Compare commits
153 Commits
310_plus
...
mkts_backu
Author | SHA1 | Date |
---|---|---|
|
cc50932c4f | |
|
c62d3dd82c | |
|
7d664c55ff | |
|
024d3661a0 | |
|
9befc1fb1a | |
|
54a1397d2c | |
|
08d7f925b9 | |
|
25891c6e51 | |
|
3a3baca9bc | |
|
768d2d997f | |
|
98da4342e7 | |
|
dd6e2604d3 | |
|
e2d91f274f | |
|
addb0a4928 | |
|
098c4f25fc | |
|
9c88b26d85 | |
|
8686cf99fe | |
|
f9ec00e1ae | |
|
25a3a123ec | |
|
54466db554 | |
|
6f06f646cf | |
|
65d4c317c6 | |
|
97439e882c | |
|
b5d566fed5 | |
|
d3adb6dff7 | |
|
22c81eb5bf | |
|
41a8c23e44 | |
|
6bb1f06813 | |
|
72de184c08 | |
|
319a6fb66a | |
|
e7b1d77b08 | |
|
3c5a799e97 | |
|
6e86904032 | |
|
544c6c3180 | |
|
4d2b5f9196 | |
|
4aaf5a1f8b | |
|
0cb05ef868 | |
|
0676f3271c | |
|
34635c21a9 | |
|
129ec9fc19 | |
|
d2b42a46e6 | |
|
fac1f86891 | |
|
36b13012b4 | |
|
9fcb1d3501 | |
|
96182c37f1 | |
|
7f350569df | |
|
48ed07aa99 | |
|
7e5c8f4417 | |
|
01f06976ed | |
|
8b89ba6111 | |
|
ba797fcbee | |
|
3b96b52474 | |
|
4af941566a | |
|
01b594e828 | |
|
197cad17a2 | |
|
fb2f1fa488 | |
|
532da9c590 | |
|
e8c261279d | |
|
e9e76e0626 | |
|
df6f9b1c17 | |
|
8e8c1c14ce | |
|
4c6e5598f2 | |
|
7a959e756d | |
|
c0d1facf3b | |
|
d03cd23571 | |
|
a8cb6c2056 | |
|
e9ed070cbf | |
|
cf457112dd | |
|
fa8e4f7c27 | |
|
990417b172 | |
|
5d09d8258f | |
|
3e72b59658 | |
|
a3b282dffe | |
|
23a368b5e5 | |
|
a4dd6c81dc | |
|
a2ef955690 | |
|
6d9a94065d | |
|
c976bff40c | |
|
11bda4f9b4 | |
|
803c65bc88 | |
|
cf7163194c | |
|
afe41236ff | |
|
b4d35496f7 | |
|
c5be35dad4 | |
|
e33d0aac15 | |
|
02ba7b6b96 | |
|
5775c5fe71 | |
|
820dfff08a | |
|
cf589c840d | |
|
bbaba71465 | |
|
73aebdfa16 | |
|
d9862a4962 | |
|
de599233af | |
|
855d02ef5a | |
|
7fbd4a95e3 | |
|
847c95d277 | |
|
8af76322c9 | |
|
eb5a4f7eeb | |
|
e008f69505 | |
|
6c8b79906b | |
|
40e62c1a38 | |
|
bed47d3ae6 | |
|
f60d9dd79c | |
|
4402b2dc73 | |
|
6e37ab6bf9 | |
|
88411a6a26 | |
|
a0c3d5f32f | |
|
236df4b6d6 | |
|
a3ec0c16c6 | |
|
51ced95962 | |
|
3487f76147 | |
|
fa69fca311 | |
|
57b3d2f7e4 | |
|
f9b799b53d | |
|
35f7c3409a | |
|
9c5f7a6bb9 | |
|
86337430d8 | |
|
8d09d63095 | |
|
df04ccb845 | |
|
ad0ace2528 | |
|
edd273d5d8 | |
|
cfc77a0a66 | |
|
69b3120444 | |
|
8662cde7ca | |
|
73b3f7ead8 | |
|
c3509e7f93 | |
|
fea645423e | |
|
d215a69049 | |
|
a11cee82d0 | |
|
aba50515df | |
|
1b1bf07f54 | |
|
77a7b73260 | |
|
4ad06e4cc0 | |
|
3da081c67a | |
|
d56d1fc4c1 | |
|
544578c67d | |
|
01ea2b3110 | |
|
2f02f71610 | |
|
b318ebc221 | |
|
d737adb1b8 | |
|
75d7314493 | |
|
d7d824030d | |
|
28436bcb2b | |
|
692e310a98 | |
|
c60d523428 | |
|
00d7bb089f | |
|
3dc87e0426 | |
|
49531a2da6 | |
|
53641abc4b | |
|
b0e236fadf | |
|
ef0516a84b | |
|
bcd0895a12 | |
|
81c69c54ec |
|
@ -1,6 +1,5 @@
|
||||||
name: CI
|
name: CI
|
||||||
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# Triggers the workflow on push or pull request events but only for the master branch
|
# Triggers the workflow on push or pull request events but only for the master branch
|
||||||
push:
|
push:
|
||||||
|
@ -11,21 +10,41 @@ on:
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
testing:
|
basic_install:
|
||||||
name: 'install + test-suite'
|
name: 'pip install'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v2
|
||||||
|
with:
|
||||||
|
ref: master
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v2
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.9'
|
||||||
|
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pip install -e . --upgrade-strategy eager -r requirements.txt
|
||||||
|
|
||||||
|
- name: Run piker cli
|
||||||
|
run: piker
|
||||||
|
|
||||||
|
testing:
|
||||||
|
name: 'test suite'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- name: Setup python
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: '3.9'
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||||
|
|
61
README.rst
61
README.rst
|
@ -74,57 +74,23 @@ for a development install::
|
||||||
|
|
||||||
install for tinas
|
install for tinas
|
||||||
*****************
|
*****************
|
||||||
for windows peeps you can start by installing all the prerequisite software:
|
for windows peeps you can start by getting `conda installed`_
|
||||||
|
and the `C++ build toolz`_ on your system.
|
||||||
- install git with all default settings - https://git-scm.com/download/win
|
|
||||||
- install anaconda all default settings - https://www.anaconda.com/products/individual
|
|
||||||
- install microsoft build tools (check the box for Desktop development for C++, you might be able to uncheck some optional downloads) - https://visualstudio.microsoft.com/visual-cpp-build-tools/
|
|
||||||
- install visual studio code default settings - https://code.visualstudio.com/download
|
|
||||||
|
|
||||||
|
|
||||||
then, `crack a conda shell`_ and run the following commands::
|
then, `crack a conda shell`_ and run the following commands::
|
||||||
|
|
||||||
mkdir code # create code directory
|
conda create piker --python=3.9
|
||||||
cd code # change directory to code
|
conda activate piker
|
||||||
git clone https://github.com/pikers/piker.git # downloads piker installation package from github
|
conda install pip
|
||||||
cd piker # change directory to piker
|
pip install --upgrade setuptools
|
||||||
|
cd dIreCToRieZ\oF\cODez\piker\
|
||||||
|
pip install -r requirements -e .
|
||||||
|
|
||||||
conda create -n pikonda # creates conda environment named pikonda
|
|
||||||
conda activate pikonda # activates pikonda
|
|
||||||
|
|
||||||
conda install -c conda-forge python-levenshtein # in case it is not already installed
|
in order to look coolio in front of all ur tina friends (and maybe
|
||||||
conda install pip # may already be installed
|
want to help us with testin, hackzing or configgin), install
|
||||||
pip # will show if pip is installed
|
`vscode`_ and `setup a coolio tiled wm console`_ so you can start
|
||||||
|
living the life of the tech literate..
|
||||||
pip install -e . -r requirements.txt # install piker in editable mode
|
|
||||||
|
|
||||||
test Piker to see if it is working::
|
|
||||||
|
|
||||||
piker -b binance chart btcusdt.binance # formatting for loading a chart
|
|
||||||
piker -b kraken -b binance chart xbtusdt.kraken
|
|
||||||
piker -b kraken -b binance -b ib chart qqq.nasdaq.ib
|
|
||||||
piker -b ib chart tsla.nasdaq.ib
|
|
||||||
|
|
||||||
potential error::
|
|
||||||
|
|
||||||
FileNotFoundError: [Errno 2] No such file or directory: 'C:\\Users\\user\\AppData\\Roaming\\piker\\brokers.toml'
|
|
||||||
|
|
||||||
solution:
|
|
||||||
|
|
||||||
- navigate to file directory above (may be different on your machine, location should be listed in the error code)
|
|
||||||
- copy and paste file from 'C:\\Users\\user\\code\\data/brokers.toml' or create a blank file using notepad at the location above
|
|
||||||
|
|
||||||
Visual Studio Code setup:
|
|
||||||
|
|
||||||
- now that piker is installed we can set up vscode as the default terminal for running piker and editing the code
|
|
||||||
- open Visual Studio Code
|
|
||||||
- file --> Add Folder to Workspace --> C:\Users\user\code\piker (adds piker directory where all piker files are located)
|
|
||||||
- file --> Save Workspace As --> save it wherever you want and call it whatever you want, this is going to be your default workspace for running and editing piker code
|
|
||||||
- ctrl + shift + p --> start typing Python: Select Interpetter --> when the option comes up select it --> Select at the workspace level --> select the one that shows ('pikonda')
|
|
||||||
- change the default terminal to cmd.exe instead of powershell (default)
|
|
||||||
- now when you create a new terminal VScode should automatically activate you conda env so that piker can be run as the first command after a new terminal is created
|
|
||||||
|
|
||||||
also, try out fancyzones as part of powertoyz for a decent tiling windows manager to manage all the cool new software you are going to be running.
|
|
||||||
|
|
||||||
.. _conda installed: https://
|
.. _conda installed: https://
|
||||||
.. _C++ build toolz: https://
|
.. _C++ build toolz: https://
|
||||||
|
@ -138,7 +104,7 @@ provider support
|
||||||
****************
|
****************
|
||||||
for live data feeds the in-progress set of supported brokers is:
|
for live data feeds the in-progress set of supported brokers is:
|
||||||
|
|
||||||
- IB_ via ``ib_insync``, also see our `container docs`_
|
- IB_ via ``ib_insync``
|
||||||
- binance_ and kraken_ for crypto over their public websocket API
|
- binance_ and kraken_ for crypto over their public websocket API
|
||||||
- questrade_ (ish) which comes with effectively free L1
|
- questrade_ (ish) which comes with effectively free L1
|
||||||
|
|
||||||
|
@ -150,7 +116,6 @@ coming soon...
|
||||||
if you want your broker supported and they have an API let us know.
|
if you want your broker supported and they have an API let us know.
|
||||||
|
|
||||||
.. _IB: https://interactivebrokers.github.io/tws-api/index.html
|
.. _IB: https://interactivebrokers.github.io/tws-api/index.html
|
||||||
.. _container docs: https://github.com/pikers/piker/tree/master/dockering/ib
|
|
||||||
.. _questrade: https://www.questrade.com/api/documentation
|
.. _questrade: https://www.questrade.com/api/documentation
|
||||||
.. _kraken: https://www.kraken.com/features/api#public-market-data
|
.. _kraken: https://www.kraken.com/features/api#public-market-data
|
||||||
.. _binance: https://github.com/pikers/piker/pull/182
|
.. _binance: https://github.com/pikers/piker/pull/182
|
||||||
|
|
|
@ -8,45 +8,20 @@ expires_at = 1616095326.355846
|
||||||
|
|
||||||
[kraken]
|
[kraken]
|
||||||
key_descr = "api_0"
|
key_descr = "api_0"
|
||||||
api_key = ""
|
public_key = ""
|
||||||
secret = ""
|
private_key = ""
|
||||||
|
|
||||||
[ib]
|
[ib]
|
||||||
hosts = [
|
host = "127.0.0.1"
|
||||||
"127.0.0.1",
|
|
||||||
]
|
|
||||||
# XXX: the order in which ports will be scanned
|
|
||||||
# (by the `brokerd` daemon-actor)
|
|
||||||
# is determined # by the line order here.
|
|
||||||
# TODO: when we eventually spawn gateways in our
|
|
||||||
# container, we can just dynamically allocate these
|
|
||||||
# using IBC.
|
|
||||||
ports = [
|
|
||||||
4002, # gw
|
|
||||||
7497, # tws
|
|
||||||
]
|
|
||||||
|
|
||||||
# XXX: for a paper account the flex web query service
|
ports.gw = 4002
|
||||||
# is not supported so you have to manually download
|
ports.tws = 7497
|
||||||
# and XML report and put it in a location that can be
|
ports.order = ["gw", "tws",]
|
||||||
# accessed by the ``brokerd.ib`` backend code for parsing.
|
|
||||||
flex_token = '666666666666666666666666'
|
|
||||||
flex_trades_query_id = '666666' # live account
|
|
||||||
|
|
||||||
# when clients are being scanned this determines
|
accounts.margin = "X0000000"
|
||||||
# which clients are preferred to be used for data
|
accounts.ira = "X0000000"
|
||||||
# feeds based on the order of account names, if
|
accounts.paper = "XX0000000"
|
||||||
# detected as active on an API client.
|
|
||||||
prefer_data_account = [
|
|
||||||
'paper',
|
|
||||||
'margin',
|
|
||||||
'ira',
|
|
||||||
]
|
|
||||||
|
|
||||||
[ib.accounts]
|
# the order in which accounts will be selected (if found through
|
||||||
# the order in which accounts will be selectable
|
# `brokerd`) when a new symbol is loaded
|
||||||
# in the order mode UI (if found via clients during
|
accounts_order = ['paper', 'margin', 'ira']
|
||||||
# API-app scanning)when a new symbol is loaded.
|
|
||||||
paper = "XX0000000"
|
|
||||||
margin = "X0000000"
|
|
||||||
ira = "X0000000"
|
|
||||||
|
|
|
@ -1,30 +0,0 @@
|
||||||
running ``ib`` gateway in ``docker``
|
|
||||||
------------------------------------
|
|
||||||
We have a config based on the (now defunct)
|
|
||||||
image from "waytrade":
|
|
||||||
|
|
||||||
https://github.com/waytrade/ib-gateway-docker
|
|
||||||
|
|
||||||
To startup this image with our custom settings
|
|
||||||
simply run the command::
|
|
||||||
|
|
||||||
docker compose up
|
|
||||||
|
|
||||||
And you should have the following socket-available services:
|
|
||||||
|
|
||||||
- ``x11vnc1@127.0.0.1:3003``
|
|
||||||
- ``ib-gw@127.0.0.1:4002``
|
|
||||||
|
|
||||||
You can attach to the container via a VNC client
|
|
||||||
without password auth.
|
|
||||||
|
|
||||||
SECURITY STUFF!?!?!
|
|
||||||
-------------------
|
|
||||||
Though "``ib``" claims they host filter connections outside
|
|
||||||
localhost (aka ``127.0.0.1``) it's probably better if you filter
|
|
||||||
the socket at the OS level using a stateless firewall rule::
|
|
||||||
|
|
||||||
ip rule add not unicast iif lo to 0.0.0.0/0 dport 4002
|
|
||||||
|
|
||||||
We will soon have this baked into our own custom image but for
|
|
||||||
now you'll have to do it urself dawgy.
|
|
|
@ -1,64 +0,0 @@
|
||||||
# rework from the original @
|
|
||||||
# https://github.com/waytrade/ib-gateway-docker/blob/master/docker-compose.yml
|
|
||||||
version: "3.5"
|
|
||||||
|
|
||||||
services:
|
|
||||||
ib-gateway:
|
|
||||||
# other image tags available:
|
|
||||||
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
|
||||||
image: waytrade/ib-gateway:981.3j
|
|
||||||
restart: always
|
|
||||||
network_mode: 'host'
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
- type: bind
|
|
||||||
source: ./jts.ini
|
|
||||||
target: /root/Jts/jts.ini
|
|
||||||
# don't let IBC clobber this file for
|
|
||||||
# the main reason of not having a stupid
|
|
||||||
# timezone set..
|
|
||||||
read_only: true
|
|
||||||
|
|
||||||
# force our own IBC config
|
|
||||||
- type: bind
|
|
||||||
source: ./ibc.ini
|
|
||||||
target: /root/ibc/config.ini
|
|
||||||
|
|
||||||
# force our noop script - socat isn't needed in host mode.
|
|
||||||
- type: bind
|
|
||||||
source: ./fork_ports_delayed.sh
|
|
||||||
target: /root/scripts/fork_ports_delayed.sh
|
|
||||||
|
|
||||||
# force our noop script - socat isn't needed in host mode.
|
|
||||||
- type: bind
|
|
||||||
source: ./run_x11_vnc.sh
|
|
||||||
target: /root/scripts/run_x11_vnc.sh
|
|
||||||
read_only: true
|
|
||||||
|
|
||||||
# NOTE:to fill these out, define an `.env` file in the same dir as
|
|
||||||
# this compose file which looks something like:
|
|
||||||
# TWS_USERID='myuser'
|
|
||||||
# TWS_PASSWORD='guest'
|
|
||||||
# TRADING_MODE=paper (or live)
|
|
||||||
# VNC_SERVER_PASSWORD='diggity'
|
|
||||||
|
|
||||||
environment:
|
|
||||||
TWS_USERID: ${TWS_USERID}
|
|
||||||
TWS_PASSWORD: ${TWS_PASSWORD}
|
|
||||||
TRADING_MODE: ${TRADING_MODE:-paper}
|
|
||||||
VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-}
|
|
||||||
|
|
||||||
# ports:
|
|
||||||
# - target: 4002
|
|
||||||
# host_ip: 127.0.0.1
|
|
||||||
# published: 4002
|
|
||||||
# protocol: tcp
|
|
||||||
|
|
||||||
# original mappings for use in non-host-mode
|
|
||||||
# which we won't really need going forward since
|
|
||||||
# ideally we just pick the port to have ib-gw listen
|
|
||||||
# on **when** we spawn the container - i.e. everything
|
|
||||||
# will be driven by a ``brokers.toml`` def.
|
|
||||||
# - "127.0.0.1:4001:4001"
|
|
||||||
# - "127.0.0.1:4002:4002"
|
|
||||||
# - "127.0.0.1:5900:5900"
|
|
|
@ -1,6 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# we now just set this is to a noop script
|
|
||||||
# since we can just run the container in
|
|
||||||
# `network_mode: 'host'` and get literally
|
|
||||||
# the exact same behaviour XD
|
|
|
@ -1,711 +0,0 @@
|
||||||
# Note that in the comments in this file, TWS refers to both the Trader
|
|
||||||
# Workstation and the IB Gateway, unless explicitly stated otherwise.
|
|
||||||
#
|
|
||||||
# When referred to below, the default value for a setting is the value
|
|
||||||
# assumed if either the setting is included but no value is specified, or
|
|
||||||
# the setting is not included at all.
|
|
||||||
#
|
|
||||||
# IBC may also be used to start the FIX CTCI Gateway. All settings
|
|
||||||
# relating to this have names prefixed with FIX.
|
|
||||||
#
|
|
||||||
# The IB API Gateway and the FIX CTCI Gateway share the same code. Which
|
|
||||||
# gateway actually runs is governed by an option on the initial gateway
|
|
||||||
# login screen. The FIX setting described under IBC Startup
|
|
||||||
# Settings below controls this.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# 1. IBC Startup Settings
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
# IBC may be used to start the IB Gateway for the FIX CTCI. This
|
|
||||||
# setting must be set to 'yes' if you want to run the FIX CTCI gateway. The
|
|
||||||
# default is 'no'.
|
|
||||||
|
|
||||||
FIX=no
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# 2. Authentication Settings
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
# TWS and the IB API gateway require a single username and password.
|
|
||||||
# You may specify the username and password using the following settings:
|
|
||||||
#
|
|
||||||
# IbLoginId
|
|
||||||
# IbPassword
|
|
||||||
#
|
|
||||||
# Alternatively, you can specify the username and password in the command
|
|
||||||
# files used to start TWS or the Gateway, but this is not recommended for
|
|
||||||
# security reasons.
|
|
||||||
#
|
|
||||||
# If you don't specify them, you will be prompted for them in the usual
|
|
||||||
# login dialog when TWS starts (but whatever you have specified will be
|
|
||||||
# included in the dialog automatically: for example you may specify the
|
|
||||||
# username but not the password, and then you will be prompted for the
|
|
||||||
# password via the login dialog). Note that if you specify either
|
|
||||||
# the username or the password (or both) in the command file, then
|
|
||||||
# IbLoginId and IbPassword settings defined in this file are ignored.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# The FIX CTCI gateway requires one username and password for FIX order
|
|
||||||
# routing, and optionally a separate username and password for market
|
|
||||||
# data connections. You may specify the usernames and passwords using
|
|
||||||
# the following settings:
|
|
||||||
#
|
|
||||||
# FIXLoginId
|
|
||||||
# FIXPassword
|
|
||||||
# IbLoginId (optional - for market data connections)
|
|
||||||
# IbPassword (optional - for market data connections)
|
|
||||||
#
|
|
||||||
# Alternatively you can specify the FIX username and password in the
|
|
||||||
# command file used to start the FIX CTCI Gateway, but this is not
|
|
||||||
# recommended for security reasons.
|
|
||||||
#
|
|
||||||
# If you don't specify them, you will be prompted for them in the usual
|
|
||||||
# login dialog when FIX CTCI gateway starts (but whatever you have
|
|
||||||
# specified will be included in the dialog automatically: for example
|
|
||||||
# you may specify the usernames but not the passwords, and then you will
|
|
||||||
# be prompted for the passwords via the login dialog). Note that if you
|
|
||||||
# specify either the FIX username or the FIX password (or both) on the
|
|
||||||
# command line, then FIXLoginId and FIXPassword settings defined in this
|
|
||||||
# file are ignored; he same applies to the market data username and
|
|
||||||
# password.
|
|
||||||
|
|
||||||
# IB API Authentication Settings
|
|
||||||
# ------------------------------
|
|
||||||
|
|
||||||
# Your TWS username:
|
|
||||||
|
|
||||||
IbLoginId=
|
|
||||||
|
|
||||||
|
|
||||||
# Your TWS password:
|
|
||||||
|
|
||||||
IbPassword=
|
|
||||||
|
|
||||||
|
|
||||||
# FIX CTCI Authentication Settings
|
|
||||||
# --------------------------------
|
|
||||||
|
|
||||||
# Your FIX CTCI username:
|
|
||||||
|
|
||||||
FIXLoginId=
|
|
||||||
|
|
||||||
|
|
||||||
# Your FIX CTCI password:
|
|
||||||
|
|
||||||
FIXPassword=
|
|
||||||
|
|
||||||
|
|
||||||
# Second Factor Authentication Settings
|
|
||||||
# -------------------------------------
|
|
||||||
|
|
||||||
# If you have enabled more than one second factor authentication
|
|
||||||
# device, TWS presents a list from which you must select the device
|
|
||||||
# you want to use for this login. You can use this setting to
|
|
||||||
# instruct IBC to select a particular item in the list on your
|
|
||||||
# behalf. Note that you must spell this value exactly as it appears
|
|
||||||
# in the list. If no value is set, you must manually select the
|
|
||||||
# relevant list entry.
|
|
||||||
|
|
||||||
SecondFactorDevice=
|
|
||||||
|
|
||||||
|
|
||||||
# If you use the IBKR Mobile app for second factor authentication,
|
|
||||||
# and you fail to complete the process before the time limit imposed
|
|
||||||
# by IBKR, you can use this setting to tell IBC to exit: arrangements
|
|
||||||
# can then be made to automatically restart IBC in order to initiate
|
|
||||||
# the login sequence afresh. Otherwise, manual intervention at TWS's
|
|
||||||
# Second Factor Authentication dialog is needed to complete the
|
|
||||||
# login.
|
|
||||||
#
|
|
||||||
# Permitted values are 'yes' and 'no'. The default is 'no'.
|
|
||||||
#
|
|
||||||
# Note that the scripts provided with the IBC zips for Windows and
|
|
||||||
# Linux provide options to automatically restart in these
|
|
||||||
# circumstances, but only if this setting is also set to 'yes'.
|
|
||||||
|
|
||||||
ExitAfterSecondFactorAuthenticationTimeout=no
|
|
||||||
|
|
||||||
|
|
||||||
# This setting is only relevant if
|
|
||||||
# ExitAfterSecondFactorAuthenticationTimeout is set to 'yes'.
|
|
||||||
#
|
|
||||||
# It controls how long (in seconds) IBC waits for login to complete
|
|
||||||
# after the user acknowledges the second factor authentication
|
|
||||||
# alert at the IBKR Mobile app. If login has not completed after
|
|
||||||
# this time, IBC terminates.
|
|
||||||
# The default value is 40.
|
|
||||||
|
|
||||||
SecondFactorAuthenticationExitInterval=
|
|
||||||
|
|
||||||
|
|
||||||
# Trading Mode
|
|
||||||
# ------------
|
|
||||||
#
|
|
||||||
# TWS 955 introduced a new Trading Mode combo box on its login
|
|
||||||
# dialog. This indicates whether the live account or the paper
|
|
||||||
# trading account corresponding to the supplied credentials is
|
|
||||||
# to be used. The allowed values are 'live' (the default) and
|
|
||||||
# 'paper'. For earlier versions of TWS this setting has no
|
|
||||||
# effect.
|
|
||||||
|
|
||||||
TradingMode=
|
|
||||||
|
|
||||||
|
|
||||||
# Paper-trading Account Warning
|
|
||||||
# -----------------------------
|
|
||||||
#
|
|
||||||
# Logging in to a paper-trading account results in TWS displaying
|
|
||||||
# a dialog asking the user to confirm that they are aware that this
|
|
||||||
# is not a brokerage account. Until this dialog has been accepted,
|
|
||||||
# TWS will not allow API connections to succeed. Setting this
|
|
||||||
# to 'yes' (the default) will cause IBC to automatically
|
|
||||||
# confirm acceptance. Setting it to 'no' will leave the dialog
|
|
||||||
# on display, and the user will have to deal with it manually.
|
|
||||||
|
|
||||||
AcceptNonBrokerageAccountWarning=yes
|
|
||||||
|
|
||||||
|
|
||||||
# Login Dialog Display Timeout
|
|
||||||
#-----------------------------
|
|
||||||
#
|
|
||||||
# In some circumstances, starting TWS may result in failure to display
|
|
||||||
# the login dialog. Restarting TWS may help to resolve this situation,
|
|
||||||
# and IBC does this automatically.
|
|
||||||
#
|
|
||||||
# This setting controls how long (in seconds) IBC waits for the login
|
|
||||||
# dialog to appear before restarting TWS.
|
|
||||||
#
|
|
||||||
# Note that in normal circumstances with a reasonably specified
|
|
||||||
# computer the time to displaying the login dialog is typically less
|
|
||||||
# than 20 seconds, and frequently much less. However many factors can
|
|
||||||
# influence this, and it is unwise to set this value too low.
|
|
||||||
#
|
|
||||||
# The default value is 60.
|
|
||||||
|
|
||||||
LoginDialogDisplayTimeout = 60
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# 3. TWS Startup Settings
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
# Path to settings store
|
|
||||||
# ----------------------
|
|
||||||
#
|
|
||||||
# Path to the directory where TWS should store its settings. This is
|
|
||||||
# normally the folder in which TWS is installed. However you may set
|
|
||||||
# it to some other location if you wish (for example if you want to
|
|
||||||
# run multiple instances of TWS with different settings).
|
|
||||||
#
|
|
||||||
# It is recommended for clarity that you use an absolute path. The
|
|
||||||
# effect of using a relative path is undefined.
|
|
||||||
#
|
|
||||||
# Linux and macOS users should use the appropriate path syntax.
|
|
||||||
#
|
|
||||||
# Note that, for Windows users, you MUST use double separator
|
|
||||||
# characters to separate the elements of the folder path: for
|
|
||||||
# example, IbDir=C:\\IBLiveSettings is valid, but
|
|
||||||
# IbDir=C:\IBLiveSettings is NOT valid and will give unexpected
|
|
||||||
# results. Linux and macOS users need not use double separators,
|
|
||||||
# but they are acceptable.
|
|
||||||
#
|
|
||||||
# The default is the current working directory when IBC is
|
|
||||||
# started.
|
|
||||||
|
|
||||||
IbDir=/root/Jts
|
|
||||||
|
|
||||||
|
|
||||||
# Store settings on server
|
|
||||||
# ------------------------
|
|
||||||
#
|
|
||||||
# If you wish to store a copy of your TWS settings on IB's
|
|
||||||
# servers as well as locally on your computer, set this to
|
|
||||||
# 'yes': this enables you to run TWS on different computers
|
|
||||||
# with the same configuration, market data lines, etc. If set
|
|
||||||
# to 'no', running TWS on different computers will not share the
|
|
||||||
# same settings. If no value is specified, TWS will obtain its
|
|
||||||
# settings from the same place as the last time this user logged
|
|
||||||
# in (whether manually or using IBC).
|
|
||||||
|
|
||||||
StoreSettingsOnServer=
|
|
||||||
|
|
||||||
|
|
||||||
# Minimize TWS on startup
|
|
||||||
# -----------------------
|
|
||||||
#
|
|
||||||
# Set to 'yes' to minimize TWS when it starts:
|
|
||||||
|
|
||||||
MinimizeMainWindow=no
|
|
||||||
|
|
||||||
|
|
||||||
# Existing Session Detected Action
|
|
||||||
# --------------------------------
|
|
||||||
#
|
|
||||||
# When a user logs on to an IBKR account for trading purposes by any means, the
|
|
||||||
# IBKR account server checks to see whether the account is already logged in
|
|
||||||
# elsewhere. If so, a dialog is displayed to both the users that enables them
|
|
||||||
# to determine what happens next. The 'ExistingSessionDetectedAction' setting
|
|
||||||
# instructs TWS how to proceed when it displays this dialog:
|
|
||||||
#
|
|
||||||
# * If the new TWS session is set to 'secondary', the existing session continues
|
|
||||||
# and the new session terminates. Thus a secondary TWS session can never
|
|
||||||
# override any other session.
|
|
||||||
#
|
|
||||||
# * If the existing TWS session is set to 'primary', the existing session
|
|
||||||
# continues and the new session terminates (even if the new session is also
|
|
||||||
# set to primary). Thus a primary TWS session can never be overridden by
|
|
||||||
# any new session).
|
|
||||||
#
|
|
||||||
# * If both the existing and the new TWS sessions are set to 'primaryoverride',
|
|
||||||
# the existing session terminates and the new session proceeds.
|
|
||||||
#
|
|
||||||
# * If the existing TWS session is set to 'manual', the user must handle the
|
|
||||||
# dialog.
|
|
||||||
#
|
|
||||||
# The difference between 'primary' and 'primaryoverride' is that a
|
|
||||||
# 'primaryoverride' session can be overriden over by a new 'primary' session,
|
|
||||||
# but a 'primary' session cannot be overriden by any other session.
|
|
||||||
#
|
|
||||||
# When set to 'primary', if another TWS session is started and manually told to
|
|
||||||
# end the 'primary' session, the 'primary' session is automatically reconnected.
|
|
||||||
#
|
|
||||||
# The default is 'manual'.
|
|
||||||
|
|
||||||
ExistingSessionDetectedAction=primary
|
|
||||||
|
|
||||||
|
|
||||||
# Override TWS API Port Number
|
|
||||||
# ----------------------------
|
|
||||||
#
|
|
||||||
# If OverrideTwsApiPort is set to an integer, IBC changes the
|
|
||||||
# 'Socket port' in TWS's API configuration to that number shortly
|
|
||||||
# after startup. Leaving the setting blank will make no change to
|
|
||||||
# the current setting. This setting is only intended for use in
|
|
||||||
# certain specialized situations where the port number needs to
|
|
||||||
# be set dynamically at run-time: most users will never need it,
|
|
||||||
# so don't use it unless you know you need it.
|
|
||||||
|
|
||||||
OverrideTwsApiPort=4002
|
|
||||||
|
|
||||||
|
|
||||||
# Read-only Login
|
|
||||||
# ---------------
|
|
||||||
#
|
|
||||||
# If ReadOnlyLogin is set to 'yes', and the user is enrolled in IB's
|
|
||||||
# account security programme, the user will not be asked to perform
|
|
||||||
# the second factor authentication action, and login to TWS will
|
|
||||||
# occur automatically in read-only mode: in this mode, placing or
|
|
||||||
# managing orders is not allowed. If set to 'no', and the user is
|
|
||||||
# enrolled in IB's account security programme, the user must perform
|
|
||||||
# the relevant second factor authentication action to complete the
|
|
||||||
# login.
|
|
||||||
|
|
||||||
# If the user is not enrolled in IB's account security programme,
|
|
||||||
# this setting is ignored. The default is 'no'.
|
|
||||||
|
|
||||||
ReadOnlyLogin=no
|
|
||||||
|
|
||||||
|
|
||||||
# Read-only API
|
|
||||||
# -------------
|
|
||||||
#
|
|
||||||
# If ReadOnlyApi is set to 'yes', API programs cannot submit, modify
|
|
||||||
# or cancel orders. If set to 'no', API programs can do these things.
|
|
||||||
# If not set, the existing TWS/Gateway configuration is unchanged.
|
|
||||||
# NB: this setting is really only supplied for the benefit of new TWS
|
|
||||||
# or Gateway instances that are being automatically installed and
|
|
||||||
# started without user intervention (eg Docker containers). Where
|
|
||||||
# a user is involved, they should use the Global Configuration to
|
|
||||||
# set the relevant checkbox (this only needs to be done once) and
|
|
||||||
# not provide a value for this setting.
|
|
||||||
|
|
||||||
ReadOnlyApi=no
|
|
||||||
|
|
||||||
|
|
||||||
# Market data size for US stocks - lots or shares
|
|
||||||
# -----------------------------------------------
|
|
||||||
#
|
|
||||||
# Since IB introduced the option of market data for US stocks showing
|
|
||||||
# bid, ask and last sizes in shares rather than lots, TWS and Gateway
|
|
||||||
# display a dialog immediately after login notifying the user about
|
|
||||||
# this and requiring user input before allowing market data to be
|
|
||||||
# accessed. The user can request that the dialog not be shown again.
|
|
||||||
#
|
|
||||||
# It is recommended that the user should handle this dialog manually
|
|
||||||
# rather than using these settings, which are provided for situations
|
|
||||||
# where the user interface is not easily accessible, or where user
|
|
||||||
# settings are not preserved between sessions (eg some Docker images).
|
|
||||||
#
|
|
||||||
# - If this setting is set to 'accept', the dialog will be handled
|
|
||||||
# automatically and the option to not show it again will be
|
|
||||||
# selected.
|
|
||||||
#
|
|
||||||
# Note that in this case, the only way to allow the dialog to be
|
|
||||||
# displayed again is to manually enable the 'Bid, Ask and Last
|
|
||||||
# Size Display Update' message in the 'Messages' section of the TWS
|
|
||||||
# configuration dialog. So you should only use 'Accept' if you are
|
|
||||||
# sure you really don't want the dialog to be displayed again, or
|
|
||||||
# you have easy access to the user interface.
|
|
||||||
#
|
|
||||||
# - If set to 'defer', the dialog will be handled automatically (so
|
|
||||||
# that market data will start), but the option to not show it again
|
|
||||||
# will not be selected, and it will be shown again after the next
|
|
||||||
# login.
|
|
||||||
#
|
|
||||||
# - If set to 'ignore', the user has to deal with the dialog manually.
|
|
||||||
#
|
|
||||||
# The default value is 'ignore'.
|
|
||||||
#
|
|
||||||
# Note if set to 'accept' or 'defer', TWS also automatically sets
|
|
||||||
# the API settings checkbox labelled 'Send market data in lots for
|
|
||||||
# US stocks for dual-mode API clients'. IBC cannot prevent this.
|
|
||||||
# However you can change this immmediately by setting
|
|
||||||
# SendMarketDataInLotsForUSstocks (see below) to 'no' .
|
|
||||||
|
|
||||||
AcceptBidAskLastSizeDisplayUpdateNotification=accept
|
|
||||||
|
|
||||||
|
|
||||||
# This setting determines whether the API settings checkbox labelled
|
|
||||||
# 'Send market data in lots for US stocks for dual-mode API clients'
|
|
||||||
# is set or cleared. If set to 'yes', the checkbox is set. If set to
|
|
||||||
# 'no' the checkbox is cleared. If defaulted, the checkbox is
|
|
||||||
# unchanged.
|
|
||||||
|
|
||||||
SendMarketDataInLotsForUSstocks=
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# 4. TWS Auto-Closedown
|
|
||||||
# =============================================================================
|
|
||||||
#
|
|
||||||
# IMPORTANT NOTE: Starting with TWS 974, this setting no longer
|
|
||||||
# works properly, because IB have changed the way TWS handles its
|
|
||||||
# autologoff mechanism.
|
|
||||||
#
|
|
||||||
# You should now configure the TWS autologoff time to something
|
|
||||||
# convenient for you, and restart IBC each day.
|
|
||||||
#
|
|
||||||
# Alternatively, discontinue use of IBC and use the auto-relogin
|
|
||||||
# mechanism within TWS 974 and later versions (note that the
|
|
||||||
# auto-relogin mechanism provided by IB is not available if you
|
|
||||||
# use IBC).
|
|
||||||
|
|
||||||
# Set to yes or no (lower case).
|
|
||||||
#
|
|
||||||
# yes means allow TWS to shut down automatically at its
|
|
||||||
# specified shutdown time, which is set via the TWS
|
|
||||||
# configuration menu.
|
|
||||||
#
|
|
||||||
# no means TWS never shuts down automatically.
|
|
||||||
#
|
|
||||||
# NB: IB recommends that you do not keep TWS running
|
|
||||||
# continuously. If you set this setting to 'no', you may
|
|
||||||
# experience incorrect TWS operation.
|
|
||||||
#
|
|
||||||
# NB: the default for this setting is 'no'. Since this will
|
|
||||||
# only work properly with TWS versions earlier than 974, you
|
|
||||||
# should explicitly set this to 'yes' for version 974 and later.
|
|
||||||
|
|
||||||
IbAutoClosedown=yes
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# 5. TWS Tidy Closedown Time
|
|
||||||
# =============================================================================
|
|
||||||
#
|
|
||||||
# NB: starting with TWS 974 this is no longer a useful option
|
|
||||||
# because both TWS and Gateway now have the same auto-logoff
|
|
||||||
# mechanism, and IBC can no longer avoid this.
|
|
||||||
#
|
|
||||||
# Note that giving this setting a value does not change TWS's
|
|
||||||
# auto-logoff in any way: any setting will be additional to the
|
|
||||||
# TWS auto-logoff.
|
|
||||||
#
|
|
||||||
# To tell IBC to tidily close TWS at a specified time every
|
|
||||||
# day, set this value to <hh:mm>, for example:
|
|
||||||
# ClosedownAt=22:00
|
|
||||||
#
|
|
||||||
# To tell IBC to tidily close TWS at a specified day and time
|
|
||||||
# each week, set this value to <dayOfWeek hh:mm>, for example:
|
|
||||||
# ClosedownAt=Friday 22:00
|
|
||||||
#
|
|
||||||
# Note that the day of the week must be specified using your
|
|
||||||
# default locale. Also note that Java will only accept
|
|
||||||
# characters encoded to ISO 8859-1 (Latin-1). This means that
|
|
||||||
# if the day name in your default locale uses any non-Latin-1
|
|
||||||
# characters you need to encode them using Unicode escapes
|
|
||||||
# (see http://java.sun.com/docs/books/jls/third_edition/html/lexical.html#3.3
|
|
||||||
# for details). For example, to tidily close TWS at 12:00 on
|
|
||||||
# Saturday where the default locale is Simplified Chinese,
|
|
||||||
# use the following:
|
|
||||||
# #ClosedownAt=\u661F\u671F\u516D 12:00
|
|
||||||
|
|
||||||
ClosedownAt=
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# 6. Other TWS Settings
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
# Accept Incoming Connection
|
|
||||||
# --------------------------
|
|
||||||
#
|
|
||||||
# If set to 'accept', IBC automatically accepts incoming
|
|
||||||
# API connection dialogs. If set to 'reject', IBC
|
|
||||||
# automatically rejects incoming API connection dialogs. If
|
|
||||||
# set to 'manual', the user must decide whether to accept or reject
|
|
||||||
# incoming API connection dialogs. The default is 'manual'.
|
|
||||||
# NB: it is recommended to set this to 'reject', and to explicitly
|
|
||||||
# configure which IP addresses can connect to the API in TWS's API
|
|
||||||
# configuration page, as this is much more secure (in this case, no
|
|
||||||
# incoming API connection dialogs will occur for those IP addresses).
|
|
||||||
|
|
||||||
AcceptIncomingConnectionAction=reject
|
|
||||||
|
|
||||||
|
|
||||||
# Allow Blind Trading
|
|
||||||
# -------------------
|
|
||||||
#
|
|
||||||
# If you attempt to place an order for a contract for which
|
|
||||||
# you have no market data subscription, TWS displays a dialog
|
|
||||||
# to warn you against such blind trading.
|
|
||||||
#
|
|
||||||
# yes means the dialog is dismissed as though the user had
|
|
||||||
# clicked the 'Ok' button: this means that you accept
|
|
||||||
# the risk and want the order to be submitted.
|
|
||||||
#
|
|
||||||
# no means the dialog remains on display and must be
|
|
||||||
# handled by the user.
|
|
||||||
|
|
||||||
AllowBlindTrading=yes
|
|
||||||
|
|
||||||
|
|
||||||
# Save Settings on a Schedule
|
|
||||||
# ---------------------------
|
|
||||||
#
|
|
||||||
# You can tell TWS to automatically save its settings on a schedule
|
|
||||||
# of your choosing. You can specify one or more specific times,
|
|
||||||
# like this:
|
|
||||||
#
|
|
||||||
# SaveTwsSettingsAt=HH:MM [ HH:MM]...
|
|
||||||
#
|
|
||||||
# for example:
|
|
||||||
# SaveTwsSettingsAt=08:00 12:30 17:30
|
|
||||||
#
|
|
||||||
# Or you can specify an interval at which settings are to be saved,
|
|
||||||
# optionally starting at a specific time and continuing until another
|
|
||||||
# time, like this:
|
|
||||||
#
|
|
||||||
#SaveTwsSettingsAt=Every n [{mins | hours}] [hh:mm] [hh:mm]
|
|
||||||
#
|
|
||||||
# where the first hh:mm is the start time and the second is the end
|
|
||||||
# time. If you don't specify the end time, settings are saved regularly
|
|
||||||
# from the start time till midnight. If you don't specify the start time.
|
|
||||||
# settings are saved regularly all day, beginning at 00:00. Note that
|
|
||||||
# settings will always be saved at the end time, even if that is not
|
|
||||||
# exactly one interval later than the previous time. If neither 'mins'
|
|
||||||
# nor 'hours' is specified, 'mins' is assumed. Examples:
|
|
||||||
#
|
|
||||||
# To save every 30 minutes all day starting at 00:00
|
|
||||||
#SaveTwsSettingsAt=Every 30
|
|
||||||
#SaveTwsSettingsAt=Every 30 mins
|
|
||||||
#
|
|
||||||
# To save every hour starting at 08:00 and ending at midnight
|
|
||||||
#SaveTwsSettingsAt=Every 1 hours 08:00
|
|
||||||
#SaveTwsSettingsAt=Every 1 hours 08:00 00:00
|
|
||||||
#
|
|
||||||
# To save every 90 minutes starting at 08:00 up to and including 17:43
|
|
||||||
#SaveTwsSettingsAt=Every 90 08:00 17:43
|
|
||||||
|
|
||||||
SaveTwsSettingsAt=
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# 7. Settings Specific to Indian Versions of TWS
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
# Indian versions of TWS may display a password expiry
|
|
||||||
# notification dialog and a NSE Compliance dialog. These can be
|
|
||||||
# dismissed by setting the following to yes. By default the
|
|
||||||
# password expiry notice is not dismissed, but the NSE Compliance
|
|
||||||
# notice is dismissed.
|
|
||||||
|
|
||||||
# Warning: setting DismissPasswordExpiryWarning=yes will mean
|
|
||||||
# you will not be notified when your password is about to expire.
|
|
||||||
# You must then take other measures to ensure that your password
|
|
||||||
# is changed within the expiry period, otherwise IBC will
|
|
||||||
# not be able to login successfully.
|
|
||||||
|
|
||||||
DismissPasswordExpiryWarning=no
|
|
||||||
DismissNSEComplianceNotice=yes
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# 8. IBC Command Server Settings
|
|
||||||
# =============================================================================
|
|
||||||
|
|
||||||
# Do NOT CHANGE THE FOLLOWING SETTINGS unless you
|
|
||||||
# intend to issue commands to IBC (for example
|
|
||||||
# using telnet). Note that these settings have nothing to
|
|
||||||
# do with running programs that use the TWS API.
|
|
||||||
|
|
||||||
# Command Server Port Number
|
|
||||||
# --------------------------
|
|
||||||
#
|
|
||||||
# The port number that IBC listens on for commands
|
|
||||||
# such as "STOP". DO NOT set this to the port number
|
|
||||||
# used for TWS API connections. There is no good reason
|
|
||||||
# to change this setting unless the port is used by
|
|
||||||
# some other application (typically another instance of
|
|
||||||
# IBC). The default value is 0, which tells IBC not to
|
|
||||||
# start the command server
|
|
||||||
|
|
||||||
#CommandServerPort=7462
|
|
||||||
|
|
||||||
|
|
||||||
# Permitted Command Sources
|
|
||||||
# -------------------------
|
|
||||||
#
|
|
||||||
# A comma separated list of IP addresses, or host names,
|
|
||||||
# which are allowed addresses for sending commands to
|
|
||||||
# IBC. Commands can always be sent from the
|
|
||||||
# same host as IBC is running on.
|
|
||||||
|
|
||||||
ControlFrom=127.0.0.1
|
|
||||||
|
|
||||||
|
|
||||||
# Address for Receiving Commands
|
|
||||||
# ------------------------------
|
|
||||||
#
|
|
||||||
# Specifies the IP address on which the Command Server
|
|
||||||
# is so listen. For a multi-homed host, this can be used
|
|
||||||
# to specify that connection requests are only to be
|
|
||||||
# accepted on the specified address. The default is to
|
|
||||||
# accept connection requests on all local addresses.
|
|
||||||
|
|
||||||
BindAddress=127.0.0.1
|
|
||||||
|
|
||||||
|
|
||||||
# Command Prompt
|
|
||||||
# --------------
|
|
||||||
#
|
|
||||||
# The specified string is output by the server when
|
|
||||||
# the connection is first opened and after the completion
|
|
||||||
# of each command. This can be useful if sending commands
|
|
||||||
# using an interactive program such as telnet. The default
|
|
||||||
# is that no prompt is output.
|
|
||||||
# For example:
|
|
||||||
#
|
|
||||||
# CommandPrompt=>
|
|
||||||
|
|
||||||
CommandPrompt=
|
|
||||||
|
|
||||||
|
|
||||||
# Suppress Command Server Info Messages
|
|
||||||
# -------------------------------------
|
|
||||||
#
|
|
||||||
# Some commands can return intermediate information about
|
|
||||||
# their progress. This setting controls whether such
|
|
||||||
# information is sent. The default is that such information
|
|
||||||
# is not sent.
|
|
||||||
|
|
||||||
SuppressInfoMessages=no
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# 9. Diagnostic Settings
|
|
||||||
# =============================================================================
|
|
||||||
#
|
|
||||||
# IBC can log information about the structure of windows
|
|
||||||
# displayed by TWS. This information is useful when adding
|
|
||||||
# new features to IBC or when behaviour is not as expected.
|
|
||||||
#
|
|
||||||
# The logged information shows the hierarchical organisation
|
|
||||||
# of all the components of the window, and includes the
|
|
||||||
# current values of text boxes and labels.
|
|
||||||
#
|
|
||||||
# Note that this structure logging has a small performance
|
|
||||||
# impact, and depending on the settings can cause the logfile
|
|
||||||
# size to be significantly increased. It is therefore
|
|
||||||
# recommended that the LogStructureWhen setting be set to
|
|
||||||
# 'never' (the default) unless there is a specific reason
|
|
||||||
# that this information is needed.
|
|
||||||
|
|
||||||
|
|
||||||
# Scope of Structure Logging
|
|
||||||
# --------------------------
|
|
||||||
#
|
|
||||||
# The LogStructureScope setting indicates which windows are
|
|
||||||
# eligible for structure logging:
|
|
||||||
#
|
|
||||||
# - if set to 'known', only windows that IBC recognizes
|
|
||||||
# are eligible - these are windows that IBC has some
|
|
||||||
# interest in monitoring, usually to take some action
|
|
||||||
# on the user's behalf;
|
|
||||||
#
|
|
||||||
# - if set to 'unknown', only windows that IBC does not
|
|
||||||
# recognize are eligible. Most windows displayed by
|
|
||||||
# TWS fall into this category;
|
|
||||||
#
|
|
||||||
# - if set to 'untitled', only windows that IBC does not
|
|
||||||
# recognize and that have no title are eligible. These
|
|
||||||
# are usually message boxes or similar small windows,
|
|
||||||
#
|
|
||||||
# - if set to 'all', then every window displayed by TWS
|
|
||||||
# is eligible.
|
|
||||||
#
|
|
||||||
# The default value is 'known'.
|
|
||||||
|
|
||||||
LogStructureScope=all
|
|
||||||
|
|
||||||
|
|
||||||
# When to Log Window Structure
|
|
||||||
# ----------------------------
|
|
||||||
#
|
|
||||||
# The LogStructureWhen setting specifies the circumstances
|
|
||||||
# when eligible TWS windows have their structure logged:
|
|
||||||
#
|
|
||||||
# - if set to 'open' or 'yes' or 'true', IBC logs the
|
|
||||||
# structure of an eligible window the first time it
|
|
||||||
# is encountered;
|
|
||||||
#
|
|
||||||
# - if set to 'activate', the structure is logged every
|
|
||||||
# time an eligible window is made active;
|
|
||||||
#
|
|
||||||
# - if set to 'never' or 'no' or 'false', structure
|
|
||||||
# information is never logged.
|
|
||||||
#
|
|
||||||
# The default value is 'never'.
|
|
||||||
|
|
||||||
LogStructureWhen=never
|
|
||||||
|
|
||||||
|
|
||||||
# DEPRECATED SETTING
|
|
||||||
# ------------------
|
|
||||||
#
|
|
||||||
# LogComponents - THIS SETTING WILL BE REMOVED IN A FUTURE
|
|
||||||
# RELEASE
|
|
||||||
#
|
|
||||||
# If LogComponents is set to any value, this is equivalent
|
|
||||||
# to setting LogStructureWhen to that same value and
|
|
||||||
# LogStructureScope to 'all': the actual values of those
|
|
||||||
# settings are ignored. The default is that the values
|
|
||||||
# of LogStructureScope and LogStructureWhen are honoured.
|
|
||||||
|
|
||||||
#LogComponents=
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,33 +0,0 @@
|
||||||
[IBGateway]
|
|
||||||
ApiOnly=true
|
|
||||||
LocalServerPort=4002
|
|
||||||
# NOTE: must be set if using IBC's "reject" mode
|
|
||||||
TrustedIPs=127.0.0.1
|
|
||||||
; RemoteHostOrderRouting=ndc1.ibllc.com
|
|
||||||
; WriteDebug=true
|
|
||||||
; RemotePortOrderRouting=4001
|
|
||||||
; useRemoteSettings=false
|
|
||||||
; tradingMode=p
|
|
||||||
; Steps=8
|
|
||||||
; colorPalletName=dark
|
|
||||||
|
|
||||||
# window geo, this may be useful for sending `xdotool` commands?
|
|
||||||
; MainWindow.Width=1986
|
|
||||||
; screenHeight=3960
|
|
||||||
|
|
||||||
|
|
||||||
[Logon]
|
|
||||||
Locale=en
|
|
||||||
# most markets are oriented around this zone
|
|
||||||
# so might as well hard code it.
|
|
||||||
TimeZone=America/New_York
|
|
||||||
UseSSL=true
|
|
||||||
displayedproxymsg=1
|
|
||||||
os_titlebar=true
|
|
||||||
s3store=true
|
|
||||||
useRemoteSettings=false
|
|
||||||
|
|
||||||
[Communication]
|
|
||||||
ctciAutoEncrypt=true
|
|
||||||
Region=usr
|
|
||||||
; Peer=cdc1.ibllc.com:4001
|
|
|
@ -1,16 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# start VNC server
|
|
||||||
x11vnc \
|
|
||||||
-ncache_cr \
|
|
||||||
-listen localhost \
|
|
||||||
-display :1 \
|
|
||||||
-forever \
|
|
||||||
-shared \
|
|
||||||
-logappend /var/log/x11vnc.log \
|
|
||||||
-bg \
|
|
||||||
-noipv6 \
|
|
||||||
-autoport 3003 \
|
|
||||||
# can't use this because of ``asyncvnc`` issue:
|
|
||||||
# https://github.com/barneygale/asyncvnc/issues/1
|
|
||||||
# -passwd 'ibcansmbz'
|
|
|
@ -35,7 +35,7 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
_root_dname = 'pikerd'
|
_root_dname = 'pikerd'
|
||||||
|
|
||||||
_registry_addr = ('127.0.0.1', 6116)
|
_registry_addr = ('127.0.0.1', 1616)
|
||||||
_tractor_kwargs: dict[str, Any] = {
|
_tractor_kwargs: dict[str, Any] = {
|
||||||
# use a different registry addr then tractor's default
|
# use a different registry addr then tractor's default
|
||||||
'arbiter_addr': _registry_addr
|
'arbiter_addr': _registry_addr
|
||||||
|
@ -426,19 +426,9 @@ async def spawn_brokerd(
|
||||||
|
|
||||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||||
# actor nursery
|
# actor nursery
|
||||||
modpath = brokermod.__name__
|
|
||||||
broker_enable = [modpath]
|
|
||||||
for submodname in getattr(
|
|
||||||
brokermod,
|
|
||||||
'__enable_modules__',
|
|
||||||
[],
|
|
||||||
):
|
|
||||||
subpath = f'{modpath}.{submodname}'
|
|
||||||
broker_enable.append(subpath)
|
|
||||||
|
|
||||||
portal = await _services.actor_n.start_actor(
|
portal = await _services.actor_n.start_actor(
|
||||||
dname,
|
dname,
|
||||||
enable_modules=_data_mods + broker_enable,
|
enable_modules=_data_mods + [brokermod.__name__],
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=_services.debug_mode,
|
debug_mode=_services.debug_mode,
|
||||||
**tractor_kwargs
|
**tractor_kwargs
|
||||||
|
|
|
@ -24,7 +24,7 @@ from functools import wraps
|
||||||
# NOTE: you can pass a flag to enable this:
|
# NOTE: you can pass a flag to enable this:
|
||||||
# ``piker chart <args> --profile``.
|
# ``piker chart <args> --profile``.
|
||||||
_pg_profile: bool = False
|
_pg_profile: bool = False
|
||||||
ms_slower_then: float = 0
|
ms_slower_then: float = 10
|
||||||
|
|
||||||
|
|
||||||
def pg_profile_enabled() -> bool:
|
def pg_profile_enabled() -> bool:
|
||||||
|
|
|
@ -33,41 +33,7 @@ class SymbolNotFound(BrokerError):
|
||||||
|
|
||||||
|
|
||||||
class NoData(BrokerError):
|
class NoData(BrokerError):
|
||||||
'''
|
"Symbol data not permitted"
|
||||||
Symbol data not permitted or no data
|
|
||||||
for time range found.
|
|
||||||
|
|
||||||
'''
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
*args,
|
|
||||||
frame_size: int = 1000,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
super().__init__(*args)
|
|
||||||
|
|
||||||
# when raised, machinery can check if the backend
|
|
||||||
# set a "frame size" for doing datetime calcs.
|
|
||||||
self.frame_size: int = 1000
|
|
||||||
|
|
||||||
|
|
||||||
class DataUnavailable(BrokerError):
|
|
||||||
'''
|
|
||||||
Signal storage requests to terminate.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# TODO: add in a reason that can be displayed in the
|
|
||||||
# UI (for eg. `kraken` is bs and you should complain
|
|
||||||
# to them that you can't pull more OHLC data..)
|
|
||||||
|
|
||||||
|
|
||||||
class DataThrottle(BrokerError):
|
|
||||||
'''
|
|
||||||
Broker throttled request rate for data.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# TODO: add in throttle metrics/feedback
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def resproc(
|
def resproc(
|
||||||
|
@ -84,12 +50,12 @@ def resproc(
|
||||||
if not resp.status_code == 200:
|
if not resp.status_code == 200:
|
||||||
raise BrokerError(resp.body)
|
raise BrokerError(resp.body)
|
||||||
try:
|
try:
|
||||||
msg = resp.json()
|
json = resp.json()
|
||||||
except json.decoder.JSONDecodeError:
|
except json.decoder.JSONDecodeError:
|
||||||
log.exception(f"Failed to process {resp}:\n{resp.text}")
|
log.exception(f"Failed to process {resp}:\n{resp.text}")
|
||||||
raise BrokerError(resp.text)
|
raise BrokerError(resp.text)
|
||||||
|
|
||||||
if log_resp:
|
if log_resp:
|
||||||
log.debug(f"Received json contents:\n{colorize_json(msg)}")
|
log.debug(f"Received json contents:\n{colorize_json(json)}")
|
||||||
|
|
||||||
return msg if return_json else resp
|
return json if return_json else resp
|
||||||
|
|
|
@ -19,7 +19,6 @@ Binance backend
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from datetime import datetime
|
|
||||||
from typing import (
|
from typing import (
|
||||||
Any, Union, Optional,
|
Any, Union, Optional,
|
||||||
AsyncGenerator, Callable,
|
AsyncGenerator, Callable,
|
||||||
|
@ -28,7 +27,7 @@ import time
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import pendulum
|
import arrow
|
||||||
import asks
|
import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -133,7 +132,7 @@ class OHLC:
|
||||||
bar_wap: float = 0.0
|
bar_wap: float = 0.0
|
||||||
|
|
||||||
|
|
||||||
# convert datetime obj timestamp to unixtime in milliseconds
|
# convert arrow timestamp to unixtime in miliseconds
|
||||||
def binance_timestamp(when):
|
def binance_timestamp(when):
|
||||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||||
|
|
||||||
|
@ -222,22 +221,20 @@ class Client:
|
||||||
async def bars(
|
async def bars(
|
||||||
self,
|
self,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
start_dt: Optional[datetime] = None,
|
start_time: int = None,
|
||||||
end_dt: Optional[datetime] = None,
|
end_time: int = None,
|
||||||
limit: int = 1000, # <- max allowed per query
|
limit: int = 1000, # <- max allowed per query
|
||||||
as_np: bool = True,
|
as_np: bool = True,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
|
||||||
if end_dt is None:
|
if start_time is None:
|
||||||
end_dt = pendulum.now('UTC')
|
start_time = binance_timestamp(
|
||||||
|
arrow.utcnow().floor('minute').shift(minutes=-limit)
|
||||||
|
)
|
||||||
|
|
||||||
if start_dt is None:
|
if end_time is None:
|
||||||
start_dt = end_dt.start_of(
|
end_time = binance_timestamp(arrow.utcnow())
|
||||||
'minute').subtract(minutes=limit)
|
|
||||||
|
|
||||||
start_time = binance_timestamp(start_dt)
|
|
||||||
end_time = binance_timestamp(end_dt)
|
|
||||||
|
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
||||||
bars = await self._api(
|
bars = await self._api(
|
||||||
|
@ -382,27 +379,7 @@ async def open_history_client(
|
||||||
|
|
||||||
# TODO implement history getter for the new storage layer.
|
# TODO implement history getter for the new storage layer.
|
||||||
async with open_cached_client('binance') as client:
|
async with open_cached_client('binance') as client:
|
||||||
|
yield client
|
||||||
async def get_ohlc(
|
|
||||||
end_dt: Optional[datetime] = None,
|
|
||||||
start_dt: Optional[datetime] = None,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
datetime, # start
|
|
||||||
datetime, # end
|
|
||||||
]:
|
|
||||||
|
|
||||||
array = await client.bars(
|
|
||||||
symbol,
|
|
||||||
start_dt=start_dt,
|
|
||||||
end_dt=end_dt,
|
|
||||||
)
|
|
||||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
|
||||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
|
||||||
return array, start_dt, end_dt
|
|
||||||
|
|
||||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
|
||||||
|
|
||||||
|
|
||||||
async def backfill_bars(
|
async def backfill_bars(
|
||||||
|
@ -452,8 +429,8 @@ async def stream_quotes(
|
||||||
|
|
||||||
# XXX: after manually inspecting the response format we
|
# XXX: after manually inspecting the response format we
|
||||||
# just directly pick out the info we need
|
# just directly pick out the info we need
|
||||||
si['price_tick_size'] = float(syminfo.filters[0]['tickSize'])
|
si['price_tick_size'] = syminfo.filters[0]['tickSize']
|
||||||
si['lot_tick_size'] = float(syminfo.filters[2]['stepSize'])
|
si['lot_tick_size'] = syminfo.filters[2]['stepSize']
|
||||||
si['asset_type'] = 'crypto'
|
si['asset_type'] = 'crypto'
|
||||||
|
|
||||||
symbol = symbols[0]
|
symbol = symbols[0]
|
||||||
|
|
|
@ -23,6 +23,7 @@ from operator import attrgetter
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
import click
|
import click
|
||||||
|
import pandas as pd
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
|
@ -46,10 +47,8 @@ _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
@click.argument('kwargs', nargs=-1)
|
@click.argument('kwargs', nargs=-1)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def api(config, meth, kwargs, keys):
|
def api(config, meth, kwargs, keys):
|
||||||
'''
|
"""Make a broker-client API method call
|
||||||
Make a broker-client API method call
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
# global opts
|
# global opts
|
||||||
broker = config['brokers'][0]
|
broker = config['brokers'][0]
|
||||||
|
|
||||||
|
@ -80,13 +79,13 @@ def api(config, meth, kwargs, keys):
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@click.option('--df-output', '-df', flag_value=True,
|
||||||
|
help='Output in `pandas.DataFrame` format')
|
||||||
@click.argument('tickers', nargs=-1, required=True)
|
@click.argument('tickers', nargs=-1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def quote(config, tickers):
|
def quote(config, tickers, df_output):
|
||||||
'''
|
"""Print symbol quotes to the console
|
||||||
Print symbol quotes to the console
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
|
|
||||||
|
@ -101,19 +100,28 @@ def quote(config, tickers):
|
||||||
if ticker not in syms:
|
if ticker not in syms:
|
||||||
brokermod.log.warn(f"Could not find symbol {ticker}?")
|
brokermod.log.warn(f"Could not find symbol {ticker}?")
|
||||||
|
|
||||||
click.echo(colorize_json(quotes))
|
if df_output:
|
||||||
|
cols = next(filter(bool, quotes)).copy()
|
||||||
|
cols.pop('symbol')
|
||||||
|
df = pd.DataFrame(
|
||||||
|
(quote or {} for quote in quotes),
|
||||||
|
columns=cols,
|
||||||
|
)
|
||||||
|
click.echo(df)
|
||||||
|
else:
|
||||||
|
click.echo(colorize_json(quotes))
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@click.option('--df-output', '-df', flag_value=True,
|
||||||
|
help='Output in `pandas.DataFrame` format')
|
||||||
@click.option('--count', '-c', default=1000,
|
@click.option('--count', '-c', default=1000,
|
||||||
help='Number of bars to retrieve')
|
help='Number of bars to retrieve')
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbol', required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def bars(config, symbol, count):
|
def bars(config, symbol, count, df_output):
|
||||||
'''
|
"""Retreive 1m bars for symbol and print on the console
|
||||||
Retreive 1m bars for symbol and print on the console
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
|
|
||||||
|
@ -125,7 +133,7 @@ def bars(config, symbol, count):
|
||||||
brokermod,
|
brokermod,
|
||||||
symbol,
|
symbol,
|
||||||
count=count,
|
count=count,
|
||||||
as_np=False,
|
as_np=df_output
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -133,7 +141,10 @@ def bars(config, symbol, count):
|
||||||
log.error(f"No quotes could be found for {symbol}?")
|
log.error(f"No quotes could be found for {symbol}?")
|
||||||
return
|
return
|
||||||
|
|
||||||
click.echo(colorize_json(bars))
|
if df_output:
|
||||||
|
click.echo(pd.DataFrame(bars))
|
||||||
|
else:
|
||||||
|
click.echo(colorize_json(bars))
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@ -145,10 +156,8 @@ def bars(config, symbol, count):
|
||||||
@click.argument('name', nargs=1, required=True)
|
@click.argument('name', nargs=1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def record(config, rate, name, dhost, filename):
|
def record(config, rate, name, dhost, filename):
|
||||||
'''
|
"""Record client side quotes to a file on disk
|
||||||
Record client side quotes to a file on disk
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
loglevel = config['loglevel']
|
loglevel = config['loglevel']
|
||||||
|
@ -186,10 +195,8 @@ def record(config, rate, name, dhost, filename):
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbol', required=True)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def contracts(ctx, loglevel, broker, symbol, ids):
|
def contracts(ctx, loglevel, broker, symbol, ids):
|
||||||
'''
|
"""Get list of all option contracts for symbol
|
||||||
Get list of all option contracts for symbol
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
brokermod = get_brokermod(broker)
|
brokermod = get_brokermod(broker)
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
|
||||||
|
@ -206,14 +213,14 @@ def contracts(ctx, loglevel, broker, symbol, ids):
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@click.option('--df-output', '-df', flag_value=True,
|
||||||
|
help='Output in `pandas.DataFrame` format')
|
||||||
@click.option('--date', '-d', help='Contracts expiry date')
|
@click.option('--date', '-d', help='Contracts expiry date')
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbol', required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def optsquote(config, symbol, date):
|
def optsquote(config, symbol, df_output, date):
|
||||||
'''
|
"""Retreive symbol option quotes on the console
|
||||||
Retreive symbol option quotes on the console
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
|
|
||||||
|
@ -226,17 +233,22 @@ def optsquote(config, symbol, date):
|
||||||
log.error(f"No option quotes could be found for {symbol}?")
|
log.error(f"No option quotes could be found for {symbol}?")
|
||||||
return
|
return
|
||||||
|
|
||||||
click.echo(colorize_json(quotes))
|
if df_output:
|
||||||
|
df = pd.DataFrame(
|
||||||
|
(quote.values() for quote in quotes),
|
||||||
|
columns=quotes[0].keys(),
|
||||||
|
)
|
||||||
|
click.echo(df)
|
||||||
|
else:
|
||||||
|
click.echo(colorize_json(quotes))
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.argument('tickers', nargs=-1, required=True)
|
@click.argument('tickers', nargs=-1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def symbol_info(config, tickers):
|
def symbol_info(config, tickers):
|
||||||
'''
|
"""Print symbol quotes to the console
|
||||||
Print symbol quotes to the console
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
|
|
||||||
|
@ -258,10 +270,8 @@ def symbol_info(config, tickers):
|
||||||
@click.argument('pattern', required=True)
|
@click.argument('pattern', required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def search(config, pattern):
|
def search(config, pattern):
|
||||||
'''
|
"""Search for symbols from broker backend(s).
|
||||||
Search for symbols from broker backend(s).
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
# global opts
|
# global opts
|
||||||
brokermods = config['brokermods']
|
brokermods = config['brokermods']
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,67 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Interactive Brokers API backend.
|
|
||||||
|
|
||||||
Sub-modules within break into the core functionalities:
|
|
||||||
|
|
||||||
- ``broker.py`` part for orders / trading endpoints
|
|
||||||
- ``data.py`` for real-time data feed endpoints
|
|
||||||
|
|
||||||
- ``client.py`` for the core API machinery which is ``trio``-ized
|
|
||||||
wrapping around ``ib_insync``.
|
|
||||||
|
|
||||||
- ``report.py`` for the hackery to build manual pp calcs
|
|
||||||
to avoid ib's absolute bullshit FIFO style position
|
|
||||||
tracking..
|
|
||||||
|
|
||||||
"""
|
|
||||||
from .api import (
|
|
||||||
get_client,
|
|
||||||
)
|
|
||||||
from .feed import (
|
|
||||||
open_history_client,
|
|
||||||
open_symbol_search,
|
|
||||||
stream_quotes,
|
|
||||||
)
|
|
||||||
from .broker import trades_dialogue
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'get_client',
|
|
||||||
'trades_dialogue',
|
|
||||||
'open_history_client',
|
|
||||||
'open_symbol_search',
|
|
||||||
'stream_quotes',
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
# tractor RPC enable arg
|
|
||||||
__enable_modules__: list[str] = [
|
|
||||||
'api',
|
|
||||||
'feed',
|
|
||||||
'broker',
|
|
||||||
]
|
|
||||||
|
|
||||||
# passed to ``tractor.ActorNursery.start_actor()``
|
|
||||||
_spawn_kwargs = {
|
|
||||||
'infect_asyncio': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
# annotation to let backend agnostic code
|
|
||||||
# know if ``brokerd`` should be spawned with
|
|
||||||
# ``tractor``'s aio mode.
|
|
||||||
_infect_asyncio: bool = True
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,590 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
"""
|
|
||||||
Order and trades endpoints for use with ``piker``'s EMS.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
from dataclasses import asdict
|
|
||||||
from functools import partial
|
|
||||||
from pprint import pformat
|
|
||||||
import time
|
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
Optional,
|
|
||||||
AsyncIterator,
|
|
||||||
)
|
|
||||||
|
|
||||||
import trio
|
|
||||||
from trio_typing import TaskStatus
|
|
||||||
import tractor
|
|
||||||
from ib_insync.contract import (
|
|
||||||
Contract,
|
|
||||||
Option,
|
|
||||||
)
|
|
||||||
from ib_insync.order import (
|
|
||||||
Trade,
|
|
||||||
OrderStatus,
|
|
||||||
)
|
|
||||||
from ib_insync.objects import (
|
|
||||||
Fill,
|
|
||||||
Execution,
|
|
||||||
)
|
|
||||||
from ib_insync.objects import Position
|
|
||||||
|
|
||||||
from piker import config
|
|
||||||
from piker.log import get_console_log
|
|
||||||
from piker.clearing._messages import (
|
|
||||||
BrokerdOrder,
|
|
||||||
BrokerdOrderAck,
|
|
||||||
BrokerdStatus,
|
|
||||||
BrokerdPosition,
|
|
||||||
BrokerdCancel,
|
|
||||||
BrokerdFill,
|
|
||||||
BrokerdError,
|
|
||||||
)
|
|
||||||
from .api import (
|
|
||||||
_accounts2clients,
|
|
||||||
_adhoc_futes_set,
|
|
||||||
log,
|
|
||||||
get_config,
|
|
||||||
open_client_proxies,
|
|
||||||
Client,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def pack_position(
|
|
||||||
pos: Position
|
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
con = pos.contract
|
|
||||||
|
|
||||||
if isinstance(con, Option):
|
|
||||||
# TODO: option symbol parsing and sane display:
|
|
||||||
symbol = con.localSymbol.replace(' ', '')
|
|
||||||
|
|
||||||
else:
|
|
||||||
# TODO: lookup fqsn even for derivs.
|
|
||||||
symbol = con.symbol.lower()
|
|
||||||
|
|
||||||
exch = (con.primaryExchange or con.exchange).lower()
|
|
||||||
symkey = '.'.join((symbol, exch))
|
|
||||||
if not exch:
|
|
||||||
# attempt to lookup the symbol from our
|
|
||||||
# hacked set..
|
|
||||||
for sym in _adhoc_futes_set:
|
|
||||||
if symbol in sym:
|
|
||||||
symkey = sym
|
|
||||||
break
|
|
||||||
|
|
||||||
expiry = con.lastTradeDateOrContractMonth
|
|
||||||
if expiry:
|
|
||||||
symkey += f'.{expiry}'
|
|
||||||
|
|
||||||
# TODO: options contracts into a sane format..
|
|
||||||
|
|
||||||
return BrokerdPosition(
|
|
||||||
broker='ib',
|
|
||||||
account=pos.account,
|
|
||||||
symbol=symkey,
|
|
||||||
currency=con.currency,
|
|
||||||
size=float(pos.position),
|
|
||||||
avg_price=float(pos.avgCost) / float(con.multiplier or 1.0),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def handle_order_requests(
|
|
||||||
|
|
||||||
ems_order_stream: tractor.MsgStream,
|
|
||||||
accounts_def: dict[str, str],
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
request_msg: dict
|
|
||||||
async for request_msg in ems_order_stream:
|
|
||||||
log.info(f'Received order request {request_msg}')
|
|
||||||
|
|
||||||
action = request_msg['action']
|
|
||||||
account = request_msg['account']
|
|
||||||
|
|
||||||
acct_number = accounts_def.get(account)
|
|
||||||
if not acct_number:
|
|
||||||
log.error(
|
|
||||||
f'An IB account number for name {account} is not found?\n'
|
|
||||||
'Make sure you have all TWS and GW instances running.'
|
|
||||||
)
|
|
||||||
await ems_order_stream.send(BrokerdError(
|
|
||||||
oid=request_msg['oid'],
|
|
||||||
symbol=request_msg['symbol'],
|
|
||||||
reason=f'No account found: `{account}` ?',
|
|
||||||
).dict())
|
|
||||||
continue
|
|
||||||
|
|
||||||
client = _accounts2clients.get(account)
|
|
||||||
if not client:
|
|
||||||
log.error(
|
|
||||||
f'An IB client for account name {account} is not found.\n'
|
|
||||||
'Make sure you have all TWS and GW instances running.'
|
|
||||||
)
|
|
||||||
await ems_order_stream.send(BrokerdError(
|
|
||||||
oid=request_msg['oid'],
|
|
||||||
symbol=request_msg['symbol'],
|
|
||||||
reason=f'No api client loaded for account: `{account}` ?',
|
|
||||||
).dict())
|
|
||||||
continue
|
|
||||||
|
|
||||||
if action in {'buy', 'sell'}:
|
|
||||||
# validate
|
|
||||||
order = BrokerdOrder(**request_msg)
|
|
||||||
|
|
||||||
# call our client api to submit the order
|
|
||||||
reqid = client.submit_limit(
|
|
||||||
oid=order.oid,
|
|
||||||
symbol=order.symbol,
|
|
||||||
price=order.price,
|
|
||||||
action=order.action,
|
|
||||||
size=order.size,
|
|
||||||
account=acct_number,
|
|
||||||
|
|
||||||
# XXX: by default 0 tells ``ib_insync`` methods that
|
|
||||||
# there is no existing order so ask the client to create
|
|
||||||
# a new one (which it seems to do by allocating an int
|
|
||||||
# counter - collision prone..)
|
|
||||||
reqid=order.reqid,
|
|
||||||
)
|
|
||||||
if reqid is None:
|
|
||||||
await ems_order_stream.send(BrokerdError(
|
|
||||||
oid=request_msg['oid'],
|
|
||||||
symbol=request_msg['symbol'],
|
|
||||||
reason='Order already active?',
|
|
||||||
).dict())
|
|
||||||
|
|
||||||
# deliver ack that order has been submitted to broker routing
|
|
||||||
await ems_order_stream.send(
|
|
||||||
BrokerdOrderAck(
|
|
||||||
# ems order request id
|
|
||||||
oid=order.oid,
|
|
||||||
# broker specific request id
|
|
||||||
reqid=reqid,
|
|
||||||
time_ns=time.time_ns(),
|
|
||||||
account=account,
|
|
||||||
).dict()
|
|
||||||
)
|
|
||||||
|
|
||||||
elif action == 'cancel':
|
|
||||||
msg = BrokerdCancel(**request_msg)
|
|
||||||
client.submit_cancel(reqid=msg.reqid)
|
|
||||||
|
|
||||||
else:
|
|
||||||
log.error(f'Unknown order command: {request_msg}')
|
|
||||||
|
|
||||||
|
|
||||||
async def recv_trade_updates(
|
|
||||||
|
|
||||||
client: Client,
|
|
||||||
to_trio: trio.abc.SendChannel,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
"""Stream a ticker using the std L1 api.
|
|
||||||
"""
|
|
||||||
client.inline_errors(to_trio)
|
|
||||||
|
|
||||||
# sync with trio task
|
|
||||||
to_trio.send_nowait(None)
|
|
||||||
|
|
||||||
def push_tradesies(eventkit_obj, obj, fill=None):
|
|
||||||
"""Push events to trio task.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if fill is not None:
|
|
||||||
# execution details event
|
|
||||||
item = ('fill', (obj, fill))
|
|
||||||
|
|
||||||
elif eventkit_obj.name() == 'positionEvent':
|
|
||||||
item = ('position', obj)
|
|
||||||
|
|
||||||
else:
|
|
||||||
item = ('status', obj)
|
|
||||||
|
|
||||||
log.info(f'eventkit event ->\n{pformat(item)}')
|
|
||||||
|
|
||||||
try:
|
|
||||||
to_trio.send_nowait(item)
|
|
||||||
except trio.BrokenResourceError:
|
|
||||||
log.exception(f'Disconnected from {eventkit_obj} updates')
|
|
||||||
eventkit_obj.disconnect(push_tradesies)
|
|
||||||
|
|
||||||
# hook up to the weird eventkit object - event stream api
|
|
||||||
for ev_name in [
|
|
||||||
'orderStatusEvent', # all order updates
|
|
||||||
'execDetailsEvent', # all "fill" updates
|
|
||||||
'positionEvent', # avg price updates per symbol per account
|
|
||||||
|
|
||||||
# 'commissionReportEvent',
|
|
||||||
# XXX: ugh, it is a separate event from IB and it's
|
|
||||||
# emitted as follows:
|
|
||||||
# self.ib.commissionReportEvent.emit(trade, fill, report)
|
|
||||||
|
|
||||||
# XXX: not sure yet if we need these
|
|
||||||
# 'updatePortfolioEvent',
|
|
||||||
|
|
||||||
# XXX: these all seem to be weird ib_insync intrernal
|
|
||||||
# events that we probably don't care that much about
|
|
||||||
# given the internal design is wonky af..
|
|
||||||
# 'newOrderEvent',
|
|
||||||
# 'orderModifyEvent',
|
|
||||||
# 'cancelOrderEvent',
|
|
||||||
# 'openOrderEvent',
|
|
||||||
]:
|
|
||||||
eventkit_obj = getattr(client.ib, ev_name)
|
|
||||||
handler = partial(push_tradesies, eventkit_obj)
|
|
||||||
eventkit_obj.connect(handler)
|
|
||||||
|
|
||||||
# let the engine run and stream
|
|
||||||
await client.ib.disconnectedEvent
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
|
||||||
async def trades_dialogue(
|
|
||||||
|
|
||||||
ctx: tractor.Context,
|
|
||||||
loglevel: str = None,
|
|
||||||
|
|
||||||
) -> AsyncIterator[dict[str, Any]]:
|
|
||||||
|
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
|
||||||
|
|
||||||
accounts_def = config.load_accounts(['ib'])
|
|
||||||
|
|
||||||
global _client_cache
|
|
||||||
|
|
||||||
# deliver positions to subscriber before anything else
|
|
||||||
all_positions = []
|
|
||||||
accounts = set()
|
|
||||||
clients: list[tuple[Client, trio.MemoryReceiveChannel]] = []
|
|
||||||
|
|
||||||
async with (
|
|
||||||
trio.open_nursery() as nurse,
|
|
||||||
open_client_proxies() as (proxies, aioclients),
|
|
||||||
):
|
|
||||||
for account, proxy in proxies.items():
|
|
||||||
|
|
||||||
client = aioclients[account]
|
|
||||||
|
|
||||||
async def open_stream(
|
|
||||||
task_status: TaskStatus[
|
|
||||||
trio.abc.ReceiveChannel
|
|
||||||
] = trio.TASK_STATUS_IGNORED,
|
|
||||||
):
|
|
||||||
# each api client has a unique event stream
|
|
||||||
async with tractor.to_asyncio.open_channel_from(
|
|
||||||
recv_trade_updates,
|
|
||||||
client=client,
|
|
||||||
) as (first, trade_event_stream):
|
|
||||||
|
|
||||||
task_status.started(trade_event_stream)
|
|
||||||
await trio.sleep_forever()
|
|
||||||
|
|
||||||
trade_event_stream = await nurse.start(open_stream)
|
|
||||||
|
|
||||||
clients.append((client, trade_event_stream))
|
|
||||||
|
|
||||||
assert account in accounts_def
|
|
||||||
accounts.add(account)
|
|
||||||
|
|
||||||
for client in aioclients.values():
|
|
||||||
for pos in client.positions():
|
|
||||||
|
|
||||||
msg = pack_position(pos)
|
|
||||||
msg.account = accounts_def.inverse[msg.account]
|
|
||||||
|
|
||||||
assert msg.account in accounts, (
|
|
||||||
f'Position for unknown account: {msg.account}')
|
|
||||||
|
|
||||||
all_positions.append(msg.dict())
|
|
||||||
|
|
||||||
trades: list[dict] = []
|
|
||||||
for proxy in proxies.values():
|
|
||||||
trades.append(await proxy.trades())
|
|
||||||
|
|
||||||
log.info(f'Loaded {len(trades)} from this session')
|
|
||||||
# TODO: write trades to local ``trades.toml``
|
|
||||||
# - use above per-session trades data and write to local file
|
|
||||||
# - get the "flex reports" working and pull historical data and
|
|
||||||
# also save locally.
|
|
||||||
|
|
||||||
await ctx.started((
|
|
||||||
all_positions,
|
|
||||||
tuple(name for name in accounts_def if name in accounts),
|
|
||||||
))
|
|
||||||
|
|
||||||
async with (
|
|
||||||
ctx.open_stream() as ems_stream,
|
|
||||||
trio.open_nursery() as n,
|
|
||||||
):
|
|
||||||
# start order request handler **before** local trades event loop
|
|
||||||
n.start_soon(handle_order_requests, ems_stream, accounts_def)
|
|
||||||
|
|
||||||
# allocate event relay tasks for each client connection
|
|
||||||
for client, stream in clients:
|
|
||||||
n.start_soon(
|
|
||||||
deliver_trade_events,
|
|
||||||
stream,
|
|
||||||
ems_stream,
|
|
||||||
accounts_def
|
|
||||||
)
|
|
||||||
|
|
||||||
# block until cancelled
|
|
||||||
await trio.sleep_forever()
|
|
||||||
|
|
||||||
|
|
||||||
async def deliver_trade_events(
|
|
||||||
|
|
||||||
trade_event_stream: trio.MemoryReceiveChannel,
|
|
||||||
ems_stream: tractor.MsgStream,
|
|
||||||
accounts_def: dict[str, str],
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''Format and relay all trade events for a given client to the EMS.
|
|
||||||
|
|
||||||
'''
|
|
||||||
action_map = {'BOT': 'buy', 'SLD': 'sell'}
|
|
||||||
|
|
||||||
# TODO: for some reason we can receive a ``None`` here when the
|
|
||||||
# ib-gw goes down? Not sure exactly how that's happening looking
|
|
||||||
# at the eventkit code above but we should probably handle it...
|
|
||||||
async for event_name, item in trade_event_stream:
|
|
||||||
|
|
||||||
log.info(f'ib sending {event_name}:\n{pformat(item)}')
|
|
||||||
|
|
||||||
# TODO: templating the ib statuses in comparison with other
|
|
||||||
# brokers is likely the way to go:
|
|
||||||
# https://interactivebrokers.github.io/tws-api/interfaceIBApi_1_1EWrapper.html#a17f2a02d6449710b6394d0266a353313
|
|
||||||
# short list:
|
|
||||||
# - PendingSubmit
|
|
||||||
# - PendingCancel
|
|
||||||
# - PreSubmitted (simulated orders)
|
|
||||||
# - ApiCancelled (cancelled by client before submission
|
|
||||||
# to routing)
|
|
||||||
# - Cancelled
|
|
||||||
# - Filled
|
|
||||||
# - Inactive (reject or cancelled but not by trader)
|
|
||||||
|
|
||||||
# XXX: here's some other sucky cases from the api
|
|
||||||
# - short-sale but securities haven't been located, in this
|
|
||||||
# case we should probably keep the order in some kind of
|
|
||||||
# weird state or cancel it outright?
|
|
||||||
|
|
||||||
# status='PendingSubmit', message=''),
|
|
||||||
# status='Cancelled', message='Error 404,
|
|
||||||
# reqId 1550: Order held while securities are located.'),
|
|
||||||
# status='PreSubmitted', message='')],
|
|
||||||
|
|
||||||
if event_name == 'status':
|
|
||||||
|
|
||||||
# XXX: begin normalization of nonsense ib_insync internal
|
|
||||||
# object-state tracking representations...
|
|
||||||
|
|
||||||
# unwrap needed data from ib_insync internal types
|
|
||||||
trade: Trade = item
|
|
||||||
status: OrderStatus = trade.orderStatus
|
|
||||||
|
|
||||||
# skip duplicate filled updates - we get the deats
|
|
||||||
# from the execution details event
|
|
||||||
msg = BrokerdStatus(
|
|
||||||
|
|
||||||
reqid=trade.order.orderId,
|
|
||||||
time_ns=time.time_ns(), # cuz why not
|
|
||||||
account=accounts_def.inverse[trade.order.account],
|
|
||||||
|
|
||||||
# everyone doin camel case..
|
|
||||||
status=status.status.lower(), # force lower case
|
|
||||||
|
|
||||||
filled=status.filled,
|
|
||||||
reason=status.whyHeld,
|
|
||||||
|
|
||||||
# this seems to not be necessarily up to date in the
|
|
||||||
# execDetails event.. so we have to send it here I guess?
|
|
||||||
remaining=status.remaining,
|
|
||||||
|
|
||||||
broker_details={'name': 'ib'},
|
|
||||||
)
|
|
||||||
|
|
||||||
elif event_name == 'fill':
|
|
||||||
|
|
||||||
# for wtv reason this is a separate event type
|
|
||||||
# from IB, not sure why it's needed other then for extra
|
|
||||||
# complexity and over-engineering :eyeroll:.
|
|
||||||
# we may just end up dropping these events (or
|
|
||||||
# translating them to ``Status`` msgs) if we can
|
|
||||||
# show the equivalent status events are no more latent.
|
|
||||||
|
|
||||||
# unpack ib_insync types
|
|
||||||
# pep-0526 style:
|
|
||||||
# https://www.python.org/dev/peps/pep-0526/#global-and-local-variable-annotations
|
|
||||||
trade: Trade
|
|
||||||
fill: Fill
|
|
||||||
trade, fill = item
|
|
||||||
execu: Execution = fill.execution
|
|
||||||
|
|
||||||
# TODO: normalize out commissions details?
|
|
||||||
details = {
|
|
||||||
'contract': asdict(fill.contract),
|
|
||||||
'execution': asdict(fill.execution),
|
|
||||||
'commissions': asdict(fill.commissionReport),
|
|
||||||
'broker_time': execu.time, # supposedly server fill time
|
|
||||||
'name': 'ib',
|
|
||||||
}
|
|
||||||
|
|
||||||
msg = BrokerdFill(
|
|
||||||
# should match the value returned from `.submit_limit()`
|
|
||||||
reqid=execu.orderId,
|
|
||||||
time_ns=time.time_ns(), # cuz why not
|
|
||||||
|
|
||||||
action=action_map[execu.side],
|
|
||||||
size=execu.shares,
|
|
||||||
price=execu.price,
|
|
||||||
|
|
||||||
broker_details=details,
|
|
||||||
# XXX: required by order mode currently
|
|
||||||
broker_time=details['broker_time'],
|
|
||||||
|
|
||||||
)
|
|
||||||
|
|
||||||
elif event_name == 'error':
|
|
||||||
|
|
||||||
err: dict = item
|
|
||||||
|
|
||||||
# f$#$% gawd dammit insync..
|
|
||||||
con = err['contract']
|
|
||||||
if isinstance(con, Contract):
|
|
||||||
err['contract'] = asdict(con)
|
|
||||||
|
|
||||||
if err['reqid'] == -1:
|
|
||||||
log.error(f'TWS external order error:\n{pformat(err)}')
|
|
||||||
|
|
||||||
# TODO: what schema for this msg if we're going to make it
|
|
||||||
# portable across all backends?
|
|
||||||
# msg = BrokerdError(**err)
|
|
||||||
continue
|
|
||||||
|
|
||||||
elif event_name == 'position':
|
|
||||||
msg = pack_position(item)
|
|
||||||
msg.account = accounts_def.inverse[msg.account]
|
|
||||||
|
|
||||||
elif event_name == 'event':
|
|
||||||
|
|
||||||
# it's either a general system status event or an external
|
|
||||||
# trade event?
|
|
||||||
log.info(f"TWS system status: \n{pformat(item)}")
|
|
||||||
|
|
||||||
# TODO: support this again but needs parsing at the callback
|
|
||||||
# level...
|
|
||||||
# reqid = item.get('reqid', 0)
|
|
||||||
# if getattr(msg, 'reqid', 0) < -1:
|
|
||||||
# log.info(f"TWS triggered trade\n{pformat(msg.dict())}")
|
|
||||||
|
|
||||||
continue
|
|
||||||
|
|
||||||
# msg.reqid = 'tws-' + str(-1 * reqid)
|
|
||||||
|
|
||||||
# mark msg as from "external system"
|
|
||||||
# TODO: probably something better then this.. and start
|
|
||||||
# considering multiplayer/group trades tracking
|
|
||||||
# msg.broker_details['external_src'] = 'tws'
|
|
||||||
|
|
||||||
# XXX: we always serialize to a dict for msgpack
|
|
||||||
# translations, ideally we can move to an msgspec (or other)
|
|
||||||
# encoder # that can be enabled in ``tractor`` ahead of
|
|
||||||
# time so we can pass through the message types directly.
|
|
||||||
await ems_stream.send(msg.dict())
|
|
||||||
|
|
||||||
|
|
||||||
def load_flex_trades(
|
|
||||||
path: Optional[str] = None,
|
|
||||||
|
|
||||||
) -> dict[str, str]:
|
|
||||||
|
|
||||||
from pprint import pprint
|
|
||||||
from ib_insync import flexreport, util
|
|
||||||
|
|
||||||
conf = get_config()
|
|
||||||
|
|
||||||
if not path:
|
|
||||||
# load ``brokers.toml`` and try to get the flex
|
|
||||||
# token and query id that must be previously defined
|
|
||||||
# by the user.
|
|
||||||
token = conf.get('flex_token')
|
|
||||||
if not token:
|
|
||||||
raise ValueError(
|
|
||||||
'You must specify a ``flex_token`` field in your'
|
|
||||||
'`brokers.toml` in order load your trade log, see our'
|
|
||||||
'intructions for how to set this up here:\n'
|
|
||||||
'PUT LINK HERE!'
|
|
||||||
)
|
|
||||||
|
|
||||||
qid = conf['flex_trades_query_id']
|
|
||||||
|
|
||||||
# TODO: hack this into our logging
|
|
||||||
# system like we do with the API client..
|
|
||||||
util.logToConsole()
|
|
||||||
|
|
||||||
# TODO: rewrite the query part of this with async..httpx?
|
|
||||||
report = flexreport.FlexReport(
|
|
||||||
token=token,
|
|
||||||
queryId=qid,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# XXX: another project we could potentially look at,
|
|
||||||
# https://pypi.org/project/ibflex/
|
|
||||||
report = flexreport.FlexReport(path=path)
|
|
||||||
|
|
||||||
trade_entries = report.extract('Trade')
|
|
||||||
trades = {
|
|
||||||
# XXX: LOL apparently ``toml`` has a bug
|
|
||||||
# where a section key error will show up in the write
|
|
||||||
# if you leave this as an ``int``?
|
|
||||||
str(t.__dict__['tradeID']): t.__dict__
|
|
||||||
for t in trade_entries
|
|
||||||
}
|
|
||||||
|
|
||||||
ln = len(trades)
|
|
||||||
log.info(f'Loaded {ln} trades from flex query')
|
|
||||||
|
|
||||||
trades_by_account = {}
|
|
||||||
for tid, trade in trades.items():
|
|
||||||
trades_by_account.setdefault(
|
|
||||||
# oddly for some so-called "BookTrade" entries
|
|
||||||
# this field seems to be blank, no cuckin clue.
|
|
||||||
# trade['ibExecID']
|
|
||||||
str(trade['accountId']), {}
|
|
||||||
)[tid] = trade
|
|
||||||
|
|
||||||
section = {'ib': trades_by_account}
|
|
||||||
pprint(section)
|
|
||||||
|
|
||||||
# TODO: load the config first and append in
|
|
||||||
# the new trades loaded here..
|
|
||||||
try:
|
|
||||||
config.write(section, 'trades')
|
|
||||||
except KeyError:
|
|
||||||
import pdbpp; pdbpp.set_trace() # noqa
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
load_flex_trades()
|
|
|
@ -1,938 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
"""
|
|
||||||
Data feed endpoints pre-wrapped and ready for use with ``tractor``/``trio``.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
import asyncio
|
|
||||||
from contextlib import asynccontextmanager as acm
|
|
||||||
from dataclasses import asdict
|
|
||||||
from datetime import datetime
|
|
||||||
from math import isnan
|
|
||||||
import time
|
|
||||||
from typing import (
|
|
||||||
Callable,
|
|
||||||
Optional,
|
|
||||||
Awaitable,
|
|
||||||
)
|
|
||||||
|
|
||||||
from async_generator import aclosing
|
|
||||||
from fuzzywuzzy import process as fuzzy
|
|
||||||
import numpy as np
|
|
||||||
import pendulum
|
|
||||||
import tractor
|
|
||||||
import trio
|
|
||||||
from trio_typing import TaskStatus
|
|
||||||
|
|
||||||
from piker.data._sharedmem import ShmArray
|
|
||||||
from .._util import SymbolNotFound, NoData
|
|
||||||
from .api import (
|
|
||||||
_adhoc_futes_set,
|
|
||||||
log,
|
|
||||||
load_aio_clients,
|
|
||||||
ibis,
|
|
||||||
MethodProxy,
|
|
||||||
open_client_proxies,
|
|
||||||
get_preferred_data_client,
|
|
||||||
Ticker,
|
|
||||||
RequestError,
|
|
||||||
Contract,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# https://interactivebrokers.github.io/tws-api/tick_types.html
|
|
||||||
tick_types = {
|
|
||||||
77: 'trade',
|
|
||||||
|
|
||||||
# a "utrade" aka an off exchange "unreportable" (dark) vlm:
|
|
||||||
# https://interactivebrokers.github.io/tws-api/tick_types.html#rt_volume
|
|
||||||
48: 'dark_trade',
|
|
||||||
|
|
||||||
# standard L1 ticks
|
|
||||||
0: 'bsize',
|
|
||||||
1: 'bid',
|
|
||||||
2: 'ask',
|
|
||||||
3: 'asize',
|
|
||||||
4: 'last',
|
|
||||||
5: 'size',
|
|
||||||
8: 'volume',
|
|
||||||
|
|
||||||
# ``ib_insync`` already packs these into
|
|
||||||
# quotes under the following fields.
|
|
||||||
# 55: 'trades_per_min', # `'tradeRate'`
|
|
||||||
# 56: 'vlm_per_min', # `'volumeRate'`
|
|
||||||
# 89: 'shortable', # `'shortableShares'`
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_data_client() -> MethodProxy:
|
|
||||||
'''
|
|
||||||
Open the first found preferred "data client" as defined in the
|
|
||||||
user's ``brokers.toml`` in the ``ib.prefer_data_account`` variable
|
|
||||||
and deliver that client wrapped in a ``MethodProxy``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
async with (
|
|
||||||
open_client_proxies() as (proxies, clients),
|
|
||||||
):
|
|
||||||
account_name, client = get_preferred_data_client(clients)
|
|
||||||
proxy = proxies.get(f'ib.{account_name}')
|
|
||||||
if not proxy:
|
|
||||||
raise ValueError(
|
|
||||||
f'No preferred data client could be found for {account_name}!'
|
|
||||||
)
|
|
||||||
|
|
||||||
yield proxy
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_history_client(
|
|
||||||
symbol: str,
|
|
||||||
|
|
||||||
) -> tuple[Callable, int]:
|
|
||||||
'''
|
|
||||||
History retreival endpoint - delivers a historical frame callble
|
|
||||||
that takes in ``pendulum.datetime`` and returns ``numpy`` arrays.
|
|
||||||
|
|
||||||
'''
|
|
||||||
async with open_data_client() as proxy:
|
|
||||||
|
|
||||||
async def get_hist(
|
|
||||||
end_dt: Optional[datetime] = None,
|
|
||||||
start_dt: Optional[datetime] = None,
|
|
||||||
|
|
||||||
) -> tuple[np.ndarray, str]:
|
|
||||||
|
|
||||||
out, fails = await get_bars(proxy, symbol, end_dt=end_dt)
|
|
||||||
|
|
||||||
# TODO: add logic here to handle tradable hours and only grab
|
|
||||||
# valid bars in the range
|
|
||||||
if out is None:
|
|
||||||
# could be trying to retreive bars over weekend
|
|
||||||
log.error(f"Can't grab bars starting at {end_dt}!?!?")
|
|
||||||
raise NoData(
|
|
||||||
f'{end_dt}',
|
|
||||||
frame_size=2000,
|
|
||||||
)
|
|
||||||
|
|
||||||
bars, bars_array, first_dt, last_dt = out
|
|
||||||
|
|
||||||
# volume cleaning since there's -ve entries,
|
|
||||||
# wood luv to know what crookery that is..
|
|
||||||
vlm = bars_array['volume']
|
|
||||||
vlm[vlm < 0] = 0
|
|
||||||
|
|
||||||
return bars_array, first_dt, last_dt
|
|
||||||
|
|
||||||
# TODO: it seems like we can do async queries for ohlc
|
|
||||||
# but getting the order right still isn't working and I'm not
|
|
||||||
# quite sure why.. needs some tinkering and probably
|
|
||||||
# a lookthrough of the ``ib_insync`` machinery, for eg. maybe
|
|
||||||
# we have to do the batch queries on the `asyncio` side?
|
|
||||||
yield get_hist, {'erlangs': 1, 'rate': 6}
|
|
||||||
|
|
||||||
|
|
||||||
_pacing: str = (
|
|
||||||
'Historical Market Data Service error '
|
|
||||||
'message:Historical data request pacing violation'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def get_bars(
|
|
||||||
|
|
||||||
proxy: MethodProxy,
|
|
||||||
fqsn: str,
|
|
||||||
|
|
||||||
# blank to start which tells ib to look up the latest datum
|
|
||||||
end_dt: str = '',
|
|
||||||
|
|
||||||
) -> (dict, np.ndarray):
|
|
||||||
'''
|
|
||||||
Retrieve historical data from a ``trio``-side task using
|
|
||||||
a ``MethoProxy``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
fails = 0
|
|
||||||
bars: Optional[list] = None
|
|
||||||
first_dt: datetime = None
|
|
||||||
last_dt: datetime = None
|
|
||||||
|
|
||||||
if end_dt:
|
|
||||||
last_dt = pendulum.from_timestamp(end_dt.timestamp())
|
|
||||||
|
|
||||||
for _ in range(10):
|
|
||||||
try:
|
|
||||||
out = await proxy.bars(
|
|
||||||
fqsn=fqsn,
|
|
||||||
end_dt=end_dt,
|
|
||||||
)
|
|
||||||
if out:
|
|
||||||
bars, bars_array = out
|
|
||||||
|
|
||||||
else:
|
|
||||||
await tractor.breakpoint()
|
|
||||||
|
|
||||||
if bars_array is None:
|
|
||||||
raise SymbolNotFound(fqsn)
|
|
||||||
|
|
||||||
first_dt = pendulum.from_timestamp(
|
|
||||||
bars[0].date.timestamp())
|
|
||||||
|
|
||||||
last_dt = pendulum.from_timestamp(
|
|
||||||
bars[-1].date.timestamp())
|
|
||||||
|
|
||||||
time = bars_array['time']
|
|
||||||
assert time[-1] == last_dt.timestamp()
|
|
||||||
assert time[0] == first_dt.timestamp()
|
|
||||||
log.info(
|
|
||||||
f'{len(bars)} bars retreived for {first_dt} -> {last_dt}'
|
|
||||||
)
|
|
||||||
|
|
||||||
return (bars, bars_array, first_dt, last_dt), fails
|
|
||||||
|
|
||||||
except RequestError as err:
|
|
||||||
msg = err.message
|
|
||||||
# why do we always need to rebind this?
|
|
||||||
# _err = err
|
|
||||||
|
|
||||||
if 'No market data permissions for' in msg:
|
|
||||||
# TODO: signalling for no permissions searches
|
|
||||||
raise NoData(
|
|
||||||
f'Symbol: {fqsn}',
|
|
||||||
)
|
|
||||||
|
|
||||||
elif (
|
|
||||||
err.code == 162
|
|
||||||
and 'HMDS query returned no data' in err.message
|
|
||||||
):
|
|
||||||
# XXX: this is now done in the storage mgmt layer
|
|
||||||
# and we shouldn't implicitly decrement the frame dt
|
|
||||||
# index since the upper layer may be doing so
|
|
||||||
# concurrently and we don't want to be delivering frames
|
|
||||||
# that weren't asked for.
|
|
||||||
log.warning(
|
|
||||||
f'NO DATA found ending @ {end_dt}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
# try to decrement start point and look further back
|
|
||||||
# end_dt = last_dt = last_dt.subtract(seconds=2000)
|
|
||||||
|
|
||||||
raise NoData(
|
|
||||||
f'Symbol: {fqsn}',
|
|
||||||
frame_size=2000,
|
|
||||||
)
|
|
||||||
|
|
||||||
elif _pacing in msg:
|
|
||||||
|
|
||||||
log.warning(
|
|
||||||
'History throttle rate reached!\n'
|
|
||||||
'Resetting farms with `ctrl-alt-f` hack\n'
|
|
||||||
)
|
|
||||||
# TODO: we might have to put a task lock around this
|
|
||||||
# method..
|
|
||||||
hist_ev = proxy.status_event(
|
|
||||||
'HMDS data farm connection is OK:ushmds'
|
|
||||||
)
|
|
||||||
|
|
||||||
# XXX: other event messages we might want to try and
|
|
||||||
# wait for but i wasn't able to get any of this
|
|
||||||
# reliable..
|
|
||||||
# reconnect_start = proxy.status_event(
|
|
||||||
# 'Market data farm is connecting:usfuture'
|
|
||||||
# )
|
|
||||||
# live_ev = proxy.status_event(
|
|
||||||
# 'Market data farm connection is OK:usfuture'
|
|
||||||
# )
|
|
||||||
|
|
||||||
# try to wait on the reset event(s) to arrive, a timeout
|
|
||||||
# will trigger a retry up to 6 times (for now).
|
|
||||||
tries: int = 2
|
|
||||||
timeout: float = 10
|
|
||||||
|
|
||||||
# try 3 time with a data reset then fail over to
|
|
||||||
# a connection reset.
|
|
||||||
for i in range(1, tries):
|
|
||||||
|
|
||||||
log.warning('Sending DATA RESET request')
|
|
||||||
await data_reset_hack(reset_type='data')
|
|
||||||
|
|
||||||
with trio.move_on_after(timeout) as cs:
|
|
||||||
for name, ev in [
|
|
||||||
# TODO: not sure if waiting on other events
|
|
||||||
# is all that useful here or not. in theory
|
|
||||||
# you could wait on one of the ones above
|
|
||||||
# first to verify the reset request was
|
|
||||||
# sent?
|
|
||||||
('history', hist_ev),
|
|
||||||
]:
|
|
||||||
await ev.wait()
|
|
||||||
log.info(f"{name} DATA RESET")
|
|
||||||
break
|
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
|
||||||
fails += 1
|
|
||||||
log.warning(
|
|
||||||
f'Data reset {name} timeout, retrying {i}.'
|
|
||||||
)
|
|
||||||
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
|
|
||||||
log.warning('Sending CONNECTION RESET')
|
|
||||||
await data_reset_hack(reset_type='connection')
|
|
||||||
|
|
||||||
with trio.move_on_after(timeout) as cs:
|
|
||||||
for name, ev in [
|
|
||||||
# TODO: not sure if waiting on other events
|
|
||||||
# is all that useful here or not. in theory
|
|
||||||
# you could wait on one of the ones above
|
|
||||||
# first to verify the reset request was
|
|
||||||
# sent?
|
|
||||||
('history', hist_ev),
|
|
||||||
]:
|
|
||||||
await ev.wait()
|
|
||||||
log.info(f"{name} DATA RESET")
|
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
|
||||||
fails += 1
|
|
||||||
log.warning('Data CONNECTION RESET timeout!?')
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
return None, None
|
|
||||||
# else: # throttle wasn't fixed so error out immediately
|
|
||||||
# raise _err
|
|
||||||
|
|
||||||
|
|
||||||
async def backfill_bars(
|
|
||||||
|
|
||||||
fqsn: str,
|
|
||||||
shm: ShmArray, # type: ignore # noqa
|
|
||||||
|
|
||||||
# TODO: we want to avoid overrunning the underlying shm array buffer
|
|
||||||
# and we should probably calc the number of calls to make depending
|
|
||||||
# on that until we have the `marketstore` daemon in place in which
|
|
||||||
# case the shm size will be driven by user config and available sys
|
|
||||||
# memory.
|
|
||||||
count: int = 16,
|
|
||||||
|
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Fill historical bars into shared mem / storage afap.
|
|
||||||
|
|
||||||
TODO: avoid pacing constraints:
|
|
||||||
https://github.com/pikers/piker/issues/128
|
|
||||||
|
|
||||||
'''
|
|
||||||
# last_dt1 = None
|
|
||||||
last_dt = None
|
|
||||||
|
|
||||||
with trio.CancelScope() as cs:
|
|
||||||
|
|
||||||
async with open_data_client() as proxy:
|
|
||||||
|
|
||||||
out, fails = await get_bars(proxy, fqsn)
|
|
||||||
|
|
||||||
if out is None:
|
|
||||||
raise RuntimeError("Could not pull currrent history?!")
|
|
||||||
|
|
||||||
(first_bars, bars_array, first_dt, last_dt) = out
|
|
||||||
vlm = bars_array['volume']
|
|
||||||
vlm[vlm < 0] = 0
|
|
||||||
last_dt = first_dt
|
|
||||||
|
|
||||||
# write historical data to buffer
|
|
||||||
shm.push(bars_array)
|
|
||||||
|
|
||||||
task_status.started(cs)
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
while i < count:
|
|
||||||
|
|
||||||
out, fails = await get_bars(proxy, fqsn, end_dt=first_dt)
|
|
||||||
|
|
||||||
if out is None:
|
|
||||||
# could be trying to retreive bars over weekend
|
|
||||||
# TODO: add logic here to handle tradable hours and
|
|
||||||
# only grab valid bars in the range
|
|
||||||
log.error(f"Can't grab bars starting at {first_dt}!?!?")
|
|
||||||
|
|
||||||
# XXX: get_bars() should internally decrement dt by
|
|
||||||
# 2k seconds and try again.
|
|
||||||
continue
|
|
||||||
|
|
||||||
(first_bars, bars_array, first_dt, last_dt) = out
|
|
||||||
# last_dt1 = last_dt
|
|
||||||
# last_dt = first_dt
|
|
||||||
|
|
||||||
# volume cleaning since there's -ve entries,
|
|
||||||
# wood luv to know what crookery that is..
|
|
||||||
vlm = bars_array['volume']
|
|
||||||
vlm[vlm < 0] = 0
|
|
||||||
|
|
||||||
# TODO we should probably dig into forums to see what peeps
|
|
||||||
# think this data "means" and then use it as an indicator of
|
|
||||||
# sorts? dinkus has mentioned that $vlms for the day dont'
|
|
||||||
# match other platforms nor the summary stat tws shows in
|
|
||||||
# the monitor - it's probably worth investigating.
|
|
||||||
|
|
||||||
shm.push(bars_array, prepend=True)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
|
|
||||||
asset_type_map = {
|
|
||||||
'STK': 'stock',
|
|
||||||
'OPT': 'option',
|
|
||||||
'FUT': 'future',
|
|
||||||
'CONTFUT': 'continuous_future',
|
|
||||||
'CASH': 'forex',
|
|
||||||
'IND': 'index',
|
|
||||||
'CFD': 'cfd',
|
|
||||||
'BOND': 'bond',
|
|
||||||
'CMDTY': 'commodity',
|
|
||||||
'FOP': 'futures_option',
|
|
||||||
'FUND': 'mutual_fund',
|
|
||||||
'WAR': 'warrant',
|
|
||||||
'IOPT': 'warran',
|
|
||||||
'BAG': 'bag',
|
|
||||||
# 'NEWS': 'news',
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
_quote_streams: dict[str, trio.abc.ReceiveStream] = {}
|
|
||||||
|
|
||||||
|
|
||||||
async def _setup_quote_stream(
|
|
||||||
|
|
||||||
from_trio: asyncio.Queue,
|
|
||||||
to_trio: trio.abc.SendChannel,
|
|
||||||
|
|
||||||
symbol: str,
|
|
||||||
opts: tuple[int] = (
|
|
||||||
'375', # RT trade volume (excludes utrades)
|
|
||||||
'233', # RT trade volume (includes utrades)
|
|
||||||
'236', # Shortable shares
|
|
||||||
|
|
||||||
# these all appear to only be updated every 25s thus
|
|
||||||
# making them mostly useless and explains why the scanner
|
|
||||||
# is always slow XD
|
|
||||||
# '293', # Trade count for day
|
|
||||||
'294', # Trade rate / minute
|
|
||||||
'295', # Vlm rate / minute
|
|
||||||
),
|
|
||||||
contract: Optional[Contract] = None,
|
|
||||||
|
|
||||||
) -> trio.abc.ReceiveChannel:
|
|
||||||
'''
|
|
||||||
Stream a ticker using the std L1 api.
|
|
||||||
|
|
||||||
This task is ``asyncio``-side and must be called from
|
|
||||||
``tractor.to_asyncio.open_channel_from()``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
global _quote_streams
|
|
||||||
|
|
||||||
to_trio.send_nowait(None)
|
|
||||||
|
|
||||||
async with load_aio_clients() as accts2clients:
|
|
||||||
caccount_name, client = get_preferred_data_client(accts2clients)
|
|
||||||
contract = contract or (await client.find_contract(symbol))
|
|
||||||
ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts))
|
|
||||||
|
|
||||||
# NOTE: it's batch-wise and slow af but I guess could
|
|
||||||
# be good for backchecking? Seems to be every 5s maybe?
|
|
||||||
# ticker: Ticker = client.ib.reqTickByTickData(
|
|
||||||
# contract, 'Last',
|
|
||||||
# )
|
|
||||||
|
|
||||||
# # define a simple queue push routine that streams quote packets
|
|
||||||
# # to trio over the ``to_trio`` memory channel.
|
|
||||||
# to_trio, from_aio = trio.open_memory_channel(2**8) # type: ignore
|
|
||||||
def teardown():
|
|
||||||
ticker.updateEvent.disconnect(push)
|
|
||||||
log.error(f"Disconnected stream for `{symbol}`")
|
|
||||||
client.ib.cancelMktData(contract)
|
|
||||||
|
|
||||||
# decouple broadcast mem chan
|
|
||||||
_quote_streams.pop(symbol, None)
|
|
||||||
|
|
||||||
def push(t: Ticker) -> None:
|
|
||||||
"""
|
|
||||||
Push quotes to trio task.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# log.debug(t)
|
|
||||||
try:
|
|
||||||
to_trio.send_nowait(t)
|
|
||||||
|
|
||||||
except (
|
|
||||||
trio.BrokenResourceError,
|
|
||||||
|
|
||||||
# XXX: HACK, not sure why this gets left stale (probably
|
|
||||||
# due to our terrible ``tractor.to_asyncio``
|
|
||||||
# implementation for streams.. but if the mem chan
|
|
||||||
# gets left here and starts blocking just kill the feed?
|
|
||||||
# trio.WouldBlock,
|
|
||||||
):
|
|
||||||
# XXX: eventkit's ``Event.emit()`` for whatever redic
|
|
||||||
# reason will catch and ignore regular exceptions
|
|
||||||
# resulting in tracebacks spammed to console..
|
|
||||||
# Manually do the dereg ourselves.
|
|
||||||
teardown()
|
|
||||||
except trio.WouldBlock:
|
|
||||||
log.warning(
|
|
||||||
f'channel is blocking symbol feed for {symbol}?'
|
|
||||||
f'\n{to_trio.statistics}'
|
|
||||||
)
|
|
||||||
|
|
||||||
# except trio.WouldBlock:
|
|
||||||
# # for slow debugging purposes to avoid clobbering prompt
|
|
||||||
# # with log msgs
|
|
||||||
# pass
|
|
||||||
|
|
||||||
ticker.updateEvent.connect(push)
|
|
||||||
try:
|
|
||||||
await asyncio.sleep(float('inf'))
|
|
||||||
finally:
|
|
||||||
teardown()
|
|
||||||
|
|
||||||
# return from_aio
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_aio_quote_stream(
|
|
||||||
|
|
||||||
symbol: str,
|
|
||||||
contract: Optional[Contract] = None,
|
|
||||||
|
|
||||||
) -> trio.abc.ReceiveStream:
|
|
||||||
|
|
||||||
from tractor.trionics import broadcast_receiver
|
|
||||||
global _quote_streams
|
|
||||||
|
|
||||||
from_aio = _quote_streams.get(symbol)
|
|
||||||
if from_aio:
|
|
||||||
|
|
||||||
# if we already have a cached feed deliver a rx side clone to consumer
|
|
||||||
async with broadcast_receiver(
|
|
||||||
from_aio,
|
|
||||||
2**6,
|
|
||||||
) as from_aio:
|
|
||||||
yield from_aio
|
|
||||||
return
|
|
||||||
|
|
||||||
async with tractor.to_asyncio.open_channel_from(
|
|
||||||
_setup_quote_stream,
|
|
||||||
symbol=symbol,
|
|
||||||
contract=contract,
|
|
||||||
|
|
||||||
) as (first, from_aio):
|
|
||||||
|
|
||||||
# cache feed for later consumers
|
|
||||||
_quote_streams[symbol] = from_aio
|
|
||||||
|
|
||||||
yield from_aio
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: cython/mypyc/numba this!
|
|
||||||
def normalize(
|
|
||||||
ticker: Ticker,
|
|
||||||
calc_price: bool = False
|
|
||||||
|
|
||||||
) -> dict:
|
|
||||||
|
|
||||||
# should be real volume for this contract by default
|
|
||||||
calc_price = False
|
|
||||||
|
|
||||||
# check for special contract types
|
|
||||||
con = ticker.contract
|
|
||||||
if type(con) in (
|
|
||||||
ibis.Commodity,
|
|
||||||
ibis.Forex,
|
|
||||||
):
|
|
||||||
# commodities and forex don't have an exchange name and
|
|
||||||
# no real volume so we have to calculate the price
|
|
||||||
suffix = con.secType
|
|
||||||
# no real volume on this tract
|
|
||||||
calc_price = True
|
|
||||||
|
|
||||||
else:
|
|
||||||
suffix = con.primaryExchange
|
|
||||||
if not suffix:
|
|
||||||
suffix = con.exchange
|
|
||||||
|
|
||||||
# append a `.<suffix>` to the returned symbol
|
|
||||||
# key for derivatives that normally is the expiry
|
|
||||||
# date key.
|
|
||||||
expiry = con.lastTradeDateOrContractMonth
|
|
||||||
if expiry:
|
|
||||||
suffix += f'.{expiry}'
|
|
||||||
|
|
||||||
# convert named tuples to dicts so we send usable keys
|
|
||||||
new_ticks = []
|
|
||||||
for tick in ticker.ticks:
|
|
||||||
if tick and not isinstance(tick, dict):
|
|
||||||
td = tick._asdict()
|
|
||||||
td['type'] = tick_types.get(
|
|
||||||
td['tickType'],
|
|
||||||
'n/a',
|
|
||||||
)
|
|
||||||
|
|
||||||
new_ticks.append(td)
|
|
||||||
|
|
||||||
tbt = ticker.tickByTicks
|
|
||||||
if tbt:
|
|
||||||
print(f'tickbyticks:\n {ticker.tickByTicks}')
|
|
||||||
|
|
||||||
ticker.ticks = new_ticks
|
|
||||||
|
|
||||||
# some contracts don't have volume so we may want to calculate
|
|
||||||
# a midpoint price based on data we can acquire (such as bid / ask)
|
|
||||||
if calc_price:
|
|
||||||
ticker.ticks.append(
|
|
||||||
{'type': 'trade', 'price': ticker.marketPrice()}
|
|
||||||
)
|
|
||||||
|
|
||||||
# serialize for transport
|
|
||||||
data = asdict(ticker)
|
|
||||||
|
|
||||||
# generate fqsn with possible specialized suffix
|
|
||||||
# for derivatives, note the lowercase.
|
|
||||||
data['symbol'] = data['fqsn'] = '.'.join(
|
|
||||||
(con.symbol, suffix)
|
|
||||||
).lower()
|
|
||||||
|
|
||||||
# convert named tuples to dicts for transport
|
|
||||||
tbts = data.get('tickByTicks')
|
|
||||||
if tbts:
|
|
||||||
data['tickByTicks'] = [tbt._asdict() for tbt in tbts]
|
|
||||||
|
|
||||||
# add time stamps for downstream latency measurements
|
|
||||||
data['brokerd_ts'] = time.time()
|
|
||||||
|
|
||||||
# stupid stupid shit...don't even care any more..
|
|
||||||
# leave it until we do a proper latency study
|
|
||||||
# if ticker.rtTime is not None:
|
|
||||||
# data['broker_ts'] = data['rtTime_s'] = float(
|
|
||||||
# ticker.rtTime.timestamp) / 1000.
|
|
||||||
data.pop('rtTime')
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
|
||||||
symbols: list[str],
|
|
||||||
feed_is_live: trio.Event,
|
|
||||||
loglevel: str = None,
|
|
||||||
|
|
||||||
# startup sync
|
|
||||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Stream symbol quotes.
|
|
||||||
|
|
||||||
This is a ``trio`` callable routine meant to be invoked
|
|
||||||
once the brokerd is up.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# TODO: support multiple subscriptions
|
|
||||||
sym = symbols[0]
|
|
||||||
log.info(f'request for real-time quotes: {sym}')
|
|
||||||
|
|
||||||
async with open_data_client() as proxy:
|
|
||||||
|
|
||||||
con, first_ticker, details = await proxy.get_sym_details(symbol=sym)
|
|
||||||
first_quote = normalize(first_ticker)
|
|
||||||
# print(f'first quote: {first_quote}')
|
|
||||||
|
|
||||||
def mk_init_msgs() -> dict[str, dict]:
|
|
||||||
'''
|
|
||||||
Collect a bunch of meta-data useful for feed startup and
|
|
||||||
pack in a `dict`-msg.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# pass back some symbol info like min_tick, trading_hours, etc.
|
|
||||||
syminfo = asdict(details)
|
|
||||||
syminfo.update(syminfo['contract'])
|
|
||||||
|
|
||||||
# nested dataclass we probably don't need and that won't IPC
|
|
||||||
# serialize
|
|
||||||
syminfo.pop('secIdList')
|
|
||||||
|
|
||||||
# TODO: more consistent field translation
|
|
||||||
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
|
|
||||||
|
|
||||||
# for stocks it seems TWS reports too small a tick size
|
|
||||||
# such that you can't submit orders with that granularity?
|
|
||||||
min_tick = 0.01 if atype == 'stock' else 0
|
|
||||||
|
|
||||||
syminfo['price_tick_size'] = max(syminfo['minTick'], min_tick)
|
|
||||||
|
|
||||||
# for "traditional" assets, volume is normally discreet, not
|
|
||||||
# a float
|
|
||||||
syminfo['lot_tick_size'] = 0.0
|
|
||||||
|
|
||||||
ibclient = proxy._aio_ns.ib.client
|
|
||||||
host, port = ibclient.host, ibclient.port
|
|
||||||
|
|
||||||
# TODO: for loop through all symbols passed in
|
|
||||||
init_msgs = {
|
|
||||||
# pass back token, and bool, signalling if we're the writer
|
|
||||||
# and that history has been written
|
|
||||||
sym: {
|
|
||||||
'symbol_info': syminfo,
|
|
||||||
'fqsn': first_quote['fqsn'],
|
|
||||||
},
|
|
||||||
'status': {
|
|
||||||
'data_ep': f'{host}:{port}',
|
|
||||||
},
|
|
||||||
|
|
||||||
}
|
|
||||||
return init_msgs
|
|
||||||
|
|
||||||
init_msgs = mk_init_msgs()
|
|
||||||
|
|
||||||
# TODO: we should instead spawn a task that waits on a feed to start
|
|
||||||
# and let it wait indefinitely..instead of this hard coded stuff.
|
|
||||||
with trio.move_on_after(1):
|
|
||||||
contract, first_ticker, details = await proxy.get_quote(symbol=sym)
|
|
||||||
|
|
||||||
# it might be outside regular trading hours so see if we can at
|
|
||||||
# least grab history.
|
|
||||||
if isnan(first_ticker.last):
|
|
||||||
task_status.started((init_msgs, first_quote))
|
|
||||||
|
|
||||||
# it's not really live but this will unblock
|
|
||||||
# the brokerd feed task to tell the ui to update?
|
|
||||||
feed_is_live.set()
|
|
||||||
|
|
||||||
# block and let data history backfill code run.
|
|
||||||
await trio.sleep_forever()
|
|
||||||
return # we never expect feed to come up?
|
|
||||||
|
|
||||||
async with open_aio_quote_stream(
|
|
||||||
symbol=sym,
|
|
||||||
contract=con,
|
|
||||||
) as stream:
|
|
||||||
|
|
||||||
# ugh, clear ticks since we've consumed them
|
|
||||||
# (ahem, ib_insync is stateful trash)
|
|
||||||
first_ticker.ticks = []
|
|
||||||
|
|
||||||
task_status.started((init_msgs, first_quote))
|
|
||||||
|
|
||||||
async with aclosing(stream):
|
|
||||||
if type(first_ticker.contract) not in (
|
|
||||||
ibis.Commodity,
|
|
||||||
ibis.Forex
|
|
||||||
):
|
|
||||||
# wait for real volume on feed (trading might be closed)
|
|
||||||
while True:
|
|
||||||
ticker = await stream.receive()
|
|
||||||
|
|
||||||
# for a real volume contract we rait for the first
|
|
||||||
# "real" trade to take place
|
|
||||||
if (
|
|
||||||
# not calc_price
|
|
||||||
# and not ticker.rtTime
|
|
||||||
not ticker.rtTime
|
|
||||||
):
|
|
||||||
# spin consuming tickers until we get a real
|
|
||||||
# market datum
|
|
||||||
log.debug(f"New unsent ticker: {ticker}")
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
log.debug("Received first real volume tick")
|
|
||||||
# ugh, clear ticks since we've consumed them
|
|
||||||
# (ahem, ib_insync is truly stateful trash)
|
|
||||||
ticker.ticks = []
|
|
||||||
|
|
||||||
# XXX: this works because we don't use
|
|
||||||
# ``aclosing()`` above?
|
|
||||||
break
|
|
||||||
|
|
||||||
quote = normalize(ticker)
|
|
||||||
log.debug(f"First ticker received {quote}")
|
|
||||||
|
|
||||||
# tell caller quotes are now coming in live
|
|
||||||
feed_is_live.set()
|
|
||||||
|
|
||||||
# last = time.time()
|
|
||||||
async for ticker in stream:
|
|
||||||
quote = normalize(ticker)
|
|
||||||
await send_chan.send({quote['fqsn']: quote})
|
|
||||||
|
|
||||||
# ugh, clear ticks since we've consumed them
|
|
||||||
ticker.ticks = []
|
|
||||||
# last = time.time()
|
|
||||||
|
|
||||||
|
|
||||||
async def data_reset_hack(
|
|
||||||
reset_type: str = 'data',
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Run key combos for resetting data feeds and yield back to caller
|
|
||||||
when complete.
|
|
||||||
|
|
||||||
This is a linux-only hack around:
|
|
||||||
|
|
||||||
https://interactivebrokers.github.io/tws-api/historical_limitations.html#pacing_violations
|
|
||||||
|
|
||||||
TODOs:
|
|
||||||
- a return type that hopefully determines if the hack was
|
|
||||||
successful.
|
|
||||||
- other OS support?
|
|
||||||
- integration with ``ib-gw`` run in docker + Xorg?
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
async def vnc_click_hack(
|
|
||||||
reset_type: str = 'data'
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Reset the data or netowork connection for the VNC attached
|
|
||||||
ib gateway using magic combos.
|
|
||||||
|
|
||||||
'''
|
|
||||||
key = {'data': 'f', 'connection': 'r'}[reset_type]
|
|
||||||
|
|
||||||
import asyncvnc
|
|
||||||
|
|
||||||
async with asyncvnc.connect(
|
|
||||||
'localhost',
|
|
||||||
port=3003,
|
|
||||||
# password='ibcansmbz',
|
|
||||||
) as client:
|
|
||||||
|
|
||||||
# move to middle of screen
|
|
||||||
# 640x1800
|
|
||||||
client.mouse.move(
|
|
||||||
x=500,
|
|
||||||
y=500,
|
|
||||||
)
|
|
||||||
client.mouse.click()
|
|
||||||
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
|
||||||
|
|
||||||
await tractor.to_asyncio.run_task(vnc_click_hack)
|
|
||||||
|
|
||||||
# we don't really need the ``xdotool`` approach any more B)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
|
||||||
async def open_symbol_search(
|
|
||||||
ctx: tractor.Context,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
# TODO: load user defined symbol set locally for fast search?
|
|
||||||
await ctx.started({})
|
|
||||||
|
|
||||||
async with open_data_client() as proxy:
|
|
||||||
async with ctx.open_stream() as stream:
|
|
||||||
|
|
||||||
last = time.time()
|
|
||||||
|
|
||||||
async for pattern in stream:
|
|
||||||
log.debug(f'received {pattern}')
|
|
||||||
now = time.time()
|
|
||||||
|
|
||||||
assert pattern, 'IB can not accept blank search pattern'
|
|
||||||
|
|
||||||
# throttle search requests to no faster then 1Hz
|
|
||||||
diff = now - last
|
|
||||||
if diff < 1.0:
|
|
||||||
log.debug('throttle sleeping')
|
|
||||||
await trio.sleep(diff)
|
|
||||||
try:
|
|
||||||
pattern = stream.receive_nowait()
|
|
||||||
except trio.WouldBlock:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not pattern or pattern.isspace():
|
|
||||||
log.warning('empty pattern received, skipping..')
|
|
||||||
|
|
||||||
# TODO: *BUG* if nothing is returned here the client
|
|
||||||
# side will cache a null set result and not showing
|
|
||||||
# anything to the use on re-searches when this query
|
|
||||||
# timed out. We probably need a special "timeout" msg
|
|
||||||
# or something...
|
|
||||||
|
|
||||||
# XXX: this unblocks the far end search task which may
|
|
||||||
# hold up a multi-search nursery block
|
|
||||||
await stream.send({})
|
|
||||||
|
|
||||||
continue
|
|
||||||
|
|
||||||
log.debug(f'searching for {pattern}')
|
|
||||||
|
|
||||||
last = time.time()
|
|
||||||
|
|
||||||
# async batch search using api stocks endpoint and module
|
|
||||||
# defined adhoc symbol set.
|
|
||||||
stock_results = []
|
|
||||||
|
|
||||||
async def stash_results(target: Awaitable[list]):
|
|
||||||
stock_results.extend(await target)
|
|
||||||
|
|
||||||
async with trio.open_nursery() as sn:
|
|
||||||
sn.start_soon(
|
|
||||||
stash_results,
|
|
||||||
proxy.search_symbols(
|
|
||||||
pattern=pattern,
|
|
||||||
upto=5,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# trigger async request
|
|
||||||
await trio.sleep(0)
|
|
||||||
|
|
||||||
# match against our ad-hoc set immediately
|
|
||||||
adhoc_matches = fuzzy.extractBests(
|
|
||||||
pattern,
|
|
||||||
list(_adhoc_futes_set),
|
|
||||||
score_cutoff=90,
|
|
||||||
)
|
|
||||||
log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
|
||||||
adhoc_match_results = {}
|
|
||||||
if adhoc_matches:
|
|
||||||
# TODO: do we need to pull contract details?
|
|
||||||
adhoc_match_results = {i[0]: {} for i in adhoc_matches}
|
|
||||||
|
|
||||||
log.debug(f'fuzzy matching stocks {stock_results}')
|
|
||||||
stock_matches = fuzzy.extractBests(
|
|
||||||
pattern,
|
|
||||||
stock_results,
|
|
||||||
score_cutoff=50,
|
|
||||||
)
|
|
||||||
|
|
||||||
matches = adhoc_match_results | {
|
|
||||||
item[0]: {} for item in stock_matches
|
|
||||||
}
|
|
||||||
# TODO: we used to deliver contract details
|
|
||||||
# {item[2]: item[0] for item in stock_matches}
|
|
||||||
|
|
||||||
log.debug(f"sending matches: {matches.keys()}")
|
|
||||||
await stream.send(matches)
|
|
|
@ -14,20 +14,18 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
Kraken backend.
|
Kraken backend.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from dataclasses import asdict, field
|
from dataclasses import asdict, field
|
||||||
from datetime import datetime
|
from typing import List, Dict, Any, Tuple, Optional, Callable
|
||||||
from pprint import pformat
|
|
||||||
from typing import Any, Optional, AsyncIterator, Callable, Union
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import trio
|
import trio
|
||||||
import pendulum
|
import arrow
|
||||||
import asks
|
import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -35,30 +33,12 @@ import tractor
|
||||||
from pydantic.dataclasses import dataclass
|
from pydantic.dataclasses import dataclass
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
import wsproto
|
import wsproto
|
||||||
import urllib.parse
|
|
||||||
import hashlib
|
|
||||||
import hmac
|
|
||||||
import base64
|
|
||||||
|
|
||||||
from .. import config
|
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
from ._util import (
|
from ._util import resproc, SymbolNotFound, BrokerError
|
||||||
resproc,
|
|
||||||
SymbolNotFound,
|
|
||||||
BrokerError,
|
|
||||||
DataThrottle,
|
|
||||||
DataUnavailable,
|
|
||||||
)
|
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from ..data import ShmArray
|
from ..data import ShmArray
|
||||||
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
from ..data._web_bs import open_autorecon_ws
|
||||||
from ..clearing._paper_engine import PaperBoi
|
|
||||||
from ..clearing._messages import (
|
|
||||||
BrokerdPosition, BrokerdOrder, BrokerdStatus,
|
|
||||||
BrokerdOrderAck, BrokerdError, BrokerdCancel,
|
|
||||||
BrokerdFill,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -87,7 +67,7 @@ ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||||
_show_wap_in_history = True
|
_show_wap_in_history = True
|
||||||
|
|
||||||
|
|
||||||
_symbol_info_translation: dict[str, str] = {
|
_symbol_info_translation: Dict[str, str] = {
|
||||||
'tick_decimals': 'pair_decimals',
|
'tick_decimals': 'pair_decimals',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -109,16 +89,16 @@ class Pair(BaseModel):
|
||||||
lot_multiplier: float
|
lot_multiplier: float
|
||||||
|
|
||||||
# array of leverage amounts available when buying
|
# array of leverage amounts available when buying
|
||||||
leverage_buy: list[int]
|
leverage_buy: List[int]
|
||||||
# array of leverage amounts available when selling
|
# array of leverage amounts available when selling
|
||||||
leverage_sell: list[int]
|
leverage_sell: List[int]
|
||||||
|
|
||||||
# fee schedule array in [volume, percent fee] tuples
|
# fee schedule array in [volume, percent fee] tuples
|
||||||
fees: list[tuple[int, float]]
|
fees: List[Tuple[int, float]]
|
||||||
|
|
||||||
# maker fee schedule array in [volume, percent fee] tuples (if on
|
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||||
# maker/taker)
|
# maker/taker)
|
||||||
fees_maker: list[tuple[int, float]]
|
fees_maker: List[Tuple[int, float]]
|
||||||
|
|
||||||
fee_volume_currency: str # volume discount currency
|
fee_volume_currency: str # volume discount currency
|
||||||
margin_call: str # margin call level
|
margin_call: str # margin call level
|
||||||
|
@ -126,27 +106,13 @@ class Pair(BaseModel):
|
||||||
ordermin: float # minimum order volume for pair
|
ordermin: float # minimum order volume for pair
|
||||||
|
|
||||||
|
|
||||||
class Trade(BaseModel):
|
|
||||||
'''
|
|
||||||
Trade class that helps parse and validate ownTrades stream
|
|
||||||
|
|
||||||
'''
|
|
||||||
reqid: str # kraken order transaction id
|
|
||||||
action: str # buy or sell
|
|
||||||
price: str # price of asset
|
|
||||||
size: str # vol of asset
|
|
||||||
broker_time: str # e.g GTC, GTD
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class OHLC:
|
class OHLC:
|
||||||
'''
|
"""Description of the flattened OHLC quote format.
|
||||||
Description of the flattened OHLC quote format.
|
|
||||||
|
|
||||||
For schema details see:
|
For schema details see:
|
||||||
https://docs.kraken.com/websockets/#message-ohlc
|
https://docs.kraken.com/websockets/#message-ohlc
|
||||||
|
"""
|
||||||
'''
|
|
||||||
chan_id: int # internal kraken id
|
chan_id: int # internal kraken id
|
||||||
chan_name: str # eg. ohlc-1 (name-interval)
|
chan_name: str # eg. ohlc-1 (name-interval)
|
||||||
pair: str # fx pair
|
pair: str # fx pair
|
||||||
|
@ -160,54 +126,12 @@ class OHLC:
|
||||||
volume: float # Accumulated volume **within interval**
|
volume: float # Accumulated volume **within interval**
|
||||||
count: int # Number of trades within interval
|
count: int # Number of trades within interval
|
||||||
# (sampled) generated tick data
|
# (sampled) generated tick data
|
||||||
ticks: list[Any] = field(default_factory=list)
|
ticks: List[Any] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
def get_config() -> dict[str, Any]:
|
|
||||||
|
|
||||||
conf, path = config.load()
|
|
||||||
section = conf.get('kraken')
|
|
||||||
|
|
||||||
if section is None:
|
|
||||||
log.warning(f'No config section found for kraken in {path}')
|
|
||||||
return {}
|
|
||||||
|
|
||||||
return section
|
|
||||||
|
|
||||||
|
|
||||||
def get_kraken_signature(
|
|
||||||
urlpath: str,
|
|
||||||
data: dict[str, Any],
|
|
||||||
secret: str
|
|
||||||
) -> str:
|
|
||||||
postdata = urllib.parse.urlencode(data)
|
|
||||||
encoded = (str(data['nonce']) + postdata).encode()
|
|
||||||
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
|
||||||
|
|
||||||
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
|
||||||
sigdigest = base64.b64encode(mac.digest())
|
|
||||||
return sigdigest.decode()
|
|
||||||
|
|
||||||
|
|
||||||
class InvalidKey(ValueError):
|
|
||||||
'''
|
|
||||||
EAPI:Invalid key
|
|
||||||
This error is returned when the API key used for the call is
|
|
||||||
either expired or disabled, please review the API key in your
|
|
||||||
Settings -> API tab of account management or generate a new one
|
|
||||||
and update your application.
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
|
|
||||||
def __init__(
|
def __init__(self) -> None:
|
||||||
self,
|
|
||||||
name: str = '',
|
|
||||||
api_key: str = '',
|
|
||||||
secret: str = ''
|
|
||||||
) -> None:
|
|
||||||
self._sesh = asks.Session(connections=4)
|
self._sesh = asks.Session(connections=4)
|
||||||
self._sesh.base_location = _url
|
self._sesh.base_location = _url
|
||||||
self._sesh.headers.update({
|
self._sesh.headers.update({
|
||||||
|
@ -215,12 +139,9 @@ class Client:
|
||||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||||
})
|
})
|
||||||
self._pairs: list[str] = []
|
self._pairs: list[str] = []
|
||||||
self._name = name
|
|
||||||
self._api_key = api_key
|
|
||||||
self._secret = secret
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pairs(self) -> dict[str, Any]:
|
def pairs(self) -> Dict[str, Any]:
|
||||||
if self._pairs is None:
|
if self._pairs is None:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Make sure to run `cache_symbols()` on startup!"
|
"Make sure to run `cache_symbols()` on startup!"
|
||||||
|
@ -233,7 +154,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
method: str,
|
method: str,
|
||||||
data: dict,
|
data: dict,
|
||||||
) -> dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
resp = await self._sesh.post(
|
resp = await self._sesh.post(
|
||||||
path=f'/public/{method}',
|
path=f'/public/{method}',
|
||||||
json=data,
|
json=data,
|
||||||
|
@ -241,112 +162,6 @@ class Client:
|
||||||
)
|
)
|
||||||
return resproc(resp, log)
|
return resproc(resp, log)
|
||||||
|
|
||||||
async def _private(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
data: dict,
|
|
||||||
uri_path: str
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
headers = {
|
|
||||||
'Content-Type':
|
|
||||||
'application/x-www-form-urlencoded',
|
|
||||||
'API-Key':
|
|
||||||
self._api_key,
|
|
||||||
'API-Sign':
|
|
||||||
get_kraken_signature(uri_path, data, self._secret)
|
|
||||||
}
|
|
||||||
resp = await self._sesh.post(
|
|
||||||
path=f'/private/{method}',
|
|
||||||
data=data,
|
|
||||||
headers=headers,
|
|
||||||
timeout=float('inf')
|
|
||||||
)
|
|
||||||
return resproc(resp, log)
|
|
||||||
|
|
||||||
async def endpoint(
|
|
||||||
self,
|
|
||||||
method: str,
|
|
||||||
data: dict[str, Any]
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
uri_path = f'/0/private/{method}'
|
|
||||||
data['nonce'] = str(int(1000*time.time()))
|
|
||||||
return await self._private(method, data, uri_path)
|
|
||||||
|
|
||||||
async def get_trades(
|
|
||||||
self,
|
|
||||||
data: dict[str, Any] = {}
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
data['ofs'] = 0
|
|
||||||
# Grab all trade history
|
|
||||||
# https://docs.kraken.com/rest/#operation/getTradeHistory
|
|
||||||
# Kraken uses 'ofs' to refer to the offset
|
|
||||||
while True:
|
|
||||||
resp = await self.endpoint('TradesHistory', data)
|
|
||||||
# grab the first 50 trades
|
|
||||||
if data['ofs'] == 0:
|
|
||||||
trades = resp['result']['trades']
|
|
||||||
# load the next 50 trades using dict constructor
|
|
||||||
# for speed
|
|
||||||
elif data['ofs'] == 50:
|
|
||||||
trades = dict(trades, **resp['result']['trades'])
|
|
||||||
# catch the end of the trades
|
|
||||||
elif resp['result']['trades'] == {}:
|
|
||||||
count = resp['result']['count']
|
|
||||||
break
|
|
||||||
# update existing dict if num trades exceeds 100
|
|
||||||
else:
|
|
||||||
trades.update(resp['result']['trades'])
|
|
||||||
# increment the offset counter
|
|
||||||
data['ofs'] += 50
|
|
||||||
# To avoid exceeding API rate limit in case of a lot of trades
|
|
||||||
await trio.sleep(1)
|
|
||||||
|
|
||||||
# make sure you grabbed all the trades
|
|
||||||
assert count == len(trades.values())
|
|
||||||
|
|
||||||
return trades
|
|
||||||
|
|
||||||
async def submit_limit(
|
|
||||||
self,
|
|
||||||
symbol: str,
|
|
||||||
price: float,
|
|
||||||
action: str,
|
|
||||||
size: float,
|
|
||||||
reqid: str = None,
|
|
||||||
validate: bool = False # set True test call without a real submission
|
|
||||||
) -> dict:
|
|
||||||
'''
|
|
||||||
Place an order and return integer request id provided by client.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# Build common data dict for common keys from both endpoints
|
|
||||||
data = {
|
|
||||||
"pair": symbol,
|
|
||||||
"price": str(price),
|
|
||||||
"validate": validate
|
|
||||||
}
|
|
||||||
if reqid is None:
|
|
||||||
# Build order data for kraken api
|
|
||||||
data |= {
|
|
||||||
"ordertype": "limit", "type": action, "volume": str(size)
|
|
||||||
}
|
|
||||||
return await self.endpoint('AddOrder', data)
|
|
||||||
else:
|
|
||||||
# Edit order data for kraken api
|
|
||||||
data["txid"] = reqid
|
|
||||||
return await self.endpoint('EditOrder', data)
|
|
||||||
|
|
||||||
async def submit_cancel(
|
|
||||||
self,
|
|
||||||
reqid: str,
|
|
||||||
) -> dict:
|
|
||||||
'''
|
|
||||||
Send cancel request for order id ``reqid``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# txid is a transaction id given by kraken
|
|
||||||
return await self.endpoint('CancelOrder', {"txid": reqid})
|
|
||||||
|
|
||||||
async def symbol_info(
|
async def symbol_info(
|
||||||
self,
|
self,
|
||||||
pair: Optional[str] = None,
|
pair: Optional[str] = None,
|
||||||
|
@ -382,7 +197,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
limit: int = None,
|
limit: int = None,
|
||||||
) -> dict[str, Any]:
|
) -> Dict[str, Any]:
|
||||||
if self._pairs is not None:
|
if self._pairs is not None:
|
||||||
data = self._pairs
|
data = self._pairs
|
||||||
else:
|
else:
|
||||||
|
@ -399,26 +214,17 @@ class Client:
|
||||||
async def bars(
|
async def bars(
|
||||||
self,
|
self,
|
||||||
symbol: str = 'XBTUSD',
|
symbol: str = 'XBTUSD',
|
||||||
|
|
||||||
# UTC 2017-07-02 12:53:20
|
# UTC 2017-07-02 12:53:20
|
||||||
since: Optional[Union[int, datetime]] = None,
|
since: int = None,
|
||||||
count: int = 720, # <- max allowed per query
|
count: int = 720, # <- max allowed per query
|
||||||
as_np: bool = True,
|
as_np: bool = True,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
|
||||||
if since is None:
|
if since is None:
|
||||||
since = pendulum.now('UTC').start_of('minute').subtract(
|
since = arrow.utcnow().floor('minute').shift(
|
||||||
minutes=count).timestamp()
|
minutes=-count).timestamp()
|
||||||
|
|
||||||
elif isinstance(since, int):
|
|
||||||
since = pendulum.from_timestamp(since).timestamp()
|
|
||||||
|
|
||||||
else: # presumably a pendulum datetime
|
|
||||||
since = since.timestamp()
|
|
||||||
|
|
||||||
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||||
since = str(max(1499000000, int(since)))
|
since = str(max(1499000000, since))
|
||||||
json = await self._public(
|
json = await self._public(
|
||||||
'OHLC',
|
'OHLC',
|
||||||
data={
|
data={
|
||||||
|
@ -462,30 +268,12 @@ class Client:
|
||||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||||
return array
|
return array
|
||||||
except KeyError:
|
except KeyError:
|
||||||
errmsg = json['error'][0]
|
raise SymbolNotFound(json['error'][0] + f': {symbol}')
|
||||||
|
|
||||||
if 'not found' in errmsg:
|
|
||||||
raise SymbolNotFound(errmsg + f': {symbol}')
|
|
||||||
|
|
||||||
elif 'Too many requests' in errmsg:
|
|
||||||
raise DataThrottle(f'{symbol}')
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise BrokerError(errmsg)
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def get_client() -> Client:
|
async def get_client() -> Client:
|
||||||
|
client = Client()
|
||||||
section = get_config()
|
|
||||||
if section:
|
|
||||||
client = Client(
|
|
||||||
name=section['key_descr'],
|
|
||||||
api_key=section['api_key'],
|
|
||||||
secret=section['secret']
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
client = Client()
|
|
||||||
|
|
||||||
# at startup, load all symbols locally for fast search
|
# at startup, load all symbols locally for fast search
|
||||||
await client.cache_symbols()
|
await client.cache_symbols()
|
||||||
|
@ -493,382 +281,8 @@ async def get_client() -> Client:
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
def pack_positions(
|
async def stream_messages(ws):
|
||||||
acc: str,
|
|
||||||
trades: dict
|
|
||||||
) -> list[Any]:
|
|
||||||
positions: dict[str, float] = {}
|
|
||||||
vols: dict[str, float] = {}
|
|
||||||
costs: dict[str, float] = {}
|
|
||||||
position_msgs: list[Any] = []
|
|
||||||
|
|
||||||
for trade in trades.values():
|
|
||||||
sign = -1 if trade['type'] == 'sell' else 1
|
|
||||||
pair = trade['pair']
|
|
||||||
vol = float(trade['vol'])
|
|
||||||
vols[pair] = vols.get(pair, 0) + sign * vol
|
|
||||||
costs[pair] = costs.get(pair, 0) + sign * float(trade['cost'])
|
|
||||||
positions[pair] = costs[pair] / vols[pair] if vols[pair] else 0
|
|
||||||
|
|
||||||
for ticker, pos in positions.items():
|
|
||||||
vol = float(vols[ticker])
|
|
||||||
if not vol:
|
|
||||||
continue
|
|
||||||
norm_sym = normalize_symbol(ticker)
|
|
||||||
msg = BrokerdPosition(
|
|
||||||
broker='kraken',
|
|
||||||
account=acc,
|
|
||||||
symbol=norm_sym,
|
|
||||||
currency=norm_sym[-3:],
|
|
||||||
size=vol,
|
|
||||||
avg_price=float(pos),
|
|
||||||
)
|
|
||||||
position_msgs.append(msg.dict())
|
|
||||||
|
|
||||||
return position_msgs
|
|
||||||
|
|
||||||
|
|
||||||
def normalize_symbol(
|
|
||||||
ticker: str
|
|
||||||
) -> str:
|
|
||||||
# This is to convert symbol names from what kraken
|
|
||||||
# uses to the traditional 3x3 pair symbol syntax
|
|
||||||
symlen = len(ticker)
|
|
||||||
if symlen == 6:
|
|
||||||
return ticker.lower()
|
|
||||||
else:
|
|
||||||
for sym in ['XXBT', 'XXMR', 'ZEUR']:
|
|
||||||
if sym in ticker:
|
|
||||||
ticker = ticker.replace(sym, sym[1:])
|
|
||||||
return ticker.lower()
|
|
||||||
|
|
||||||
|
|
||||||
def make_auth_sub(data: dict[str, Any]) -> dict[str, str]:
|
|
||||||
'''
|
|
||||||
Create a request subscription packet dict.
|
|
||||||
|
|
||||||
## TODO: point to the auth urls
|
|
||||||
https://docs.kraken.com/websockets/#message-subscribe
|
|
||||||
|
|
||||||
'''
|
|
||||||
# eg. specific logic for this in kraken's sync client:
|
|
||||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
|
||||||
return {
|
|
||||||
'event': 'subscribe',
|
|
||||||
'subscription': data,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
async def handle_order_requests(
|
|
||||||
|
|
||||||
client: Client,
|
|
||||||
ems_order_stream: tractor.MsgStream,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
request_msg: dict
|
|
||||||
order: BrokerdOrder
|
|
||||||
|
|
||||||
async for request_msg in ems_order_stream:
|
|
||||||
log.info(
|
|
||||||
'Received order request:\n'
|
|
||||||
f'{pformat(request_msg)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
action = request_msg['action']
|
|
||||||
|
|
||||||
if action in {'buy', 'sell'}:
|
|
||||||
|
|
||||||
account = request_msg['account']
|
|
||||||
if account != 'kraken.spot':
|
|
||||||
log.error(
|
|
||||||
'This is a kraken account, \
|
|
||||||
only a `kraken.spot` selection is valid'
|
|
||||||
)
|
|
||||||
await ems_order_stream.send(BrokerdError(
|
|
||||||
oid=request_msg['oid'],
|
|
||||||
symbol=request_msg['symbol'],
|
|
||||||
|
|
||||||
# reason=f'Kraken only, No account found: `{account}` ?',
|
|
||||||
reason=(
|
|
||||||
'Kraken only, order mode disabled due to '
|
|
||||||
'https://github.com/pikers/piker/issues/299'
|
|
||||||
),
|
|
||||||
|
|
||||||
).dict())
|
|
||||||
continue
|
|
||||||
|
|
||||||
# validate
|
|
||||||
order = BrokerdOrder(**request_msg)
|
|
||||||
# call our client api to submit the order
|
|
||||||
resp = await client.submit_limit(
|
|
||||||
symbol=order.symbol,
|
|
||||||
price=order.price,
|
|
||||||
action=order.action,
|
|
||||||
size=order.size,
|
|
||||||
reqid=order.reqid,
|
|
||||||
)
|
|
||||||
|
|
||||||
err = resp['error']
|
|
||||||
if err:
|
|
||||||
oid = order.oid
|
|
||||||
log.error(f'Failed to submit order: {oid}')
|
|
||||||
|
|
||||||
await ems_order_stream.send(
|
|
||||||
BrokerdError(
|
|
||||||
oid=order.oid,
|
|
||||||
reqid=order.reqid,
|
|
||||||
symbol=order.symbol,
|
|
||||||
reason="Failed order submission",
|
|
||||||
broker_details=resp
|
|
||||||
).dict()
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# TODO: handle multiple orders (cancels?)
|
|
||||||
# txid is an array of strings
|
|
||||||
if order.reqid is None:
|
|
||||||
reqid = resp['result']['txid'][0]
|
|
||||||
else:
|
|
||||||
# update the internal pairing of oid to krakens
|
|
||||||
# txid with the new txid that is returned on edit
|
|
||||||
reqid = resp['result']['txid']
|
|
||||||
|
|
||||||
# deliver ack that order has been submitted to broker routing
|
|
||||||
await ems_order_stream.send(
|
|
||||||
BrokerdOrderAck(
|
|
||||||
|
|
||||||
# ems order request id
|
|
||||||
oid=order.oid,
|
|
||||||
|
|
||||||
# broker specific request id
|
|
||||||
reqid=reqid,
|
|
||||||
|
|
||||||
# account the made the order
|
|
||||||
account=order.account
|
|
||||||
|
|
||||||
).dict()
|
|
||||||
)
|
|
||||||
|
|
||||||
elif action == 'cancel':
|
|
||||||
msg = BrokerdCancel(**request_msg)
|
|
||||||
|
|
||||||
# Send order cancellation to kraken
|
|
||||||
resp = await client.submit_cancel(
|
|
||||||
reqid=msg.reqid
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check to make sure there was no error returned by
|
|
||||||
# the kraken endpoint. Assert one order was cancelled.
|
|
||||||
try:
|
|
||||||
result = resp['result']
|
|
||||||
count = result['count']
|
|
||||||
|
|
||||||
# check for 'error' key if we received no 'result'
|
|
||||||
except KeyError:
|
|
||||||
error = resp.get('error')
|
|
||||||
|
|
||||||
await ems_order_stream.send(
|
|
||||||
BrokerdError(
|
|
||||||
oid=msg.oid,
|
|
||||||
reqid=msg.reqid,
|
|
||||||
symbol=msg.symbol,
|
|
||||||
reason="Failed order cancel",
|
|
||||||
broker_details=resp
|
|
||||||
).dict()
|
|
||||||
)
|
|
||||||
|
|
||||||
if not error:
|
|
||||||
raise BrokerError(f'Unknown order cancel response: {resp}')
|
|
||||||
|
|
||||||
else:
|
|
||||||
if not count: # no orders were cancelled?
|
|
||||||
|
|
||||||
# XXX: what exactly is this from and why would we care?
|
|
||||||
# there doesn't seem to be any docs here?
|
|
||||||
# https://docs.kraken.com/rest/#operation/cancelOrder
|
|
||||||
|
|
||||||
# Check to make sure the cancellation is NOT pending,
|
|
||||||
# then send the confirmation to the ems order stream
|
|
||||||
pending = result.get('pending')
|
|
||||||
if pending:
|
|
||||||
log.error(f'Order {oid} cancel was not yet successful')
|
|
||||||
|
|
||||||
await ems_order_stream.send(
|
|
||||||
BrokerdError(
|
|
||||||
oid=msg.oid,
|
|
||||||
reqid=msg.reqid,
|
|
||||||
symbol=msg.symbol,
|
|
||||||
# TODO: maybe figure out if pending
|
|
||||||
# cancels will eventually get cancelled
|
|
||||||
reason="Order cancel is still pending?",
|
|
||||||
broker_details=resp
|
|
||||||
).dict()
|
|
||||||
)
|
|
||||||
|
|
||||||
else: # order cancel success case.
|
|
||||||
|
|
||||||
await ems_order_stream.send(
|
|
||||||
BrokerdStatus(
|
|
||||||
reqid=msg.reqid,
|
|
||||||
account=msg.account,
|
|
||||||
time_ns=time.time_ns(),
|
|
||||||
status='cancelled',
|
|
||||||
reason='Order cancelled',
|
|
||||||
broker_details={'name': 'kraken'}
|
|
||||||
).dict()
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
log.error(f'Unknown order command: {request_msg}')
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
|
||||||
async def trades_dialogue(
|
|
||||||
ctx: tractor.Context,
|
|
||||||
loglevel: str = None,
|
|
||||||
) -> AsyncIterator[dict[str, Any]]:
|
|
||||||
|
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def subscribe(ws: wsproto.WSConnection, token: str):
|
|
||||||
# XXX: setup subs
|
|
||||||
# https://docs.kraken.com/websockets/#message-subscribe
|
|
||||||
# specific logic for this in kraken's shitty sync client:
|
|
||||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
|
||||||
trades_sub = make_auth_sub(
|
|
||||||
{'name': 'ownTrades', 'token': token}
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: we want to eventually allow unsubs which should
|
|
||||||
# be completely fine to request from a separate task
|
|
||||||
# since internally the ws methods appear to be FIFO
|
|
||||||
# locked.
|
|
||||||
await ws.send_msg(trades_sub)
|
|
||||||
|
|
||||||
yield
|
|
||||||
|
|
||||||
# unsub from all pairs on teardown
|
|
||||||
await ws.send_msg({
|
|
||||||
'event': 'unsubscribe',
|
|
||||||
'subscription': ['ownTrades'],
|
|
||||||
})
|
|
||||||
|
|
||||||
# XXX: do we need to ack the unsub?
|
|
||||||
# await ws.recv_msg()
|
|
||||||
|
|
||||||
# Authenticated block
|
|
||||||
async with get_client() as client:
|
|
||||||
if not client._api_key:
|
|
||||||
log.error('Missing Kraken API key: Trades WS connection failed')
|
|
||||||
await ctx.started(({}, ['paper']))
|
|
||||||
|
|
||||||
async with (
|
|
||||||
ctx.open_stream() as ems_stream,
|
|
||||||
trio.open_nursery() as n,
|
|
||||||
):
|
|
||||||
|
|
||||||
client = PaperBoi(
|
|
||||||
'kraken',
|
|
||||||
ems_stream,
|
|
||||||
_buys={},
|
|
||||||
_sells={},
|
|
||||||
|
|
||||||
_reqids={},
|
|
||||||
|
|
||||||
# TODO: load paper positions from ``positions.toml``
|
|
||||||
_positions={},
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: maybe add multiple accounts
|
|
||||||
n.start_soon(handle_order_requests, client, ems_stream)
|
|
||||||
|
|
||||||
acc_name = 'kraken.' + client._name
|
|
||||||
trades = await client.get_trades()
|
|
||||||
|
|
||||||
position_msgs = pack_positions(acc_name, trades)
|
|
||||||
|
|
||||||
await ctx.started((position_msgs, (acc_name,)))
|
|
||||||
|
|
||||||
# Get websocket token for authenticated data stream
|
|
||||||
# Assert that a token was actually received.
|
|
||||||
resp = await client.endpoint('GetWebSocketsToken', {})
|
|
||||||
|
|
||||||
# lol wtf is this..
|
|
||||||
assert resp['error'] == []
|
|
||||||
|
|
||||||
token = resp['result']['token']
|
|
||||||
|
|
||||||
async with (
|
|
||||||
ctx.open_stream() as ems_stream,
|
|
||||||
trio.open_nursery() as n,
|
|
||||||
):
|
|
||||||
# TODO: maybe add multiple accounts
|
|
||||||
n.start_soon(handle_order_requests, client, ems_stream)
|
|
||||||
|
|
||||||
# Process trades msg stream of ws
|
|
||||||
async with open_autorecon_ws(
|
|
||||||
'wss://ws-auth.kraken.com/',
|
|
||||||
fixture=subscribe,
|
|
||||||
token=token,
|
|
||||||
) as ws:
|
|
||||||
async for msg in process_trade_msgs(ws):
|
|
||||||
for trade in msg:
|
|
||||||
# check the type of packaged message
|
|
||||||
assert type(trade) == Trade
|
|
||||||
|
|
||||||
# prepare and send a filled status update
|
|
||||||
filled_msg = BrokerdStatus(
|
|
||||||
reqid=trade.reqid,
|
|
||||||
time_ns=time.time_ns(),
|
|
||||||
|
|
||||||
account='kraken.spot',
|
|
||||||
status='filled',
|
|
||||||
filled=float(trade.size),
|
|
||||||
reason='Order filled by kraken',
|
|
||||||
broker_details={
|
|
||||||
'name': 'kraken',
|
|
||||||
'broker_time': trade.broker_time
|
|
||||||
},
|
|
||||||
|
|
||||||
# TODO: figure out if kraken gives a count
|
|
||||||
# of how many units of underlying were
|
|
||||||
# filled. Alternatively we can decrement
|
|
||||||
# this value ourselves by associating and
|
|
||||||
# calcing from the diff with the original
|
|
||||||
# client-side request, see:
|
|
||||||
# https://github.com/pikers/piker/issues/296
|
|
||||||
remaining=0,
|
|
||||||
)
|
|
||||||
|
|
||||||
await ems_stream.send(filled_msg.dict())
|
|
||||||
|
|
||||||
# send a fill msg for gui update
|
|
||||||
fill_msg = BrokerdFill(
|
|
||||||
reqid=trade.reqid,
|
|
||||||
time_ns=time.time_ns(),
|
|
||||||
|
|
||||||
action=trade.action,
|
|
||||||
size=float(trade.size),
|
|
||||||
price=float(trade.price),
|
|
||||||
# TODO: maybe capture more msg data i.e fees?
|
|
||||||
broker_details={'name': 'kraken'},
|
|
||||||
broker_time=float(trade.broker_time)
|
|
||||||
)
|
|
||||||
|
|
||||||
await ems_stream.send(fill_msg.dict())
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_messages(
|
|
||||||
ws: NoBsWs,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Message stream parser and heartbeat handler.
|
|
||||||
|
|
||||||
Deliver ws subscription messages as well as handle heartbeat logic
|
|
||||||
though a single async generator.
|
|
||||||
|
|
||||||
'''
|
|
||||||
too_slow_count = last_hb = 0
|
too_slow_count = last_hb = 0
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -906,95 +320,39 @@ async def stream_messages(
|
||||||
if err:
|
if err:
|
||||||
raise BrokerError(err)
|
raise BrokerError(err)
|
||||||
else:
|
else:
|
||||||
yield msg
|
chan_id, *payload_array, chan_name, pair = msg
|
||||||
|
|
||||||
|
if 'ohlc' in chan_name:
|
||||||
|
|
||||||
async def process_data_feed_msgs(
|
yield 'ohlc', OHLC(chan_id, chan_name, pair, *payload_array[0])
|
||||||
ws: NoBsWs,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Parse and pack data feed messages.
|
|
||||||
|
|
||||||
'''
|
elif 'spread' in chan_name:
|
||||||
async for msg in stream_messages(ws):
|
|
||||||
|
|
||||||
chan_id, *payload_array, chan_name, pair = msg
|
bid, ask, ts, bsize, asize = map(float, payload_array[0])
|
||||||
|
|
||||||
if 'ohlc' in chan_name:
|
# TODO: really makes you think IB has a horrible API...
|
||||||
|
quote = {
|
||||||
|
'symbol': pair.replace('/', ''),
|
||||||
|
'ticks': [
|
||||||
|
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||||
|
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||||
|
|
||||||
yield 'ohlc', OHLC(chan_id, chan_name, pair, *payload_array[0])
|
{'type': 'ask', 'price': ask, 'size': asize},
|
||||||
|
{'type': 'asize', 'price': ask, 'size': asize},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
yield 'l1', quote
|
||||||
|
|
||||||
elif 'spread' in chan_name:
|
# elif 'book' in msg[-2]:
|
||||||
|
# chan_id, *payload_array, chan_name, pair = msg
|
||||||
|
# print(msg)
|
||||||
|
|
||||||
bid, ask, ts, bsize, asize = map(float, payload_array[0])
|
else:
|
||||||
|
print(f'UNHANDLED MSG: {msg}')
|
||||||
# TODO: really makes you think IB has a horrible API...
|
|
||||||
quote = {
|
|
||||||
'symbol': pair.replace('/', ''),
|
|
||||||
'ticks': [
|
|
||||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
|
||||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
|
||||||
|
|
||||||
{'type': 'ask', 'price': ask, 'size': asize},
|
|
||||||
{'type': 'asize', 'price': ask, 'size': asize},
|
|
||||||
],
|
|
||||||
}
|
|
||||||
yield 'l1', quote
|
|
||||||
|
|
||||||
# elif 'book' in msg[-2]:
|
|
||||||
# chan_id, *payload_array, chan_name, pair = msg
|
|
||||||
# print(msg)
|
|
||||||
|
|
||||||
else:
|
|
||||||
print(f'UNHANDLED MSG: {msg}')
|
|
||||||
yield msg
|
|
||||||
|
|
||||||
|
|
||||||
async def process_trade_msgs(
|
|
||||||
ws: NoBsWs,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Parse and pack data feed messages.
|
|
||||||
|
|
||||||
'''
|
|
||||||
sequence_counter = 0
|
|
||||||
async for msg in stream_messages(ws):
|
|
||||||
|
|
||||||
try:
|
|
||||||
# check that we are on the ownTrades stream and that msgs
|
|
||||||
# are arriving in sequence with kraken For clarification the
|
|
||||||
# kraken ws api docs for this stream:
|
|
||||||
# https://docs.kraken.com/websockets/#message-ownTrades
|
|
||||||
assert msg[1] == 'ownTrades'
|
|
||||||
assert msg[2]['sequence'] > sequence_counter
|
|
||||||
sequence_counter += 1
|
|
||||||
raw_msgs = msg[0]
|
|
||||||
trade_msgs = []
|
|
||||||
|
|
||||||
# Check that we are only processing new trades
|
|
||||||
if msg[2]['sequence'] != 1:
|
|
||||||
# check if its a new order or an update msg
|
|
||||||
for trade_msg in raw_msgs:
|
|
||||||
trade = list(trade_msg.values())[0]
|
|
||||||
order_msg = Trade(
|
|
||||||
reqid=trade['ordertxid'],
|
|
||||||
action=trade['type'],
|
|
||||||
price=trade['price'],
|
|
||||||
size=trade['vol'],
|
|
||||||
broker_time=trade['time']
|
|
||||||
)
|
|
||||||
trade_msgs.append(order_msg)
|
|
||||||
|
|
||||||
yield trade_msgs
|
|
||||||
|
|
||||||
except AssertionError:
|
|
||||||
print(f'UNHANDLED MSG: {msg}')
|
|
||||||
yield msg
|
|
||||||
|
|
||||||
|
|
||||||
def normalize(
|
def normalize(
|
||||||
ohlc: OHLC,
|
ohlc: OHLC,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
quote = asdict(ohlc)
|
quote = asdict(ohlc)
|
||||||
quote['broker_ts'] = quote['time']
|
quote['broker_ts'] = quote['time']
|
||||||
|
@ -1012,13 +370,12 @@ def normalize(
|
||||||
return topic, quote
|
return topic, quote
|
||||||
|
|
||||||
|
|
||||||
def make_sub(pairs: list[str], data: dict[str, Any]) -> dict[str, str]:
|
def make_sub(pairs: List[str], data: Dict[str, Any]) -> Dict[str, str]:
|
||||||
'''
|
"""Create a request subscription packet dict.
|
||||||
Create a request subscription packet dict.
|
|
||||||
|
|
||||||
https://docs.kraken.com/websockets/#message-subscribe
|
https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
|
||||||
'''
|
"""
|
||||||
# eg. specific logic for this in kraken's sync client:
|
# eg. specific logic for this in kraken's sync client:
|
||||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||||
return {
|
return {
|
||||||
|
@ -1036,45 +393,7 @@ async def open_history_client(
|
||||||
|
|
||||||
# TODO implement history getter for the new storage layer.
|
# TODO implement history getter for the new storage layer.
|
||||||
async with open_cached_client('kraken') as client:
|
async with open_cached_client('kraken') as client:
|
||||||
|
yield client
|
||||||
# lol, kraken won't send any more then the "last"
|
|
||||||
# 720 1m bars.. so we have to just ignore further
|
|
||||||
# requests of this type..
|
|
||||||
queries: int = 0
|
|
||||||
|
|
||||||
async def get_ohlc(
|
|
||||||
end_dt: Optional[datetime] = None,
|
|
||||||
start_dt: Optional[datetime] = None,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
datetime, # start
|
|
||||||
datetime, # end
|
|
||||||
]:
|
|
||||||
|
|
||||||
nonlocal queries
|
|
||||||
if queries > 0:
|
|
||||||
raise DataUnavailable
|
|
||||||
|
|
||||||
count = 0
|
|
||||||
while count <= 3:
|
|
||||||
try:
|
|
||||||
array = await client.bars(
|
|
||||||
symbol,
|
|
||||||
since=end_dt,
|
|
||||||
)
|
|
||||||
count += 1
|
|
||||||
queries += 1
|
|
||||||
break
|
|
||||||
except DataThrottle:
|
|
||||||
log.warning(f'kraken OHLC throttle for {symbol}')
|
|
||||||
await trio.sleep(1)
|
|
||||||
|
|
||||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
|
||||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
|
||||||
return array, start_dt, end_dt
|
|
||||||
|
|
||||||
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
|
||||||
|
|
||||||
|
|
||||||
async def backfill_bars(
|
async def backfill_bars(
|
||||||
|
@ -1085,9 +404,8 @@ async def backfill_bars(
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
"""Fill historical bars into shared mem / storage afap.
|
||||||
Fill historical bars into shared mem / storage afap.
|
"""
|
||||||
'''
|
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
async with open_cached_client('kraken') as client:
|
async with open_cached_client('kraken') as client:
|
||||||
bars = await client.bars(symbol=sym)
|
bars = await client.bars(symbol=sym)
|
||||||
|
@ -1098,7 +416,7 @@ async def backfill_bars(
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: list[str],
|
symbols: List[str],
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
|
@ -1106,15 +424,13 @@ async def stream_quotes(
|
||||||
sub_type: str = 'ohlc',
|
sub_type: str = 'ohlc',
|
||||||
|
|
||||||
# startup sync
|
# startup sync
|
||||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[Tuple[Dict, Dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
"""Subscribe for ohlc stream of quotes for ``pairs``.
|
||||||
Subscribe for ohlc stream of quotes for ``pairs``.
|
|
||||||
|
|
||||||
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||||
|
"""
|
||||||
'''
|
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
@ -1187,16 +503,15 @@ async def stream_quotes(
|
||||||
# XXX: do we need to ack the unsub?
|
# XXX: do we need to ack the unsub?
|
||||||
# await ws.recv_msg()
|
# await ws.recv_msg()
|
||||||
|
|
||||||
# see the tips on reconnection logic:
|
# see the tips on reonnection logic:
|
||||||
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||||
ws: NoBsWs
|
|
||||||
async with open_autorecon_ws(
|
async with open_autorecon_ws(
|
||||||
'wss://ws.kraken.com/',
|
'wss://ws.kraken.com/',
|
||||||
fixture=subscribe,
|
fixture=subscribe,
|
||||||
) as ws:
|
) as ws:
|
||||||
|
|
||||||
# pull a first quote and deliver
|
# pull a first quote and deliver
|
||||||
msg_gen = process_data_feed_msgs(ws)
|
msg_gen = stream_messages(ws)
|
||||||
|
|
||||||
# TODO: use ``anext()`` when it lands in 3.10!
|
# TODO: use ``anext()`` when it lands in 3.10!
|
||||||
typ, ohlc_last = await msg_gen.__anext__()
|
typ, ohlc_last = await msg_gen.__anext__()
|
||||||
|
@ -1254,7 +569,6 @@ async def stream_quotes(
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def open_symbol_search(
|
async def open_symbol_search(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
|
|
||||||
) -> Client:
|
) -> Client:
|
||||||
async with open_cached_client('kraken') as client:
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ Questrade API backend.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import inspect
|
import inspect
|
||||||
|
import contextlib
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
@ -31,10 +32,11 @@ from typing import (
|
||||||
Callable,
|
Callable,
|
||||||
)
|
)
|
||||||
|
|
||||||
import pendulum
|
import arrow
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from async_generator import asynccontextmanager
|
from async_generator import asynccontextmanager
|
||||||
|
import pandas as pd
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import wrapt
|
import wrapt
|
||||||
import asks
|
import asks
|
||||||
|
@ -44,6 +46,7 @@ from .._cacheables import open_cached_client, async_lifo_cache
|
||||||
from .. import config
|
from .. import config
|
||||||
from ._util import resproc, BrokerError, SymbolNotFound
|
from ._util import resproc, BrokerError, SymbolNotFound
|
||||||
from ..log import get_logger, colorize_json, get_console_log
|
from ..log import get_logger, colorize_json, get_console_log
|
||||||
|
from . import get_brokermod
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -598,16 +601,12 @@ class Client:
|
||||||
sid = sids[symbol]
|
sid = sids[symbol]
|
||||||
|
|
||||||
# get last market open end time
|
# get last market open end time
|
||||||
est_end = now = pendulum.now('UTC').in_timezoe(
|
est_end = now = arrow.utcnow().to('US/Eastern').floor('minute')
|
||||||
'America/New_York').start_of('minute')
|
|
||||||
|
|
||||||
# on non-paid feeds we can't retreive the first 15 mins
|
# on non-paid feeds we can't retreive the first 15 mins
|
||||||
wd = now.isoweekday()
|
wd = now.isoweekday()
|
||||||
if wd > 5:
|
if wd > 5:
|
||||||
quotes = await self.quote([symbol])
|
quotes = await self.quote([symbol])
|
||||||
est_end = pendulum.parse(
|
est_end = arrow.get(quotes[0]['lastTradeTime'])
|
||||||
quotes[0]['lastTradeTime']
|
|
||||||
)
|
|
||||||
if est_end.hour == 0:
|
if est_end.hour == 0:
|
||||||
# XXX don't bother figuring out extended hours for now
|
# XXX don't bother figuring out extended hours for now
|
||||||
est_end = est_end.replace(hour=17)
|
est_end = est_end.replace(hour=17)
|
||||||
|
@ -668,7 +667,7 @@ def get_OHLCV(
|
||||||
"""
|
"""
|
||||||
del bar['end']
|
del bar['end']
|
||||||
del bar['VWAP']
|
del bar['VWAP']
|
||||||
bar['start'] = pendulum.from_timestamp(bar['start']) / 10**9
|
bar['start'] = pd.Timestamp(bar['start']).value/10**9
|
||||||
return tuple(bar.values())
|
return tuple(bar.values())
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -201,8 +201,8 @@ async def open_ems(
|
||||||
# ready for order commands
|
# ready for order commands
|
||||||
book = get_orders()
|
book = get_orders()
|
||||||
|
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import uncons_fqsn
|
||||||
broker, symbol, suffix = unpack_fqsn(fqsn)
|
broker, symbol, suffix = uncons_fqsn(fqsn)
|
||||||
|
|
||||||
async with maybe_open_emsd(broker) as portal:
|
async with maybe_open_emsd(broker) as portal:
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,7 @@ In da suit parlances: "Execution management systems"
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
# from math import isnan
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
import time
|
import time
|
||||||
from typing import AsyncIterator, Callable
|
from typing import AsyncIterator, Callable
|
||||||
|
@ -80,9 +81,7 @@ def mk_check(
|
||||||
|
|
||||||
return check_lt
|
return check_lt
|
||||||
|
|
||||||
raise ValueError(
|
raise ValueError('trigger: {trigger_price}, last: {known_last}')
|
||||||
f'trigger: {trigger_price}, last: {known_last}'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -500,9 +499,7 @@ async def open_brokerd_trades_dialogue(
|
||||||
finally:
|
finally:
|
||||||
# parent context must have been closed
|
# parent context must have been closed
|
||||||
# remove from cache so next client will respawn if needed
|
# remove from cache so next client will respawn if needed
|
||||||
relay = _router.relays.pop(broker, None)
|
_router.relays.pop(broker)
|
||||||
if not relay:
|
|
||||||
log.warning(f'Relay for {broker} was already removed!?')
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -563,10 +560,7 @@ async def translate_and_relay_brokerd_events(
|
||||||
|
|
||||||
name = brokerd_msg['name']
|
name = brokerd_msg['name']
|
||||||
|
|
||||||
log.info(
|
log.info(f'Received broker trade event:\n{pformat(brokerd_msg)}')
|
||||||
f'Received broker trade event:\n'
|
|
||||||
f'{pformat(brokerd_msg)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
if name == 'position':
|
if name == 'position':
|
||||||
|
|
||||||
|
@ -618,28 +612,19 @@ async def translate_and_relay_brokerd_events(
|
||||||
# packed at submission since we already know it ahead of
|
# packed at submission since we already know it ahead of
|
||||||
# time
|
# time
|
||||||
paper = brokerd_msg['broker_details'].get('paper_info')
|
paper = brokerd_msg['broker_details'].get('paper_info')
|
||||||
ext = brokerd_msg['broker_details'].get('external')
|
|
||||||
if paper:
|
if paper:
|
||||||
# paperboi keeps the ems id up front
|
# paperboi keeps the ems id up front
|
||||||
oid = paper['oid']
|
oid = paper['oid']
|
||||||
|
|
||||||
elif ext:
|
else:
|
||||||
# may be an order msg specified as "external" to the
|
# may be an order msg specified as "external" to the
|
||||||
# piker ems flow (i.e. generated by some other
|
# piker ems flow (i.e. generated by some other
|
||||||
# external broker backend client (like tws for ib)
|
# external broker backend client (like tws for ib)
|
||||||
log.error(f"External trade event {ext}")
|
ext = brokerd_msg['broker_details'].get('external')
|
||||||
|
if ext:
|
||||||
|
log.error(f"External trade event {ext}")
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
else:
|
|
||||||
# something is out of order, we don't have an oid for
|
|
||||||
# this broker-side message.
|
|
||||||
log.error(
|
|
||||||
'Unknown oid:{oid} for msg:\n'
|
|
||||||
f'{pformat(brokerd_msg)}'
|
|
||||||
'Unable to relay message to client side!?'
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# check for existing live flow entry
|
# check for existing live flow entry
|
||||||
entry = book._ems_entries.get(oid)
|
entry = book._ems_entries.get(oid)
|
||||||
|
@ -837,9 +822,7 @@ async def process_client_order_cmds(
|
||||||
if reqid:
|
if reqid:
|
||||||
|
|
||||||
# send cancel to brokerd immediately!
|
# send cancel to brokerd immediately!
|
||||||
log.info(
|
log.info("Submitting cancel for live order {reqid}")
|
||||||
f'Submitting cancel for live order {reqid}'
|
|
||||||
)
|
|
||||||
|
|
||||||
await brokerd_order_stream.send(msg.dict())
|
await brokerd_order_stream.send(msg.dict())
|
||||||
|
|
||||||
|
@ -1043,8 +1026,8 @@ async def _emsd_main(
|
||||||
global _router
|
global _router
|
||||||
assert _router
|
assert _router
|
||||||
|
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import uncons_fqsn
|
||||||
broker, symbol, suffix = unpack_fqsn(fqsn)
|
broker, symbol, suffix = uncons_fqsn(fqsn)
|
||||||
dark_book = _router.get_dark_book(broker)
|
dark_book = _router.get_dark_book(broker)
|
||||||
|
|
||||||
# TODO: would be nice if in tractor we can require either a ctx arg,
|
# TODO: would be nice if in tractor we can require either a ctx arg,
|
||||||
|
@ -1068,6 +1051,11 @@ async def _emsd_main(
|
||||||
book = _router.get_dark_book(broker)
|
book = _router.get_dark_book(broker)
|
||||||
book.lasts[fqsn] = first_quote['last']
|
book.lasts[fqsn] = first_quote['last']
|
||||||
|
|
||||||
|
# XXX: ib is a cucker but we've fixed avoiding receiving any
|
||||||
|
# `Nan`s in the backend during market hours (right?). this was
|
||||||
|
# here previously as a sanity check during market hours.
|
||||||
|
# assert not isnan(last)
|
||||||
|
|
||||||
# open a stream with the brokerd backend for order
|
# open a stream with the brokerd backend for order
|
||||||
# flow dialogue
|
# flow dialogue
|
||||||
async with (
|
async with (
|
||||||
|
|
|
@ -184,7 +184,7 @@ class BrokerdStatus(BaseModel):
|
||||||
# {
|
# {
|
||||||
# 'submitted',
|
# 'submitted',
|
||||||
# 'cancelled',
|
# 'cancelled',
|
||||||
# 'filled',
|
# 'executed',
|
||||||
# }
|
# }
|
||||||
status: str
|
status: str
|
||||||
|
|
||||||
|
@ -242,7 +242,7 @@ class BrokerdError(BaseModel):
|
||||||
|
|
||||||
# if no brokerd order request was actually submitted (eg. we errored
|
# if no brokerd order request was actually submitted (eg. we errored
|
||||||
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
||||||
reqid: Optional[Union[int, str]] = None
|
reqid: Union[int, str] = ''
|
||||||
|
|
||||||
symbol: str
|
symbol: str
|
||||||
reason: str
|
reason: str
|
||||||
|
|
|
@ -32,7 +32,7 @@ from dataclasses import dataclass
|
||||||
|
|
||||||
from .. import data
|
from .. import data
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import uncons_fqsn
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
||||||
|
@ -390,7 +390,7 @@ async def handle_order_requests(
|
||||||
account = request_msg['account']
|
account = request_msg['account']
|
||||||
if account != 'paper':
|
if account != 'paper':
|
||||||
log.error(
|
log.error(
|
||||||
'This is a paper account, only a `paper` selection is valid'
|
'On a paper account, only a `paper` selection is valid'
|
||||||
)
|
)
|
||||||
await ems_order_stream.send(BrokerdError(
|
await ems_order_stream.send(BrokerdError(
|
||||||
oid=request_msg['oid'],
|
oid=request_msg['oid'],
|
||||||
|
@ -463,8 +463,7 @@ async def trades_dialogue(
|
||||||
):
|
):
|
||||||
# TODO: load paper positions per broker from .toml config file
|
# TODO: load paper positions per broker from .toml config file
|
||||||
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
||||||
# await ctx.started(all_positions)
|
await ctx.started(({}, ['paper']))
|
||||||
await ctx.started(({}, {'paper',}))
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as ems_stream,
|
ctx.open_stream() as ems_stream,
|
||||||
|
@ -500,7 +499,7 @@ async def open_paperboi(
|
||||||
its context.
|
its context.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
broker, symbol, expiry = unpack_fqsn(fqsn)
|
broker, symbol, expiry = uncons_fqsn(fqsn)
|
||||||
service_name = f'paperboi.{broker}'
|
service_name = f'paperboi.{broker}'
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
|
@ -1,25 +1,7 @@
|
||||||
# piker: trading gear for hackers
|
"""
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
CLI commons.
|
CLI commons.
|
||||||
|
"""
|
||||||
'''
|
|
||||||
import os
|
import os
|
||||||
from pprint import pformat
|
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import trio
|
import trio
|
||||||
|
@ -72,22 +54,17 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
if tsdb:
|
if tsdb:
|
||||||
from piker.data._ahab import start_ahab
|
# TODO:
|
||||||
from piker.data.marketstore import start_marketstore
|
# async with maybe_open_marketstored():
|
||||||
|
|
||||||
|
from piker.data._ahab import start_ahab
|
||||||
log.info('Spawning `marketstore` supervisor')
|
log.info('Spawning `marketstore` supervisor')
|
||||||
ctn_ready, config, (cid, pid) = await n.start(
|
ctn_ready = await n.start(
|
||||||
start_ahab,
|
start_ahab,
|
||||||
'marketstored',
|
'marketstored',
|
||||||
start_marketstore,
|
|
||||||
|
|
||||||
)
|
|
||||||
log.info(
|
|
||||||
f'`marketstore` up!\n'
|
|
||||||
f'`marketstored` pid: {pid}\n'
|
|
||||||
f'docker container id: {cid}\n'
|
|
||||||
f'config: {pformat(config)}'
|
|
||||||
)
|
)
|
||||||
|
await ctn_ready.wait()
|
||||||
|
log.info('`marketstore` container:{uid} up')
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship for pikers)
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -16,7 +16,6 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Broker configuration mgmt.
|
Broker configuration mgmt.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
|
@ -51,7 +50,7 @@ def get_app_dir(app_name, roaming=True, force_posix=False):
|
||||||
Unix (POSIX):
|
Unix (POSIX):
|
||||||
``~/.foo-bar``
|
``~/.foo-bar``
|
||||||
Win XP (roaming):
|
Win XP (roaming):
|
||||||
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo``
|
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo Bar``
|
||||||
Win XP (not roaming):
|
Win XP (not roaming):
|
||||||
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
|
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
|
||||||
Win 7 (roaming):
|
Win 7 (roaming):
|
||||||
|
@ -82,8 +81,7 @@ def get_app_dir(app_name, roaming=True, force_posix=False):
|
||||||
folder = os.path.expanduser("~")
|
folder = os.path.expanduser("~")
|
||||||
return os.path.join(folder, app_name)
|
return os.path.join(folder, app_name)
|
||||||
if force_posix:
|
if force_posix:
|
||||||
return os.path.join(
|
return os.path.join(os.path.expanduser("~/.{}".format(_posixify(app_name))))
|
||||||
os.path.expanduser("~/.{}".format(_posixify(app_name))))
|
|
||||||
if sys.platform == "darwin":
|
if sys.platform == "darwin":
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
os.path.expanduser("~/Library/Application Support"), app_name
|
os.path.expanduser("~/Library/Application Support"), app_name
|
||||||
|
@ -109,12 +107,7 @@ if _parent_user:
|
||||||
]
|
]
|
||||||
)
|
)
|
||||||
|
|
||||||
_conf_names: set[str] = {
|
_file_name = 'brokers.toml'
|
||||||
'brokers',
|
|
||||||
'trades',
|
|
||||||
'watchlists',
|
|
||||||
}
|
|
||||||
|
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
_context_defaults = dict(
|
_context_defaults = dict(
|
||||||
default_map={
|
default_map={
|
||||||
|
@ -136,43 +129,23 @@ def _override_config_dir(
|
||||||
_config_dir = path
|
_config_dir = path
|
||||||
|
|
||||||
|
|
||||||
def _conf_fn_w_ext(
|
def get_broker_conf_path():
|
||||||
name: str,
|
|
||||||
) -> str:
|
|
||||||
# change this if we ever change the config file format.
|
|
||||||
return f'{name}.toml'
|
|
||||||
|
|
||||||
|
|
||||||
def get_conf_path(
|
|
||||||
conf_name: str = 'brokers',
|
|
||||||
|
|
||||||
) -> str:
|
|
||||||
"""Return the default config path normally under
|
"""Return the default config path normally under
|
||||||
``~/.config/piker`` on linux.
|
``~/.config/piker`` on linux.
|
||||||
|
|
||||||
Contains files such as:
|
Contains files such as:
|
||||||
- brokers.toml
|
- brokers.toml
|
||||||
- watchlists.toml
|
- watchlists.toml
|
||||||
- trades.toml
|
|
||||||
|
|
||||||
# maybe coming soon ;)
|
|
||||||
- signals.toml
|
- signals.toml
|
||||||
- strats.toml
|
- strats.toml
|
||||||
|
|
||||||
"""
|
"""
|
||||||
assert conf_name in _conf_names
|
return os.path.join(_config_dir, _file_name)
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
|
||||||
return os.path.join(
|
|
||||||
_config_dir,
|
|
||||||
fn,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def repodir():
|
def repodir():
|
||||||
'''
|
"""Return the abspath to the repo directory.
|
||||||
Return the abspath to the repo directory.
|
"""
|
||||||
|
|
||||||
'''
|
|
||||||
dirpath = os.path.abspath(
|
dirpath = os.path.abspath(
|
||||||
# we're 3 levels down in **this** module file
|
# we're 3 levels down in **this** module file
|
||||||
dirname(dirname(os.path.realpath(__file__)))
|
dirname(dirname(os.path.realpath(__file__)))
|
||||||
|
@ -181,27 +154,16 @@ def repodir():
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
conf_name: str = 'brokers',
|
|
||||||
path: str = None
|
path: str = None
|
||||||
|
|
||||||
) -> (dict, str):
|
) -> (dict, str):
|
||||||
'''
|
"""Load broker config.
|
||||||
Load config file by name.
|
"""
|
||||||
|
path = path or get_broker_conf_path()
|
||||||
'''
|
|
||||||
path = path or get_conf_path(conf_name)
|
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
shutil.copyfile(
|
||||||
|
os.path.join(repodir(), 'config', 'brokers.toml'),
|
||||||
template = os.path.join(
|
path,
|
||||||
repodir(),
|
|
||||||
'config',
|
|
||||||
fn
|
|
||||||
)
|
)
|
||||||
# try to copy in a template config to the user's directory
|
|
||||||
# if one exists.
|
|
||||||
if os.path.isfile(template):
|
|
||||||
shutil.copyfile(template, path)
|
|
||||||
|
|
||||||
config = toml.load(path)
|
config = toml.load(path)
|
||||||
log.debug(f"Read config file {path}")
|
log.debug(f"Read config file {path}")
|
||||||
|
@ -210,17 +172,13 @@ def load(
|
||||||
|
|
||||||
def write(
|
def write(
|
||||||
config: dict, # toml config as dict
|
config: dict, # toml config as dict
|
||||||
name: str = 'brokers',
|
|
||||||
path: str = None,
|
path: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
''''
|
"""Write broker config to disk.
|
||||||
Write broker config to disk.
|
|
||||||
|
|
||||||
Create a ``brokers.ini`` file if one does not exist.
|
Create a ``brokers.ini`` file if one does not exist.
|
||||||
|
"""
|
||||||
'''
|
path = path or get_broker_conf_path()
|
||||||
path = path or get_conf_path(name)
|
|
||||||
dirname = os.path.dirname(path)
|
dirname = os.path.dirname(path)
|
||||||
if not os.path.isdir(dirname):
|
if not os.path.isdir(dirname):
|
||||||
log.debug(f"Creating config dir {_config_dir}")
|
log.debug(f"Creating config dir {_config_dir}")
|
||||||
|
@ -230,10 +188,7 @@ def write(
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Watch out you're trying to write a blank config!")
|
"Watch out you're trying to write a blank config!")
|
||||||
|
|
||||||
log.debug(
|
log.debug(f"Writing config file {path}")
|
||||||
f"Writing config `{name}` file to:\n"
|
|
||||||
f"{path}"
|
|
||||||
)
|
|
||||||
with open(path, 'w') as cf:
|
with open(path, 'w') as cf:
|
||||||
return toml.dump(config, cf)
|
return toml.dump(config, cf)
|
||||||
|
|
||||||
|
@ -263,5 +218,4 @@ def load_accounts(
|
||||||
|
|
||||||
# our default paper engine entry
|
# our default paper engine entry
|
||||||
accounts['paper'] = None
|
accounts['paper'] = None
|
||||||
|
|
||||||
return accounts
|
return accounts
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -19,25 +19,19 @@ Supervisor for docker with included specific-image service helpers.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
import os
|
import os
|
||||||
import time
|
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
Optional,
|
||||||
Callable,
|
# Any,
|
||||||
Any,
|
|
||||||
)
|
)
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.msg import NamespacePath
|
|
||||||
import docker
|
import docker
|
||||||
import json
|
import json
|
||||||
from docker.models.containers import Container as DockerContainer
|
from docker.models.containers import Container
|
||||||
from docker.errors import (
|
from docker.errors import DockerException, APIError
|
||||||
DockerException,
|
|
||||||
APIError,
|
|
||||||
)
|
|
||||||
from requests.exceptions import ConnectionError, ReadTimeout
|
from requests.exceptions import ConnectionError, ReadTimeout
|
||||||
|
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
|
@ -46,14 +40,49 @@ from .. import config
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_config = '''
|
||||||
|
# piker's ``marketstore`` config.
|
||||||
|
|
||||||
|
# mount this config using:
|
||||||
|
# sudo docker run --mount \
|
||||||
|
# type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
||||||
|
# 5993:5993 alpacamarkets/marketstore:latest
|
||||||
|
|
||||||
|
root_directory: data
|
||||||
|
listen_port: 5993
|
||||||
|
grpc_listen_port: 5995
|
||||||
|
log_level: debug
|
||||||
|
queryable: true
|
||||||
|
stop_grace_period: 0
|
||||||
|
wal_rotate_interval: 5
|
||||||
|
stale_threshold: 5
|
||||||
|
enable_add: true
|
||||||
|
enable_remove: false
|
||||||
|
|
||||||
|
triggers:
|
||||||
|
- module: ondiskagg.so
|
||||||
|
on: "*/1Sec/OHLCV"
|
||||||
|
config:
|
||||||
|
# filter: "nasdaq"
|
||||||
|
destinations:
|
||||||
|
- 1Min
|
||||||
|
- 5Min
|
||||||
|
- 15Min
|
||||||
|
- 1H
|
||||||
|
- 1D
|
||||||
|
|
||||||
|
- module: stream.so
|
||||||
|
on: '*/*/*'
|
||||||
|
# config:
|
||||||
|
# filter: "nasdaq"
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
class DockerNotStarted(Exception):
|
class DockerNotStarted(Exception):
|
||||||
'Prolly you dint start da daemon bruh'
|
'Prolly you dint start da daemon bruh'
|
||||||
|
|
||||||
|
|
||||||
class ContainerError(RuntimeError):
|
|
||||||
'Error reported via app-container logging level'
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_docker(
|
async def open_docker(
|
||||||
url: Optional[str] = None,
|
url: Optional[str] = None,
|
||||||
|
@ -99,216 +128,145 @@ async def open_docker(
|
||||||
finally:
|
finally:
|
||||||
if client:
|
if client:
|
||||||
client.close()
|
client.close()
|
||||||
|
# client.api._custom_adapter.close()
|
||||||
|
for c in client.containers.list():
|
||||||
class Container:
|
c.kill()
|
||||||
'''
|
|
||||||
Wrapper around a ``docker.models.containers.Container`` to include
|
|
||||||
log capture and relay through our native logging system and helper
|
|
||||||
method(s) for cancellation/teardown.
|
|
||||||
|
|
||||||
'''
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
cntr: DockerContainer,
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
self.cntr = cntr
|
|
||||||
# log msg de-duplication
|
|
||||||
self.seen_so_far = set()
|
|
||||||
|
|
||||||
async def process_logs_until(
|
|
||||||
self,
|
|
||||||
patt: str,
|
|
||||||
bp_on_msg: bool = False,
|
|
||||||
) -> bool:
|
|
||||||
'''
|
|
||||||
Attempt to capture container log messages and relay through our
|
|
||||||
native logging system.
|
|
||||||
|
|
||||||
'''
|
|
||||||
seen_so_far = self.seen_so_far
|
|
||||||
|
|
||||||
while True:
|
|
||||||
logs = self.cntr.logs()
|
|
||||||
entries = logs.decode().split('\n')
|
|
||||||
for entry in entries:
|
|
||||||
|
|
||||||
# ignore null lines
|
|
||||||
if not entry:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
record = json.loads(entry.strip())
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
if 'Error' in entry:
|
|
||||||
raise RuntimeError(entry)
|
|
||||||
raise
|
|
||||||
|
|
||||||
msg = record['msg']
|
|
||||||
level = record['level']
|
|
||||||
if msg and entry not in seen_so_far:
|
|
||||||
seen_so_far.add(entry)
|
|
||||||
if bp_on_msg:
|
|
||||||
await tractor.breakpoint()
|
|
||||||
|
|
||||||
getattr(log, level, log.error)(f'{msg}')
|
|
||||||
|
|
||||||
# print(f'level: {level}')
|
|
||||||
if level in ('error', 'fatal'):
|
|
||||||
raise ContainerError(msg)
|
|
||||||
|
|
||||||
if patt in msg:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# do a checkpoint so we don't block if cancelled B)
|
|
||||||
await trio.sleep(0.01)
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def try_signal(
|
|
||||||
self,
|
|
||||||
signal: str = 'SIGINT',
|
|
||||||
|
|
||||||
) -> bool:
|
|
||||||
try:
|
|
||||||
# XXX: market store doesn't seem to shutdown nicely all the
|
|
||||||
# time with this (maybe because there are still open grpc
|
|
||||||
# connections?) noticably after client connections have been
|
|
||||||
# made or are in use/teardown. It works just fine if you
|
|
||||||
# just start and stop the container tho?..
|
|
||||||
log.cancel(f'SENDING {signal} to {self.cntr.id}')
|
|
||||||
self.cntr.kill(signal)
|
|
||||||
return True
|
|
||||||
|
|
||||||
except docker.errors.APIError as err:
|
|
||||||
if 'is not running' in err.explanation:
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def cancel(
|
|
||||||
self,
|
|
||||||
stop_msg: str,
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
cid = self.cntr.id
|
|
||||||
# first try a graceful cancel
|
|
||||||
log.cancel(
|
|
||||||
f'SIGINT cancelling container: {cid}\n'
|
|
||||||
f'waiting on stop msg: "{stop_msg}"'
|
|
||||||
)
|
|
||||||
self.try_signal('SIGINT')
|
|
||||||
|
|
||||||
start = time.time()
|
|
||||||
for _ in range(30):
|
|
||||||
|
|
||||||
with trio.move_on_after(0.5) as cs:
|
|
||||||
cs.shield = True
|
|
||||||
await self.process_logs_until(stop_msg)
|
|
||||||
|
|
||||||
# if we aren't cancelled on above checkpoint then we
|
|
||||||
# assume we read the expected stop msg and terminated.
|
|
||||||
break
|
|
||||||
|
|
||||||
try:
|
|
||||||
log.info(f'Polling for container shutdown:\n{cid}')
|
|
||||||
|
|
||||||
if self.cntr.status not in {'exited', 'not-running'}:
|
|
||||||
self.cntr.wait(
|
|
||||||
timeout=0.1,
|
|
||||||
condition='not-running',
|
|
||||||
)
|
|
||||||
|
|
||||||
break
|
|
||||||
|
|
||||||
except (
|
|
||||||
ReadTimeout,
|
|
||||||
):
|
|
||||||
log.info(f'Still waiting on container:\n{cid}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
except (
|
|
||||||
docker.errors.APIError,
|
|
||||||
ConnectionError,
|
|
||||||
):
|
|
||||||
log.exception('Docker connection failure')
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
delay = time.time() - start
|
|
||||||
log.error(
|
|
||||||
f'Failed to kill container {cid} after {delay}s\n'
|
|
||||||
'sending SIGKILL..'
|
|
||||||
)
|
|
||||||
# get out the big guns, bc apparently marketstore
|
|
||||||
# doesn't actually know how to terminate gracefully
|
|
||||||
# :eyeroll:...
|
|
||||||
self.try_signal('SIGKILL')
|
|
||||||
self.cntr.wait(
|
|
||||||
timeout=3,
|
|
||||||
condition='not-running',
|
|
||||||
)
|
|
||||||
|
|
||||||
log.cancel(f'Container stopped: {cid}')
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def open_ahabd(
|
async def open_marketstored(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
endpoint: str, # ns-pointer str-msg-type
|
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
get_console_log('info', name=__name__)
|
'''
|
||||||
|
Start and supervise a marketstore instance with its config bind-mounted
|
||||||
|
in from the piker config directory on the system.
|
||||||
|
|
||||||
|
The equivalent cli cmd to this code is:
|
||||||
|
|
||||||
|
sudo docker run --mount \
|
||||||
|
type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
||||||
|
5993:5993 alpacamarkets/marketstore:latest
|
||||||
|
|
||||||
|
'''
|
||||||
|
log = get_console_log('info', name=__name__)
|
||||||
|
|
||||||
async with open_docker() as client:
|
async with open_docker() as client:
|
||||||
|
|
||||||
# TODO: eventually offer a config-oriented API to do the mounts,
|
# create a mount from user's local piker config dir into container
|
||||||
# params, etc. passing to ``Containter.run()``?
|
config_dir_mnt = docker.types.Mount(
|
||||||
# call into endpoint for container config/init
|
target='/etc',
|
||||||
ep_func = NamespacePath(endpoint).load_ref()
|
source=config._config_dir,
|
||||||
(
|
type='bind',
|
||||||
dcntr,
|
)
|
||||||
cntr_config,
|
|
||||||
start_msg,
|
|
||||||
stop_msg,
|
|
||||||
) = ep_func(client)
|
|
||||||
cntr = Container(dcntr)
|
|
||||||
|
|
||||||
with trio.move_on_after(1):
|
# create a user config subdir where the marketstore
|
||||||
found = await cntr.process_logs_until(start_msg)
|
# backing filesystem database can be persisted.
|
||||||
|
persistent_data_dir = os.path.join(
|
||||||
|
config._config_dir, 'data',
|
||||||
|
)
|
||||||
|
if not os.path.isdir(persistent_data_dir):
|
||||||
|
os.mkdir(persistent_data_dir)
|
||||||
|
|
||||||
if not found and cntr not in client.containers.list():
|
data_dir_mnt = docker.types.Mount(
|
||||||
raise RuntimeError(
|
target='/data',
|
||||||
'Failed to start `marketstore` check logs deats'
|
source=persistent_data_dir,
|
||||||
|
type='bind',
|
||||||
|
)
|
||||||
|
|
||||||
|
cntr: Container = client.containers.run(
|
||||||
|
'alpacamarkets/marketstore:latest',
|
||||||
|
# do we need this for cmds?
|
||||||
|
# '-i',
|
||||||
|
|
||||||
|
# '-p 5993:5993',
|
||||||
|
ports={
|
||||||
|
'5993/tcp': 5993, # jsonrpc
|
||||||
|
'5995/tcp': 5995, # grpc
|
||||||
|
},
|
||||||
|
mounts=[config_dir_mnt, data_dir_mnt],
|
||||||
|
detach=True,
|
||||||
|
# stop_signal='SIGINT',
|
||||||
|
init=True,
|
||||||
|
# remove=True,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
seen_so_far = set()
|
||||||
|
|
||||||
|
async def process_logs_until(
|
||||||
|
match: str,
|
||||||
|
bp_on_msg: bool = False,
|
||||||
|
):
|
||||||
|
logs = cntr.logs(stream=True)
|
||||||
|
for entry in logs:
|
||||||
|
entry = entry.decode()
|
||||||
|
|
||||||
|
try:
|
||||||
|
record = json.loads(entry.strip())
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
if 'Error' in entry:
|
||||||
|
raise RuntimeError(entry)
|
||||||
|
|
||||||
|
msg = record['msg']
|
||||||
|
level = record['level']
|
||||||
|
if msg and entry not in seen_so_far:
|
||||||
|
seen_so_far.add(entry)
|
||||||
|
if bp_on_msg:
|
||||||
|
await tractor.breakpoint()
|
||||||
|
getattr(log, level, log.error)(f'{msg}')
|
||||||
|
|
||||||
|
# if "launching tcp listener for all services..." in msg:
|
||||||
|
if match in msg:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# do a checkpoint so we don't block if cancelled B)
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
with trio.move_on_after(0.5):
|
||||||
|
found = await process_logs_until(
|
||||||
|
"launching tcp listener for all services...",
|
||||||
)
|
)
|
||||||
|
|
||||||
await ctx.started((
|
if not found and cntr not in client.containers.list():
|
||||||
cntr.cntr.id,
|
raise RuntimeError(
|
||||||
os.getpid(),
|
'Failed to start `marketstore` check logs deats'
|
||||||
cntr_config,
|
)
|
||||||
))
|
|
||||||
|
|
||||||
try:
|
await ctx.started(cntr.id)
|
||||||
|
|
||||||
# TODO: we might eventually want a proxy-style msg-prot here
|
# block for the expected "teardown log msg"..
|
||||||
# to allow remote control of containers without needing
|
await process_logs_until('exiting...',)
|
||||||
# callers to have root perms?
|
|
||||||
await trio.sleep_forever()
|
except (
|
||||||
|
BaseException,
|
||||||
|
# trio.Cancelled,
|
||||||
|
# KeyboardInterrupt,
|
||||||
|
):
|
||||||
|
cntr.kill('SIGINT')
|
||||||
|
with trio.move_on_after(0.5) as cs:
|
||||||
|
cs.shield = True
|
||||||
|
await process_logs_until('exiting...',)
|
||||||
|
raise
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
with trio.CancelScope(shield=True):
|
try:
|
||||||
await cntr.cancel(stop_msg)
|
cntr.wait(
|
||||||
|
timeout=0.5,
|
||||||
|
condition='not-running',
|
||||||
|
)
|
||||||
|
except (
|
||||||
|
ReadTimeout,
|
||||||
|
ConnectionError,
|
||||||
|
):
|
||||||
|
cntr.kill()
|
||||||
|
|
||||||
|
|
||||||
async def start_ahab(
|
async def start_ahab(
|
||||||
service_name: str,
|
service_name: str,
|
||||||
endpoint: Callable[docker.DockerClient, DockerContainer],
|
task_status: TaskStatus[trio.Event] = trio.TASK_STATUS_IGNORED,
|
||||||
task_status: TaskStatus[
|
|
||||||
tuple[
|
|
||||||
trio.Event,
|
|
||||||
dict[str, Any],
|
|
||||||
],
|
|
||||||
] = trio.TASK_STATUS_IGNORED,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -347,19 +305,14 @@ async def start_ahab(
|
||||||
)[2] # named user's uid
|
)[2] # named user's uid
|
||||||
)
|
)
|
||||||
|
|
||||||
|
task_status.started(cn_ready)
|
||||||
|
|
||||||
async with portal.open_context(
|
async with portal.open_context(
|
||||||
open_ahabd,
|
open_marketstored,
|
||||||
endpoint=str(NamespacePath.from_ref(endpoint)),
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
||||||
cid, pid, cntr_config = first
|
assert str(first)
|
||||||
|
# run till cancelled
|
||||||
task_status.started((
|
|
||||||
cn_ready,
|
|
||||||
cntr_config,
|
|
||||||
(cid, pid),
|
|
||||||
))
|
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
# since we demoted root perms in this parent
|
# since we demoted root perms in this parent
|
||||||
|
@ -371,6 +324,7 @@ async def start_ahab(
|
||||||
# TODO: we could also consider adding
|
# TODO: we could also consider adding
|
||||||
# a ``tractor.ZombieDetected`` or something that we could raise
|
# a ``tractor.ZombieDetected`` or something that we could raise
|
||||||
# if we find the child didn't terminate.
|
# if we find the child didn't terminate.
|
||||||
|
# await tractor.breakpoint()
|
||||||
except PermissionError:
|
except PermissionError:
|
||||||
log.warning('Failed to cancel root permsed container')
|
log.warning('Failed to cancel root permsed container')
|
||||||
|
|
||||||
|
@ -383,3 +337,12 @@ async def start_ahab(
|
||||||
return
|
return
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
await start_ahab()
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
trio.run(main)
|
||||||
|
|
|
@ -22,7 +22,7 @@ financial data flows.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
import time
|
import time
|
||||||
from typing import TYPE_CHECKING, Optional, Union
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
|
@ -32,7 +32,6 @@ from ..log import get_logger
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._sharedmem import ShmArray
|
from ._sharedmem import ShmArray
|
||||||
from .feed import _FeedsBus
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -91,7 +90,6 @@ async def increment_ohlc_buffer(
|
||||||
|
|
||||||
total_s = 0 # total seconds counted
|
total_s = 0 # total seconds counted
|
||||||
lowest = min(sampler.ohlcv_shms.keys())
|
lowest = min(sampler.ohlcv_shms.keys())
|
||||||
lowest_shm = sampler.ohlcv_shms[lowest][0]
|
|
||||||
ad = lowest - 0.001
|
ad = lowest - 0.001
|
||||||
|
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
|
@ -135,57 +133,21 @@ async def increment_ohlc_buffer(
|
||||||
# write to the buffer
|
# write to the buffer
|
||||||
shm.push(last)
|
shm.push(last)
|
||||||
|
|
||||||
await broadcast(delay_s, shm=lowest_shm)
|
# broadcast the buffer index step to any subscribers for
|
||||||
|
# a given sample period.
|
||||||
|
subs = sampler.subscribers.get(delay_s, ())
|
||||||
|
|
||||||
|
for stream in subs:
|
||||||
async def broadcast(
|
try:
|
||||||
delay_s: int,
|
await stream.send({'index': shm._last.value})
|
||||||
shm: Optional[ShmArray] = None,
|
except (
|
||||||
|
trio.BrokenResourceError,
|
||||||
) -> None:
|
trio.ClosedResourceError
|
||||||
'''
|
):
|
||||||
Broadcast the given ``shm: ShmArray``'s buffer index step to any
|
log.error(
|
||||||
subscribers for a given sample period.
|
f'{stream._ctx.chan.uid} dropped connection'
|
||||||
|
)
|
||||||
The sent msg will include the first and last index which slice into
|
subs.remove(stream)
|
||||||
the buffer's non-empty data.
|
|
||||||
|
|
||||||
'''
|
|
||||||
subs = sampler.subscribers.get(delay_s, ())
|
|
||||||
|
|
||||||
first = last = -1
|
|
||||||
|
|
||||||
if shm is None:
|
|
||||||
periods = sampler.ohlcv_shms.keys()
|
|
||||||
# if this is an update triggered by a history update there
|
|
||||||
# might not actually be any sampling bus setup since there's
|
|
||||||
# no "live feed" active yet.
|
|
||||||
if periods:
|
|
||||||
lowest = min(periods)
|
|
||||||
shm = sampler.ohlcv_shms[lowest][0]
|
|
||||||
first = shm._first.value
|
|
||||||
last = shm._last.value
|
|
||||||
|
|
||||||
for stream in subs:
|
|
||||||
try:
|
|
||||||
await stream.send({
|
|
||||||
'first': first,
|
|
||||||
'last': last,
|
|
||||||
'index': last,
|
|
||||||
})
|
|
||||||
except (
|
|
||||||
trio.BrokenResourceError,
|
|
||||||
trio.ClosedResourceError
|
|
||||||
):
|
|
||||||
log.error(
|
|
||||||
f'{stream._ctx.chan.uid} dropped connection'
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
subs.remove(stream)
|
|
||||||
except ValueError:
|
|
||||||
log.warning(
|
|
||||||
f'{stream._ctx.chan.uid} sub already removed!?'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -220,7 +182,7 @@ async def iter_ohlc_periods(
|
||||||
|
|
||||||
async def sample_and_broadcast(
|
async def sample_and_broadcast(
|
||||||
|
|
||||||
bus: _FeedsBus, # noqa
|
bus: '_FeedsBus', # noqa
|
||||||
shm: ShmArray,
|
shm: ShmArray,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
brokername: str,
|
brokername: str,
|
||||||
|
@ -299,13 +261,7 @@ async def sample_and_broadcast(
|
||||||
# end up triggering backpressure which which will
|
# end up triggering backpressure which which will
|
||||||
# eventually block this producer end of the feed and
|
# eventually block this producer end of the feed and
|
||||||
# thus other consumers still attached.
|
# thus other consumers still attached.
|
||||||
subs: list[
|
subs = bus._subscribers[broker_symbol.lower()]
|
||||||
tuple[
|
|
||||||
Union[tractor.MsgStream, trio.MemorySendChannel],
|
|
||||||
tractor.Context,
|
|
||||||
Optional[float], # tick throttle in Hz
|
|
||||||
]
|
|
||||||
] = bus._subscribers[broker_symbol.lower()]
|
|
||||||
|
|
||||||
# NOTE: by default the broker backend doesn't append
|
# NOTE: by default the broker backend doesn't append
|
||||||
# it's own "name" into the fqsn schema (but maybe it
|
# it's own "name" into the fqsn schema (but maybe it
|
||||||
|
@ -314,7 +270,7 @@ async def sample_and_broadcast(
|
||||||
bsym = f'{broker_symbol}.{brokername}'
|
bsym = f'{broker_symbol}.{brokername}'
|
||||||
lags: int = 0
|
lags: int = 0
|
||||||
|
|
||||||
for (stream, ctx, tick_throttle) in subs:
|
for (stream, tick_throttle) in subs:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with trio.move_on_after(0.2) as cs:
|
with trio.move_on_after(0.2) as cs:
|
||||||
|
@ -326,41 +282,25 @@ async def sample_and_broadcast(
|
||||||
(bsym, quote)
|
(bsym, quote)
|
||||||
)
|
)
|
||||||
except trio.WouldBlock:
|
except trio.WouldBlock:
|
||||||
chan = ctx.chan
|
ctx = getattr(stream, '_ctx', None)
|
||||||
if ctx:
|
if ctx:
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Feed overrun {bus.brokername} ->'
|
f'Feed overrun {bus.brokername} ->'
|
||||||
f'{chan.uid} !!!'
|
f'{ctx.channel.uid} !!!'
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
key = id(stream)
|
key = id(stream)
|
||||||
overruns[key] += 1
|
overruns[key] += 1
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Feed overrun {broker_symbol}'
|
f'Feed overrun {bus.brokername} -> '
|
||||||
'@{bus.brokername} -> '
|
|
||||||
f'feed @ {tick_throttle} Hz'
|
f'feed @ {tick_throttle} Hz'
|
||||||
)
|
)
|
||||||
if overruns[key] > 6:
|
if overruns[key] > 6:
|
||||||
# TODO: should we check for the
|
log.warning(
|
||||||
# context being cancelled? this
|
f'Dropping consumer {stream}'
|
||||||
# could happen but the
|
)
|
||||||
# channel-ipc-pipe is still up.
|
await stream.aclose()
|
||||||
if not chan.connected():
|
raise trio.BrokenResourceError
|
||||||
log.warning(
|
|
||||||
'Dropping broken consumer:\n'
|
|
||||||
f'{broker_symbol}:'
|
|
||||||
f'{ctx.cid}@{chan.uid}'
|
|
||||||
)
|
|
||||||
await stream.aclose()
|
|
||||||
raise trio.BrokenResourceError
|
|
||||||
else:
|
|
||||||
log.warning(
|
|
||||||
'Feed getting overrun bro!\n'
|
|
||||||
f'{broker_symbol}:'
|
|
||||||
f'{ctx.cid}@{chan.uid}'
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
await stream.send(
|
await stream.send(
|
||||||
{bsym: quote}
|
{bsym: quote}
|
||||||
|
@ -376,12 +316,11 @@ async def sample_and_broadcast(
|
||||||
trio.ClosedResourceError,
|
trio.ClosedResourceError,
|
||||||
trio.EndOfChannel,
|
trio.EndOfChannel,
|
||||||
):
|
):
|
||||||
chan = ctx.chan
|
ctx = getattr(stream, '_ctx', None)
|
||||||
if ctx:
|
if ctx:
|
||||||
log.warning(
|
log.warning(
|
||||||
'Dropped `brokerd`-quotes-feed connection:\n'
|
f'{ctx.chan.uid} dropped '
|
||||||
f'{broker_symbol}:'
|
'`brokerd`-quotes-feed connection'
|
||||||
f'{ctx.cid}@{chan.uid}'
|
|
||||||
)
|
)
|
||||||
if tick_throttle:
|
if tick_throttle:
|
||||||
assert stream._closed
|
assert stream._closed
|
||||||
|
@ -394,11 +333,7 @@ async def sample_and_broadcast(
|
||||||
try:
|
try:
|
||||||
subs.remove((stream, tick_throttle))
|
subs.remove((stream, tick_throttle))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
log.error(
|
log.error(f'{stream} was already removed from subs!?')
|
||||||
f'Stream was already removed from subs!?\n'
|
|
||||||
f'{broker_symbol}:'
|
|
||||||
f'{ctx.cid}@{chan.uid}'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: a less naive throttler, here's some snippets:
|
# TODO: a less naive throttler, here's some snippets:
|
||||||
|
@ -432,12 +367,7 @@ async def uniform_rate_send(
|
||||||
|
|
||||||
if left_to_sleep > 0:
|
if left_to_sleep > 0:
|
||||||
with trio.move_on_after(left_to_sleep) as cs:
|
with trio.move_on_after(left_to_sleep) as cs:
|
||||||
try:
|
sym, last_quote = await quote_stream.receive()
|
||||||
sym, last_quote = await quote_stream.receive()
|
|
||||||
except trio.EndOfChannel:
|
|
||||||
log.exception(f"feed for {stream} ended?")
|
|
||||||
break
|
|
||||||
|
|
||||||
diff = time.time() - last_send
|
diff = time.time() - last_send
|
||||||
|
|
||||||
if not first_quote:
|
if not first_quote:
|
||||||
|
@ -510,7 +440,6 @@ async def uniform_rate_send(
|
||||||
# if the feed consumer goes down then drop
|
# if the feed consumer goes down then drop
|
||||||
# out of this rate limiter
|
# out of this rate limiter
|
||||||
log.warning(f'{stream} closed')
|
log.warning(f'{stream} closed')
|
||||||
await stream.aclose()
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# reset send cycle state
|
# reset send cycle state
|
||||||
|
|
|
@ -20,9 +20,9 @@ NumPy compatible shared memory buffers for real-time IPC streaming.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from sys import byteorder
|
from sys import byteorder
|
||||||
import time
|
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||||
|
from multiprocessing import resource_tracker as mantracker
|
||||||
|
|
||||||
if _USE_POSIX:
|
if _USE_POSIX:
|
||||||
from _posixshmem import shm_unlink
|
from _posixshmem import shm_unlink
|
||||||
|
@ -30,7 +30,6 @@ if _USE_POSIX:
|
||||||
import tractor
|
import tractor
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from numpy.lib import recfunctions as rfn
|
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._source import base_iohlc_dtype
|
from ._source import base_iohlc_dtype
|
||||||
|
@ -39,41 +38,26 @@ from ._source import base_iohlc_dtype
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# how much is probably dependent on lifestyle
|
# Tell the "resource tracker" thing to fuck off.
|
||||||
_secs_in_day = int(60 * 60 * 24)
|
class ManTracker(mantracker.ResourceTracker):
|
||||||
# we try for a buncha times, but only on a run-every-other-day kinda week.
|
def register(self, name, rtype):
|
||||||
_days_worth = 16
|
pass
|
||||||
_default_size = _days_worth * _secs_in_day
|
|
||||||
# where to start the new data append index
|
def unregister(self, name, rtype):
|
||||||
_rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
|
pass
|
||||||
|
|
||||||
|
def ensure_running(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
def cuckoff_mantracker():
|
# "know your land and know your prey"
|
||||||
|
# https://www.dailymotion.com/video/x6ozzco
|
||||||
from multiprocessing import resource_tracker as mantracker
|
mantracker._resource_tracker = ManTracker()
|
||||||
|
mantracker.register = mantracker._resource_tracker.register
|
||||||
# Tell the "resource tracker" thing to fuck off.
|
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
|
||||||
class ManTracker(mantracker.ResourceTracker):
|
ensure_running = mantracker._resource_tracker.ensure_running
|
||||||
def register(self, name, rtype):
|
mantracker.unregister = mantracker._resource_tracker.unregister
|
||||||
pass
|
mantracker.getfd = mantracker._resource_tracker.getfd
|
||||||
|
|
||||||
def unregister(self, name, rtype):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def ensure_running(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# "know your land and know your prey"
|
|
||||||
# https://www.dailymotion.com/video/x6ozzco
|
|
||||||
mantracker._resource_tracker = ManTracker()
|
|
||||||
mantracker.register = mantracker._resource_tracker.register
|
|
||||||
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
|
|
||||||
# ensure_running = mantracker._resource_tracker.ensure_running
|
|
||||||
mantracker.unregister = mantracker._resource_tracker.unregister
|
|
||||||
mantracker.getfd = mantracker._resource_tracker.getfd
|
|
||||||
|
|
||||||
|
|
||||||
cuckoff_mantracker()
|
|
||||||
|
|
||||||
|
|
||||||
class SharedInt:
|
class SharedInt:
|
||||||
|
@ -99,12 +83,7 @@ class SharedInt:
|
||||||
if _USE_POSIX:
|
if _USE_POSIX:
|
||||||
# We manually unlink to bypass all the "resource tracker"
|
# We manually unlink to bypass all the "resource tracker"
|
||||||
# nonsense meant for non-SC systems.
|
# nonsense meant for non-SC systems.
|
||||||
name = self._shm.name
|
shm_unlink(self._shm.name)
|
||||||
try:
|
|
||||||
shm_unlink(name)
|
|
||||||
except FileNotFoundError:
|
|
||||||
# might be a teardown race here?
|
|
||||||
log.warning(f'Shm for {name} already unlinked?')
|
|
||||||
|
|
||||||
|
|
||||||
class _Token(BaseModel):
|
class _Token(BaseModel):
|
||||||
|
@ -204,11 +183,7 @@ class ShmArray:
|
||||||
self._post_init: bool = False
|
self._post_init: bool = False
|
||||||
|
|
||||||
# pushing data does not write the index (aka primary key)
|
# pushing data does not write the index (aka primary key)
|
||||||
dtype = shmarr.dtype
|
self._write_fields = list(shmarr.dtype.fields.keys())[1:]
|
||||||
if dtype.fields:
|
|
||||||
self._write_fields = list(shmarr.dtype.fields.keys())[1:]
|
|
||||||
else:
|
|
||||||
self._write_fields = None
|
|
||||||
|
|
||||||
# TODO: ringbuf api?
|
# TODO: ringbuf api?
|
||||||
|
|
||||||
|
@ -254,58 +229,10 @@ class ShmArray:
|
||||||
|
|
||||||
return a
|
return a
|
||||||
|
|
||||||
def ustruct(
|
|
||||||
self,
|
|
||||||
fields: Optional[list[str]] = None,
|
|
||||||
|
|
||||||
# type that all field values will be cast to
|
|
||||||
# in the returned view.
|
|
||||||
common_dtype: np.dtype = np.float,
|
|
||||||
|
|
||||||
) -> np.ndarray:
|
|
||||||
|
|
||||||
array = self._array
|
|
||||||
|
|
||||||
if fields:
|
|
||||||
selection = array[fields]
|
|
||||||
# fcount = len(fields)
|
|
||||||
else:
|
|
||||||
selection = array
|
|
||||||
# fcount = len(array.dtype.fields)
|
|
||||||
|
|
||||||
# XXX: manual ``.view()`` attempt that also doesn't work.
|
|
||||||
# uview = selection.view(
|
|
||||||
# dtype='<f16',
|
|
||||||
# ).reshape(-1, 4, order='A')
|
|
||||||
|
|
||||||
# assert len(selection) == len(uview)
|
|
||||||
|
|
||||||
u = rfn.structured_to_unstructured(
|
|
||||||
selection,
|
|
||||||
# dtype=float,
|
|
||||||
copy=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# unstruct = np.ndarray(u.shape, dtype=a.dtype, buffer=shm.buf)
|
|
||||||
# array[:] = a[:]
|
|
||||||
return u
|
|
||||||
# return ShmArray(
|
|
||||||
# shmarr=u,
|
|
||||||
# first=self._first,
|
|
||||||
# last=self._last,
|
|
||||||
# shm=self._shm
|
|
||||||
# )
|
|
||||||
|
|
||||||
def last(
|
def last(
|
||||||
self,
|
self,
|
||||||
length: int = 1,
|
length: int = 1,
|
||||||
|
|
||||||
) -> np.ndarray:
|
) -> np.ndarray:
|
||||||
'''
|
|
||||||
Return the last ``length``'s worth of ("row") entries from the
|
|
||||||
array.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return self.array[-length:]
|
return self.array[-length:]
|
||||||
|
|
||||||
def push(
|
def push(
|
||||||
|
@ -314,7 +241,6 @@ class ShmArray:
|
||||||
|
|
||||||
field_map: Optional[dict[str, str]] = None,
|
field_map: Optional[dict[str, str]] = None,
|
||||||
prepend: bool = False,
|
prepend: bool = False,
|
||||||
update_first: bool = True,
|
|
||||||
start: Optional[int] = None,
|
start: Optional[int] = None,
|
||||||
|
|
||||||
) -> int:
|
) -> int:
|
||||||
|
@ -327,9 +253,10 @@ class ShmArray:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
length = len(data)
|
length = len(data)
|
||||||
|
index = start if start is not None else self._last.value
|
||||||
|
|
||||||
if prepend:
|
if prepend:
|
||||||
index = (start or self._first.value) - length
|
index = self._first.value - length
|
||||||
|
|
||||||
if index < 0:
|
if index < 0:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
|
@ -337,9 +264,6 @@ class ShmArray:
|
||||||
f'You have passed {abs(index)} too many datums.'
|
f'You have passed {abs(index)} too many datums.'
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
|
||||||
index = start if start is not None else self._last.value
|
|
||||||
|
|
||||||
end = index + length
|
end = index + length
|
||||||
|
|
||||||
if field_map:
|
if field_map:
|
||||||
|
@ -357,17 +281,12 @@ class ShmArray:
|
||||||
# tries to access ``.array`` (which due to the index
|
# tries to access ``.array`` (which due to the index
|
||||||
# overlap will be empty). Pretty sure we've fixed it now
|
# overlap will be empty). Pretty sure we've fixed it now
|
||||||
# but leaving this here as a reminder.
|
# but leaving this here as a reminder.
|
||||||
if prepend and update_first and length:
|
if prepend:
|
||||||
assert index < self._first.value
|
assert index < self._first.value
|
||||||
|
|
||||||
if (
|
if index < self._first.value:
|
||||||
index < self._first.value
|
|
||||||
and update_first
|
|
||||||
):
|
|
||||||
assert prepend, 'prepend=True not passed but index decreased?'
|
|
||||||
self._first.value = index
|
self._first.value = index
|
||||||
|
else:
|
||||||
elif not prepend:
|
|
||||||
self._last.value = end
|
self._last.value = end
|
||||||
|
|
||||||
self._post_init = True
|
self._post_init = True
|
||||||
|
@ -403,7 +322,6 @@ class ShmArray:
|
||||||
f"Input array has unknown field(s): {only_in_theirs}"
|
f"Input array has unknown field(s): {only_in_theirs}"
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: support "silent" prepends that don't update ._first.value?
|
|
||||||
def prepend(
|
def prepend(
|
||||||
self,
|
self,
|
||||||
data: np.ndarray,
|
data: np.ndarray,
|
||||||
|
@ -430,6 +348,12 @@ class ShmArray:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
|
# how much is probably dependent on lifestyle
|
||||||
|
_secs_in_day = int(60 * 60 * 24)
|
||||||
|
# we try for 3 times but only on a run-every-other-day kinda week.
|
||||||
|
_default_size = 6 * _secs_in_day
|
||||||
|
|
||||||
|
|
||||||
def open_shm_array(
|
def open_shm_array(
|
||||||
|
|
||||||
key: Optional[str] = None,
|
key: Optional[str] = None,
|
||||||
|
@ -454,11 +378,7 @@ def open_shm_array(
|
||||||
create=True,
|
create=True,
|
||||||
size=a.nbytes
|
size=a.nbytes
|
||||||
)
|
)
|
||||||
array = np.ndarray(
|
array = np.ndarray(a.shape, dtype=a.dtype, buffer=shm.buf)
|
||||||
a.shape,
|
|
||||||
dtype=a.dtype,
|
|
||||||
buffer=shm.buf
|
|
||||||
)
|
|
||||||
array[:] = a[:]
|
array[:] = a[:]
|
||||||
array.setflags(write=int(not readonly))
|
array.setflags(write=int(not readonly))
|
||||||
|
|
||||||
|
@ -501,7 +421,7 @@ def open_shm_array(
|
||||||
# this sets the index to 3/4 of the length of the buffer
|
# this sets the index to 3/4 of the length of the buffer
|
||||||
# leaving a "days worth of second samples" for the real-time
|
# leaving a "days worth of second samples" for the real-time
|
||||||
# section.
|
# section.
|
||||||
last.value = first.value = _rt_buffer_start
|
last.value = first.value = int(5*_secs_in_day)
|
||||||
|
|
||||||
shmarr = ShmArray(
|
shmarr = ShmArray(
|
||||||
array,
|
array,
|
||||||
|
@ -542,26 +462,8 @@ def attach_shm_array(
|
||||||
if key in _known_tokens:
|
if key in _known_tokens:
|
||||||
assert _Token.from_msg(_known_tokens[key]) == token, "WTF"
|
assert _Token.from_msg(_known_tokens[key]) == token, "WTF"
|
||||||
|
|
||||||
# XXX: ugh, looks like due to the ``shm_open()`` C api we can't
|
|
||||||
# actually place files in a subdir, see discussion here:
|
|
||||||
# https://stackoverflow.com/a/11103289
|
|
||||||
|
|
||||||
# attach to array buffer and view as per dtype
|
# attach to array buffer and view as per dtype
|
||||||
_err: Optional[Exception] = None
|
shm = SharedMemory(name=key)
|
||||||
for _ in range(3):
|
|
||||||
try:
|
|
||||||
shm = SharedMemory(
|
|
||||||
name=key,
|
|
||||||
create=False,
|
|
||||||
)
|
|
||||||
break
|
|
||||||
except OSError as oserr:
|
|
||||||
_err = oserr
|
|
||||||
time.sleep(0.1)
|
|
||||||
else:
|
|
||||||
if _err:
|
|
||||||
raise _err
|
|
||||||
|
|
||||||
shmarr = np.ndarray(
|
shmarr = np.ndarray(
|
||||||
(size,),
|
(size,),
|
||||||
dtype=token.dtype,
|
dtype=token.dtype,
|
||||||
|
|
|
@ -21,7 +21,6 @@ from __future__ import annotations
|
||||||
from typing import Any
|
from typing import Any
|
||||||
import decimal
|
import decimal
|
||||||
|
|
||||||
from bidict import bidict
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
# from numba import from_dtype
|
# from numba import from_dtype
|
||||||
|
@ -33,7 +32,7 @@ ohlc_fields = [
|
||||||
('high', float),
|
('high', float),
|
||||||
('low', float),
|
('low', float),
|
||||||
('close', float),
|
('close', float),
|
||||||
('volume', float),
|
('volume', int),
|
||||||
('bar_wap', float),
|
('bar_wap', float),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -48,16 +47,16 @@ base_ohlc_dtype = np.dtype(ohlc_fields)
|
||||||
# https://github.com/numba/numba/issues/4511
|
# https://github.com/numba/numba/issues/4511
|
||||||
# numba_ohlc_dtype = from_dtype(base_ohlc_dtype)
|
# numba_ohlc_dtype = from_dtype(base_ohlc_dtype)
|
||||||
|
|
||||||
# map time frame "keys" to seconds values
|
# map time frame "keys" to minutes values
|
||||||
tf_in_1s = bidict({
|
tf_in_1m = {
|
||||||
1: '1s',
|
'1m': 1,
|
||||||
60: '1m',
|
'5m': 5,
|
||||||
60*5: '5m',
|
'15m': 15,
|
||||||
60*15: '15m',
|
'30m': 30,
|
||||||
60*30: '30m',
|
'1h': 60,
|
||||||
60*60: '1h',
|
'4h': 240,
|
||||||
60*60*24: '1d',
|
'1d': 1440,
|
||||||
})
|
}
|
||||||
|
|
||||||
|
|
||||||
def mk_fqsn(
|
def mk_fqsn(
|
||||||
|
@ -92,7 +91,7 @@ def ohlc_zeros(length: int) -> np.ndarray:
|
||||||
return np.zeros(length, dtype=base_ohlc_dtype)
|
return np.zeros(length, dtype=base_ohlc_dtype)
|
||||||
|
|
||||||
|
|
||||||
def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
def uncons_fqsn(fqsn: str) -> tuple[str, str, str]:
|
||||||
'''
|
'''
|
||||||
Unpack a fully-qualified-symbol-name to ``tuple``.
|
Unpack a fully-qualified-symbol-name to ``tuple``.
|
||||||
|
|
||||||
|
@ -178,7 +177,7 @@ class Symbol(BaseModel):
|
||||||
# XXX: like wtf..
|
# XXX: like wtf..
|
||||||
# ) -> 'Symbol':
|
# ) -> 'Symbol':
|
||||||
) -> None:
|
) -> None:
|
||||||
broker, key, suffix = unpack_fqsn(fqsn)
|
broker, key, suffix = uncons_fqsn(fqsn)
|
||||||
return cls.from_broker_info(
|
return cls.from_broker_info(
|
||||||
broker,
|
broker,
|
||||||
key,
|
key,
|
||||||
|
@ -222,23 +221,6 @@ class Symbol(BaseModel):
|
||||||
return (key, broker)
|
return (key, broker)
|
||||||
|
|
||||||
def front_fqsn(self) -> str:
|
def front_fqsn(self) -> str:
|
||||||
'''
|
|
||||||
fqsn = "fully qualified symbol name"
|
|
||||||
|
|
||||||
Basically the idea here is for all client-ish code (aka programs/actors
|
|
||||||
that ask the provider agnostic layers in the stack for data) should be
|
|
||||||
able to tell which backend / venue / derivative each data feed/flow is
|
|
||||||
from by an explicit string key of the current form:
|
|
||||||
|
|
||||||
<instrumentname>.<venue>.<suffixwithmetadata>.<brokerbackendname>
|
|
||||||
|
|
||||||
TODO: I have thoughts that we should actually change this to be
|
|
||||||
more like an "attr lookup" (like how the web should have done
|
|
||||||
urls, but marketting peeps ruined it etc. etc.):
|
|
||||||
|
|
||||||
<broker>.<venue>.<instrumentname>.<suffixwithmetadata>
|
|
||||||
|
|
||||||
'''
|
|
||||||
tokens = self.tokens()
|
tokens = self.tokens()
|
||||||
fqsn = '.'.join(tokens)
|
fqsn = '.'.join(tokens)
|
||||||
return fqsn
|
return fqsn
|
||||||
|
|
|
@ -53,13 +53,11 @@ class NoBsWs:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
token: str,
|
|
||||||
stack: AsyncExitStack,
|
stack: AsyncExitStack,
|
||||||
fixture: Callable,
|
fixture: Callable,
|
||||||
serializer: ModuleType = json,
|
serializer: ModuleType = json,
|
||||||
):
|
):
|
||||||
self.url = url
|
self.url = url
|
||||||
self.token = token
|
|
||||||
self.fixture = fixture
|
self.fixture = fixture
|
||||||
self._stack = stack
|
self._stack = stack
|
||||||
self._ws: 'WebSocketConnection' = None # noqa
|
self._ws: 'WebSocketConnection' = None # noqa
|
||||||
|
@ -83,15 +81,9 @@ class NoBsWs:
|
||||||
trio_websocket.open_websocket_url(self.url)
|
trio_websocket.open_websocket_url(self.url)
|
||||||
)
|
)
|
||||||
# rerun user code fixture
|
# rerun user code fixture
|
||||||
if self.token == '':
|
ret = await self._stack.enter_async_context(
|
||||||
ret = await self._stack.enter_async_context(
|
self.fixture(self)
|
||||||
self.fixture(self)
|
)
|
||||||
)
|
|
||||||
else:
|
|
||||||
ret = await self._stack.enter_async_context(
|
|
||||||
self.fixture(self, self.token)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert ret is None
|
assert ret is None
|
||||||
|
|
||||||
log.info(f'Connection success: {self.url}')
|
log.info(f'Connection success: {self.url}')
|
||||||
|
@ -135,14 +127,12 @@ async def open_autorecon_ws(
|
||||||
|
|
||||||
# TODO: proper type annot smh
|
# TODO: proper type annot smh
|
||||||
fixture: Callable,
|
fixture: Callable,
|
||||||
# used for authenticated websockets
|
|
||||||
token: str = '',
|
|
||||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
async with AsyncExitStack() as stack:
|
async with AsyncExitStack() as stack:
|
||||||
ws = NoBsWs(url, token, stack, fixture=fixture)
|
ws = NoBsWs(url, stack, fixture=fixture)
|
||||||
await ws._connect()
|
await ws._connect()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -22,32 +22,26 @@ This module is enabled for ``brokerd`` daemons.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from datetime import datetime
|
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
AsyncIterator, Optional,
|
AsyncIterator, Optional,
|
||||||
Generator,
|
|
||||||
Awaitable,
|
Awaitable,
|
||||||
TYPE_CHECKING,
|
|
||||||
Union,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
import pendulum
|
||||||
import trio
|
import trio
|
||||||
from trio.abc import ReceiveChannel
|
from trio.abc import ReceiveChannel
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import trimeter
|
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import maybe_open_context
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
import pendulum
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from ..brokers import get_brokermod
|
from ..brokers import get_brokermod
|
||||||
from ..calc import humanize
|
from .._cacheables import maybe_open_context
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .._daemon import (
|
from .._daemon import (
|
||||||
maybe_spawn_brokerd,
|
maybe_spawn_brokerd,
|
||||||
|
@ -62,24 +56,17 @@ from .ingest import get_ingestormod
|
||||||
from ._source import (
|
from ._source import (
|
||||||
base_iohlc_dtype,
|
base_iohlc_dtype,
|
||||||
Symbol,
|
Symbol,
|
||||||
unpack_fqsn,
|
uncons_fqsn,
|
||||||
)
|
)
|
||||||
from ..ui import _search
|
from ..ui import _search
|
||||||
from ._sampling import (
|
from ._sampling import (
|
||||||
sampler,
|
sampler,
|
||||||
broadcast,
|
|
||||||
increment_ohlc_buffer,
|
increment_ohlc_buffer,
|
||||||
iter_ohlc_periods,
|
iter_ohlc_periods,
|
||||||
sample_and_broadcast,
|
sample_and_broadcast,
|
||||||
uniform_rate_send,
|
uniform_rate_send,
|
||||||
)
|
)
|
||||||
from ..brokers._util import (
|
|
||||||
NoData,
|
|
||||||
DataUnavailable,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from .marketstore import Storage
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -117,13 +104,7 @@ class _FeedsBus(BaseModel):
|
||||||
# https://github.com/samuelcolvin/pydantic/issues/2816
|
# https://github.com/samuelcolvin/pydantic/issues/2816
|
||||||
_subscribers: dict[
|
_subscribers: dict[
|
||||||
str,
|
str,
|
||||||
list[
|
list[tuple[tractor.MsgStream, Optional[float]]]
|
||||||
tuple[
|
|
||||||
Union[tractor.MsgStream, trio.MemorySendChannel],
|
|
||||||
tractor.Context,
|
|
||||||
Optional[float], # tick throttle in Hz
|
|
||||||
]
|
|
||||||
]
|
|
||||||
] = {}
|
] = {}
|
||||||
|
|
||||||
async def start_task(
|
async def start_task(
|
||||||
|
@ -212,449 +193,20 @@ async def _setup_persistent_brokerd(
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
def diff_history(
|
|
||||||
array,
|
|
||||||
start_dt,
|
|
||||||
end_dt,
|
|
||||||
last_tsdb_dt: Optional[datetime] = None
|
|
||||||
|
|
||||||
) -> np.ndarray:
|
|
||||||
|
|
||||||
to_push = array
|
|
||||||
|
|
||||||
if last_tsdb_dt:
|
|
||||||
s_diff = (start_dt - last_tsdb_dt).seconds
|
|
||||||
|
|
||||||
# if we detect a partial frame's worth of data
|
|
||||||
# that is new, slice out only that history and
|
|
||||||
# write to shm.
|
|
||||||
if (
|
|
||||||
s_diff < 0
|
|
||||||
):
|
|
||||||
if abs(s_diff) < len(array):
|
|
||||||
# the + 1 is because ``last_tsdb_dt`` is pulled from
|
|
||||||
# the last row entry for the ``'time'`` field retreived
|
|
||||||
# from the tsdb.
|
|
||||||
to_push = array[abs(s_diff) + 1:]
|
|
||||||
|
|
||||||
else:
|
|
||||||
# pass back only the portion of the array that is
|
|
||||||
# greater then the last time stamp in the tsdb.
|
|
||||||
time = array['time']
|
|
||||||
to_push = array[time >= last_tsdb_dt.timestamp()]
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f'Pushing partial frame {to_push.size} to shm'
|
|
||||||
)
|
|
||||||
|
|
||||||
return to_push
|
|
||||||
|
|
||||||
|
|
||||||
async def start_backfill(
|
async def start_backfill(
|
||||||
mod: ModuleType,
|
mod: ModuleType,
|
||||||
bfqsn: str,
|
fqsn: str,
|
||||||
shm: ShmArray,
|
shm: ShmArray,
|
||||||
|
|
||||||
last_tsdb_dt: Optional[datetime] = None,
|
|
||||||
storage: Optional[Storage] = None,
|
|
||||||
write_tsdb: bool = True,
|
|
||||||
tsdb_is_up: bool = False,
|
|
||||||
|
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> int:
|
) -> int:
|
||||||
|
|
||||||
async with mod.open_history_client(bfqsn) as (hist, config):
|
return await mod.backfill_bars(
|
||||||
|
fqsn,
|
||||||
# get latest query's worth of history all the way
|
shm,
|
||||||
# back to what is recorded in the tsdb
|
task_status=task_status,
|
||||||
array, start_dt, end_dt = await hist(end_dt=None)
|
)
|
||||||
|
|
||||||
times = array['time']
|
|
||||||
|
|
||||||
# sample period step size in seconds
|
|
||||||
step_size_s = (
|
|
||||||
pendulum.from_timestamp(times[-1])
|
|
||||||
- pendulum.from_timestamp(times[-2])
|
|
||||||
).seconds
|
|
||||||
|
|
||||||
# "frame"'s worth of sample period steps in seconds
|
|
||||||
frame_size_s = len(array) * step_size_s
|
|
||||||
|
|
||||||
to_push = diff_history(
|
|
||||||
array,
|
|
||||||
start_dt,
|
|
||||||
end_dt,
|
|
||||||
last_tsdb_dt=last_tsdb_dt,
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(f'Pushing {to_push.size} to shm!')
|
|
||||||
shm.push(to_push)
|
|
||||||
|
|
||||||
for delay_s in sampler.subscribers:
|
|
||||||
await broadcast(delay_s)
|
|
||||||
|
|
||||||
# signal that backfilling to tsdb's end datum is complete
|
|
||||||
bf_done = trio.Event()
|
|
||||||
|
|
||||||
# let caller unblock and deliver latest history frame
|
|
||||||
task_status.started((shm, start_dt, end_dt, bf_done))
|
|
||||||
|
|
||||||
# based on the sample step size, maybe load a certain amount history
|
|
||||||
if last_tsdb_dt is None:
|
|
||||||
if step_size_s not in (1, 60):
|
|
||||||
raise ValueError(
|
|
||||||
'`piker` only needs to support 1m and 1s sampling '
|
|
||||||
'but ur api is trying to deliver a longer '
|
|
||||||
f'timeframe of {step_size_s} ' 'seconds.. so ye, dun '
|
|
||||||
'do dat brudder.'
|
|
||||||
)
|
|
||||||
|
|
||||||
# when no tsdb "last datum" is provided, we just load
|
|
||||||
# some near-term history.
|
|
||||||
periods = {
|
|
||||||
1: {'days': 1},
|
|
||||||
60: {'days': 14},
|
|
||||||
}
|
|
||||||
|
|
||||||
if tsdb_is_up:
|
|
||||||
# do a decently sized backfill and load it into storage.
|
|
||||||
periods = {
|
|
||||||
1: {'days': 6},
|
|
||||||
60: {'years': 2},
|
|
||||||
}
|
|
||||||
|
|
||||||
kwargs = periods[step_size_s]
|
|
||||||
last_tsdb_dt = start_dt.subtract(**kwargs)
|
|
||||||
|
|
||||||
# configure async query throttling
|
|
||||||
erlangs = config.get('erlangs', 1)
|
|
||||||
rate = config.get('rate', 1)
|
|
||||||
frames = {}
|
|
||||||
|
|
||||||
def iter_dts(start: datetime):
|
|
||||||
|
|
||||||
while True:
|
|
||||||
|
|
||||||
hist_period = pendulum.period(
|
|
||||||
start,
|
|
||||||
last_tsdb_dt,
|
|
||||||
)
|
|
||||||
dtrange = list(hist_period.range('seconds', frame_size_s))
|
|
||||||
log.debug(f'New datetime index:\n{pformat(dtrange)}')
|
|
||||||
|
|
||||||
for end_dt in dtrange:
|
|
||||||
log.info(f'Yielding next frame start {end_dt}')
|
|
||||||
start = yield end_dt
|
|
||||||
|
|
||||||
# if caller sends a new start date, reset to that
|
|
||||||
if start is not None:
|
|
||||||
log.warning(f'Resetting date range: {start}')
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
# from while
|
|
||||||
return
|
|
||||||
|
|
||||||
# pull new history frames until we hit latest
|
|
||||||
# already in the tsdb or a max count.
|
|
||||||
count = 0
|
|
||||||
|
|
||||||
# NOTE: when gaps are detected in the retreived history (by
|
|
||||||
# comparisor of the end - start versus the expected "frame size"
|
|
||||||
# in seconds) we need a way to alert the async request code not
|
|
||||||
# to continue to query for data "within the gap". This var is
|
|
||||||
# set in such cases such that further requests in that period
|
|
||||||
# are discarded and further we reset the "datetimem query frame
|
|
||||||
# index" in such cases to avoid needless noop requests.
|
|
||||||
earliest_end_dt: Optional[datetime] = start_dt
|
|
||||||
|
|
||||||
async def get_ohlc_frame(
|
|
||||||
input_end_dt: datetime,
|
|
||||||
iter_dts_gen: Generator[datetime],
|
|
||||||
|
|
||||||
) -> np.ndarray:
|
|
||||||
|
|
||||||
nonlocal count, frames, earliest_end_dt, frame_size_s
|
|
||||||
count += 1
|
|
||||||
|
|
||||||
if input_end_dt > earliest_end_dt:
|
|
||||||
# if a request comes in for an inter-gap frame we
|
|
||||||
# discard it since likely this request is still
|
|
||||||
# lingering from before the reset of ``iter_dts()`` via
|
|
||||||
# ``.send()`` below.
|
|
||||||
log.info(f'Discarding request history ending @ {input_end_dt}')
|
|
||||||
|
|
||||||
# signals to ``trimeter`` loop to discard and
|
|
||||||
# ``continue`` in it's schedule loop.
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
log.info(
|
|
||||||
f'Requesting {step_size_s}s frame ending in {input_end_dt}'
|
|
||||||
)
|
|
||||||
array, start_dt, end_dt = await hist(end_dt=input_end_dt)
|
|
||||||
assert array['time'][0] == start_dt.timestamp()
|
|
||||||
|
|
||||||
except NoData:
|
|
||||||
log.warning(
|
|
||||||
f'NO DATA for {frame_size_s}s frame @ {input_end_dt} ?!?'
|
|
||||||
)
|
|
||||||
return None # discard signal
|
|
||||||
|
|
||||||
except DataUnavailable as duerr:
|
|
||||||
# broker is being a bish and we can't pull
|
|
||||||
# any more..
|
|
||||||
log.warning('backend halted on data deliver !?!?')
|
|
||||||
|
|
||||||
# ugh, what's a better way?
|
|
||||||
# TODO: fwiw, we probably want a way to signal a throttle
|
|
||||||
# condition (eg. with ib) so that we can halt the
|
|
||||||
# request loop until the condition is resolved?
|
|
||||||
return duerr
|
|
||||||
|
|
||||||
diff = end_dt - start_dt
|
|
||||||
frame_time_diff_s = diff.seconds
|
|
||||||
expected_frame_size_s = frame_size_s + step_size_s
|
|
||||||
|
|
||||||
if frame_time_diff_s > expected_frame_size_s:
|
|
||||||
|
|
||||||
# XXX: query result includes a start point prior to our
|
|
||||||
# expected "frame size" and thus is likely some kind of
|
|
||||||
# history gap (eg. market closed period, outage, etc.)
|
|
||||||
# so indicate to the request loop that this gap is
|
|
||||||
# expected by both,
|
|
||||||
# - resetting the ``iter_dts()`` generator to start at
|
|
||||||
# the new start point delivered in this result
|
|
||||||
# - setting the non-locally scoped ``earliest_end_dt``
|
|
||||||
# to this new value so that the request loop doesn't
|
|
||||||
# get tripped up thinking there's an out of order
|
|
||||||
# request-result condition.
|
|
||||||
|
|
||||||
log.warning(
|
|
||||||
f'History frame ending @ {end_dt} appears to have a gap:\n'
|
|
||||||
f'{diff} ~= {frame_time_diff_s} seconds'
|
|
||||||
)
|
|
||||||
|
|
||||||
# reset dtrange gen to new start point
|
|
||||||
try:
|
|
||||||
next_end = iter_dts_gen.send(start_dt)
|
|
||||||
log.info(
|
|
||||||
f'Reset frame index to start at {start_dt}\n'
|
|
||||||
f'Was at {next_end}'
|
|
||||||
)
|
|
||||||
|
|
||||||
# NOTE: manually set "earliest end datetime" index-value
|
|
||||||
# to avoid the request loop getting confused about
|
|
||||||
# new frames that are earlier in history - i.e. this
|
|
||||||
# **is not** the case of out-of-order frames from
|
|
||||||
# an async batch request.
|
|
||||||
earliest_end_dt = start_dt
|
|
||||||
|
|
||||||
except StopIteration:
|
|
||||||
# gen already terminated meaning we probably already
|
|
||||||
# exhausted it via frame requests.
|
|
||||||
log.info(
|
|
||||||
"Datetime index already exhausted, can't reset.."
|
|
||||||
)
|
|
||||||
|
|
||||||
to_push = diff_history(
|
|
||||||
array,
|
|
||||||
start_dt,
|
|
||||||
end_dt,
|
|
||||||
last_tsdb_dt=last_tsdb_dt,
|
|
||||||
)
|
|
||||||
ln = len(to_push)
|
|
||||||
if ln:
|
|
||||||
log.info(f'{ln} bars for {start_dt} -> {end_dt}')
|
|
||||||
frames[input_end_dt.timestamp()] = (to_push, start_dt, end_dt)
|
|
||||||
return to_push, start_dt, end_dt
|
|
||||||
|
|
||||||
else:
|
|
||||||
log.warning(
|
|
||||||
f'{ln} BARS TO PUSH after diff?!: {start_dt} -> {end_dt}'
|
|
||||||
)
|
|
||||||
return None
|
|
||||||
|
|
||||||
# initial dt index starts at the start of the first query result
|
|
||||||
idts = iter_dts(start_dt)
|
|
||||||
|
|
||||||
async with trimeter.amap(
|
|
||||||
partial(
|
|
||||||
get_ohlc_frame,
|
|
||||||
# we close in the ``iter_dt()`` gen in so we can send
|
|
||||||
# reset signals as needed for gap dection in the
|
|
||||||
# history.
|
|
||||||
iter_dts_gen=idts,
|
|
||||||
),
|
|
||||||
idts,
|
|
||||||
|
|
||||||
capture_outcome=True,
|
|
||||||
include_value=True,
|
|
||||||
|
|
||||||
# better technical names bruv...
|
|
||||||
max_at_once=erlangs,
|
|
||||||
max_per_second=rate,
|
|
||||||
|
|
||||||
) as outcomes:
|
|
||||||
|
|
||||||
# Then iterate over the return values, as they become available
|
|
||||||
# (i.e., not necessarily in the original order)
|
|
||||||
async for input_end_dt, outcome in outcomes:
|
|
||||||
|
|
||||||
try:
|
|
||||||
out = outcome.unwrap()
|
|
||||||
|
|
||||||
if out is None:
|
|
||||||
# skip signal
|
|
||||||
continue
|
|
||||||
|
|
||||||
elif isinstance(out, DataUnavailable):
|
|
||||||
# no data available case signal.. so just kill
|
|
||||||
# further requests and basically just stop
|
|
||||||
# trying...
|
|
||||||
break
|
|
||||||
|
|
||||||
except Exception:
|
|
||||||
log.exception('uhh trimeter bail')
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
to_push, start_dt, end_dt = out
|
|
||||||
|
|
||||||
if not len(to_push):
|
|
||||||
# diff returned no new data (i.e. we probablyl hit
|
|
||||||
# the ``last_tsdb_dt`` point).
|
|
||||||
# TODO: raise instead?
|
|
||||||
log.warning(f'No history for range {start_dt} -> {end_dt}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
# pipeline-style pull frames until we need to wait for
|
|
||||||
# the next in order to arrive.
|
|
||||||
# i = end_dts.index(input_end_dt)
|
|
||||||
# print(f'latest end_dt {end_dt} found at index {i}')
|
|
||||||
|
|
||||||
epochs = list(reversed(sorted(frames)))
|
|
||||||
for epoch in epochs:
|
|
||||||
|
|
||||||
start = shm.array['time'][0]
|
|
||||||
last_shm_prepend_dt = pendulum.from_timestamp(start)
|
|
||||||
earliest_frame_queue_dt = pendulum.from_timestamp(epoch)
|
|
||||||
|
|
||||||
diff = start - epoch
|
|
||||||
|
|
||||||
if diff < 0:
|
|
||||||
log.warning(
|
|
||||||
'Discarding out of order frame:\n'
|
|
||||||
f'{earliest_frame_queue_dt}'
|
|
||||||
)
|
|
||||||
frames.pop(epoch)
|
|
||||||
continue
|
|
||||||
# await tractor.breakpoint()
|
|
||||||
|
|
||||||
if diff > step_size_s:
|
|
||||||
|
|
||||||
if earliest_end_dt < earliest_frame_queue_dt:
|
|
||||||
# XXX: an expected gap was encountered (see
|
|
||||||
# logic in ``get_ohlc_frame()``, so allow
|
|
||||||
# this frame through to the storage layer.
|
|
||||||
log.warning(
|
|
||||||
f'Expected history gap of {diff}s:\n'
|
|
||||||
f'{earliest_frame_queue_dt} <- '
|
|
||||||
f'{earliest_end_dt}'
|
|
||||||
)
|
|
||||||
|
|
||||||
elif (
|
|
||||||
erlangs > 1
|
|
||||||
):
|
|
||||||
# we don't yet have the next frame to push
|
|
||||||
# so break back to the async request loop
|
|
||||||
# while we wait for more async frame-results
|
|
||||||
# to arrive.
|
|
||||||
if len(frames) >= erlangs:
|
|
||||||
log.warning(
|
|
||||||
'Frame count in async-queue is greater '
|
|
||||||
'then erlangs?\n'
|
|
||||||
'There seems to be a gap between:\n'
|
|
||||||
f'{earliest_frame_queue_dt} <- '
|
|
||||||
f'{last_shm_prepend_dt}\n'
|
|
||||||
'Conducting manual call for frame ending: '
|
|
||||||
f'{last_shm_prepend_dt}'
|
|
||||||
)
|
|
||||||
(
|
|
||||||
to_push,
|
|
||||||
start_dt,
|
|
||||||
end_dt,
|
|
||||||
) = await get_ohlc_frame(
|
|
||||||
input_end_dt=last_shm_prepend_dt,
|
|
||||||
iter_dts_gen=idts,
|
|
||||||
)
|
|
||||||
last_epoch = to_push['time'][-1]
|
|
||||||
diff = start - last_epoch
|
|
||||||
|
|
||||||
if diff > step_size_s:
|
|
||||||
await tractor.breakpoint()
|
|
||||||
raise DataUnavailable(
|
|
||||||
'An awkward frame was found:\n'
|
|
||||||
f'{start_dt} -> {end_dt}:\n{to_push}'
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
frames[last_epoch] = (
|
|
||||||
to_push, start_dt, end_dt)
|
|
||||||
break
|
|
||||||
|
|
||||||
expect_end = pendulum.from_timestamp(start)
|
|
||||||
expect_start = expect_end.subtract(
|
|
||||||
seconds=frame_size_s)
|
|
||||||
log.warning(
|
|
||||||
'waiting on out-of-order history frame:\n'
|
|
||||||
f'{expect_end - expect_start}'
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
to_push, start_dt, end_dt = frames.pop(epoch)
|
|
||||||
ln = len(to_push)
|
|
||||||
|
|
||||||
# bail gracefully on shm allocation overrun/full condition
|
|
||||||
try:
|
|
||||||
shm.push(to_push, prepend=True)
|
|
||||||
except ValueError:
|
|
||||||
log.info(
|
|
||||||
f'Shm buffer overrun on: {start_dt} -> {end_dt}?'
|
|
||||||
)
|
|
||||||
break
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f'Shm pushed {ln} frame:\n'
|
|
||||||
f'{start_dt} -> {end_dt}'
|
|
||||||
)
|
|
||||||
# keep track of most recent "prepended" ``start_dt``
|
|
||||||
# both for detecting gaps and ensuring async
|
|
||||||
# frame-result order.
|
|
||||||
earliest_end_dt = start_dt
|
|
||||||
|
|
||||||
if (
|
|
||||||
storage is not None
|
|
||||||
and write_tsdb
|
|
||||||
):
|
|
||||||
log.info(
|
|
||||||
f'Writing {ln} frame to storage:\n'
|
|
||||||
f'{start_dt} -> {end_dt}'
|
|
||||||
)
|
|
||||||
await storage.write_ohlcv(
|
|
||||||
f'{bfqsn}.{mod.name}', # lul..
|
|
||||||
to_push,
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: can we only trigger this if the respective
|
|
||||||
# history in "in view"?!?
|
|
||||||
# XXX: extremely important, there can be no checkpoints
|
|
||||||
# in the block above to avoid entering new ``frames``
|
|
||||||
# values while we're pipelining the current ones to
|
|
||||||
# memory...
|
|
||||||
for delay_s in sampler.subscribers:
|
|
||||||
await broadcast(delay_s)
|
|
||||||
|
|
||||||
bf_done.set()
|
|
||||||
|
|
||||||
|
|
||||||
async def manage_history(
|
async def manage_history(
|
||||||
|
@ -698,154 +250,115 @@ async def manage_history(
|
||||||
# for now only do backfilling if no tsdb can be found
|
# for now only do backfilling if no tsdb can be found
|
||||||
do_legacy_backfill = not is_up and opened
|
do_legacy_backfill = not is_up and opened
|
||||||
|
|
||||||
bfqsn = fqsn.replace('.' + mod.name, '')
|
|
||||||
open_history_client = getattr(mod, 'open_history_client', None)
|
open_history_client = getattr(mod, 'open_history_client', None)
|
||||||
assert open_history_client
|
|
||||||
|
|
||||||
if is_up and opened and open_history_client:
|
if is_up and opened and open_history_client:
|
||||||
|
|
||||||
log.info('Found existing `marketstored`')
|
log.info('Found existing `marketstored`')
|
||||||
from . import marketstore
|
from . import marketstore
|
||||||
|
|
||||||
async with marketstore.open_storage_client(
|
async with marketstore.open_storage_client(
|
||||||
fqsn,
|
fqsn,
|
||||||
) as storage:
|
) as storage:
|
||||||
|
|
||||||
# TODO: this should be used verbatim for the pure
|
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||||
# shm backfiller approach below.
|
|
||||||
|
|
||||||
# start history anal and load missing new data via backend.
|
if not tsdb_arrays:
|
||||||
series, _, last_tsdb_dt = await storage.load(fqsn)
|
do_legacy_backfill = True
|
||||||
|
|
||||||
broker, symbol, expiry = unpack_fqsn(fqsn)
|
|
||||||
(
|
|
||||||
shm,
|
|
||||||
latest_start_dt,
|
|
||||||
latest_end_dt,
|
|
||||||
bf_done,
|
|
||||||
) = await bus.nursery.start(
|
|
||||||
partial(
|
|
||||||
start_backfill,
|
|
||||||
mod,
|
|
||||||
bfqsn,
|
|
||||||
shm,
|
|
||||||
last_tsdb_dt=last_tsdb_dt,
|
|
||||||
tsdb_is_up=True,
|
|
||||||
storage=storage,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# if len(shm.array) < 2:
|
|
||||||
# TODO: there's an edge case here to solve where if the last
|
|
||||||
# frame before market close (at least on ib) was pushed and
|
|
||||||
# there was only "1 new" row pushed from the first backfill
|
|
||||||
# query-iteration, then the sample step sizing calcs will
|
|
||||||
# break upstream from here since you can't diff on at least
|
|
||||||
# 2 steps... probably should also add logic to compute from
|
|
||||||
# the tsdb series and stash that somewhere as meta data on
|
|
||||||
# the shm buffer?.. no se.
|
|
||||||
|
|
||||||
task_status.started(shm)
|
|
||||||
some_data_ready.set()
|
|
||||||
|
|
||||||
await bf_done.wait()
|
|
||||||
# do diff against last start frame of history and only fill
|
|
||||||
# in from the tsdb an allotment that allows for most recent
|
|
||||||
# to be loaded into mem *before* tsdb data.
|
|
||||||
if last_tsdb_dt:
|
|
||||||
dt_diff_s = (
|
|
||||||
latest_start_dt - last_tsdb_dt
|
|
||||||
).seconds
|
|
||||||
else:
|
else:
|
||||||
dt_diff_s = 0
|
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
||||||
|
|
||||||
# await trio.sleep_forever()
|
fastest = list(tsdb_arrays.values())[0]
|
||||||
# TODO: see if there's faster multi-field reads:
|
times = fastest['Epoch']
|
||||||
# https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields
|
first, last = times[0], times[-1]
|
||||||
# re-index with a `time` and index field
|
first_tsdb_dt, last_tsdb_dt = map(
|
||||||
prepend_start = shm._first.value
|
pendulum.from_timestamp, [first, last]
|
||||||
|
|
||||||
# sanity check on most-recent-data loading
|
|
||||||
assert prepend_start > dt_diff_s
|
|
||||||
|
|
||||||
history = list(series.values())
|
|
||||||
if history:
|
|
||||||
fastest = history[0]
|
|
||||||
to_push = fastest[:prepend_start]
|
|
||||||
|
|
||||||
shm.push(
|
|
||||||
to_push,
|
|
||||||
|
|
||||||
# insert the history pre a "days worth" of samples
|
|
||||||
# to leave some real-time buffer space at the end.
|
|
||||||
prepend=True,
|
|
||||||
# update_first=False,
|
|
||||||
# start=prepend_start,
|
|
||||||
field_map=marketstore.ohlc_key_map,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# load as much from storage into shm as space will
|
# TODO: this should be used verbatim for the pure
|
||||||
# allow according to user's shm size settings.
|
# shm backfiller approach below.
|
||||||
count = 0
|
|
||||||
end = fastest['Epoch'][0]
|
|
||||||
|
|
||||||
while shm._first.value > 0:
|
def diff_history(
|
||||||
count += 1
|
array,
|
||||||
series = await storage.read_ohlcv(
|
start_dt,
|
||||||
fqsn,
|
end_dt,
|
||||||
end=end,
|
|
||||||
)
|
|
||||||
history = list(series.values())
|
|
||||||
fastest = history[0]
|
|
||||||
end = fastest['Epoch'][0]
|
|
||||||
prepend_start -= len(to_push)
|
|
||||||
to_push = fastest[:prepend_start]
|
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
|
||||||
|
s_diff = (last_tsdb_dt - start_dt).seconds
|
||||||
|
|
||||||
|
# if we detect a partial frame's worth of data
|
||||||
|
# that is new, slice out only that history and
|
||||||
|
# write to shm.
|
||||||
|
if s_diff > 0:
|
||||||
|
assert last_tsdb_dt > start_dt
|
||||||
|
selected = array['time'] > last_tsdb_dt.timestamp()
|
||||||
|
to_push = array[selected]
|
||||||
|
log.info(
|
||||||
|
f'Pushing partial frame {to_push.size} to shm'
|
||||||
|
)
|
||||||
|
return to_push
|
||||||
|
|
||||||
|
else:
|
||||||
|
return array
|
||||||
|
|
||||||
|
# start history anal and load missing new data via backend.
|
||||||
|
async with open_history_client(fqsn) as hist:
|
||||||
|
|
||||||
|
# get latest query's worth of history all the way
|
||||||
|
# back to what is recorded in the tsdb
|
||||||
|
array, start_dt, end_dt = await hist(end_dt='')
|
||||||
|
to_push = diff_history(array, start_dt, end_dt)
|
||||||
|
shm.push(to_push)
|
||||||
|
|
||||||
|
# let caller unblock and deliver latest history frame
|
||||||
|
task_status.started(shm)
|
||||||
|
some_data_ready.set()
|
||||||
|
|
||||||
|
# pull new history frames until we hit latest
|
||||||
|
# already in the tsdb
|
||||||
|
while start_dt > last_tsdb_dt:
|
||||||
|
array, start_dt, end_dt = await hist(end_dt=start_dt)
|
||||||
|
to_push = diff_history(array, start_dt, end_dt)
|
||||||
|
shm.push(to_push, prepend=True)
|
||||||
|
|
||||||
|
# TODO: see if there's faster multi-field reads:
|
||||||
|
# https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields
|
||||||
|
# re-index with a `time` and index field
|
||||||
shm.push(
|
shm.push(
|
||||||
to_push,
|
fastest[-shm._first.value:],
|
||||||
|
|
||||||
# insert the history pre a "days worth" of samples
|
# insert the history pre a "days worth" of samples
|
||||||
# to leave some real-time buffer space at the end.
|
# to leave some real-time buffer space at the end.
|
||||||
prepend=True,
|
prepend=True,
|
||||||
# update_first=False,
|
# start=shm._len - _secs_in_day,
|
||||||
# start=prepend_start,
|
field_map={
|
||||||
field_map=marketstore.ohlc_key_map,
|
'Epoch': 'time',
|
||||||
|
'Open': 'open',
|
||||||
|
'High': 'high',
|
||||||
|
'Low': 'low',
|
||||||
|
'Close': 'close',
|
||||||
|
'Volume': 'volume',
|
||||||
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
# manually trigger step update to update charts/fsps
|
# TODO: write new data to tsdb to be ready to for next
|
||||||
# which need an incremental update.
|
# read.
|
||||||
# NOTE: the way this works is super duper
|
|
||||||
# un-intuitive right now:
|
|
||||||
# - the broadcaster fires a msg to the fsp subsystem.
|
|
||||||
# - fsp subsys then checks for a sample step diff and
|
|
||||||
# possibly recomputes prepended history.
|
|
||||||
# - the fsp then sends back to the parent actor
|
|
||||||
# (usually a chart showing graphics for said fsp)
|
|
||||||
# which tells the chart to conduct a manual full
|
|
||||||
# graphics loop cycle.
|
|
||||||
for delay_s in sampler.subscribers:
|
|
||||||
await broadcast(delay_s)
|
|
||||||
|
|
||||||
if count > 6:
|
|
||||||
break
|
|
||||||
|
|
||||||
log.info(f'Loaded {to_push.shape} datums from storage')
|
|
||||||
|
|
||||||
# TODO: write new data to tsdb to be ready to for next read.
|
|
||||||
|
|
||||||
if do_legacy_backfill:
|
if do_legacy_backfill:
|
||||||
# do a legacy incremental backfill from the provider.
|
# do a legacy incremental backfill from the provider.
|
||||||
log.info('No existing `marketstored` found..')
|
log.info('No existing `marketstored` found..')
|
||||||
|
|
||||||
|
bfqsn = fqsn.replace('.' + mod.name, '')
|
||||||
# start history backfill task ``backfill_bars()`` is
|
# start history backfill task ``backfill_bars()`` is
|
||||||
# a required backend func this must block until shm is
|
# a required backend func this must block until shm is
|
||||||
# filled with first set of ohlc bars
|
# filled with first set of ohlc bars
|
||||||
await bus.nursery.start(
|
await bus.nursery.start(
|
||||||
partial(
|
start_backfill,
|
||||||
start_backfill,
|
mod,
|
||||||
mod,
|
bfqsn,
|
||||||
bfqsn,
|
shm,
|
||||||
shm,
|
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# yield back after client connect with filled shm
|
# yield back after client connect with filled shm
|
||||||
|
@ -864,10 +377,8 @@ async def manage_history(
|
||||||
|
|
||||||
async def allocate_persistent_feed(
|
async def allocate_persistent_feed(
|
||||||
bus: _FeedsBus,
|
bus: _FeedsBus,
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
|
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
start_stream: bool = True,
|
start_stream: bool = True,
|
||||||
|
|
||||||
|
@ -887,7 +398,6 @@ async def allocate_persistent_feed(
|
||||||
- a real-time streaming task which connec
|
- a real-time streaming task which connec
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# load backend module
|
|
||||||
try:
|
try:
|
||||||
mod = get_brokermod(brokername)
|
mod = get_brokermod(brokername)
|
||||||
except ImportError:
|
except ImportError:
|
||||||
|
@ -944,10 +454,7 @@ async def allocate_persistent_feed(
|
||||||
# true fqsn
|
# true fqsn
|
||||||
fqsn = '.'.join((bfqsn, brokername))
|
fqsn = '.'.join((bfqsn, brokername))
|
||||||
# add a fqsn entry that includes the ``.<broker>`` suffix
|
# add a fqsn entry that includes the ``.<broker>`` suffix
|
||||||
# and an entry that includes the broker-specific fqsn (including
|
|
||||||
# any new suffixes or elements as injected by the backend).
|
|
||||||
init_msg[fqsn] = msg
|
init_msg[fqsn] = msg
|
||||||
init_msg[bfqsn] = msg
|
|
||||||
|
|
||||||
# TODO: pretty sure we don't need this? why not just leave 1s as
|
# TODO: pretty sure we don't need this? why not just leave 1s as
|
||||||
# the fastest "sample period" since we'll probably always want that
|
# the fastest "sample period" since we'll probably always want that
|
||||||
|
@ -961,14 +468,13 @@ async def allocate_persistent_feed(
|
||||||
await some_data_ready.wait()
|
await some_data_ready.wait()
|
||||||
|
|
||||||
# append ``.<broker>`` suffix to each quote symbol
|
# append ``.<broker>`` suffix to each quote symbol
|
||||||
acceptable_not_fqsn_with_broker_suffix = symbol + f'.{brokername}'
|
bsym = symbol + f'.{brokername}'
|
||||||
|
|
||||||
generic_first_quotes = {
|
generic_first_quotes = {
|
||||||
acceptable_not_fqsn_with_broker_suffix: first_quote,
|
bsym: first_quote,
|
||||||
fqsn: first_quote,
|
fqsn: first_quote,
|
||||||
}
|
}
|
||||||
|
|
||||||
bus.feeds[symbol] = bus.feeds[bfqsn] = (
|
bus.feeds[symbol] = bus.feeds[fqsn] = (
|
||||||
init_msg,
|
init_msg,
|
||||||
generic_first_quotes,
|
generic_first_quotes,
|
||||||
)
|
)
|
||||||
|
@ -1019,9 +525,9 @@ async def open_feed_bus(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
brokername: str,
|
brokername: str,
|
||||||
symbol: str, # normally expected to the broker-specific fqsn
|
symbol: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
tick_throttle: Optional[float] = None,
|
tick_throttle: Optional[float] = None,
|
||||||
start_stream: bool = True,
|
start_stream: bool = True,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -1041,9 +547,7 @@ async def open_feed_bus(
|
||||||
# TODO: check for any stale shm entries for this symbol
|
# TODO: check for any stale shm entries for this symbol
|
||||||
# (after we also group them in a nice `/dev/shm/piker/` subdir).
|
# (after we also group them in a nice `/dev/shm/piker/` subdir).
|
||||||
# ensure we are who we think we are
|
# ensure we are who we think we are
|
||||||
servicename = tractor.current_actor().name
|
assert 'brokerd' in tractor.current_actor().name
|
||||||
assert 'brokerd' in servicename
|
|
||||||
assert brokername in servicename
|
|
||||||
|
|
||||||
bus = get_feed_bus(brokername)
|
bus = get_feed_bus(brokername)
|
||||||
|
|
||||||
|
@ -1053,7 +557,7 @@ async def open_feed_bus(
|
||||||
entry = bus.feeds.get(symbol)
|
entry = bus.feeds.get(symbol)
|
||||||
if entry is None:
|
if entry is None:
|
||||||
# allocate a new actor-local stream bus which
|
# allocate a new actor-local stream bus which
|
||||||
# will persist for this `brokerd`'s service lifetime.
|
# will persist for this `brokerd`.
|
||||||
async with bus.task_lock:
|
async with bus.task_lock:
|
||||||
await bus.nursery.start(
|
await bus.nursery.start(
|
||||||
partial(
|
partial(
|
||||||
|
@ -1082,7 +586,7 @@ async def open_feed_bus(
|
||||||
# true fqsn
|
# true fqsn
|
||||||
fqsn = '.'.join([bfqsn, brokername])
|
fqsn = '.'.join([bfqsn, brokername])
|
||||||
assert fqsn in first_quotes
|
assert fqsn in first_quotes
|
||||||
assert bus.feeds[bfqsn]
|
assert bus.feeds[fqsn]
|
||||||
|
|
||||||
# broker-ambiguous symbol (provided on cli - eg. mnq.globex.ib)
|
# broker-ambiguous symbol (provided on cli - eg. mnq.globex.ib)
|
||||||
bsym = symbol + f'.{brokername}'
|
bsym = symbol + f'.{brokername}'
|
||||||
|
@ -1125,10 +629,10 @@ async def open_feed_bus(
|
||||||
recv,
|
recv,
|
||||||
stream,
|
stream,
|
||||||
)
|
)
|
||||||
sub = (send, ctx, tick_throttle)
|
sub = (send, tick_throttle)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
sub = (stream, ctx, tick_throttle)
|
sub = (stream, tick_throttle)
|
||||||
|
|
||||||
subs = bus._subscribers[bfqsn]
|
subs = bus._subscribers[bfqsn]
|
||||||
subs.append(sub)
|
subs.append(sub)
|
||||||
|
@ -1211,10 +715,10 @@ class Feed:
|
||||||
shm: ShmArray
|
shm: ShmArray
|
||||||
mod: ModuleType
|
mod: ModuleType
|
||||||
first_quotes: dict # symbol names to first quote dicts
|
first_quotes: dict # symbol names to first quote dicts
|
||||||
_portal: tractor.Portal
|
|
||||||
stream: trio.abc.ReceiveChannel[dict[str, Any]]
|
|
||||||
status: dict[str, Any]
|
|
||||||
|
|
||||||
|
_portal: tractor.Portal
|
||||||
|
|
||||||
|
stream: trio.abc.ReceiveChannel[dict[str, Any]]
|
||||||
throttle_rate: Optional[int] = None
|
throttle_rate: Optional[int] = None
|
||||||
|
|
||||||
_trade_stream: Optional[AsyncIterator[dict[str, Any]]] = None
|
_trade_stream: Optional[AsyncIterator[dict[str, Any]]] = None
|
||||||
|
@ -1282,7 +786,7 @@ async def install_brokerd_search(
|
||||||
# a backend module?
|
# a backend module?
|
||||||
pause_period=getattr(
|
pause_period=getattr(
|
||||||
brokermod, '_search_conf', {}
|
brokermod, '_search_conf', {}
|
||||||
).get('pause_period', 0.0616),
|
).get('pause_period', 0.0616),
|
||||||
):
|
):
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
@ -1304,7 +808,7 @@ async def open_feed(
|
||||||
'''
|
'''
|
||||||
fqsn = fqsns[0].lower()
|
fqsn = fqsns[0].lower()
|
||||||
|
|
||||||
brokername, key, suffix = unpack_fqsn(fqsn)
|
brokername, key, suffix = uncons_fqsn(fqsn)
|
||||||
bfqsn = fqsn.replace('.' + brokername, '')
|
bfqsn = fqsn.replace('.' + brokername, '')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -1355,24 +859,9 @@ async def open_feed(
|
||||||
first_quotes=first_quotes,
|
first_quotes=first_quotes,
|
||||||
stream=stream,
|
stream=stream,
|
||||||
_portal=portal,
|
_portal=portal,
|
||||||
status={},
|
|
||||||
throttle_rate=tick_throttle,
|
throttle_rate=tick_throttle,
|
||||||
)
|
)
|
||||||
|
|
||||||
# fill out "status info" that the UI can show
|
|
||||||
host, port = feed.portal.channel.raddr
|
|
||||||
if host == '127.0.0.1':
|
|
||||||
host = 'localhost'
|
|
||||||
|
|
||||||
feed.status.update({
|
|
||||||
'actor_name': feed.portal.channel.uid[0],
|
|
||||||
'host': host,
|
|
||||||
'port': port,
|
|
||||||
'shm': f'{humanize(feed.shm._shm.size)}',
|
|
||||||
'throttle_rate': feed.throttle_rate,
|
|
||||||
})
|
|
||||||
feed.status.update(init_msg.pop('status', {}))
|
|
||||||
|
|
||||||
for sym, data in init_msg.items():
|
for sym, data in init_msg.items():
|
||||||
si = data['symbol_info']
|
si = data['symbol_info']
|
||||||
fqsn = data['fqsn'] + f'.{brokername}'
|
fqsn = data['fqsn'] + f'.{brokername}'
|
||||||
|
|
|
@ -23,15 +23,12 @@
|
||||||
- todo: tick sequence stream-cloning for testing
|
- todo: tick sequence stream-cloning for testing
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from datetime import datetime
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Optional,
|
Optional,
|
||||||
Union,
|
Union,
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
from math import isnan
|
from math import isnan
|
||||||
|
@ -40,6 +37,7 @@ from bidict import bidict
|
||||||
import msgpack
|
import msgpack
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
import tractor
|
import tractor
|
||||||
from trio_websocket import open_websocket_url
|
from trio_websocket import open_websocket_url
|
||||||
from anyio_marketstore import (
|
from anyio_marketstore import (
|
||||||
|
@ -47,154 +45,14 @@ from anyio_marketstore import (
|
||||||
MarketstoreClient,
|
MarketstoreClient,
|
||||||
Params,
|
Params,
|
||||||
)
|
)
|
||||||
import pendulum
|
|
||||||
import purerpc
|
import purerpc
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
import docker
|
|
||||||
from ._ahab import DockerContainer
|
|
||||||
|
|
||||||
from .feed import maybe_open_feed
|
from .feed import maybe_open_feed
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# container level config
|
|
||||||
_config = {
|
|
||||||
'grpc_listen_port': 5995,
|
|
||||||
'ws_listen_port': 5993,
|
|
||||||
'log_level': 'debug',
|
|
||||||
}
|
|
||||||
|
|
||||||
_yaml_config = '''
|
|
||||||
# piker's ``marketstore`` config.
|
|
||||||
|
|
||||||
# mount this config using:
|
|
||||||
# sudo docker run --mount \
|
|
||||||
# type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
|
||||||
# 5993:5993 alpacamarkets/marketstore:latest
|
|
||||||
|
|
||||||
root_directory: data
|
|
||||||
listen_port: {ws_listen_port}
|
|
||||||
grpc_listen_port: {grpc_listen_port}
|
|
||||||
log_level: {log_level}
|
|
||||||
queryable: true
|
|
||||||
stop_grace_period: 0
|
|
||||||
wal_rotate_interval: 5
|
|
||||||
stale_threshold: 5
|
|
||||||
enable_add: true
|
|
||||||
enable_remove: false
|
|
||||||
|
|
||||||
triggers:
|
|
||||||
- module: ondiskagg.so
|
|
||||||
on: "*/1Sec/OHLCV"
|
|
||||||
config:
|
|
||||||
# filter: "nasdaq"
|
|
||||||
destinations:
|
|
||||||
- 1Min
|
|
||||||
- 5Min
|
|
||||||
- 15Min
|
|
||||||
- 1H
|
|
||||||
- 1D
|
|
||||||
|
|
||||||
- module: stream.so
|
|
||||||
on: '*/*/*'
|
|
||||||
# config:
|
|
||||||
# filter: "nasdaq"
|
|
||||||
|
|
||||||
'''.format(**_config)
|
|
||||||
|
|
||||||
|
|
||||||
def start_marketstore(
|
|
||||||
client: docker.DockerClient,
|
|
||||||
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> tuple[DockerContainer, dict[str, Any]]:
|
|
||||||
'''
|
|
||||||
Start and supervise a marketstore instance with its config bind-mounted
|
|
||||||
in from the piker config directory on the system.
|
|
||||||
|
|
||||||
The equivalent cli cmd to this code is:
|
|
||||||
|
|
||||||
sudo docker run --mount \
|
|
||||||
type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
|
||||||
5993:5993 alpacamarkets/marketstore:latest
|
|
||||||
|
|
||||||
'''
|
|
||||||
import os
|
|
||||||
import docker
|
|
||||||
from .. import config
|
|
||||||
get_console_log('info', name=__name__)
|
|
||||||
|
|
||||||
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
|
||||||
|
|
||||||
# create when dne
|
|
||||||
if not os.path.isdir(mktsdir):
|
|
||||||
os.mkdir(mktsdir)
|
|
||||||
|
|
||||||
yml_file = os.path.join(mktsdir, 'mkts.yml')
|
|
||||||
if not os.path.isfile(yml_file):
|
|
||||||
log.warning(
|
|
||||||
f'No `marketstore` config exists?: {yml_file}\n'
|
|
||||||
'Generating new file from template:\n'
|
|
||||||
f'{_yaml_config}\n'
|
|
||||||
)
|
|
||||||
with open(yml_file, 'w') as yf:
|
|
||||||
yf.write(_yaml_config)
|
|
||||||
|
|
||||||
# create a mount from user's local piker config dir into container
|
|
||||||
config_dir_mnt = docker.types.Mount(
|
|
||||||
target='/etc',
|
|
||||||
source=mktsdir,
|
|
||||||
type='bind',
|
|
||||||
)
|
|
||||||
|
|
||||||
# create a user config subdir where the marketstore
|
|
||||||
# backing filesystem database can be persisted.
|
|
||||||
persistent_data_dir = os.path.join(
|
|
||||||
mktsdir, 'data',
|
|
||||||
)
|
|
||||||
if not os.path.isdir(persistent_data_dir):
|
|
||||||
os.mkdir(persistent_data_dir)
|
|
||||||
|
|
||||||
data_dir_mnt = docker.types.Mount(
|
|
||||||
target='/data',
|
|
||||||
source=persistent_data_dir,
|
|
||||||
type='bind',
|
|
||||||
)
|
|
||||||
|
|
||||||
dcntr: DockerContainer = client.containers.run(
|
|
||||||
'alpacamarkets/marketstore:latest',
|
|
||||||
# do we need this for cmds?
|
|
||||||
# '-i',
|
|
||||||
|
|
||||||
# '-p 5993:5993',
|
|
||||||
ports={
|
|
||||||
'5993/tcp': 5993, # jsonrpc / ws?
|
|
||||||
'5995/tcp': 5995, # grpc
|
|
||||||
},
|
|
||||||
mounts=[
|
|
||||||
config_dir_mnt,
|
|
||||||
data_dir_mnt,
|
|
||||||
],
|
|
||||||
detach=True,
|
|
||||||
# stop_signal='SIGINT',
|
|
||||||
init=True,
|
|
||||||
# remove=True,
|
|
||||||
)
|
|
||||||
return (
|
|
||||||
dcntr,
|
|
||||||
_config,
|
|
||||||
|
|
||||||
# expected startup and stop msgs
|
|
||||||
"launching tcp listener for all services...",
|
|
||||||
"exiting...",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK')
|
_tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK')
|
||||||
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
|
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
|
||||||
|
|
||||||
|
@ -242,22 +100,12 @@ _ohlcv_dt = [
|
||||||
# ohlcv sampling
|
# ohlcv sampling
|
||||||
('Open', 'f4'),
|
('Open', 'f4'),
|
||||||
('High', 'f4'),
|
('High', 'f4'),
|
||||||
('Low', 'f4'),
|
('Low', 'i8'),
|
||||||
('Close', 'f4'),
|
('Close', 'i8'),
|
||||||
('Volume', 'f4'),
|
('Volume', 'f4'),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
ohlc_key_map = bidict({
|
|
||||||
'Epoch': 'time',
|
|
||||||
'Open': 'open',
|
|
||||||
'High': 'high',
|
|
||||||
'Low': 'low',
|
|
||||||
'Close': 'close',
|
|
||||||
'Volume': 'volume',
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def mk_tbk(keys: tuple[str, str, str]) -> str:
|
def mk_tbk(keys: tuple[str, str, str]) -> str:
|
||||||
'''
|
'''
|
||||||
Generate a marketstore table key from a tuple.
|
Generate a marketstore table key from a tuple.
|
||||||
|
@ -279,7 +127,7 @@ def quote_to_marketstore_structarray(
|
||||||
'''
|
'''
|
||||||
if last_fill:
|
if last_fill:
|
||||||
# new fill bby
|
# new fill bby
|
||||||
now = int(pendulum.parse(last_fill).timestamp)
|
now = timestamp(last_fill)
|
||||||
else:
|
else:
|
||||||
# this should get inserted upstream by the broker-client to
|
# this should get inserted upstream by the broker-client to
|
||||||
# subtract from IPC latency
|
# subtract from IPC latency
|
||||||
|
@ -309,6 +157,15 @@ def quote_to_marketstore_structarray(
|
||||||
return np.array([tuple(array_input)], dtype=_quote_dt)
|
return np.array([tuple(array_input)], dtype=_quote_dt)
|
||||||
|
|
||||||
|
|
||||||
|
def timestamp(date, **kwargs) -> int:
|
||||||
|
'''
|
||||||
|
Return marketstore compatible 'Epoch' integer in nanoseconds
|
||||||
|
from a date formatted str.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return int(pd.Timestamp(date, **kwargs).value)
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def get_client(
|
async def get_client(
|
||||||
host: str = 'localhost',
|
host: str = 'localhost',
|
||||||
|
@ -342,7 +199,6 @@ class MarketStoreError(Exception):
|
||||||
# raise MarketStoreError(err)
|
# raise MarketStoreError(err)
|
||||||
|
|
||||||
|
|
||||||
# map of seconds ints to "time frame" accepted keys
|
|
||||||
tf_in_1s = bidict({
|
tf_in_1s = bidict({
|
||||||
1: '1Sec',
|
1: '1Sec',
|
||||||
60: '1Min',
|
60: '1Min',
|
||||||
|
@ -384,41 +240,13 @@ class Storage:
|
||||||
async def write_ticks(self, ticks: list) -> None:
|
async def write_ticks(self, ticks: list) -> None:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def load(
|
async def write_ohlcv(self, ohlcv: np.ndarray) -> None:
|
||||||
self,
|
...
|
||||||
fqsn: str,
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
dict[int, np.ndarray], # timeframe (in secs) to series
|
|
||||||
Optional[datetime], # first dt
|
|
||||||
Optional[datetime], # last dt
|
|
||||||
]:
|
|
||||||
|
|
||||||
first_tsdb_dt, last_tsdb_dt = None, None
|
|
||||||
tsdb_arrays = await self.read_ohlcv(
|
|
||||||
fqsn,
|
|
||||||
# on first load we don't need to pull the max
|
|
||||||
# history per request size worth.
|
|
||||||
limit=3000,
|
|
||||||
)
|
|
||||||
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
|
||||||
|
|
||||||
if tsdb_arrays:
|
|
||||||
fastest = list(tsdb_arrays.values())[0]
|
|
||||||
times = fastest['Epoch']
|
|
||||||
first, last = times[0], times[-1]
|
|
||||||
first_tsdb_dt, last_tsdb_dt = map(
|
|
||||||
pendulum.from_timestamp, [first, last]
|
|
||||||
)
|
|
||||||
|
|
||||||
return tsdb_arrays, first_tsdb_dt, last_tsdb_dt
|
|
||||||
|
|
||||||
async def read_ohlcv(
|
async def read_ohlcv(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
timeframe: Optional[Union[int, str]] = None,
|
timeframe: Optional[Union[int, str]] = None,
|
||||||
end: Optional[int] = None,
|
|
||||||
limit: int = int(800e3),
|
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
MarketstoreClient,
|
MarketstoreClient,
|
||||||
|
@ -430,30 +258,16 @@ class Storage:
|
||||||
if fqsn not in syms:
|
if fqsn not in syms:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
tfstr = tf_in_1s[1]
|
|
||||||
|
|
||||||
params = Params(
|
|
||||||
symbols=fqsn,
|
|
||||||
timeframe=tfstr,
|
|
||||||
attrgroup='OHLCV',
|
|
||||||
end=end,
|
|
||||||
# limit_from_start=True,
|
|
||||||
|
|
||||||
# TODO: figure the max limit here given the
|
|
||||||
# ``purepc`` msg size limit of purerpc: 33554432
|
|
||||||
limit=limit,
|
|
||||||
)
|
|
||||||
|
|
||||||
if timeframe is None:
|
if timeframe is None:
|
||||||
log.info(f'starting {fqsn} tsdb granularity scan..')
|
log.info(f'starting {fqsn} tsdb granularity scan..')
|
||||||
# loop through and try to find highest granularity
|
# loop through and try to find highest granularity
|
||||||
for tfstr in tf_in_1s.values():
|
for tfstr in tf_in_1s.values():
|
||||||
try:
|
try:
|
||||||
log.info(f'querying for {tfstr}@{fqsn}')
|
log.info(f'querying for {tfstr}@{fqsn}')
|
||||||
params.set('timeframe', tfstr)
|
result = await client.query(
|
||||||
result = await client.query(params)
|
Params(fqsn, tfstr, 'OHLCV',)
|
||||||
|
)
|
||||||
break
|
break
|
||||||
|
|
||||||
except purerpc.grpclib.exceptions.UnknownError:
|
except purerpc.grpclib.exceptions.UnknownError:
|
||||||
# XXX: this is already logged by the container and
|
# XXX: this is already logged by the container and
|
||||||
# thus shows up through `marketstored` logs relay.
|
# thus shows up through `marketstored` logs relay.
|
||||||
|
@ -463,7 +277,8 @@ class Storage:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
else:
|
else:
|
||||||
result = await client.query(params)
|
tfstr = tf_in_1s[timeframe]
|
||||||
|
result = await client.query(Params(fqsn, tfstr, 'OHLCV',))
|
||||||
|
|
||||||
# TODO: it turns out column access on recarrays is actually slower:
|
# TODO: it turns out column access on recarrays is actually slower:
|
||||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||||
|
@ -492,90 +307,6 @@ class Storage:
|
||||||
|
|
||||||
return await client.destroy(tbk=key)
|
return await client.destroy(tbk=key)
|
||||||
|
|
||||||
async def write_ohlcv(
|
|
||||||
self,
|
|
||||||
fqsn: str,
|
|
||||||
ohlcv: np.ndarray,
|
|
||||||
append_and_duplicate: bool = True,
|
|
||||||
limit: int = int(800e3),
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
# build mkts schema compat array for writing
|
|
||||||
mkts_dt = np.dtype(_ohlcv_dt)
|
|
||||||
mkts_array = np.zeros(
|
|
||||||
len(ohlcv),
|
|
||||||
dtype=mkts_dt,
|
|
||||||
)
|
|
||||||
# copy from shm array (yes it's this easy):
|
|
||||||
# https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
|
|
||||||
mkts_array[:] = ohlcv[[
|
|
||||||
'time',
|
|
||||||
'open',
|
|
||||||
'high',
|
|
||||||
'low',
|
|
||||||
'close',
|
|
||||||
'volume',
|
|
||||||
]]
|
|
||||||
|
|
||||||
m, r = divmod(len(mkts_array), limit)
|
|
||||||
|
|
||||||
for i in range(m, 1):
|
|
||||||
to_push = mkts_array[i-1:i*limit]
|
|
||||||
|
|
||||||
# write to db
|
|
||||||
resp = await self.client.write(
|
|
||||||
to_push,
|
|
||||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
|
||||||
# for the same timestamp-index.
|
|
||||||
# TODO: pre deduplicate?
|
|
||||||
isvariablelength=append_and_duplicate,
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f'Wrote {mkts_array.size} datums to tsdb\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
for resp in resp.responses:
|
|
||||||
err = resp.error
|
|
||||||
if err:
|
|
||||||
raise MarketStoreError(err)
|
|
||||||
|
|
||||||
if r:
|
|
||||||
to_push = mkts_array[m*limit:]
|
|
||||||
|
|
||||||
# write to db
|
|
||||||
resp = await self.client.write(
|
|
||||||
to_push,
|
|
||||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
|
||||||
# for the same timestamp-index.
|
|
||||||
# TODO: pre deduplicate?
|
|
||||||
isvariablelength=append_and_duplicate,
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f'Wrote {mkts_array.size} datums to tsdb\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
for resp in resp.responses:
|
|
||||||
err = resp.error
|
|
||||||
if err:
|
|
||||||
raise MarketStoreError(err)
|
|
||||||
|
|
||||||
# XXX: currently the only way to do this is through the CLI:
|
|
||||||
|
|
||||||
# sudo ./marketstore connect --dir ~/.config/piker/data
|
|
||||||
# >> \show mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
|
||||||
# and this seems to block and use up mem..
|
|
||||||
# >> \trim mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
|
||||||
|
|
||||||
# relevant source code for this is here:
|
|
||||||
# https://github.com/alpacahq/marketstore/blob/master/cmd/connect/session/trim.go#L14
|
|
||||||
# def delete_range(self, start_dt, end_dt) -> None:
|
|
||||||
# ...
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_storage_client(
|
async def open_storage_client(
|
||||||
|
@ -596,7 +327,7 @@ async def open_storage_client(
|
||||||
|
|
||||||
|
|
||||||
async def tsdb_history_update(
|
async def tsdb_history_update(
|
||||||
fqsn: Optional[str] = None,
|
fqsn: str,
|
||||||
|
|
||||||
) -> list[str]:
|
) -> list[str]:
|
||||||
|
|
||||||
|
@ -642,51 +373,80 @@ async def tsdb_history_update(
|
||||||
):
|
):
|
||||||
profiler(f'opened feed for {fqsn}')
|
profiler(f'opened feed for {fqsn}')
|
||||||
|
|
||||||
to_append = feed.shm.array
|
symbol = feed.symbols.get(fqsn)
|
||||||
to_prepend = None
|
if symbol:
|
||||||
|
fqsn = symbol.front_fqsn()
|
||||||
if fqsn:
|
|
||||||
symbol = feed.symbols.get(fqsn)
|
|
||||||
if symbol:
|
|
||||||
fqsn = symbol.front_fqsn()
|
|
||||||
|
|
||||||
# diff db history with shm and only write the missing portions
|
|
||||||
ohlcv = feed.shm.array
|
|
||||||
|
|
||||||
# TODO: use pg profiler
|
|
||||||
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
|
||||||
# hist diffing
|
|
||||||
if tsdb_arrays:
|
|
||||||
for secs in (1, 60):
|
|
||||||
ts = tsdb_arrays.get(secs)
|
|
||||||
if ts is not None and len(ts):
|
|
||||||
# these aren't currently used but can be referenced from
|
|
||||||
# within the embedded ipython shell below.
|
|
||||||
to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
|
||||||
to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
|
||||||
|
|
||||||
profiler('Finished db arrays diffs')
|
|
||||||
|
|
||||||
syms = await storage.client.list_symbols()
|
syms = await storage.client.list_symbols()
|
||||||
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||||
profiler(f'listed symbols {syms}')
|
profiler(f'listed symbols {syms}')
|
||||||
|
|
||||||
# TODO: ask if user wants to write history for detected
|
# diff db history with shm and only write the missing portions
|
||||||
# available shm buffers?
|
ohlcv = feed.shm.array
|
||||||
|
|
||||||
|
# TODO: use pg profiler
|
||||||
|
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||||
|
|
||||||
|
to_append = feed.shm.array
|
||||||
|
to_prepend = None
|
||||||
|
|
||||||
|
# hist diffing
|
||||||
|
if tsdb_arrays:
|
||||||
|
onesec = tsdb_arrays[1]
|
||||||
|
to_append = ohlcv[ohlcv['time'] > onesec['Epoch'][-1]]
|
||||||
|
to_prepend = ohlcv[ohlcv['time'] < onesec['Epoch'][0]]
|
||||||
|
|
||||||
|
profiler('Finished db arrays diffs')
|
||||||
|
|
||||||
|
for array in [to_append, to_prepend]:
|
||||||
|
if array is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f'Writing datums {array.size} -> to tsdb from shm\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# build mkts schema compat array for writing
|
||||||
|
mkts_dt = np.dtype(_ohlcv_dt)
|
||||||
|
mkts_array = np.zeros(
|
||||||
|
len(array),
|
||||||
|
dtype=mkts_dt,
|
||||||
|
)
|
||||||
|
# copy from shm array (yes it's this easy):
|
||||||
|
# https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
|
||||||
|
mkts_array[:] = array[[
|
||||||
|
'time',
|
||||||
|
'open',
|
||||||
|
'high',
|
||||||
|
'low',
|
||||||
|
'close',
|
||||||
|
'volume',
|
||||||
|
]]
|
||||||
|
|
||||||
|
# write to db
|
||||||
|
resp = await storage.client.write(
|
||||||
|
mkts_array,
|
||||||
|
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||||
|
|
||||||
|
# NOTE: will will append duplicates
|
||||||
|
# for the same timestamp-index.
|
||||||
|
# TODO: pre deduplicate?
|
||||||
|
isvariablelength=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f'Wrote {to_append.size} datums to tsdb\n'
|
||||||
|
)
|
||||||
|
profiler('Finished db writes')
|
||||||
|
|
||||||
|
for resp in resp.responses:
|
||||||
|
err = resp.error
|
||||||
|
if err:
|
||||||
|
raise MarketStoreError(err)
|
||||||
|
|
||||||
from tractor.trionics import ipython_embed
|
from tractor.trionics import ipython_embed
|
||||||
await ipython_embed()
|
await ipython_embed()
|
||||||
|
|
||||||
# for array in [to_append, to_prepend]:
|
|
||||||
# if array is None:
|
|
||||||
# continue
|
|
||||||
|
|
||||||
# log.info(
|
|
||||||
# f'Writing datums {array.size} -> to tsdb from shm\n'
|
|
||||||
# )
|
|
||||||
# await storage.write_ohlcv(fqsn, array)
|
|
||||||
|
|
||||||
# profiler('Finished db writes')
|
|
||||||
|
|
||||||
|
|
||||||
async def ingest_quote_stream(
|
async def ingest_quote_stream(
|
||||||
symbols: list[str],
|
symbols: list[str],
|
||||||
|
|
|
@ -361,7 +361,7 @@ async def cascade(
|
||||||
) -> tuple[TaskTracker, int]:
|
) -> tuple[TaskTracker, int]:
|
||||||
# TODO: adopt an incremental update engine/approach
|
# TODO: adopt an incremental update engine/approach
|
||||||
# where possible here eventually!
|
# where possible here eventually!
|
||||||
log.debug(f're-syncing fsp {func_name} to source')
|
log.warning(f're-syncing fsp {func_name} to source')
|
||||||
tracker.cs.cancel()
|
tracker.cs.cancel()
|
||||||
await tracker.complete.wait()
|
await tracker.complete.wait()
|
||||||
tracker, index = await n.start(fsp_target)
|
tracker, index = await n.start(fsp_target)
|
||||||
|
@ -369,12 +369,7 @@ async def cascade(
|
||||||
# always trigger UI refresh after history update,
|
# always trigger UI refresh after history update,
|
||||||
# see ``piker.ui._fsp.FspAdmin.open_chain()`` and
|
# see ``piker.ui._fsp.FspAdmin.open_chain()`` and
|
||||||
# ``piker.ui._display.trigger_update()``.
|
# ``piker.ui._display.trigger_update()``.
|
||||||
await client_stream.send({
|
await client_stream.send('update')
|
||||||
'fsp_update': {
|
|
||||||
'key': dst_shm_token,
|
|
||||||
'first': dst._first.value,
|
|
||||||
'last': dst._last.value,
|
|
||||||
}})
|
|
||||||
return tracker, index
|
return tracker, index
|
||||||
|
|
||||||
def is_synced(
|
def is_synced(
|
||||||
|
|
|
@ -167,7 +167,6 @@ def _wma(
|
||||||
|
|
||||||
assert length == len(weights)
|
assert length == len(weights)
|
||||||
|
|
||||||
# lol, for long sequences this is nutso slow and expensive..
|
|
||||||
return np.convolve(signal, weights, 'valid')
|
return np.convolve(signal, weights, 'valid')
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -309,7 +309,7 @@ async def flow_rates(
|
||||||
|
|
||||||
if period > 1:
|
if period > 1:
|
||||||
trade_rate_wma = _wma(
|
trade_rate_wma = _wma(
|
||||||
dvlm_shm.array['trade_count'][-period:],
|
dvlm_shm.array['trade_count'],
|
||||||
period,
|
period,
|
||||||
weights=weights,
|
weights=weights,
|
||||||
)
|
)
|
||||||
|
@ -332,7 +332,7 @@ async def flow_rates(
|
||||||
|
|
||||||
if period > 1:
|
if period > 1:
|
||||||
dark_trade_rate_wma = _wma(
|
dark_trade_rate_wma = _wma(
|
||||||
dvlm_shm.array['dark_trade_count'][-period:],
|
dvlm_shm.array['dark_trade_count'],
|
||||||
period,
|
period,
|
||||||
weights=weights,
|
weights=weights,
|
||||||
)
|
)
|
||||||
|
|
|
@ -25,13 +25,10 @@ from pygments import highlight, lexers, formatters
|
||||||
|
|
||||||
# Makes it so we only see the full module name when using ``__name__``
|
# Makes it so we only see the full module name when using ``__name__``
|
||||||
# without the extra "piker." prefix.
|
# without the extra "piker." prefix.
|
||||||
_proj_name: str = 'piker'
|
_proj_name = 'piker'
|
||||||
|
|
||||||
|
|
||||||
def get_logger(
|
def get_logger(name: str = None) -> logging.Logger:
|
||||||
name: str = None,
|
|
||||||
|
|
||||||
) -> logging.Logger:
|
|
||||||
'''Return the package log or a sub-log for `name` if provided.
|
'''Return the package log or a sub-log for `name` if provided.
|
||||||
'''
|
'''
|
||||||
return tractor.log.get_logger(name=name, _root_name=_proj_name)
|
return tractor.log.get_logger(name=name, _root_name=_proj_name)
|
||||||
|
|
|
@ -19,10 +19,10 @@ Chart axes graphics and behavior.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from typing import Optional, Callable
|
from typing import List, Tuple, Optional, Callable
|
||||||
from math import floor
|
from math import floor
|
||||||
|
|
||||||
import numpy as np
|
import pandas as pd
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
from PyQt5.QtCore import QPointF
|
from PyQt5.QtCore import QPointF
|
||||||
|
@ -103,7 +103,7 @@ class Axis(pg.AxisItem):
|
||||||
def size_to_values(self) -> None:
|
def size_to_values(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def txt_offsets(self) -> tuple[int, int]:
|
def txt_offsets(self) -> Tuple[int, int]:
|
||||||
return tuple(self.style['tickTextOffset'])
|
return tuple(self.style['tickTextOffset'])
|
||||||
|
|
||||||
|
|
||||||
|
@ -218,14 +218,13 @@ class DynamicDateAxis(Axis):
|
||||||
|
|
||||||
def _indexes_to_timestrs(
|
def _indexes_to_timestrs(
|
||||||
self,
|
self,
|
||||||
indexes: list[int],
|
indexes: List[int],
|
||||||
|
|
||||||
) -> list[str]:
|
) -> List[str]:
|
||||||
|
|
||||||
chart = self.linkedsplits.chart
|
chart = self.linkedsplits.chart
|
||||||
flow = chart._flows[chart.name]
|
bars = chart._arrays[chart.name]
|
||||||
shm = flow.shm
|
shm = self.linkedsplits.chart._shm
|
||||||
bars = shm.array
|
|
||||||
first = shm._first.value
|
first = shm._first.value
|
||||||
|
|
||||||
bars_len = len(bars)
|
bars_len = len(bars)
|
||||||
|
@ -242,17 +241,10 @@ class DynamicDateAxis(Axis):
|
||||||
)]
|
)]
|
||||||
|
|
||||||
# TODO: **don't** have this hard coded shift to EST
|
# TODO: **don't** have this hard coded shift to EST
|
||||||
# delay = times[-1] - times[-2]
|
dts = pd.to_datetime(epochs, unit='s') # - 4*pd.offsets.Hour()
|
||||||
dts = np.array(epochs, dtype='datetime64[s]')
|
|
||||||
|
|
||||||
# see units listing:
|
delay = times[-1] - times[-2]
|
||||||
# https://numpy.org/devdocs/reference/arrays.datetime.html#datetime-units
|
return dts.strftime(self.tick_tpl[delay])
|
||||||
return list(np.datetime_as_string(dts))
|
|
||||||
|
|
||||||
# TODO: per timeframe formatting?
|
|
||||||
# - we probably need this based on zoom now right?
|
|
||||||
# prec = self.np_dt_precision[delay]
|
|
||||||
# return dts.strftime(self.tick_tpl[delay])
|
|
||||||
|
|
||||||
def tickStrings(
|
def tickStrings(
|
||||||
self,
|
self,
|
||||||
|
@ -438,7 +430,7 @@ class XAxisLabel(AxisLabel):
|
||||||
| QtCore.Qt.AlignCenter
|
| QtCore.Qt.AlignCenter
|
||||||
)
|
)
|
||||||
|
|
||||||
def size_hint(self) -> tuple[float, float]:
|
def size_hint(self) -> Tuple[float, float]:
|
||||||
# size to parent axis height
|
# size to parent axis height
|
||||||
return self._parent.height(), None
|
return self._parent.height(), None
|
||||||
|
|
||||||
|
@ -452,11 +444,11 @@ class XAxisLabel(AxisLabel):
|
||||||
|
|
||||||
timestrs = self._parent._indexes_to_timestrs([int(value)])
|
timestrs = self._parent._indexes_to_timestrs([int(value)])
|
||||||
|
|
||||||
if not len(timestrs):
|
if not timestrs.any():
|
||||||
return
|
return
|
||||||
|
|
||||||
pad = 1*' '
|
pad = 1*' '
|
||||||
self.label_str = pad + str(timestrs[0]) + pad
|
self.label_str = pad + timestrs[0] + pad
|
||||||
|
|
||||||
_, y_offset = self._parent.txt_offsets()
|
_, y_offset = self._parent.txt_offsets()
|
||||||
|
|
||||||
|
@ -517,7 +509,7 @@ class YAxisLabel(AxisLabel):
|
||||||
if getattr(self._parent, 'txt_offsets', False):
|
if getattr(self._parent, 'txt_offsets', False):
|
||||||
self.x_offset, y_offset = self._parent.txt_offsets()
|
self.x_offset, y_offset = self._parent.txt_offsets()
|
||||||
|
|
||||||
def size_hint(self) -> tuple[float, float]:
|
def size_hint(self) -> Tuple[float, float]:
|
||||||
# size to parent axis width(-ish)
|
# size to parent axis width(-ish)
|
||||||
wsh = self._dpifont.boundingRect(' ').height() / 2
|
wsh = self._dpifont.boundingRect(' ').height() / 2
|
||||||
return (
|
return (
|
||||||
|
|
|
@ -37,6 +37,7 @@ from PyQt5.QtWidgets import (
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
import trio
|
import trio
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
from ._axes import (
|
from ._axes import (
|
||||||
DynamicDateAxis,
|
DynamicDateAxis,
|
||||||
|
@ -47,13 +48,9 @@ from ._cursor import (
|
||||||
Cursor,
|
Cursor,
|
||||||
ContentsLabel,
|
ContentsLabel,
|
||||||
)
|
)
|
||||||
from ..data._sharedmem import ShmArray
|
|
||||||
from ._l1 import L1Labels
|
from ._l1 import L1Labels
|
||||||
from ._ohlc import BarItems
|
from ._ohlc import BarItems
|
||||||
from ._curve import (
|
from ._curve import FastAppendCurve
|
||||||
Curve,
|
|
||||||
StepCurve,
|
|
||||||
)
|
|
||||||
from ._style import (
|
from ._style import (
|
||||||
hcolor,
|
hcolor,
|
||||||
CHART_MARGINS,
|
CHART_MARGINS,
|
||||||
|
@ -62,12 +59,11 @@ from ._style import (
|
||||||
)
|
)
|
||||||
from ..data.feed import Feed
|
from ..data.feed import Feed
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
|
from ..data._sharedmem import ShmArray
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._interaction import ChartView
|
from ._interaction import ChartView
|
||||||
from ._forms import FieldsForm
|
from ._forms import FieldsForm
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
|
||||||
from ._overlay import PlotItemOverlay
|
from ._overlay import PlotItemOverlay
|
||||||
from ._flows import Flow
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._display import DisplayState
|
from ._display import DisplayState
|
||||||
|
@ -368,11 +364,11 @@ class LinkedSplits(QWidget):
|
||||||
|
|
||||||
self._symbol: Symbol = None
|
self._symbol: Symbol = None
|
||||||
|
|
||||||
def graphics_cycle(self, **kwargs) -> None:
|
def graphics_cycle(self) -> None:
|
||||||
from . import _display
|
from . import _display
|
||||||
ds = self.display_state
|
ds = self.display_state
|
||||||
if ds:
|
if ds:
|
||||||
return _display.graphics_update_cycle(ds, **kwargs)
|
return _display.graphics_update_cycle(ds)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def symbol(self) -> Symbol:
|
def symbol(self) -> Symbol:
|
||||||
|
@ -418,7 +414,7 @@ class LinkedSplits(QWidget):
|
||||||
self,
|
self,
|
||||||
|
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
shm: ShmArray,
|
array: np.ndarray,
|
||||||
sidepane: FieldsForm,
|
sidepane: FieldsForm,
|
||||||
|
|
||||||
style: str = 'bar',
|
style: str = 'bar',
|
||||||
|
@ -443,7 +439,7 @@ class LinkedSplits(QWidget):
|
||||||
self.chart = self.add_plot(
|
self.chart = self.add_plot(
|
||||||
|
|
||||||
name=symbol.key,
|
name=symbol.key,
|
||||||
shm=shm,
|
array=array,
|
||||||
style=style,
|
style=style,
|
||||||
_is_main=True,
|
_is_main=True,
|
||||||
|
|
||||||
|
@ -471,7 +467,7 @@ class LinkedSplits(QWidget):
|
||||||
self,
|
self,
|
||||||
|
|
||||||
name: str,
|
name: str,
|
||||||
shm: ShmArray,
|
array: np.ndarray,
|
||||||
|
|
||||||
array_key: Optional[str] = None,
|
array_key: Optional[str] = None,
|
||||||
style: str = 'line',
|
style: str = 'line',
|
||||||
|
@ -515,6 +511,7 @@ class LinkedSplits(QWidget):
|
||||||
name=name,
|
name=name,
|
||||||
data_key=array_key or name,
|
data_key=array_key or name,
|
||||||
|
|
||||||
|
array=array,
|
||||||
parent=qframe,
|
parent=qframe,
|
||||||
linkedsplits=self,
|
linkedsplits=self,
|
||||||
axisItems=axes,
|
axisItems=axes,
|
||||||
|
@ -578,7 +575,7 @@ class LinkedSplits(QWidget):
|
||||||
|
|
||||||
graphics, data_key = cpw.draw_ohlc(
|
graphics, data_key = cpw.draw_ohlc(
|
||||||
name,
|
name,
|
||||||
shm,
|
array,
|
||||||
array_key=array_key
|
array_key=array_key
|
||||||
)
|
)
|
||||||
self.cursor.contents_labels.add_label(
|
self.cursor.contents_labels.add_label(
|
||||||
|
@ -592,7 +589,7 @@ class LinkedSplits(QWidget):
|
||||||
add_label = True
|
add_label = True
|
||||||
graphics, data_key = cpw.draw_curve(
|
graphics, data_key = cpw.draw_curve(
|
||||||
name,
|
name,
|
||||||
shm,
|
array,
|
||||||
array_key=array_key,
|
array_key=array_key,
|
||||||
color='default_light',
|
color='default_light',
|
||||||
)
|
)
|
||||||
|
@ -601,7 +598,7 @@ class LinkedSplits(QWidget):
|
||||||
add_label = True
|
add_label = True
|
||||||
graphics, data_key = cpw.draw_curve(
|
graphics, data_key = cpw.draw_curve(
|
||||||
name,
|
name,
|
||||||
shm,
|
array,
|
||||||
array_key=array_key,
|
array_key=array_key,
|
||||||
step_mode=True,
|
step_mode=True,
|
||||||
color='davies',
|
color='davies',
|
||||||
|
@ -656,6 +653,31 @@ class LinkedSplits(QWidget):
|
||||||
cpw.sidepane.setMaximumWidth(sp_w)
|
cpw.sidepane.setMaximumWidth(sp_w)
|
||||||
|
|
||||||
|
|
||||||
|
# class FlowsTable(pydantic.BaseModel):
|
||||||
|
# '''
|
||||||
|
# Data-AGGRegate: high level API onto multiple (categorized)
|
||||||
|
# ``Flow``s with high level processing routines for
|
||||||
|
# multi-graphics computations and display.
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# flows: dict[str, np.ndarray] = {}
|
||||||
|
|
||||||
|
|
||||||
|
class Flow(BaseModel):
|
||||||
|
'''
|
||||||
|
(FinancialSignal-)Flow compound type which wraps a real-time
|
||||||
|
graphics (curve) and its backing data stream together for high level
|
||||||
|
access and control.
|
||||||
|
|
||||||
|
'''
|
||||||
|
class Config:
|
||||||
|
arbitrary_types_allowed = True
|
||||||
|
|
||||||
|
name: str
|
||||||
|
plot: pg.PlotItem
|
||||||
|
shm: Optional[ShmArray] = None # may be filled in "later"
|
||||||
|
|
||||||
|
|
||||||
class ChartPlotWidget(pg.PlotWidget):
|
class ChartPlotWidget(pg.PlotWidget):
|
||||||
'''
|
'''
|
||||||
``GraphicsView`` subtype containing a single ``PlotItem``.
|
``GraphicsView`` subtype containing a single ``PlotItem``.
|
||||||
|
@ -689,6 +711,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
# the "data view" we generate graphics from
|
# the "data view" we generate graphics from
|
||||||
name: str,
|
name: str,
|
||||||
|
array: np.ndarray,
|
||||||
data_key: str,
|
data_key: str,
|
||||||
linkedsplits: LinkedSplits,
|
linkedsplits: LinkedSplits,
|
||||||
|
|
||||||
|
@ -741,6 +764,14 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
self._max_l1_line_len: float = 0
|
self._max_l1_line_len: float = 0
|
||||||
|
|
||||||
# self.setViewportMargins(0, 0, 0, 0)
|
# self.setViewportMargins(0, 0, 0, 0)
|
||||||
|
# self._ohlc = array # readonly view of ohlc data
|
||||||
|
|
||||||
|
# TODO: move to Aggr above XD
|
||||||
|
# readonly view of data arrays
|
||||||
|
self._arrays = {
|
||||||
|
self.data_key: array,
|
||||||
|
}
|
||||||
|
self._graphics = {} # registry of underlying graphics
|
||||||
|
|
||||||
# registry of overlay curve names
|
# registry of overlay curve names
|
||||||
self._flows: dict[str, Flow] = {}
|
self._flows: dict[str, Flow] = {}
|
||||||
|
@ -756,6 +787,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
# show background grid
|
# show background grid
|
||||||
self.showGrid(x=False, y=True, alpha=0.3)
|
self.showGrid(x=False, y=True, alpha=0.3)
|
||||||
|
|
||||||
|
self.default_view()
|
||||||
self.cv.enable_auto_yrange()
|
self.cv.enable_auto_yrange()
|
||||||
|
|
||||||
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
||||||
|
@ -804,8 +836,14 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
Return a range tuple for the bars present in view.
|
Return a range tuple for the bars present in view.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
main_flow = self._flows[self.name]
|
l, r = self.view_range()
|
||||||
ifirst, l, lbar, rbar, r, ilast = main_flow.datums_range()
|
array = self._arrays[self.name]
|
||||||
|
start, stop = self._xrange = (
|
||||||
|
array[0]['index'],
|
||||||
|
array[-1]['index'],
|
||||||
|
)
|
||||||
|
lbar = max(l, start)
|
||||||
|
rbar = min(r, stop)
|
||||||
return l, lbar, rbar, r
|
return l, lbar, rbar, r
|
||||||
|
|
||||||
def curve_width_pxs(
|
def curve_width_pxs(
|
||||||
|
@ -859,51 +897,40 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
def default_view(
|
def default_view(
|
||||||
self,
|
self,
|
||||||
bars_from_y: int = 3000,
|
steps_on_screen: Optional[int] = None
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Set the view box to the "default" startup view of the scene.
|
Set the view box to the "default" startup view of the scene.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
flow = self._flows.get(self.name)
|
try:
|
||||||
if not flow:
|
index = self._arrays[self.name]['index']
|
||||||
log.warning(f'`Flow` for {self.name} not loaded yet?')
|
except IndexError:
|
||||||
|
log.warning(f'array for {self.name} not loaded yet?')
|
||||||
return
|
return
|
||||||
|
|
||||||
index = flow.shm.array['index']
|
|
||||||
xfirst, xlast = index[0], index[-1]
|
xfirst, xlast = index[0], index[-1]
|
||||||
l, lbar, rbar, r = self.bars_range()
|
l, lbar, rbar, r = self.bars_range()
|
||||||
view = self.view
|
|
||||||
|
marker_pos, l1_len = self.pre_l1_xs()
|
||||||
|
end = xlast + l1_len + 1
|
||||||
|
|
||||||
if (
|
if (
|
||||||
rbar < 0
|
rbar < 0
|
||||||
or l < xfirst
|
or l < xfirst
|
||||||
or l < 0
|
|
||||||
or (rbar - lbar) < 6
|
or (rbar - lbar) < 6
|
||||||
):
|
):
|
||||||
# TODO: set fixed bars count on screen that approx includes as
|
# set fixed bars count on screen that approx includes as
|
||||||
# many bars as possible before a downsample line is shown.
|
# many bars as possible before a downsample line is shown.
|
||||||
begin = xlast - bars_from_y
|
begin = xlast - round(6116 / 6)
|
||||||
view.setXRange(
|
|
||||||
min=begin,
|
|
||||||
max=xlast,
|
|
||||||
padding=0,
|
|
||||||
)
|
|
||||||
# re-get range
|
|
||||||
l, lbar, rbar, r = self.bars_range()
|
|
||||||
|
|
||||||
# we get the L1 spread label "length" in view coords
|
else:
|
||||||
# terms now that we've scaled either by user control
|
begin = end - (r - l)
|
||||||
# or to the default set of bars as per the immediate block
|
|
||||||
# above.
|
|
||||||
marker_pos, l1_len = self.pre_l1_xs()
|
|
||||||
end = xlast + l1_len + 1
|
|
||||||
begin = end - (r - l)
|
|
||||||
|
|
||||||
# for debugging
|
# for debugging
|
||||||
# print(
|
# print(
|
||||||
# # f'bars range: {brange}\n'
|
# f'bars range: {brange}\n'
|
||||||
# f'xlast: {xlast}\n'
|
# f'xlast: {xlast}\n'
|
||||||
# f'marker pos: {marker_pos}\n'
|
# f'marker pos: {marker_pos}\n'
|
||||||
# f'l1 len: {l1_len}\n'
|
# f'l1 len: {l1_len}\n'
|
||||||
|
@ -915,13 +942,14 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
if self._static_yrange == 'axis':
|
if self._static_yrange == 'axis':
|
||||||
self._static_yrange = None
|
self._static_yrange = None
|
||||||
|
|
||||||
|
view = self.view
|
||||||
view.setXRange(
|
view.setXRange(
|
||||||
min=begin,
|
min=begin,
|
||||||
max=end,
|
max=end,
|
||||||
padding=0,
|
padding=0,
|
||||||
)
|
)
|
||||||
self.view.maybe_downsample_graphics()
|
|
||||||
view._set_yrange()
|
view._set_yrange()
|
||||||
|
self.view.maybe_downsample_graphics()
|
||||||
try:
|
try:
|
||||||
self.linked.graphics_cycle()
|
self.linked.graphics_cycle()
|
||||||
except IndexError:
|
except IndexError:
|
||||||
|
@ -930,7 +958,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
def increment_view(
|
def increment_view(
|
||||||
self,
|
self,
|
||||||
steps: int = 1,
|
steps: int = 1,
|
||||||
vb: Optional[ChartView] = None,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
|
@ -939,8 +966,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
"""
|
"""
|
||||||
l, r = self.view_range()
|
l, r = self.view_range()
|
||||||
view = vb or self.view
|
self.view.setXRange(
|
||||||
view.setXRange(
|
|
||||||
min=l + steps,
|
min=l + steps,
|
||||||
max=r + steps,
|
max=r + steps,
|
||||||
|
|
||||||
|
@ -952,7 +978,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
def draw_ohlc(
|
def draw_ohlc(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
shm: ShmArray,
|
data: np.ndarray,
|
||||||
|
|
||||||
array_key: Optional[str] = None,
|
array_key: Optional[str] = None,
|
||||||
|
|
||||||
|
@ -972,16 +998,11 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
# the np array buffer to be drawn on next render cycle
|
# the np array buffer to be drawn on next render cycle
|
||||||
self.plotItem.addItem(graphics)
|
self.plotItem.addItem(graphics)
|
||||||
|
|
||||||
|
# draw after to allow self.scene() to work...
|
||||||
|
graphics.draw_from_data(data)
|
||||||
|
|
||||||
data_key = array_key or name
|
data_key = array_key or name
|
||||||
|
self._graphics[data_key] = graphics
|
||||||
self._flows[data_key] = Flow(
|
|
||||||
name=name,
|
|
||||||
plot=self.plotItem,
|
|
||||||
_shm=shm,
|
|
||||||
is_ohlc=True,
|
|
||||||
graphics=graphics,
|
|
||||||
)
|
|
||||||
|
|
||||||
self._add_sticky(name, bg_color='davies')
|
self._add_sticky(name, bg_color='davies')
|
||||||
|
|
||||||
return graphics, data_key
|
return graphics, data_key
|
||||||
|
@ -1022,7 +1043,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
)
|
)
|
||||||
pi.hideButtons()
|
pi.hideButtons()
|
||||||
|
|
||||||
# cv.enable_auto_yrange(self.view)
|
|
||||||
cv.enable_auto_yrange()
|
cv.enable_auto_yrange()
|
||||||
|
|
||||||
# compose this new plot's graphics with the current chart's
|
# compose this new plot's graphics with the current chart's
|
||||||
|
@ -1047,21 +1067,19 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
self,
|
self,
|
||||||
|
|
||||||
name: str,
|
name: str,
|
||||||
shm: ShmArray,
|
data: np.ndarray,
|
||||||
|
|
||||||
array_key: Optional[str] = None,
|
array_key: Optional[str] = None,
|
||||||
overlay: bool = False,
|
overlay: bool = False,
|
||||||
color: Optional[str] = None,
|
color: Optional[str] = None,
|
||||||
add_label: bool = True,
|
add_label: bool = True,
|
||||||
pi: Optional[pg.PlotItem] = None,
|
|
||||||
step_mode: bool = False,
|
|
||||||
|
|
||||||
**pdi_kwargs,
|
**pdi_kwargs,
|
||||||
|
|
||||||
) -> (pg.PlotDataItem, str):
|
) -> (pg.PlotDataItem, str):
|
||||||
'''
|
'''
|
||||||
Draw a "curve" (line plot graphics) for the provided data in
|
Draw a "curve" (line plot graphics) for the provided data in
|
||||||
the input shm array ``shm``.
|
the input array ``data``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
color = color or self.pen_color or 'default_light'
|
color = color or self.pen_color or 'default_light'
|
||||||
|
@ -1071,28 +1089,35 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
data_key = array_key or name
|
data_key = array_key or name
|
||||||
|
|
||||||
curve_type = {
|
# yah, we wrote our own B)
|
||||||
None: Curve,
|
curve = FastAppendCurve(
|
||||||
'step': StepCurve,
|
y=data[data_key],
|
||||||
# TODO:
|
x=data['index'],
|
||||||
# 'bars': BarsItems
|
# antialias=True,
|
||||||
}['step' if step_mode else None]
|
|
||||||
|
|
||||||
curve = curve_type(
|
|
||||||
name=name,
|
name=name,
|
||||||
|
|
||||||
|
# XXX: pretty sure this is just more overhead
|
||||||
|
# on data reads and makes graphics rendering no faster
|
||||||
|
# clipToView=True,
|
||||||
|
|
||||||
**pdi_kwargs,
|
**pdi_kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
pi = pi or self.plotItem
|
# XXX: see explanation for different caching modes:
|
||||||
|
# https://stackoverflow.com/a/39410081
|
||||||
|
# seems to only be useful if we don't re-generate the entire
|
||||||
|
# QPainterPath every time
|
||||||
|
# curve.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
self._flows[data_key] = Flow(
|
# don't ever use this - it's a colossal nightmare of artefacts
|
||||||
name=name,
|
# and is disastrous for performance.
|
||||||
plot=pi,
|
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
||||||
_shm=shm,
|
|
||||||
is_ohlc=False,
|
# register curve graphics and backing array for name
|
||||||
# register curve graphics with this flow
|
self._graphics[name] = curve
|
||||||
graphics=curve,
|
self._arrays[data_key] = data
|
||||||
)
|
|
||||||
|
pi = self.plotItem
|
||||||
|
|
||||||
# TODO: this probably needs its own method?
|
# TODO: this probably needs its own method?
|
||||||
if overlay:
|
if overlay:
|
||||||
|
@ -1102,6 +1127,10 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
f'{overlay} must be from `.plotitem_overlay()`'
|
f'{overlay} must be from `.plotitem_overlay()`'
|
||||||
)
|
)
|
||||||
pi = overlay
|
pi = overlay
|
||||||
|
|
||||||
|
# anchor_at = ('bottom', 'left')
|
||||||
|
self._flows[name] = Flow(name=name, plot=pi)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# anchor_at = ('top', 'left')
|
# anchor_at = ('top', 'left')
|
||||||
|
|
||||||
|
@ -1151,9 +1180,32 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
)
|
)
|
||||||
return last
|
return last
|
||||||
|
|
||||||
def update_graphics_from_flow(
|
# def update_ohlc_from_array(
|
||||||
|
# self,
|
||||||
|
|
||||||
|
# graphics_name: str,
|
||||||
|
# array: np.ndarray,
|
||||||
|
# **kwargs,
|
||||||
|
|
||||||
|
# ) -> pg.GraphicsObject:
|
||||||
|
# '''
|
||||||
|
# Update the named internal graphics from ``array``.
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# self._index = array['index'][0]
|
||||||
|
# self._arrays[self.name] = array
|
||||||
|
|
||||||
|
# graphics = self._graphics[graphics_name]
|
||||||
|
# graphics.update_from_array(array, **kwargs)
|
||||||
|
|
||||||
|
# return graphics
|
||||||
|
|
||||||
|
# def update_curve_from_array(
|
||||||
|
def update_graphics_from_array(
|
||||||
self,
|
self,
|
||||||
graphics_name: str,
|
graphics_name: str,
|
||||||
|
|
||||||
|
array: Optional[np.ndarray] = None,
|
||||||
array_key: Optional[str] = None,
|
array_key: Optional[str] = None,
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
@ -1163,11 +1215,64 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
Update the named internal graphics from ``array``.
|
Update the named internal graphics from ``array``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
flow = self._flows[array_key or graphics_name]
|
if array is not None:
|
||||||
return flow.update_graphics(
|
assert len(array)
|
||||||
array_key=array_key,
|
|
||||||
**kwargs,
|
data_key = array_key or graphics_name
|
||||||
)
|
if graphics_name not in self._flows:
|
||||||
|
data_key = self.name
|
||||||
|
|
||||||
|
if array is not None:
|
||||||
|
# write array to internal graphics table
|
||||||
|
self._arrays[data_key] = array
|
||||||
|
else:
|
||||||
|
array = self._arrays[data_key]
|
||||||
|
|
||||||
|
# array key and graphics "name" might be different..
|
||||||
|
graphics = self._graphics[graphics_name]
|
||||||
|
|
||||||
|
# compute "in-view" indices
|
||||||
|
l, lbar, rbar, r = self.bars_range()
|
||||||
|
indexes = array['index']
|
||||||
|
ifirst = indexes[0]
|
||||||
|
ilast = indexes[-1]
|
||||||
|
|
||||||
|
lbar_i = max(l, ifirst) - ifirst
|
||||||
|
rbar_i = min(r, ilast) - ifirst
|
||||||
|
|
||||||
|
in_view = array[lbar_i: rbar_i]
|
||||||
|
|
||||||
|
if not in_view.size:
|
||||||
|
return graphics
|
||||||
|
|
||||||
|
# TODO: we could do it this way as well no?
|
||||||
|
# to_draw = array[lbar - ifirst:(rbar - ifirst) + 1]
|
||||||
|
|
||||||
|
# start_index = self._index
|
||||||
|
# lbar = max(l, start_index) - start_index
|
||||||
|
# rbar = min(r, ohlc[-1]['index']) - start_index
|
||||||
|
if isinstance(graphics, BarItems):
|
||||||
|
graphics.update_from_array(
|
||||||
|
array,
|
||||||
|
in_view,
|
||||||
|
view_range=(lbar_i, rbar_i),
|
||||||
|
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
graphics.update_from_array(
|
||||||
|
x=array['index'],
|
||||||
|
y=array[data_key],
|
||||||
|
|
||||||
|
x_iv=in_view['index'],
|
||||||
|
y_iv=in_view[data_key],
|
||||||
|
view_range=(lbar_i, rbar_i),
|
||||||
|
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
return graphics
|
||||||
|
|
||||||
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
||||||
# # compute contents label "height" in view terms
|
# # compute contents label "height" in view terms
|
||||||
|
@ -1214,7 +1319,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
# TODO: this should go onto some sort of
|
# TODO: this should go onto some sort of
|
||||||
# data-view thinger..right?
|
# data-view thinger..right?
|
||||||
ohlc = self._flows[self.name].shm.array
|
ohlc = self._shm.array
|
||||||
|
|
||||||
# XXX: not sure why the time is so off here
|
# XXX: not sure why the time is so off here
|
||||||
# looks like we're gonna have to do some fixing..
|
# looks like we're gonna have to do some fixing..
|
||||||
|
@ -1244,9 +1349,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
def maxmin(
|
def maxmin(
|
||||||
self,
|
self,
|
||||||
name: Optional[str] = None,
|
name: Optional[str] = None,
|
||||||
bars_range: Optional[tuple[
|
bars_range: Optional[tuple[int, int, int, int]] = None,
|
||||||
int, int, int, int, int, int
|
|
||||||
]] = None,
|
|
||||||
|
|
||||||
) -> tuple[float, float]:
|
) -> tuple[float, float]:
|
||||||
'''
|
'''
|
||||||
|
@ -1255,43 +1358,46 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
If ``bars_range`` is provided use that range.
|
If ``bars_range`` is provided use that range.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# print(f'Chart[{self.name}].maxmin()')
|
l, lbar, rbar, r = bars_range or self.bars_range()
|
||||||
profiler = pg.debug.Profiler(
|
# TODO: logic to check if end of bars in view
|
||||||
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
# extra = view_len - _min_points_to_show
|
||||||
disabled=not pg_profile_enabled(),
|
# begin = self._arrays['ohlc'][0]['index'] - extra
|
||||||
ms_threshold=ms_slower_then,
|
# # end = len(self._arrays['ohlc']) - 1 + extra
|
||||||
delayed=True,
|
# end = self._arrays['ohlc'][-1]['index'] - 1 + extra
|
||||||
)
|
|
||||||
|
# bars_len = rbar - lbar
|
||||||
|
# log.debug(
|
||||||
|
# f"\nl: {l}, lbar: {lbar}, rbar: {rbar}, r: {r}\n"
|
||||||
|
# f"view_len: {view_len}, bars_len: {bars_len}\n"
|
||||||
|
# f"begin: {begin}, end: {end}, extra: {extra}"
|
||||||
|
# )
|
||||||
|
|
||||||
# TODO: here we should instead look up the ``Flow.shm.array``
|
# TODO: here we should instead look up the ``Flow.shm.array``
|
||||||
# and read directly from shm to avoid copying to memory first
|
# and read directly from shm to avoid copying to memory first
|
||||||
# and then reading it again here.
|
# and then reading it again here.
|
||||||
flow_key = name or self.name
|
a = self._arrays.get(name or self.name)
|
||||||
flow = self._flows.get(flow_key)
|
if a is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
ifirst = a[0]['index']
|
||||||
|
bars = a[lbar - ifirst:(rbar - ifirst) + 1]
|
||||||
|
|
||||||
|
if not len(bars):
|
||||||
|
# likely no data loaded yet or extreme scrolling?
|
||||||
|
log.error(f"WTF bars_range = {lbar}:{rbar}")
|
||||||
|
return
|
||||||
|
|
||||||
if (
|
if (
|
||||||
flow is None
|
self.data_key == self.linked.symbol.key
|
||||||
):
|
):
|
||||||
log.error(f"flow {flow_key} doesn't exist in chart {self.name} !?")
|
# ohlc sampled bars hi/lo lookup
|
||||||
key = res = 0, 0
|
ylow = np.nanmin(bars['low'])
|
||||||
|
yhigh = np.nanmax(bars['high'])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
(
|
view = bars[name or self.data_key]
|
||||||
first,
|
ylow = np.nanmin(view)
|
||||||
l,
|
yhigh = np.nanmax(view)
|
||||||
lbar,
|
|
||||||
rbar,
|
|
||||||
r,
|
|
||||||
last,
|
|
||||||
) = bars_range or flow.datums_range()
|
|
||||||
profiler(f'{self.name} got bars range')
|
|
||||||
|
|
||||||
key = round(lbar), round(rbar)
|
# print(f'{(ylow, yhigh)}')
|
||||||
res = flow.maxmin(*key)
|
return ylow, yhigh
|
||||||
if res == (None, None):
|
|
||||||
log.error(
|
|
||||||
f"{flow_key} no mxmn for bars_range => {key} !?"
|
|
||||||
)
|
|
||||||
res = 0, 0
|
|
||||||
|
|
||||||
profiler(f'yrange mxmn: {key} -> {res}')
|
|
||||||
return res
|
|
||||||
|
|
|
@ -108,7 +108,7 @@ def trace_hl(
|
||||||
|
|
||||||
def ohlc_flatten(
|
def ohlc_flatten(
|
||||||
ohlc: np.ndarray,
|
ohlc: np.ndarray,
|
||||||
use_mxmn: bool = True,
|
use_mxmn: bool = False,
|
||||||
|
|
||||||
) -> tuple[np.ndarray, np.ndarray]:
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
'''
|
'''
|
||||||
|
@ -120,11 +120,7 @@ def ohlc_flatten(
|
||||||
index = ohlc['index']
|
index = ohlc['index']
|
||||||
|
|
||||||
if use_mxmn:
|
if use_mxmn:
|
||||||
# traces a line optimally over highs to lows
|
|
||||||
# using numba. NOTE: pretty sure this is faster
|
|
||||||
# and looks about the same as the below output.
|
|
||||||
flat, x = hl2mxmn(ohlc)
|
flat, x = hl2mxmn(ohlc)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
flat = rfn.structured_to_unstructured(
|
flat = rfn.structured_to_unstructured(
|
||||||
ohlc[['open', 'high', 'low', 'close']]
|
ohlc[['open', 'high', 'low', 'close']]
|
||||||
|
@ -138,20 +134,50 @@ def ohlc_flatten(
|
||||||
return x, flat
|
return x, flat
|
||||||
|
|
||||||
|
|
||||||
|
def ohlc_to_m4_line(
|
||||||
|
ohlc: np.ndarray,
|
||||||
|
px_width: int,
|
||||||
|
|
||||||
|
downsample: bool = False,
|
||||||
|
uppx: Optional[float] = None,
|
||||||
|
pretrace: bool = False,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
'''
|
||||||
|
Convert an OHLC struct-array to a m4 downsampled 1-d array.
|
||||||
|
|
||||||
|
'''
|
||||||
|
xpts, flat = ohlc_flatten(
|
||||||
|
ohlc,
|
||||||
|
use_mxmn=pretrace,
|
||||||
|
)
|
||||||
|
|
||||||
|
if downsample:
|
||||||
|
bins, x, y = ds_m4(
|
||||||
|
xpts,
|
||||||
|
flat,
|
||||||
|
px_width=px_width,
|
||||||
|
uppx=uppx,
|
||||||
|
log_scale=bool(uppx)
|
||||||
|
)
|
||||||
|
x = np.broadcast_to(x[:, None], y.shape)
|
||||||
|
x = (x + np.array([-0.43, 0, 0, 0.43])).flatten()
|
||||||
|
y = y.flatten()
|
||||||
|
|
||||||
|
return x, y
|
||||||
|
else:
|
||||||
|
return xpts, flat
|
||||||
|
|
||||||
|
|
||||||
def ds_m4(
|
def ds_m4(
|
||||||
x: np.ndarray,
|
x: np.ndarray,
|
||||||
y: np.ndarray,
|
y: np.ndarray,
|
||||||
# units-per-pixel-x(dimension)
|
|
||||||
uppx: float,
|
|
||||||
|
|
||||||
# XXX: troll zone / easter egg..
|
# this is the width of the data in view
|
||||||
# want to mess with ur pal, pass in the actual
|
# in display-device-local pixel units.
|
||||||
# pixel width here instead of uppx-proper (i.e. pass
|
px_width: int,
|
||||||
# in our ``pg.GraphicsObject`` derivative's ``.px_width()``
|
uppx: Optional[float] = None,
|
||||||
# gto mega-trip-out ur bud). Hint, it used to be implemented
|
log_scale: bool = True,
|
||||||
# (wrongly) using "pixel width", so check the git history ;)
|
|
||||||
|
|
||||||
xrange: Optional[float] = None,
|
|
||||||
|
|
||||||
) -> tuple[int, np.ndarray, np.ndarray]:
|
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||||
'''
|
'''
|
||||||
|
@ -178,49 +204,51 @@ def ds_m4(
|
||||||
# "i didn't show it in the sample code, but it's accounted for
|
# "i didn't show it in the sample code, but it's accounted for
|
||||||
# in the start and end indices and number of bins"
|
# in the start and end indices and number of bins"
|
||||||
|
|
||||||
# should never get called unless actually needed
|
# optionally log-scale down the "supposed pxs on screen"
|
||||||
assert uppx > 1
|
# as the units-per-px (uppx) get's large.
|
||||||
|
if log_scale:
|
||||||
|
assert uppx, 'You must provide a `uppx` value to use log scaling!'
|
||||||
|
|
||||||
|
scaler = round(
|
||||||
|
max(
|
||||||
|
# NOTE: found that a 16x px width brought greater
|
||||||
|
# detail, likely due to dpi scaling?
|
||||||
|
# px_width=px_width * 16,
|
||||||
|
2**6 / (1 + math.log(uppx, 2)),
|
||||||
|
1
|
||||||
|
)
|
||||||
|
)
|
||||||
|
px_width *= scaler
|
||||||
|
|
||||||
|
assert px_width > 1 # width of screen in pxs?
|
||||||
|
|
||||||
# NOTE: if we didn't pre-slice the data to downsample
|
# NOTE: if we didn't pre-slice the data to downsample
|
||||||
# you could in theory pass these as the slicing params,
|
# you could in theory pass these as the slicing params,
|
||||||
# do we care though since we can always just pre-slice the
|
# do we care though since we can always just pre-slice the
|
||||||
# input?
|
# input?
|
||||||
x_start = x[0] # x value start/lowest in domain
|
x_start = x[0] # x value start/lowest in domain
|
||||||
|
x_end = x[-1] # x end value/highest in domain
|
||||||
if xrange is None:
|
|
||||||
x_end = x[-1] # x end value/highest in domain
|
|
||||||
xrange = (x_end - x_start)
|
|
||||||
|
|
||||||
# XXX: always round up on the input pixels
|
# XXX: always round up on the input pixels
|
||||||
# lnx = len(x)
|
px_width = math.ceil(px_width)
|
||||||
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
|
||||||
|
|
||||||
pxw = math.ceil(xrange / uppx)
|
x_range = x_end - x_start
|
||||||
|
|
||||||
# scale up the frame "width" directly with uppx
|
# ratio of indexed x-value to width of raster in pixels.
|
||||||
w = uppx
|
# this is more or less, uppx: units-per-pixel.
|
||||||
|
w = x_range / float(px_width)
|
||||||
|
|
||||||
# ensure we make more then enough
|
# ensure we make more then enough
|
||||||
# frames (windows) for the output pixel
|
# frames (windows) for the output pixel
|
||||||
frames = pxw
|
frames = px_width
|
||||||
|
|
||||||
# if we have more and then exact integer's
|
# if we have more and then exact integer's
|
||||||
# (uniform quotient output) worth of datum-domain-points
|
# (uniform quotient output) worth of datum-domain-points
|
||||||
# per windows-frame, add one more window to ensure
|
# per windows-frame, add one more window to ensure
|
||||||
# we have room for all output down-samples.
|
# we have room for all output down-samples.
|
||||||
pts_per_pixel, r = divmod(xrange, frames)
|
pts_per_pixel, r = divmod(len(x), frames)
|
||||||
if r:
|
if r:
|
||||||
# while r:
|
|
||||||
frames += 1
|
frames += 1
|
||||||
pts_per_pixel, r = divmod(xrange, frames)
|
|
||||||
|
|
||||||
# print(
|
|
||||||
# f'uppx: {uppx}\n'
|
|
||||||
# f'xrange: {xrange}\n'
|
|
||||||
# f'pxw: {pxw}\n'
|
|
||||||
# f'frames: {frames}\n'
|
|
||||||
# )
|
|
||||||
assert frames >= (xrange / uppx)
|
|
||||||
|
|
||||||
# call into ``numba``
|
# call into ``numba``
|
||||||
nb, i_win, y_out = _m4(
|
nb, i_win, y_out = _m4(
|
||||||
|
|
|
@ -43,8 +43,8 @@ log = get_logger(__name__)
|
||||||
# latency (in terms of perceived lag in cross hair) so really be sure
|
# latency (in terms of perceived lag in cross hair) so really be sure
|
||||||
# there's an improvement if you want to change it!
|
# there's an improvement if you want to change it!
|
||||||
|
|
||||||
_mouse_rate_limit = 60 # TODO; should we calc current screen refresh rate?
|
_mouse_rate_limit = 120 # TODO; should we calc current screen refresh rate?
|
||||||
_debounce_delay = 0
|
_debounce_delay = 1 / 40
|
||||||
_ch_label_opac = 1
|
_ch_label_opac = 1
|
||||||
|
|
||||||
|
|
||||||
|
@ -98,30 +98,25 @@ class LineDot(pg.CurvePoint):
|
||||||
ev: QtCore.QEvent,
|
ev: QtCore.QEvent,
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
if not isinstance(
|
||||||
if (
|
ev, QtCore.QDynamicPropertyChangeEvent
|
||||||
not isinstance(ev, QtCore.QDynamicPropertyChangeEvent)
|
) or self.curve() is None:
|
||||||
or self.curve() is None
|
|
||||||
):
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# TODO: get rid of this ``.getData()`` and
|
# TODO: get rid of this ``.getData()`` and
|
||||||
# make a more pythonic api to retreive backing
|
# make a more pythonic api to retreive backing
|
||||||
# numpy arrays...
|
# numpy arrays...
|
||||||
# (x, y) = self.curve().getData()
|
(x, y) = self.curve().getData()
|
||||||
# index = self.property('index')
|
index = self.property('index')
|
||||||
# # first = self._plot._arrays['ohlc'][0]['index']
|
# first = self._plot._arrays['ohlc'][0]['index']
|
||||||
# # first = x[0]
|
# first = x[0]
|
||||||
# # i = index - first
|
# i = index - first
|
||||||
# if index:
|
if index:
|
||||||
# i = round(index - x[0])
|
i = round(index - x[0])
|
||||||
# if i > 0 and i < len(y):
|
if i > 0 and i < len(y):
|
||||||
# newPos = (index, y[i])
|
newPos = (index, y[i])
|
||||||
# QtWidgets.QGraphicsItem.setPos(
|
QtWidgets.QGraphicsItem.setPos(self, *newPos)
|
||||||
# self,
|
return True
|
||||||
# *newPos,
|
|
||||||
# )
|
|
||||||
# return True
|
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -196,9 +191,6 @@ class ContentsLabel(pg.LabelItem):
|
||||||
|
|
||||||
self.setText(
|
self.setText(
|
||||||
"<b>i</b>:{index}<br/>"
|
"<b>i</b>:{index}<br/>"
|
||||||
# NB: these fields must be indexed in the correct order via
|
|
||||||
# the slice syntax below.
|
|
||||||
"<b>epoch</b>:{}<br/>"
|
|
||||||
"<b>O</b>:{}<br/>"
|
"<b>O</b>:{}<br/>"
|
||||||
"<b>H</b>:{}<br/>"
|
"<b>H</b>:{}<br/>"
|
||||||
"<b>L</b>:{}<br/>"
|
"<b>L</b>:{}<br/>"
|
||||||
|
@ -206,15 +198,7 @@ class ContentsLabel(pg.LabelItem):
|
||||||
"<b>V</b>:{}<br/>"
|
"<b>V</b>:{}<br/>"
|
||||||
"<b>wap</b>:{}".format(
|
"<b>wap</b>:{}".format(
|
||||||
*array[index - first][
|
*array[index - first][
|
||||||
[
|
['open', 'high', 'low', 'close', 'volume', 'bar_wap']
|
||||||
'time',
|
|
||||||
'open',
|
|
||||||
'high',
|
|
||||||
'low',
|
|
||||||
'close',
|
|
||||||
'volume',
|
|
||||||
'bar_wap',
|
|
||||||
]
|
|
||||||
],
|
],
|
||||||
name=name,
|
name=name,
|
||||||
index=index,
|
index=index,
|
||||||
|
@ -259,13 +243,13 @@ class ContentsLabels:
|
||||||
def update_labels(
|
def update_labels(
|
||||||
self,
|
self,
|
||||||
index: int,
|
index: int,
|
||||||
|
# array_name: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
# for name, (label, update) in self._labels.items():
|
||||||
for chart, name, label, update in self._labels:
|
for chart, name, label, update in self._labels:
|
||||||
|
|
||||||
flow = chart._flows[name]
|
array = chart._arrays[name]
|
||||||
array = flow.shm.array
|
|
||||||
|
|
||||||
if not (
|
if not (
|
||||||
index >= 0
|
index >= 0
|
||||||
and index < array[-1]['index']
|
and index < array[-1]['index']
|
||||||
|
@ -274,6 +258,8 @@ class ContentsLabels:
|
||||||
print('WTF out of range?')
|
print('WTF out of range?')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# array = chart._arrays[name]
|
||||||
|
|
||||||
# call provided update func with data point
|
# call provided update func with data point
|
||||||
try:
|
try:
|
||||||
label.show()
|
label.show()
|
||||||
|
@ -475,12 +461,9 @@ class Cursor(pg.GraphicsObject):
|
||||||
) -> LineDot:
|
) -> LineDot:
|
||||||
# if this plot contains curves add line dot "cursors" to denote
|
# if this plot contains curves add line dot "cursors" to denote
|
||||||
# the current sample under the mouse
|
# the current sample under the mouse
|
||||||
main_flow = plot._flows[plot.name]
|
|
||||||
# read out last index
|
|
||||||
i = main_flow.shm.array[-1]['index']
|
|
||||||
cursor = LineDot(
|
cursor = LineDot(
|
||||||
curve,
|
curve,
|
||||||
index=i,
|
index=plot._arrays[plot.name][-1]['index'],
|
||||||
plot=plot
|
plot=plot
|
||||||
)
|
)
|
||||||
plot.addItem(cursor)
|
plot.addItem(cursor)
|
||||||
|
|
|
@ -18,37 +18,83 @@
|
||||||
Fast, smooth, sexy curves.
|
Fast, smooth, sexy curves.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import contextmanager as cm
|
from typing import Optional
|
||||||
from typing import Optional, Callable
|
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5 import QtWidgets
|
from PyQt5 import QtGui, QtWidgets
|
||||||
from PyQt5.QtWidgets import QGraphicsItem
|
from PyQt5.QtWidgets import QGraphicsItem
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
Qt,
|
Qt,
|
||||||
QLineF,
|
QLineF,
|
||||||
QSizeF,
|
QSizeF,
|
||||||
QRectF,
|
QRectF,
|
||||||
# QRect,
|
|
||||||
QPointF,
|
QPointF,
|
||||||
)
|
)
|
||||||
from PyQt5.QtGui import (
|
|
||||||
QPainter,
|
|
||||||
QPainterPath,
|
|
||||||
)
|
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._style import hcolor
|
from ._style import hcolor
|
||||||
# from ._compression import (
|
from ._compression import (
|
||||||
# # ohlc_to_m4_line,
|
# ohlc_to_m4_line,
|
||||||
# ds_m4,
|
ds_m4,
|
||||||
# )
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def step_path_arrays_from_1d(
|
||||||
|
x: np.ndarray,
|
||||||
|
y: np.ndarray,
|
||||||
|
include_endpoints: bool = False,
|
||||||
|
|
||||||
|
) -> (np.ndarray, np.ndarray):
|
||||||
|
'''
|
||||||
|
Generate a "step mode" curve aligned with OHLC style bars
|
||||||
|
such that each segment spans each bar (aka "centered" style).
|
||||||
|
|
||||||
|
'''
|
||||||
|
y_out = y.copy()
|
||||||
|
x_out = x.copy()
|
||||||
|
x2 = np.empty(
|
||||||
|
# the data + 2 endpoints on either end for
|
||||||
|
# "termination of the path".
|
||||||
|
(len(x) + 1, 2),
|
||||||
|
# we want to align with OHLC or other sampling style
|
||||||
|
# bars likely so we need fractinal values
|
||||||
|
dtype=float,
|
||||||
|
)
|
||||||
|
x2[0] = x[0] - 0.5
|
||||||
|
x2[1] = x[0] + 0.5
|
||||||
|
x2[1:] = x[:, np.newaxis] + 0.5
|
||||||
|
|
||||||
|
# flatten to 1-d
|
||||||
|
x_out = x2.reshape(x2.size)
|
||||||
|
|
||||||
|
# we create a 1d with 2 extra indexes to
|
||||||
|
# hold the start and (current) end value for the steps
|
||||||
|
# on either end
|
||||||
|
y2 = np.empty((len(y), 2), dtype=y.dtype)
|
||||||
|
y2[:] = y[:, np.newaxis]
|
||||||
|
|
||||||
|
y_out = np.empty(
|
||||||
|
2*len(y) + 2,
|
||||||
|
dtype=y.dtype
|
||||||
|
)
|
||||||
|
|
||||||
|
# flatten and set 0 endpoints
|
||||||
|
y_out[1:-1] = y2.reshape(y2.size)
|
||||||
|
y_out[0] = 0
|
||||||
|
y_out[-1] = 0
|
||||||
|
|
||||||
|
if not include_endpoints:
|
||||||
|
return x_out[:-1], y_out[:-1]
|
||||||
|
|
||||||
|
else:
|
||||||
|
return x_out, y_out
|
||||||
|
|
||||||
|
|
||||||
_line_styles: dict[str, int] = {
|
_line_styles: dict[str, int] = {
|
||||||
'solid': Qt.PenStyle.SolidLine,
|
'solid': Qt.PenStyle.SolidLine,
|
||||||
'dash': Qt.PenStyle.DashLine,
|
'dash': Qt.PenStyle.DashLine,
|
||||||
|
@ -57,43 +103,24 @@ _line_styles: dict[str, int] = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class Curve(pg.GraphicsObject):
|
class FastAppendCurve(pg.GraphicsObject):
|
||||||
'''
|
'''
|
||||||
A faster, simpler, append friendly version of
|
A faster, append friendly version of ``pyqtgraph.PlotCurveItem``
|
||||||
``pyqtgraph.PlotCurveItem`` built for highly customizable real-time
|
built for real-time data updates.
|
||||||
updates.
|
|
||||||
|
|
||||||
This type is a much stripped down version of a ``pyqtgraph`` style
|
The main difference is avoiding regeneration of the entire
|
||||||
"graphics object" in the sense that the internal lower level
|
historical path where possible and instead only updating the "new"
|
||||||
graphics which are drawn in the ``.paint()`` method are actually
|
segment(s) via a ``numpy`` array diff calc. Further the "last"
|
||||||
rendered outside of this class entirely and instead are assigned as
|
graphic segment is drawn independently such that near-term (high
|
||||||
state (instance vars) here and then drawn during a Qt graphics
|
frequency) discrete-time-sampled style updates don't trigger a full
|
||||||
cycle.
|
path redraw.
|
||||||
|
|
||||||
The main motivation for this more modular, composed design is that
|
|
||||||
lower level graphics data can be rendered in different threads and
|
|
||||||
then read and drawn in this main thread without having to worry
|
|
||||||
about dealing with Qt's concurrency primitives. See
|
|
||||||
``piker.ui._flows.Renderer`` for details and logic related to lower
|
|
||||||
level path generation and incremental update. The main differences in
|
|
||||||
the path generation code include:
|
|
||||||
|
|
||||||
- avoiding regeneration of the entire historical path where possible
|
|
||||||
and instead only updating the "new" segment(s) via a ``numpy``
|
|
||||||
array diff calc.
|
|
||||||
- here, the "last" graphics datum-segment is drawn independently
|
|
||||||
such that near-term (high frequency) discrete-time-sampled style
|
|
||||||
updates don't trigger a full path redraw.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# sub-type customization methods
|
|
||||||
sub_br: Optional[Callable] = None
|
|
||||||
sub_paint: Optional[Callable] = None
|
|
||||||
declare_paintables: Optional[Callable] = None
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
|
x: np.ndarray,
|
||||||
|
y: np.ndarray,
|
||||||
*args,
|
*args,
|
||||||
|
|
||||||
step_mode: bool = False,
|
step_mode: bool = False,
|
||||||
|
@ -107,25 +134,29 @@ class Curve(pg.GraphicsObject):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
self._name = name
|
|
||||||
|
|
||||||
# brutaaalll, see comments within..
|
# brutaaalll, see comments within..
|
||||||
self.yData = None
|
self._y = self.yData = y
|
||||||
self.xData = None
|
self._x = self.xData = x
|
||||||
|
|
||||||
# self._last_cap: int = 0
|
self._name = name
|
||||||
self.path: Optional[QPainterPath] = None
|
self.path: Optional[QtGui.QPainterPath] = None
|
||||||
|
|
||||||
# additional path used for appends which tries to avoid
|
|
||||||
# triggering an update/redraw of the presumably larger
|
|
||||||
# historical ``.path`` above.
|
|
||||||
self.use_fpath = use_fpath
|
self.use_fpath = use_fpath
|
||||||
self.fast_path: Optional[QPainterPath] = None
|
self.fast_path: Optional[QtGui.QPainterPath] = None
|
||||||
|
|
||||||
|
self._ds_cache: dict = {}
|
||||||
|
|
||||||
# TODO: we can probably just dispense with the parent since
|
# TODO: we can probably just dispense with the parent since
|
||||||
# we're basically only using the pen setting now...
|
# we're basically only using the pen setting now...
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
|
# self._xrange: tuple[int, int] = self.dataBounds(ax=0)
|
||||||
|
self._xrange: Optional[tuple[int, int]] = None
|
||||||
|
|
||||||
|
# self._last_draw = time.time()
|
||||||
|
self._in_ds: bool = False
|
||||||
|
self._last_uppx: float = 0
|
||||||
|
|
||||||
# all history of curve is drawn in single px thickness
|
# all history of curve is drawn in single px thickness
|
||||||
pen = pg.mkPen(hcolor(color))
|
pen = pg.mkPen(hcolor(color))
|
||||||
pen.setStyle(_line_styles[style])
|
pen.setStyle(_line_styles[style])
|
||||||
|
@ -139,43 +170,29 @@ class Curve(pg.GraphicsObject):
|
||||||
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||||
self.last_step_pen = pg.mkPen(pen, width=2)
|
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||||
|
|
||||||
# self._last_line: Optional[QLineF] = None
|
self._last_line: Optional[QLineF] = None
|
||||||
self._last_line = QLineF()
|
self._last_step_rect: Optional[QRectF] = None
|
||||||
self._last_w: float = 1
|
|
||||||
|
|
||||||
# flat-top style histogram-like discrete curve
|
# flat-top style histogram-like discrete curve
|
||||||
# self._step_mode: bool = step_mode
|
self._step_mode: bool = step_mode
|
||||||
|
|
||||||
# self._fill = True
|
# self._fill = True
|
||||||
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
||||||
|
|
||||||
# NOTE: this setting seems to mostly prevent redraws on mouse
|
|
||||||
# interaction which is a huge boon for avg interaction latency.
|
|
||||||
|
|
||||||
# TODO: one question still remaining is if this makes trasform
|
# TODO: one question still remaining is if this makes trasform
|
||||||
# interactions slower (such as zooming) and if so maybe if/when
|
# interactions slower (such as zooming) and if so maybe if/when
|
||||||
# we implement a "history" mode for the view we disable this in
|
# we implement a "history" mode for the view we disable this in
|
||||||
# that mode?
|
# that mode?
|
||||||
# don't enable caching by default for the case where the
|
if step_mode:
|
||||||
# only thing drawn is the "last" line segment which can
|
# don't enable caching by default for the case where the
|
||||||
# have a weird artifact where it won't be fully drawn to its
|
# only thing drawn is the "last" line segment which can
|
||||||
# endpoint (something we saw on trade rate curves)
|
# have a weird artifact where it won't be fully drawn to its
|
||||||
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
# endpoint (something we saw on trade rate curves)
|
||||||
|
self.setCacheMode(
|
||||||
|
QGraphicsItem.DeviceCoordinateCache
|
||||||
|
)
|
||||||
|
|
||||||
# XXX: see explanation for different caching modes:
|
self.update()
|
||||||
# https://stackoverflow.com/a/39410081
|
|
||||||
# seems to only be useful if we don't re-generate the entire
|
|
||||||
# QPainterPath every time
|
|
||||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
|
||||||
|
|
||||||
# don't ever use this - it's a colossal nightmare of artefacts
|
|
||||||
# and is disastrous for performance.
|
|
||||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
|
||||||
|
|
||||||
# allow sub-type customization
|
|
||||||
declare = self.declare_paintables
|
|
||||||
if declare:
|
|
||||||
declare()
|
|
||||||
|
|
||||||
# TODO: probably stick this in a new parent
|
# TODO: probably stick this in a new parent
|
||||||
# type which will contain our own version of
|
# type which will contain our own version of
|
||||||
|
@ -199,18 +216,462 @@ class Curve(pg.GraphicsObject):
|
||||||
vr = self.viewRect()
|
vr = self.viewRect()
|
||||||
l, r = int(vr.left()), int(vr.right())
|
l, r = int(vr.left()), int(vr.right())
|
||||||
|
|
||||||
|
if not self._xrange:
|
||||||
|
return 0
|
||||||
|
|
||||||
start, stop = self._xrange
|
start, stop = self._xrange
|
||||||
lbar = max(l, start)
|
lbar = max(l, start)
|
||||||
rbar = min(r, stop)
|
rbar = min(r, stop)
|
||||||
|
|
||||||
return vb.mapViewToDevice(
|
return round(vb.mapViewToDevice(
|
||||||
QLineF(lbar, 0, rbar, 0)
|
QLineF(lbar, 0, rbar, 0)
|
||||||
).length()
|
).length())
|
||||||
|
|
||||||
|
# def should_ds_or_redraw(
|
||||||
|
# self,
|
||||||
|
|
||||||
|
# ) -> tuple[bool, bool]:
|
||||||
|
|
||||||
|
# uppx = self.x_uppx()
|
||||||
|
# px_width = self.px_width()
|
||||||
|
# if not px_width:
|
||||||
|
# return False, False
|
||||||
|
|
||||||
|
# # uppx_diff = abs(uppx - self._last_uppx)
|
||||||
|
# uppx_diff = (uppx - self._last_uppx)
|
||||||
|
# self._last_uppx = uppx
|
||||||
|
|
||||||
|
# should_redraw: bool = False
|
||||||
|
# should_ds: bool = self._in_ds
|
||||||
|
|
||||||
|
# # print(uppx_diff)
|
||||||
|
|
||||||
|
# if (
|
||||||
|
# uppx <= 8
|
||||||
|
# ):
|
||||||
|
# # trigger redraw or original non-downsampled data
|
||||||
|
# if self._in_ds:
|
||||||
|
# print('REVERTING BACK TO SRC DATA')
|
||||||
|
# # clear downsampled curve(s) and expect
|
||||||
|
# # refresh of path segments.
|
||||||
|
# should_redraw = True
|
||||||
|
|
||||||
|
# elif (
|
||||||
|
# uppx_diff >= 1
|
||||||
|
# or uppx_diff <= -1
|
||||||
|
# or self._step_mode and abs(uppx_diff) >= 1
|
||||||
|
# ):
|
||||||
|
# log.info(
|
||||||
|
# f'{self._name} downsampler change: {self._last_uppx} -> {uppx}'
|
||||||
|
# )
|
||||||
|
# should_ds = {'px_width': px_width, 'uppx': uppx}
|
||||||
|
# should_redraw = True
|
||||||
|
|
||||||
|
# if should_ds:
|
||||||
|
# should_ds = {'px_width': px_width, 'uppx': uppx}
|
||||||
|
|
||||||
|
# return should_ds, should_redraw
|
||||||
|
|
||||||
|
def downsample(
|
||||||
|
self,
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
px_width,
|
||||||
|
uppx,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
|
||||||
|
# downsample whenever more then 1 pixels per datum can be shown.
|
||||||
|
# always refresh data bounds until we get diffing
|
||||||
|
# working properly, see above..
|
||||||
|
bins, x, y = ds_m4(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
px_width=px_width,
|
||||||
|
uppx=uppx,
|
||||||
|
log_scale=bool(uppx)
|
||||||
|
)
|
||||||
|
x = np.broadcast_to(x[:, None], y.shape)
|
||||||
|
# x = (x + np.array([-0.43, 0, 0, 0.43])).flatten()
|
||||||
|
x = (x + np.array([-0.5, 0, 0, 0.5])).flatten()
|
||||||
|
y = y.flatten()
|
||||||
|
|
||||||
|
# presumably?
|
||||||
|
# self._in_ds = True
|
||||||
|
return x, y
|
||||||
|
|
||||||
|
def maybe_downsample(
|
||||||
|
self,
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Simple update call but with previously cached arrays data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# print('DS CALLED FROM INTERACTION?')
|
||||||
|
# presume this is a so called "interaction update", see
|
||||||
|
# ``ChartView.maybe_downsample_graphics()``.
|
||||||
|
self.update_from_array(self._x, self._y)
|
||||||
|
|
||||||
|
def update_from_array(
|
||||||
|
self,
|
||||||
|
|
||||||
|
# full array input history
|
||||||
|
x: np.ndarray,
|
||||||
|
y: np.ndarray,
|
||||||
|
|
||||||
|
# pre-sliced array data that's "in view"
|
||||||
|
x_iv: np.ndarray,
|
||||||
|
y_iv: np.ndarray,
|
||||||
|
|
||||||
|
view_range: Optional[tuple[int, int]] = None,
|
||||||
|
|
||||||
|
) -> QtGui.QPainterPath:
|
||||||
|
'''
|
||||||
|
Update curve from input 2-d data.
|
||||||
|
|
||||||
|
Compare with a cached "x-range" state and (pre/a)ppend based on
|
||||||
|
a length diff.
|
||||||
|
|
||||||
|
'''
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
msg=f'FastAppendCurve.update_from_array(): `{self._name}`',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
|
flip_cache = False
|
||||||
|
draw_full_path = True
|
||||||
|
|
||||||
|
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||||
|
# our `LineDot`) required ``.getData()`` to work..
|
||||||
|
self.xData = x
|
||||||
|
self.yData = y
|
||||||
|
|
||||||
|
# update internal array refs
|
||||||
|
self._x, self._y = x, y
|
||||||
|
|
||||||
|
# compute the length diffs between the first/last index entry in
|
||||||
|
# the input data and the last indexes we have on record from the
|
||||||
|
# last time we updated the curve index.
|
||||||
|
if self._xrange:
|
||||||
|
istart, istop = self._xrange
|
||||||
|
else:
|
||||||
|
self._xrange = istart, istop = x[0], x[-1]
|
||||||
|
|
||||||
|
prepend_length = int(istart - x[0])
|
||||||
|
append_length = int(x[-1] - istop)
|
||||||
|
|
||||||
|
# print(f"xrange: {self._xrange}")
|
||||||
|
if view_range:
|
||||||
|
li, ri = view_range
|
||||||
|
# x, y = x[lbar:rbar], y[lbar:rbar]
|
||||||
|
# x, y = x_iv, y_iv
|
||||||
|
profiler(f'view range slice {view_range}')
|
||||||
|
|
||||||
|
# if self._name == 'OHLC':
|
||||||
|
# print(f'view range slice {view_range}')
|
||||||
|
|
||||||
|
# ds state checking
|
||||||
|
uppx = self.x_uppx()
|
||||||
|
px_width = self.px_width()
|
||||||
|
uppx_diff = (uppx - self._last_uppx)
|
||||||
|
|
||||||
|
# step mode: draw flat top discrete "step"
|
||||||
|
# over the index space for each datum.
|
||||||
|
if self._step_mode:
|
||||||
|
|
||||||
|
# TODO: numba this bish
|
||||||
|
# x_out, y_out = step_path_arrays_from_1d(
|
||||||
|
# x[:-1], y[:-1]
|
||||||
|
# )
|
||||||
|
|
||||||
|
x_iv_out, y_iv_out = step_path_arrays_from_1d(
|
||||||
|
x_iv[:-1], y_iv[:-1]
|
||||||
|
)
|
||||||
|
|
||||||
|
profiler('generated step arrays')
|
||||||
|
|
||||||
|
else:
|
||||||
|
# by default we only pull data up to the last (current) index
|
||||||
|
# x_out, y_out = x[:-1], y[:-1]
|
||||||
|
x_iv_out, y_iv_out = x_iv[:-1], y_iv[:-1]
|
||||||
|
profiler('sliced array history')
|
||||||
|
|
||||||
|
|
||||||
|
# by default plan to draw the source ouput that's "in view"
|
||||||
|
x_to_path, y_to_path = x_iv_out, y_iv_out
|
||||||
|
|
||||||
|
ds_key = px_width, uppx
|
||||||
|
|
||||||
|
# always re-ds if we were dsed but the input range changes.
|
||||||
|
if self._in_ds:
|
||||||
|
# slice out the portion of the downsampled data that is
|
||||||
|
# "in view" and **only** draw a path for that.
|
||||||
|
|
||||||
|
entry = self._ds_cache.get(ds_key)
|
||||||
|
if entry:
|
||||||
|
x_ds_out, y_ds_out, first_i, last_i = entry
|
||||||
|
|
||||||
|
# if last_i == x[-1]:
|
||||||
|
log.info(
|
||||||
|
f'{self._name} has cached ds {ds_key} -> {entry}'
|
||||||
|
)
|
||||||
|
prepend_length = int(first_i - ri)
|
||||||
|
append_length = int(ri - last_i)
|
||||||
|
|
||||||
|
# x_to_path = x_ds_out
|
||||||
|
# y_to_path = y_ds_out
|
||||||
|
|
||||||
|
# else:
|
||||||
|
# log.warn(f'{self._name} ds updates unhandled!')
|
||||||
|
# DS only the new part?
|
||||||
|
|
||||||
|
# check for downsampling conditions
|
||||||
|
if (
|
||||||
|
# std m4 downsample conditions
|
||||||
|
uppx_diff >= 4
|
||||||
|
or uppx_diff <= -2
|
||||||
|
or self._step_mode and abs(uppx_diff) >= 2
|
||||||
|
|
||||||
|
# or self._in_ds and px_width > 1
|
||||||
|
):
|
||||||
|
# if not uppx_diff >= 1:
|
||||||
|
log.info(
|
||||||
|
f'{self._name} sampler change: {self._last_uppx} -> {uppx}'
|
||||||
|
)
|
||||||
|
self._last_uppx = uppx
|
||||||
|
# should_ds = {'px_width': px_width, 'uppx': uppx}
|
||||||
|
|
||||||
|
# if self._step_mode:
|
||||||
|
# # TODO: numba this bish
|
||||||
|
# x_out, y_out = step_path_arrays_from_1d(
|
||||||
|
# x_iv[:-1], y_iv[:-1]
|
||||||
|
# )
|
||||||
|
# else:
|
||||||
|
# # by default we only pull data up to the last (current) index
|
||||||
|
# x_out, y_out = x_iv[:-1], y_iv[:-1]
|
||||||
|
|
||||||
|
x_ds_out, y_ds_out = self.downsample(
|
||||||
|
x_iv_out,
|
||||||
|
y_iv_out,
|
||||||
|
|
||||||
|
px_width=px_width,
|
||||||
|
uppx=uppx,
|
||||||
|
)
|
||||||
|
profiler(
|
||||||
|
f'path downsample ds_key={ds_key}\n'
|
||||||
|
f'{x_iv_out.size}, {y_iv_out.size}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# cache downsampled outputs
|
||||||
|
self._ds_cache[ds_key] = (
|
||||||
|
x_ds_out,
|
||||||
|
y_ds_out,
|
||||||
|
x[0],
|
||||||
|
x[-1],
|
||||||
|
)
|
||||||
|
|
||||||
|
x_to_path = x_ds_out
|
||||||
|
y_to_path = y_ds_out
|
||||||
|
|
||||||
|
self._in_ds = True
|
||||||
|
|
||||||
|
elif (
|
||||||
|
uppx <= 8
|
||||||
|
and self._in_ds
|
||||||
|
):
|
||||||
|
# we should de-downsample back to our original
|
||||||
|
# source data so we clear our path data in prep
|
||||||
|
# to generate a new one from original source data.
|
||||||
|
if self.path:
|
||||||
|
self.path.clear()
|
||||||
|
|
||||||
|
if self.fast_path:
|
||||||
|
self.fast_path.clear()
|
||||||
|
|
||||||
|
log.info(f'DEDOWN -> {self._name}')
|
||||||
|
profiler('path reversion to non-ds data')
|
||||||
|
|
||||||
|
self._in_ds = False
|
||||||
|
|
||||||
|
|
||||||
|
# render path graphics
|
||||||
|
# log.info(
|
||||||
|
# # f'{self._name}: last sizes {x_to_path.size}, {y_to_path.size}',
|
||||||
|
# f'{self._name}: sizes {x_to_path.size}, {y_to_path.size}',
|
||||||
|
# )
|
||||||
|
|
||||||
|
self._last_topaths = x_to_path, y_to_path
|
||||||
|
|
||||||
|
no_path_yet = self.path is None
|
||||||
|
|
||||||
|
if draw_full_path:
|
||||||
|
self.path = pg.functions.arrayToQPath(
|
||||||
|
x_to_path,
|
||||||
|
y_to_path,
|
||||||
|
connect='all',
|
||||||
|
finiteCheck=False,
|
||||||
|
path=self.path,
|
||||||
|
)
|
||||||
|
profiler('generated FULL PATH -> {self._name}')
|
||||||
|
|
||||||
|
# reserve mem allocs see:
|
||||||
|
# - https://doc.qt.io/qt-5/qpainterpath.html#reserve
|
||||||
|
# - https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||||
|
# - https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||||
|
# XXX: right now this is based on had hoc checks on a
|
||||||
|
# hidpi 3840x2160 4k monitor but we should optimize for
|
||||||
|
# the target display(s) on the sys.
|
||||||
|
if no_path_yet:
|
||||||
|
self.path.reserve(int(500e3))
|
||||||
|
|
||||||
|
self._last_vr = view_range
|
||||||
|
|
||||||
|
# TODO: get this piecewise prepend working - right now it's
|
||||||
|
# giving heck on vwap...
|
||||||
|
# if prepend_length:
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
|
# prepend_path = pg.functions.arrayToQPath(
|
||||||
|
# x[0:prepend_length],
|
||||||
|
# y[0:prepend_length],
|
||||||
|
# connect='all'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # swap prepend path in "front"
|
||||||
|
# old_path = self.path
|
||||||
|
# self.path = prepend_path
|
||||||
|
# # self.path.moveTo(new_x[0], new_y[0])
|
||||||
|
# self.path.connectPath(old_path)
|
||||||
|
|
||||||
|
# elif (
|
||||||
|
# append_length > 0
|
||||||
|
# ):
|
||||||
|
# if self._step_mode:
|
||||||
|
# new_x, new_y = step_path_arrays_from_1d(
|
||||||
|
# x[-append_length - 2:-1],
|
||||||
|
# y[-append_length - 2:-1],
|
||||||
|
# )
|
||||||
|
# # [1:] since we don't need the vertical line normally at
|
||||||
|
# # the beginning of the step curve taking the first (x,
|
||||||
|
# # y) poing down to the x-axis **because** this is an
|
||||||
|
# # appended path graphic.
|
||||||
|
# new_x = new_x[1:]
|
||||||
|
# new_y = new_y[1:]
|
||||||
|
|
||||||
|
# else:
|
||||||
|
# # print(f"append_length: {append_length}")
|
||||||
|
# new_x = x[-append_length - 2:-1]
|
||||||
|
# new_y = y[-append_length - 2:-1]
|
||||||
|
# # print((new_x, new_y))
|
||||||
|
|
||||||
|
# profiler('diffed append arrays')
|
||||||
|
|
||||||
|
# if should_ds:
|
||||||
|
# new_x, new_y = self.downsample(
|
||||||
|
# new_x,
|
||||||
|
# new_y,
|
||||||
|
# **should_ds,
|
||||||
|
# )
|
||||||
|
# profiler(f'fast path downsample redraw={should_ds}')
|
||||||
|
|
||||||
|
# append_path = pg.functions.arrayToQPath(
|
||||||
|
# new_x,
|
||||||
|
# new_y,
|
||||||
|
# connect='all',
|
||||||
|
# finiteCheck=False,
|
||||||
|
# path=self.fast_path,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# if self.use_fpath:
|
||||||
|
# # an attempt at trying to make append-updates faster..
|
||||||
|
# if self.fast_path is None:
|
||||||
|
# self.fast_path = append_path
|
||||||
|
# self.fast_path.reserve(int(6e3))
|
||||||
|
# else:
|
||||||
|
# self.fast_path.connectPath(append_path)
|
||||||
|
# size = self.fast_path.capacity()
|
||||||
|
# profiler(f'connected fast path w size: {size}')
|
||||||
|
|
||||||
|
# # print(f"append_path br: {append_path.boundingRect()}")
|
||||||
|
# # self.path.moveTo(new_x[0], new_y[0])
|
||||||
|
# # path.connectPath(append_path)
|
||||||
|
|
||||||
|
# # XXX: lol this causes a hang..
|
||||||
|
# # self.path = self.path.simplified()
|
||||||
|
# else:
|
||||||
|
# size = self.path.capacity()
|
||||||
|
# profiler(f'connected history path w size: {size}')
|
||||||
|
# self.path.connectPath(append_path)
|
||||||
|
|
||||||
|
# other merging ideas:
|
||||||
|
# https://stackoverflow.com/questions/8936225/how-to-merge-qpainterpaths
|
||||||
|
# path.addPath(append_path)
|
||||||
|
# path.closeSubpath()
|
||||||
|
|
||||||
|
# TODO: try out new work from `pyqtgraph` main which
|
||||||
|
# should repair horrid perf:
|
||||||
|
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
||||||
|
# ok, nope still horrible XD
|
||||||
|
# if self._fill:
|
||||||
|
# # XXX: super slow set "union" op
|
||||||
|
# self.path = self.path.united(append_path).simplified()
|
||||||
|
|
||||||
|
# self.disable_cache()
|
||||||
|
# flip_cache = True
|
||||||
|
|
||||||
|
# XXX: do we need this any more?
|
||||||
|
# if (
|
||||||
|
# self._step_mode
|
||||||
|
# ):
|
||||||
|
# self.disable_cache()
|
||||||
|
# flip_cache = True
|
||||||
|
|
||||||
|
x_last = x[-1]
|
||||||
|
y_last = y[-1]
|
||||||
|
|
||||||
|
# draw the "current" step graphic segment so it lines up with
|
||||||
|
# the "middle" of the current (OHLC) sample.
|
||||||
|
if self._step_mode:
|
||||||
|
self._last_line = QLineF(
|
||||||
|
x_last - 0.5, 0,
|
||||||
|
x_last + 0.5, 0,
|
||||||
|
)
|
||||||
|
self._last_step_rect = QRectF(
|
||||||
|
x_last - 0.5, 0,
|
||||||
|
x_last + 0.5, y_last
|
||||||
|
)
|
||||||
|
# print(
|
||||||
|
# f"path br: {self.path.boundingRect()}",
|
||||||
|
# f"fast path br: {self.fast_path.boundingRect()}",
|
||||||
|
# f"last rect br: {self._last_step_rect}",
|
||||||
|
# )
|
||||||
|
else:
|
||||||
|
# print((x[-1], y_last))
|
||||||
|
self._last_line = QLineF(
|
||||||
|
x[-2], y[-2],
|
||||||
|
x[-1], y_last
|
||||||
|
)
|
||||||
|
|
||||||
|
profiler('draw last segment')
|
||||||
|
|
||||||
|
# trigger redraw of path
|
||||||
|
# do update before reverting to cache mode
|
||||||
|
# self.prepareGeometryChange()
|
||||||
|
self.update()
|
||||||
|
profiler('.update()')
|
||||||
|
|
||||||
|
if flip_cache:
|
||||||
|
# XXX: seems to be needed to avoid artifacts (see above).
|
||||||
|
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||||
# our `LineDot`) required ``.getData()`` to work..
|
# our `LineDot`) required ``.getData()`` to work..
|
||||||
def getData(self):
|
def getData(self):
|
||||||
return self.xData, self.yData
|
return self._x, self._y
|
||||||
|
|
||||||
|
# TODO: drop the above after ``Cursor`` re-work
|
||||||
|
def get_arrays(self) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
return self._x, self._y
|
||||||
|
|
||||||
def clear(self):
|
def clear(self):
|
||||||
'''
|
'''
|
||||||
|
@ -233,18 +694,25 @@ class Curve(pg.GraphicsObject):
|
||||||
# self.fast_path.clear()
|
# self.fast_path.clear()
|
||||||
self.fast_path = None
|
self.fast_path = None
|
||||||
|
|
||||||
@cm
|
# self.disable_cache()
|
||||||
def reset_cache(self) -> None:
|
# self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
|
def disable_cache(self) -> None:
|
||||||
|
'''
|
||||||
|
Disable the use of the pixel coordinate cache and trigger a geo event.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# XXX: pretty annoying but, without this there's little
|
||||||
|
# artefacts on the append updates to the curve...
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
||||||
yield
|
self.prepareGeometryChange()
|
||||||
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
|
||||||
|
|
||||||
def boundingRect(self):
|
def boundingRect(self):
|
||||||
'''
|
'''
|
||||||
Compute and then cache our rect.
|
Compute and then cache our rect.
|
||||||
'''
|
'''
|
||||||
if self.path is None:
|
if self.path is None:
|
||||||
return QPainterPath().boundingRect()
|
return QtGui.QPainterPath().boundingRect()
|
||||||
else:
|
else:
|
||||||
# dynamically override this method after initial
|
# dynamically override this method after initial
|
||||||
# path is created to avoid requiring the above None check
|
# path is created to avoid requiring the above None check
|
||||||
|
@ -256,7 +724,6 @@ class Curve(pg.GraphicsObject):
|
||||||
Post init ``.boundingRect()```.
|
Post init ``.boundingRect()```.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# hb = self.path.boundingRect()
|
|
||||||
hb = self.path.controlPointRect()
|
hb = self.path.controlPointRect()
|
||||||
hb_size = hb.size()
|
hb_size = hb.size()
|
||||||
|
|
||||||
|
@ -264,60 +731,17 @@ class Curve(pg.GraphicsObject):
|
||||||
if fp:
|
if fp:
|
||||||
fhb = fp.controlPointRect()
|
fhb = fp.controlPointRect()
|
||||||
hb_size = fhb.size() + hb_size
|
hb_size = fhb.size() + hb_size
|
||||||
|
|
||||||
# print(f'hb_size: {hb_size}')
|
# print(f'hb_size: {hb_size}')
|
||||||
|
|
||||||
# if self._last_step_rect:
|
w = hb_size.width() + 1
|
||||||
# hb_size += self._last_step_rect.size()
|
h = hb_size.height() + 1
|
||||||
|
|
||||||
# if self._line:
|
|
||||||
# br = self._last_step_rect.bottomRight()
|
|
||||||
|
|
||||||
# tl = QPointF(
|
|
||||||
# # self._vr[0],
|
|
||||||
# # hb.topLeft().y(),
|
|
||||||
# # 0,
|
|
||||||
# # hb_size.height() + 1
|
|
||||||
# )
|
|
||||||
|
|
||||||
# br = self._last_step_rect.bottomRight()
|
|
||||||
|
|
||||||
w = hb_size.width()
|
|
||||||
h = hb_size.height()
|
|
||||||
|
|
||||||
sbr = self.sub_br
|
|
||||||
if sbr:
|
|
||||||
w, h = self.sub_br(w, h)
|
|
||||||
else:
|
|
||||||
# assume plain line graphic and use
|
|
||||||
# default unit step in each direction.
|
|
||||||
|
|
||||||
# only on a plane line do we include
|
|
||||||
# and extra index step's worth of width
|
|
||||||
# since in the step case the end of the curve
|
|
||||||
# actually terminates earlier so we don't need
|
|
||||||
# this for the last step.
|
|
||||||
w += self._last_w
|
|
||||||
# ll = self._last_line
|
|
||||||
h += 1 # ll.y2() - ll.y1()
|
|
||||||
|
|
||||||
# br = QPointF(
|
|
||||||
# self._vr[-1],
|
|
||||||
# # tl.x() + w,
|
|
||||||
# tl.y() + h,
|
|
||||||
# )
|
|
||||||
|
|
||||||
br = QRectF(
|
br = QRectF(
|
||||||
|
|
||||||
# top left
|
# top left
|
||||||
# hb.topLeft()
|
|
||||||
# tl,
|
|
||||||
QPointF(hb.topLeft()),
|
QPointF(hb.topLeft()),
|
||||||
|
|
||||||
# br,
|
|
||||||
# total size
|
# total size
|
||||||
# QSizeF(hb_size)
|
|
||||||
# hb_size,
|
|
||||||
QSizeF(w, h)
|
QSizeF(w, h)
|
||||||
)
|
)
|
||||||
# print(f'bounding rect: {br}')
|
# print(f'bounding rect: {br}')
|
||||||
|
@ -325,36 +749,41 @@ class Curve(pg.GraphicsObject):
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
self,
|
self,
|
||||||
p: QPainter,
|
p: QtGui.QPainter,
|
||||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||||
w: QtWidgets.QWidget
|
w: QtWidgets.QWidget
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
msg=f'Curve.paint(): `{self._name}`',
|
msg=f'FastAppendCurve.paint(): `{self._name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
# disabled=True,
|
||||||
|
gt=ms_slower_then,
|
||||||
)
|
)
|
||||||
|
|
||||||
sub_paint = self.sub_paint
|
if (
|
||||||
if sub_paint:
|
self._step_mode
|
||||||
sub_paint(p, profiler)
|
and self._last_step_rect
|
||||||
|
):
|
||||||
|
brush = self._brush
|
||||||
|
|
||||||
p.setPen(self.last_step_pen)
|
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||||
p.drawLine(self._last_line)
|
# p.drawRect(self._last_step_rect)
|
||||||
profiler('.drawLine()')
|
p.fillRect(self._last_step_rect, brush)
|
||||||
p.setPen(self._pen)
|
profiler('.fillRect()')
|
||||||
|
|
||||||
|
if self._last_line:
|
||||||
|
p.setPen(self.last_step_pen)
|
||||||
|
p.drawLine(self._last_line)
|
||||||
|
profiler('.drawLine()')
|
||||||
|
p.setPen(self._pen)
|
||||||
|
|
||||||
path = self.path
|
path = self.path
|
||||||
# cap = path.capacity()
|
|
||||||
# if cap != self._last_cap:
|
|
||||||
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
|
||||||
# self._last_cap = cap
|
|
||||||
|
|
||||||
if path:
|
if path:
|
||||||
p.drawPath(path)
|
p.drawPath(path)
|
||||||
profiler(f'.drawPath(path): {path.capacity()}')
|
profiler('.drawPath(path)')
|
||||||
|
|
||||||
fp = self.fast_path
|
fp = self.fast_path
|
||||||
if fp:
|
if fp:
|
||||||
|
@ -368,117 +797,3 @@ class Curve(pg.GraphicsObject):
|
||||||
# if self._fill:
|
# if self._fill:
|
||||||
# brush = self.opts['brush']
|
# brush = self.opts['brush']
|
||||||
# p.fillPath(self.path, brush)
|
# p.fillPath(self.path, brush)
|
||||||
|
|
||||||
def draw_last_datum(
|
|
||||||
self,
|
|
||||||
path: QPainterPath,
|
|
||||||
src_data: np.ndarray,
|
|
||||||
render_data: np.ndarray,
|
|
||||||
reset: bool,
|
|
||||||
array_key: str,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
# default line draw last call
|
|
||||||
# with self.reset_cache():
|
|
||||||
x = render_data['index']
|
|
||||||
y = render_data[array_key]
|
|
||||||
|
|
||||||
# draw the "current" step graphic segment so it
|
|
||||||
# lines up with the "middle" of the current
|
|
||||||
# (OHLC) sample.
|
|
||||||
self._last_line = QLineF(
|
|
||||||
x[-2], y[-2],
|
|
||||||
x[-1], y[-1],
|
|
||||||
)
|
|
||||||
|
|
||||||
return x, y
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: this should probably be a "downsampled" curve type
|
|
||||||
# that draws a bar-style (but for the px column) last graphics
|
|
||||||
# element such that the current datum in view can be shown
|
|
||||||
# (via it's max / min) even when highly zoomed out.
|
|
||||||
class FlattenedOHLC(Curve):
|
|
||||||
|
|
||||||
def draw_last_datum(
|
|
||||||
self,
|
|
||||||
path: QPainterPath,
|
|
||||||
src_data: np.ndarray,
|
|
||||||
render_data: np.ndarray,
|
|
||||||
reset: bool,
|
|
||||||
array_key: str,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
lasts = src_data[-2:]
|
|
||||||
x = lasts['index']
|
|
||||||
y = lasts['close']
|
|
||||||
|
|
||||||
# draw the "current" step graphic segment so it
|
|
||||||
# lines up with the "middle" of the current
|
|
||||||
# (OHLC) sample.
|
|
||||||
self._last_line = QLineF(
|
|
||||||
x[-2], y[-2],
|
|
||||||
x[-1], y[-1]
|
|
||||||
)
|
|
||||||
return x, y
|
|
||||||
|
|
||||||
|
|
||||||
class StepCurve(Curve):
|
|
||||||
|
|
||||||
def declare_paintables(
|
|
||||||
self,
|
|
||||||
) -> None:
|
|
||||||
self._last_step_rect = QRectF()
|
|
||||||
|
|
||||||
def draw_last_datum(
|
|
||||||
self,
|
|
||||||
path: QPainterPath,
|
|
||||||
src_data: np.ndarray,
|
|
||||||
render_data: np.ndarray,
|
|
||||||
reset: bool,
|
|
||||||
array_key: str,
|
|
||||||
|
|
||||||
w: float = 0.5,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
# TODO: remove this and instead place all step curve
|
|
||||||
# updating into pre-path data render callbacks.
|
|
||||||
# full input data
|
|
||||||
x = src_data['index']
|
|
||||||
y = src_data[array_key]
|
|
||||||
|
|
||||||
x_last = x[-1]
|
|
||||||
y_last = y[-1]
|
|
||||||
|
|
||||||
# lol, commenting this makes step curves
|
|
||||||
# all "black" for me :eyeroll:..
|
|
||||||
self._last_line = QLineF(
|
|
||||||
x_last - w, 0,
|
|
||||||
x_last + w, 0,
|
|
||||||
)
|
|
||||||
self._last_step_rect = QRectF(
|
|
||||||
x_last - w, 0,
|
|
||||||
x_last + w, y_last,
|
|
||||||
)
|
|
||||||
return x, y
|
|
||||||
|
|
||||||
def sub_paint(
|
|
||||||
self,
|
|
||||||
p: QPainter,
|
|
||||||
profiler: pg.debug.Profiler,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
|
||||||
# p.drawRect(self._last_step_rect)
|
|
||||||
p.fillRect(self._last_step_rect, self._brush)
|
|
||||||
profiler('.fillRect()')
|
|
||||||
|
|
||||||
def sub_br(
|
|
||||||
self,
|
|
||||||
path_w: float,
|
|
||||||
path_h: float,
|
|
||||||
|
|
||||||
) -> (float, float):
|
|
||||||
# passthrough
|
|
||||||
return path_w, path_h
|
|
||||||
|
|
|
@ -29,10 +29,10 @@ from typing import Optional, Any, Callable
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
import pendulum
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
|
from PyQt5.QtCore import QLineF
|
||||||
|
|
||||||
# from .. import brokers
|
from .. import brokers
|
||||||
from ..data.feed import open_feed
|
from ..data.feed import open_feed
|
||||||
from ._axes import YAxisLabel
|
from ._axes import YAxisLabel
|
||||||
from ._chart import (
|
from ._chart import (
|
||||||
|
@ -48,22 +48,21 @@ from ._fsp import (
|
||||||
open_vlm_displays,
|
open_vlm_displays,
|
||||||
)
|
)
|
||||||
from ..data._sharedmem import ShmArray
|
from ..data._sharedmem import ShmArray
|
||||||
from ..data._source import tf_in_1s
|
|
||||||
from ._forms import (
|
from ._forms import (
|
||||||
FieldsForm,
|
FieldsForm,
|
||||||
mk_order_pane_layout,
|
mk_order_pane_layout,
|
||||||
)
|
)
|
||||||
from .order_mode import open_order_mode
|
from .order_mode import open_order_mode
|
||||||
from .._profile import (
|
# from .._profile import (
|
||||||
pg_profile_enabled,
|
# pg_profile_enabled,
|
||||||
ms_slower_then,
|
# ms_slower_then,
|
||||||
)
|
# )
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# TODO: load this from a config.toml!
|
# TODO: load this from a config.toml!
|
||||||
_quote_throttle_rate: int = 22 # Hz
|
_quote_throttle_rate: int = 12 # Hz
|
||||||
|
|
||||||
|
|
||||||
# a working tick-type-classes template
|
# a working tick-type-classes template
|
||||||
|
@ -74,20 +73,12 @@ _tick_groups = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
# TODO: delegate this to each `Flow.maxmin()` which includes
|
|
||||||
# caching and further we should implement the following stream based
|
|
||||||
# approach, likely with ``numba``:
|
|
||||||
# https://arxiv.org/abs/cs/0610046
|
|
||||||
# https://github.com/lemire/pythonmaxmin
|
|
||||||
def chart_maxmin(
|
def chart_maxmin(
|
||||||
chart: ChartPlotWidget,
|
chart: ChartPlotWidget,
|
||||||
ohlcv_shm: ShmArray,
|
|
||||||
vlm_chart: Optional[ChartPlotWidget] = None,
|
vlm_chart: Optional[ChartPlotWidget] = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
|
|
||||||
tuple[int, int, int, int],
|
tuple[int, int, int, int],
|
||||||
|
|
||||||
float,
|
float,
|
||||||
float,
|
float,
|
||||||
float,
|
float,
|
||||||
|
@ -96,26 +87,34 @@ def chart_maxmin(
|
||||||
Compute max and min datums "in view" for range limits.
|
Compute max and min datums "in view" for range limits.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
last_bars_range = chart.bars_range()
|
# TODO: implement this
|
||||||
out = chart.maxmin()
|
# https://arxiv.org/abs/cs/0610046
|
||||||
|
# https://github.com/lemire/pythonmaxmin
|
||||||
|
|
||||||
if out is None:
|
array = chart._arrays[chart.name]
|
||||||
|
ifirst = array[0]['index']
|
||||||
|
|
||||||
|
last_bars_range = chart.bars_range()
|
||||||
|
l, lbar, rbar, r = last_bars_range
|
||||||
|
in_view = array[lbar - ifirst:rbar - ifirst + 1]
|
||||||
|
|
||||||
|
if not in_view.size:
|
||||||
|
log.warning('Resetting chart to data')
|
||||||
|
chart.default_view()
|
||||||
return (last_bars_range, 0, 0, 0)
|
return (last_bars_range, 0, 0, 0)
|
||||||
|
|
||||||
mn, mx = out
|
mx, mn = np.nanmax(in_view['high']), np.nanmin(in_view['low'])
|
||||||
|
|
||||||
|
# TODO: when we start using line charts, probably want to make
|
||||||
|
# this an overloaded call on our `DataView
|
||||||
|
# sym = chart.name
|
||||||
|
# mx, mn = np.nanmax(in_view[sym]), np.nanmin(in_view[sym])
|
||||||
|
|
||||||
mx_vlm_in_view = 0
|
mx_vlm_in_view = 0
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
out = vlm_chart.maxmin()
|
mx_vlm_in_view = np.max(in_view['volume'])
|
||||||
if out:
|
|
||||||
_, mx_vlm_in_view = out
|
|
||||||
|
|
||||||
return (
|
return last_bars_range, mx, max(mn, 0), mx_vlm_in_view
|
||||||
last_bars_range,
|
|
||||||
mx,
|
|
||||||
max(mn, 0), # presuming price can't be negative?
|
|
||||||
mx_vlm_in_view,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -184,12 +183,7 @@ async def graphics_update_loop(
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
vlm_sticky = vlm_chart._ysticks['volume']
|
vlm_sticky = vlm_chart._ysticks['volume']
|
||||||
|
|
||||||
maxmin = partial(
|
maxmin = partial(chart_maxmin, chart, vlm_chart)
|
||||||
chart_maxmin,
|
|
||||||
chart,
|
|
||||||
ohlcv,
|
|
||||||
vlm_chart,
|
|
||||||
)
|
|
||||||
last_bars_range: tuple[float, float]
|
last_bars_range: tuple[float, float]
|
||||||
(
|
(
|
||||||
last_bars_range,
|
last_bars_range,
|
||||||
|
@ -263,7 +257,6 @@ async def graphics_update_loop(
|
||||||
'vars': {
|
'vars': {
|
||||||
'tick_margin': tick_margin,
|
'tick_margin': tick_margin,
|
||||||
'i_last': i_last,
|
'i_last': i_last,
|
||||||
'i_last_append': i_last,
|
|
||||||
'last_mx_vlm': last_mx_vlm,
|
'last_mx_vlm': last_mx_vlm,
|
||||||
'last_mx': last_mx,
|
'last_mx': last_mx,
|
||||||
'last_mn': last_mn,
|
'last_mn': last_mn,
|
||||||
|
@ -272,9 +265,8 @@ async def graphics_update_loop(
|
||||||
|
|
||||||
chart.default_view()
|
chart.default_view()
|
||||||
|
|
||||||
# main real-time quotes update loop
|
# main loop
|
||||||
async for quotes in stream:
|
async for quotes in stream:
|
||||||
|
|
||||||
ds.quotes = quotes
|
ds.quotes = quotes
|
||||||
quote_period = time.time() - last_quote
|
quote_period = time.time() - last_quote
|
||||||
quote_rate = round(
|
quote_rate = round(
|
||||||
|
@ -309,39 +301,88 @@ async def graphics_update_loop(
|
||||||
def graphics_update_cycle(
|
def graphics_update_cycle(
|
||||||
ds: DisplayState,
|
ds: DisplayState,
|
||||||
wap_in_history: bool = False,
|
wap_in_history: bool = False,
|
||||||
trigger_all: bool = False, # flag used by prepend history updates
|
|
||||||
prepend_update_index: Optional[int] = None,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# TODO: eventually optimize this whole graphics stack with ``numba``
|
# TODO: eventually optimize this whole graphics stack with ``numba``
|
||||||
# hopefully XD
|
# hopefully XD
|
||||||
|
|
||||||
chart = ds.chart
|
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
msg=f'Graphics loop cycle for: `{chart.name}`',
|
disabled=True, # not pg_profile_enabled(),
|
||||||
delayed=True,
|
gt=1/12 * 1e3,
|
||||||
disabled=not pg_profile_enabled(),
|
|
||||||
# disabled=True,
|
|
||||||
ms_threshold=ms_slower_then,
|
|
||||||
|
|
||||||
# ms_threshold=1/12 * 1e3,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# unpack multi-referenced components
|
# unpack multi-referenced components
|
||||||
|
chart = ds.chart
|
||||||
vlm_chart = ds.vlm_chart
|
vlm_chart = ds.vlm_chart
|
||||||
l1 = ds.l1
|
l1 = ds.l1
|
||||||
|
|
||||||
ohlcv = ds.ohlcv
|
ohlcv = ds.ohlcv
|
||||||
array = ohlcv.array
|
array = ohlcv.array
|
||||||
vars = ds.vars
|
vars = ds.vars
|
||||||
tick_margin = vars['tick_margin']
|
tick_margin = vars['tick_margin']
|
||||||
|
|
||||||
update_uppx = 16
|
update_uppx = 5
|
||||||
|
|
||||||
for sym, quote in ds.quotes.items():
|
for sym, quote in ds.quotes.items():
|
||||||
|
brange, mx_in_view, mn_in_view, mx_vlm_in_view = ds.maxmin()
|
||||||
|
l, lbar, rbar, r = brange
|
||||||
|
mx = mx_in_view + tick_margin
|
||||||
|
mn = mn_in_view - tick_margin
|
||||||
|
profiler('maxmin call')
|
||||||
|
|
||||||
# compute the first available graphic's x-units-per-pixel
|
# compute the first available graphic's x-units-per-pixel
|
||||||
uppx = vlm_chart.view.x_uppx()
|
xpx = vlm_chart.view.xs_in_px()
|
||||||
|
|
||||||
|
in_view = chart.in_view(ohlcv.array)
|
||||||
|
|
||||||
|
if lbar != rbar:
|
||||||
|
# view box width in pxs
|
||||||
|
w = chart.view.boundingRect().width()
|
||||||
|
|
||||||
|
# TODO: a better way to get this?
|
||||||
|
# i would guess the esiest way is to just
|
||||||
|
# get the ``.boundingRect()`` of the curve
|
||||||
|
# in view but maybe there's something smarter?
|
||||||
|
# Currently we're just mapping the rbar, lbar to
|
||||||
|
# pixels via:
|
||||||
|
cw = chart.view.mapViewToDevice(QLineF(lbar, 0, rbar, 0)).length()
|
||||||
|
# is this faster?
|
||||||
|
# cw = chart.mapFromView(QLineF(lbar, 0 , rbar, 0)).length()
|
||||||
|
|
||||||
|
profiler(
|
||||||
|
f'view width pxs: {w}\n'
|
||||||
|
f'curve width pxs: {cw}\n'
|
||||||
|
f'sliced in view: {in_view.size}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# compress bars to m4 line(s) if uppx is high enough
|
||||||
|
# if in_view.size > cw:
|
||||||
|
# from ._compression import ds_m4, hl2mxmn
|
||||||
|
|
||||||
|
# mxmn, x = hl2mxmn(in_view)
|
||||||
|
# profiler('hl tracer')
|
||||||
|
|
||||||
|
# nb, x, y = ds_m4(
|
||||||
|
# x=x,
|
||||||
|
# y=mxmn,
|
||||||
|
# # TODO: this needs to actually be the width
|
||||||
|
# # in pixels of the visible curve since we don't
|
||||||
|
# # want to downsample any 'zeros' around the curve,
|
||||||
|
# # just the values that make up the curve graphic,
|
||||||
|
# # i think?
|
||||||
|
# px_width=cw,
|
||||||
|
# )
|
||||||
|
# profiler(
|
||||||
|
# 'm4 downsampled\n'
|
||||||
|
# f' ds bins: {nb}\n'
|
||||||
|
# f' x.shape: {x.shape}\n'
|
||||||
|
# f' y.shape: {y.shape}\n'
|
||||||
|
# f' x: {x}\n'
|
||||||
|
# f' y: {y}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# assert y.size == mxmn.size
|
||||||
|
|
||||||
# NOTE: vlm may be written by the ``brokerd`` backend
|
# NOTE: vlm may be written by the ``brokerd`` backend
|
||||||
# event though a tick sample is not emitted.
|
# event though a tick sample is not emitted.
|
||||||
|
@ -349,7 +390,7 @@ def graphics_update_cycle(
|
||||||
# https://github.com/pikers/piker/issues/116
|
# https://github.com/pikers/piker/issues/116
|
||||||
|
|
||||||
# NOTE: this used to be implemented in a dedicated
|
# NOTE: this used to be implemented in a dedicated
|
||||||
# "increment task": ``check_for_new_bars()`` but it doesn't
|
# "increment tas": ``check_for_new_bars()`` but it doesn't
|
||||||
# make sense to do a whole task switch when we can just do
|
# make sense to do a whole task switch when we can just do
|
||||||
# this simple index-diff and all the fsp sub-curve graphics
|
# this simple index-diff and all the fsp sub-curve graphics
|
||||||
# are diffed on each draw cycle anyway; so updates to the
|
# are diffed on each draw cycle anyway; so updates to the
|
||||||
|
@ -360,71 +401,53 @@ def graphics_update_cycle(
|
||||||
i_diff = i_step - vars['i_last']
|
i_diff = i_step - vars['i_last']
|
||||||
vars['i_last'] = i_step
|
vars['i_last'] = i_step
|
||||||
|
|
||||||
append_diff = i_step - vars['i_last_append']
|
|
||||||
|
|
||||||
# update the "last datum" (aka extending the flow graphic with
|
|
||||||
# new data) only if the number of unit steps is >= the number of
|
|
||||||
# such unit steps per pixel (aka uppx). Iow, if the zoom level
|
|
||||||
# is such that a datum(s) update to graphics wouldn't span
|
|
||||||
# to a new pixel, we don't update yet.
|
|
||||||
do_append = (append_diff >= uppx)
|
|
||||||
if do_append:
|
|
||||||
vars['i_last_append'] = i_step
|
|
||||||
|
|
||||||
do_rt_update = uppx < update_uppx
|
|
||||||
# print(
|
|
||||||
# f'append_diff:{append_diff}\n'
|
|
||||||
# f'uppx:{uppx}\n'
|
|
||||||
# f'do_append: {do_append}'
|
|
||||||
# )
|
|
||||||
|
|
||||||
# TODO: we should only run mxmn when we know
|
|
||||||
# an update is due via ``do_append`` above.
|
|
||||||
(
|
|
||||||
brange,
|
|
||||||
mx_in_view,
|
|
||||||
mn_in_view,
|
|
||||||
mx_vlm_in_view,
|
|
||||||
) = ds.maxmin()
|
|
||||||
l, lbar, rbar, r = brange
|
|
||||||
mx = mx_in_view + tick_margin
|
|
||||||
mn = mn_in_view - tick_margin
|
|
||||||
|
|
||||||
profiler('`ds.maxmin()` call')
|
|
||||||
|
|
||||||
liv = r >= i_step # the last datum is in view
|
|
||||||
|
|
||||||
if (
|
|
||||||
prepend_update_index is not None
|
|
||||||
and lbar > prepend_update_index
|
|
||||||
):
|
|
||||||
# on a history update (usually from the FSP subsys)
|
|
||||||
# if the segment of history that is being prepended
|
|
||||||
# isn't in view there is no reason to do a graphics
|
|
||||||
# update.
|
|
||||||
log.debug('Skipping prepend graphics cycle: frame not in view')
|
|
||||||
return
|
|
||||||
|
|
||||||
# don't real-time "shift" the curve to the
|
# don't real-time "shift" the curve to the
|
||||||
# left unless we get one of the following:
|
# left under the following conditions:
|
||||||
if (
|
if (
|
||||||
(
|
i_diff > 0 # no new sample step
|
||||||
# i_diff > 0 # no new sample step
|
and xpx < update_uppx # chart is zoomed out very far
|
||||||
do_append
|
and r >= i_step # the last datum isn't in view
|
||||||
# and uppx < 4 # chart is zoomed out very far
|
|
||||||
and liv
|
|
||||||
)
|
|
||||||
or trigger_all
|
|
||||||
):
|
):
|
||||||
# TODO: we should track and compute whether the last
|
# TODO: we should track and compute whether the last
|
||||||
# pixel in a curve should show new data based on uppx
|
# pixel in a curve should show new data based on uppx
|
||||||
# and then iff update curves and shift?
|
# and then iff update curves and shift?
|
||||||
chart.increment_view(steps=i_diff)
|
chart.increment_view(steps=i_diff)
|
||||||
|
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
vlm_chart.increment_view(steps=i_diff)
|
# always update y-label
|
||||||
|
ds.vlm_sticky.update_from_data(*array[-1][['index', 'volume']])
|
||||||
|
|
||||||
profiler('view incremented')
|
if (
|
||||||
|
# if zoomed out alot don't update the last "bar"
|
||||||
|
(xpx < update_uppx or i_diff > 0)
|
||||||
|
and r >= i_step
|
||||||
|
):
|
||||||
|
# TODO: make it so this doesn't have to be called
|
||||||
|
# once the $vlm is up?
|
||||||
|
vlm_chart.update_graphics_from_array('volume', array)
|
||||||
|
|
||||||
|
if (
|
||||||
|
mx_vlm_in_view != vars['last_mx_vlm']
|
||||||
|
or mx_vlm_in_view > vars['last_mx_vlm']
|
||||||
|
):
|
||||||
|
# print(f'mx vlm: {last_mx_vlm} -> {mx_vlm_in_view}')
|
||||||
|
vlm_chart.view._set_yrange(
|
||||||
|
yrange=(0, mx_vlm_in_view * 1.375)
|
||||||
|
)
|
||||||
|
vars['last_mx_vlm'] = mx_vlm_in_view
|
||||||
|
|
||||||
|
for curve_name, flow in vlm_chart._flows.items():
|
||||||
|
update_fsp_chart(
|
||||||
|
vlm_chart,
|
||||||
|
flow.shm,
|
||||||
|
curve_name,
|
||||||
|
array_key=curve_name,
|
||||||
|
)
|
||||||
|
# is this even doing anything?
|
||||||
|
flow.plot.vb._set_yrange(
|
||||||
|
autoscale_linked_plots=False,
|
||||||
|
name=curve_name,
|
||||||
|
)
|
||||||
|
|
||||||
ticks_frame = quote.get('ticks', ())
|
ticks_frame = quote.get('ticks', ())
|
||||||
|
|
||||||
|
@ -468,24 +491,19 @@ def graphics_update_cycle(
|
||||||
# current) tick first order as an optimization where we only
|
# current) tick first order as an optimization where we only
|
||||||
# update from the last tick from each type class.
|
# update from the last tick from each type class.
|
||||||
# last_clear_updated: bool = False
|
# last_clear_updated: bool = False
|
||||||
|
# for typ, tick in reversed(lasts.items()):
|
||||||
|
|
||||||
# update ohlc sampled price bars
|
# update ohlc sampled price bars
|
||||||
if (
|
if (
|
||||||
do_rt_update
|
xpx < update_uppx
|
||||||
or do_append
|
or i_diff > 0
|
||||||
or trigger_all
|
|
||||||
):
|
):
|
||||||
chart.update_graphics_from_flow(
|
chart.update_graphics_from_array(
|
||||||
chart.name,
|
chart.name,
|
||||||
# do_append=uppx < update_uppx,
|
array,
|
||||||
do_append=do_append,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE: we always update the "last" datum
|
# iterate in FIFO order per frame
|
||||||
# since the current range should at least be updated
|
|
||||||
# to it's max/min on the last pixel.
|
|
||||||
|
|
||||||
# iterate in FIFO order per tick-frame
|
|
||||||
for typ, tick in lasts.items():
|
for typ, tick in lasts.items():
|
||||||
|
|
||||||
price = tick.get('price')
|
price = tick.get('price')
|
||||||
|
@ -495,9 +513,8 @@ def graphics_update_cycle(
|
||||||
# tick frames to determine the y-range for chart
|
# tick frames to determine the y-range for chart
|
||||||
# auto-scaling.
|
# auto-scaling.
|
||||||
# TODO: we need a streaming minmax algo here, see def above.
|
# TODO: we need a streaming minmax algo here, see def above.
|
||||||
if liv:
|
mx = max(price + tick_margin, mx)
|
||||||
mx = max(price + tick_margin, mx)
|
mn = min(price - tick_margin, mn)
|
||||||
mn = min(price - tick_margin, mn)
|
|
||||||
|
|
||||||
if typ in clear_types:
|
if typ in clear_types:
|
||||||
|
|
||||||
|
@ -520,8 +537,9 @@ def graphics_update_cycle(
|
||||||
|
|
||||||
if wap_in_history:
|
if wap_in_history:
|
||||||
# update vwap overlay line
|
# update vwap overlay line
|
||||||
chart.update_graphics_from_flow(
|
chart.update_graphics_from_array(
|
||||||
'bar_wap',
|
'bar_wap',
|
||||||
|
array,
|
||||||
)
|
)
|
||||||
|
|
||||||
# L1 book label-line updates
|
# L1 book label-line updates
|
||||||
|
@ -535,10 +553,7 @@ def graphics_update_cycle(
|
||||||
l1.bid_label.fields['level']: l1.bid_label,
|
l1.bid_label.fields['level']: l1.bid_label,
|
||||||
}.get(price)
|
}.get(price)
|
||||||
|
|
||||||
if (
|
if label is not None:
|
||||||
label is not None
|
|
||||||
and liv
|
|
||||||
):
|
|
||||||
label.update_fields(
|
label.update_fields(
|
||||||
{'level': price, 'size': size}
|
{'level': price, 'size': size}
|
||||||
)
|
)
|
||||||
|
@ -547,157 +562,54 @@ def graphics_update_cycle(
|
||||||
# the relevant L1 queue?
|
# the relevant L1 queue?
|
||||||
# label.size -= size
|
# label.size -= size
|
||||||
|
|
||||||
elif (
|
# elif ticktype in ('ask', 'asize'):
|
||||||
typ in _tick_groups['asks']
|
elif typ in _tick_groups['asks']:
|
||||||
# TODO: instead we could check if the price is in the
|
|
||||||
# y-view-range?
|
|
||||||
and liv
|
|
||||||
):
|
|
||||||
l1.ask_label.update_fields({'level': price, 'size': size})
|
l1.ask_label.update_fields({'level': price, 'size': size})
|
||||||
|
|
||||||
elif (
|
# elif ticktype in ('bid', 'bsize'):
|
||||||
typ in _tick_groups['bids']
|
elif typ in _tick_groups['bids']:
|
||||||
# TODO: instead we could check if the price is in the
|
|
||||||
# y-view-range?
|
|
||||||
and liv
|
|
||||||
):
|
|
||||||
l1.bid_label.update_fields({'level': price, 'size': size})
|
l1.bid_label.update_fields({'level': price, 'size': size})
|
||||||
|
|
||||||
# check for y-range re-size
|
# check for y-range re-size
|
||||||
if (
|
if (
|
||||||
(mx > vars['last_mx']) or (mn < vars['last_mn'])
|
(mx > vars['last_mx']) or (mn < vars['last_mn'])
|
||||||
and not chart._static_yrange == 'axis'
|
and not chart._static_yrange == 'axis'
|
||||||
and liv
|
|
||||||
):
|
):
|
||||||
main_vb = chart.view
|
# print(f'new y range: {(mn, mx)}')
|
||||||
if (
|
chart.view._set_yrange(
|
||||||
main_vb._ic is None
|
yrange=(mn, mx),
|
||||||
or not main_vb._ic.is_set()
|
# TODO: we should probably scale
|
||||||
):
|
# the view margin based on the size
|
||||||
# print(f'updating range due to mxmn')
|
# of the true range? This way you can
|
||||||
main_vb._set_yrange(
|
# slap in orders outside the current
|
||||||
# TODO: we should probably scale
|
# L1 (only) book range.
|
||||||
# the view margin based on the size
|
# range_margin=0.1,
|
||||||
# of the true range? This way you can
|
|
||||||
# slap in orders outside the current
|
|
||||||
# L1 (only) book range.
|
|
||||||
# range_margin=0.1,
|
|
||||||
yrange=(mn, mx),
|
|
||||||
)
|
|
||||||
|
|
||||||
# XXX: update this every draw cycle to make L1-always-in-view work.
|
|
||||||
vars['last_mx'], vars['last_mn'] = mx, mn
|
|
||||||
|
|
||||||
# run synchronous update on all linked flows
|
|
||||||
# TODO: should the "main" (aka source) flow be special?
|
|
||||||
for curve_name, flow in chart._flows.items():
|
|
||||||
# update any overlayed fsp flows
|
|
||||||
if curve_name != chart.data_key:
|
|
||||||
update_fsp_chart(
|
|
||||||
chart,
|
|
||||||
flow,
|
|
||||||
curve_name,
|
|
||||||
array_key=curve_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
# even if we're downsampled bigly
|
|
||||||
# draw the last datum in the final
|
|
||||||
# px column to give the user the mx/mn
|
|
||||||
# range of that set.
|
|
||||||
if (
|
|
||||||
not do_append
|
|
||||||
# and not do_rt_update
|
|
||||||
and liv
|
|
||||||
):
|
|
||||||
flow.draw_last(
|
|
||||||
array_key=curve_name,
|
|
||||||
only_last_uppx=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# volume chart logic..
|
|
||||||
# TODO: can we unify this with the above loop?
|
|
||||||
if vlm_chart:
|
|
||||||
# always update y-label
|
|
||||||
ds.vlm_sticky.update_from_data(
|
|
||||||
*array[-1][['index', 'volume']]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
vars['last_mx'], vars['last_mn'] = mx, mn
|
||||||
(
|
|
||||||
do_rt_update
|
|
||||||
or do_append
|
|
||||||
and liv
|
|
||||||
)
|
|
||||||
or trigger_all
|
|
||||||
):
|
|
||||||
# TODO: make it so this doesn't have to be called
|
|
||||||
# once the $vlm is up?
|
|
||||||
vlm_chart.update_graphics_from_flow(
|
|
||||||
'volume',
|
|
||||||
# UGGGh, see ``maxmin()`` impl in `._fsp` for
|
|
||||||
# the overlayed plotitems... we need a better
|
|
||||||
# bay to invoke a maxmin per overlay..
|
|
||||||
render=False,
|
|
||||||
# XXX: ^^^^ THIS IS SUPER IMPORTANT! ^^^^
|
|
||||||
# without this, since we disable the
|
|
||||||
# 'volume' (units) chart after the $vlm starts
|
|
||||||
# up we need to be sure to enable this
|
|
||||||
# auto-ranging otherwise there will be no handler
|
|
||||||
# connected to update accompanying overlay
|
|
||||||
# graphics..
|
|
||||||
)
|
|
||||||
profiler('`vlm_chart.update_graphics_from_flow()`')
|
|
||||||
|
|
||||||
if (
|
# run synchronous update on all derived fsp subplots
|
||||||
mx_vlm_in_view != vars['last_mx_vlm']
|
for name, subchart in ds.linked.subplots.items():
|
||||||
):
|
update_fsp_chart(
|
||||||
yrange = (0, mx_vlm_in_view * 1.375)
|
subchart,
|
||||||
vlm_chart.view._set_yrange(
|
subchart._shm,
|
||||||
yrange=yrange,
|
|
||||||
)
|
|
||||||
profiler('`vlm_chart.view._set_yrange()`')
|
|
||||||
# print(f'mx vlm: {last_mx_vlm} -> {mx_vlm_in_view}')
|
|
||||||
vars['last_mx_vlm'] = mx_vlm_in_view
|
|
||||||
|
|
||||||
for curve_name, flow in vlm_chart._flows.items():
|
# XXX: do we really needs seperate names here?
|
||||||
|
name,
|
||||||
|
array_key=name,
|
||||||
|
)
|
||||||
|
subchart.cv._set_yrange()
|
||||||
|
|
||||||
if (
|
# TODO: all overlays on all subplots..
|
||||||
curve_name != 'volume' and
|
|
||||||
flow.render and (
|
|
||||||
liv and
|
|
||||||
do_rt_update or do_append
|
|
||||||
)
|
|
||||||
):
|
|
||||||
update_fsp_chart(
|
|
||||||
vlm_chart,
|
|
||||||
flow,
|
|
||||||
curve_name,
|
|
||||||
array_key=curve_name,
|
|
||||||
# do_append=uppx < update_uppx,
|
|
||||||
do_append=do_append,
|
|
||||||
)
|
|
||||||
# is this even doing anything?
|
|
||||||
# (pretty sure it's the real-time
|
|
||||||
# resizing from last quote?)
|
|
||||||
fvb = flow.plot.vb
|
|
||||||
fvb._set_yrange(
|
|
||||||
name=curve_name,
|
|
||||||
)
|
|
||||||
|
|
||||||
elif (
|
# run synchronous update on all derived overlays
|
||||||
curve_name != 'volume'
|
for curve_name, flow in chart._flows.items():
|
||||||
and not do_append
|
update_fsp_chart(
|
||||||
and liv
|
chart,
|
||||||
and uppx >= 1
|
flow.shm,
|
||||||
# even if we're downsampled bigly
|
curve_name,
|
||||||
# draw the last datum in the final
|
array_key=curve_name,
|
||||||
# px column to give the user the mx/mn
|
)
|
||||||
# range of that set.
|
|
||||||
):
|
|
||||||
# always update the last datum-element
|
|
||||||
# graphic for all flows
|
|
||||||
# print(f'drawing last {flow.name}')
|
|
||||||
flow.draw_last(array_key=curve_name)
|
|
||||||
|
|
||||||
|
|
||||||
async def display_symbol_data(
|
async def display_symbol_data(
|
||||||
|
@ -723,17 +635,15 @@ async def display_symbol_data(
|
||||||
)
|
)
|
||||||
|
|
||||||
# historical data fetch
|
# historical data fetch
|
||||||
# brokermod = brokers.get_brokermod(provider)
|
brokermod = brokers.get_brokermod(provider)
|
||||||
|
|
||||||
# ohlc_status_done = sbar.open_status(
|
# ohlc_status_done = sbar.open_status(
|
||||||
# 'retreiving OHLC history.. ',
|
# 'retreiving OHLC history.. ',
|
||||||
# clear_on_next=True,
|
# clear_on_next=True,
|
||||||
# group_key=loading_sym_key,
|
# group_key=loading_sym_key,
|
||||||
# )
|
# )
|
||||||
fqsn = '.'.join((sym, provider))
|
|
||||||
|
|
||||||
async with open_feed(
|
async with open_feed(
|
||||||
[fqsn],
|
['.'.join((sym, provider))],
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
# limit to at least display's FPS
|
# limit to at least display's FPS
|
||||||
|
@ -746,17 +656,11 @@ async def display_symbol_data(
|
||||||
symbol = feed.symbols[sym]
|
symbol = feed.symbols[sym]
|
||||||
fqsn = symbol.front_fqsn()
|
fqsn = symbol.front_fqsn()
|
||||||
|
|
||||||
times = bars['time']
|
|
||||||
end = pendulum.from_timestamp(times[-1])
|
|
||||||
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
|
||||||
step_size_s = (end - start).seconds
|
|
||||||
tf_key = tf_in_1s[step_size_s]
|
|
||||||
|
|
||||||
# load in symbol's ohlc data
|
# load in symbol's ohlc data
|
||||||
godwidget.window.setWindowTitle(
|
godwidget.window.setWindowTitle(
|
||||||
f'{fqsn} '
|
f'{fqsn} '
|
||||||
f'tick:{symbol.tick_size} '
|
f'tick:{symbol.tick_size} '
|
||||||
f'step:{tf_key} '
|
f'step:1s '
|
||||||
)
|
)
|
||||||
|
|
||||||
linked = godwidget.linkedsplits
|
linked = godwidget.linkedsplits
|
||||||
|
@ -772,31 +676,31 @@ async def display_symbol_data(
|
||||||
# create main OHLC chart
|
# create main OHLC chart
|
||||||
chart = linked.plot_ohlc_main(
|
chart = linked.plot_ohlc_main(
|
||||||
symbol,
|
symbol,
|
||||||
ohlcv,
|
bars,
|
||||||
sidepane=pp_pane,
|
sidepane=pp_pane,
|
||||||
)
|
)
|
||||||
chart.default_view()
|
|
||||||
chart._feeds[symbol.key] = feed
|
chart._feeds[symbol.key] = feed
|
||||||
chart.setFocus()
|
chart.setFocus()
|
||||||
|
|
||||||
# plot historical vwap if available
|
# plot historical vwap if available
|
||||||
wap_in_history = False
|
wap_in_history = False
|
||||||
|
|
||||||
# XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?!
|
if brokermod._show_wap_in_history:
|
||||||
# if brokermod._show_wap_in_history:
|
|
||||||
|
|
||||||
# if 'bar_wap' in bars.dtype.fields:
|
if 'bar_wap' in bars.dtype.fields:
|
||||||
# wap_in_history = True
|
wap_in_history = True
|
||||||
# chart.draw_curve(
|
chart.draw_curve(
|
||||||
# name='bar_wap',
|
name='bar_wap',
|
||||||
# shm=ohlcv,
|
data=bars,
|
||||||
# color='default_light',
|
add_label=False,
|
||||||
# add_label=False,
|
)
|
||||||
# )
|
|
||||||
|
|
||||||
# size view to data once at outset
|
# size view to data once at outset
|
||||||
chart.cv._set_yrange()
|
chart.cv._set_yrange()
|
||||||
|
|
||||||
|
# TODO: a data view api that makes this less shit
|
||||||
|
chart._shm = ohlcv
|
||||||
|
|
||||||
# NOTE: we must immediately tell Qt to show the OHLC chart
|
# NOTE: we must immediately tell Qt to show the OHLC chart
|
||||||
# to avoid a race where the subplots get added/shown to
|
# to avoid a race where the subplots get added/shown to
|
||||||
# the linked set *before* the main price chart!
|
# the linked set *before* the main price chart!
|
||||||
|
@ -852,12 +756,13 @@ async def display_symbol_data(
|
||||||
# that it isn't double rendered in the display loop
|
# that it isn't double rendered in the display loop
|
||||||
# above since we do a maxmin calc on the volume data to
|
# above since we do a maxmin calc on the volume data to
|
||||||
# determine if auto-range adjustements should be made.
|
# determine if auto-range adjustements should be made.
|
||||||
# linked.subplots.pop('volume', None)
|
linked.subplots.pop('volume', None)
|
||||||
|
|
||||||
# TODO: make this not so shit XD
|
# TODO: make this not so shit XD
|
||||||
# close group status
|
# close group status
|
||||||
sbar._status_groups[loading_sym_key][1]()
|
sbar._status_groups[loading_sym_key][1]()
|
||||||
|
|
||||||
# let the app run.. bby
|
# let the app run.. bby
|
||||||
|
chart.default_view()
|
||||||
# linked.graphics_cycle()
|
# linked.graphics_cycle()
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
|
@ -343,7 +343,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
nbars = ixmx - ixmn + 1
|
nbars = ixmx - ixmn + 1
|
||||||
|
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
data = chart._flows[chart.name].shm.array[ixmn:ixmx]
|
data = chart._arrays[chart.name][ixmn:ixmx]
|
||||||
|
|
||||||
if len(data):
|
if len(data):
|
||||||
std = data['close'].std()
|
std = data['close'].std()
|
||||||
|
|
|
@ -49,6 +49,10 @@ from . import _style
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# pyqtgraph global config
|
# pyqtgraph global config
|
||||||
|
# might as well enable this for now?
|
||||||
|
pg.useOpenGL = True
|
||||||
|
pg.enableExperimental = True
|
||||||
|
|
||||||
# engage core tweaks that give us better response
|
# engage core tweaks that give us better response
|
||||||
# latency then the average pg user
|
# latency then the average pg user
|
||||||
_do_overrides()
|
_do_overrides()
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Feed status and controls widget(s) for embedding in a UI-pane.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import annotations
|
|
||||||
from textwrap import dedent
|
|
||||||
from typing import TYPE_CHECKING
|
|
||||||
|
|
||||||
# from PyQt5.QtCore import Qt
|
|
||||||
|
|
||||||
from ._style import _font, _font_small
|
|
||||||
# from ..calc import humanize
|
|
||||||
from ._label import FormatLabel
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ._chart import ChartPlotWidget
|
|
||||||
from ..data.feed import Feed
|
|
||||||
from ._forms import FieldsForm
|
|
||||||
|
|
||||||
|
|
||||||
def mk_feed_label(
|
|
||||||
form: FieldsForm,
|
|
||||||
feed: Feed,
|
|
||||||
chart: ChartPlotWidget,
|
|
||||||
|
|
||||||
) -> FormatLabel:
|
|
||||||
'''
|
|
||||||
Generate a label from feed meta-data to be displayed
|
|
||||||
in a UI sidepane.
|
|
||||||
|
|
||||||
TODO: eventually buttons for changing settings over
|
|
||||||
a feed control protocol.
|
|
||||||
|
|
||||||
'''
|
|
||||||
status = feed.status
|
|
||||||
assert status
|
|
||||||
|
|
||||||
msg = dedent("""
|
|
||||||
actor: **{actor_name}**\n
|
|
||||||
|_ @**{host}:{port}**\n
|
|
||||||
""")
|
|
||||||
|
|
||||||
for key, val in status.items():
|
|
||||||
if key in ('host', 'port', 'actor_name'):
|
|
||||||
continue
|
|
||||||
msg += f'\n|_ {key}: **{{{key}}}**\n'
|
|
||||||
|
|
||||||
feed_label = FormatLabel(
|
|
||||||
fmt_str=msg,
|
|
||||||
# |_ streams: **{symbols}**\n
|
|
||||||
font=_font.font,
|
|
||||||
font_size=_font_small.px_size,
|
|
||||||
font_color='default_lightest',
|
|
||||||
)
|
|
||||||
|
|
||||||
# form.vbox.setAlignment(feed_label, Qt.AlignBottom)
|
|
||||||
# form.vbox.setAlignment(Qt.AlignBottom)
|
|
||||||
_ = chart.height() - (
|
|
||||||
form.height() +
|
|
||||||
form.fill_bar.height()
|
|
||||||
# feed_label.height()
|
|
||||||
)
|
|
||||||
|
|
||||||
feed_label.format(**feed.status)
|
|
||||||
|
|
||||||
return feed_label
|
|
1247
piker/ui/_flows.py
1247
piker/ui/_flows.py
File diff suppressed because it is too large
Load Diff
|
@ -750,12 +750,12 @@ def mk_order_pane_layout(
|
||||||
parent=parent,
|
parent=parent,
|
||||||
fields_schema={
|
fields_schema={
|
||||||
'account': {
|
'account': {
|
||||||
'label': '**accnt**:',
|
'label': '**account**:',
|
||||||
'type': 'select',
|
'type': 'select',
|
||||||
'default_value': ['paper'],
|
'default_value': ['paper'],
|
||||||
},
|
},
|
||||||
'size_unit': {
|
'size_unit': {
|
||||||
'label': '**alloc**:',
|
'label': '**allocate**:',
|
||||||
'type': 'select',
|
'type': 'select',
|
||||||
'default_value': [
|
'default_value': [
|
||||||
'$ size',
|
'$ size',
|
||||||
|
|
125
piker/ui/_fsp.py
125
piker/ui/_fsp.py
|
@ -72,17 +72,12 @@ def has_vlm(ohlcv: ShmArray) -> bool:
|
||||||
|
|
||||||
def update_fsp_chart(
|
def update_fsp_chart(
|
||||||
chart: ChartPlotWidget,
|
chart: ChartPlotWidget,
|
||||||
flow,
|
shm: ShmArray,
|
||||||
graphics_name: str,
|
graphics_name: str,
|
||||||
array_key: Optional[str],
|
array_key: Optional[str],
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
shm = flow.shm
|
|
||||||
if not shm:
|
|
||||||
return
|
|
||||||
|
|
||||||
array = shm.array
|
array = shm.array
|
||||||
last_row = try_read(array)
|
last_row = try_read(array)
|
||||||
|
|
||||||
|
@ -94,10 +89,10 @@ def update_fsp_chart(
|
||||||
# update graphics
|
# update graphics
|
||||||
# NOTE: this does a length check internally which allows it
|
# NOTE: this does a length check internally which allows it
|
||||||
# staying above the last row check below..
|
# staying above the last row check below..
|
||||||
chart.update_graphics_from_flow(
|
chart.update_graphics_from_array(
|
||||||
graphics_name,
|
graphics_name,
|
||||||
|
array,
|
||||||
array_key=array_key or graphics_name,
|
array_key=array_key or graphics_name,
|
||||||
**kwargs,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
||||||
|
@ -107,6 +102,9 @@ def update_fsp_chart(
|
||||||
# read from last calculated value and update any label
|
# read from last calculated value and update any label
|
||||||
last_val_sticky = chart._ysticks.get(graphics_name)
|
last_val_sticky = chart._ysticks.get(graphics_name)
|
||||||
if last_val_sticky:
|
if last_val_sticky:
|
||||||
|
# array = shm.array[array_key]
|
||||||
|
# if len(array):
|
||||||
|
# value = array[-1]
|
||||||
last = last_row[array_key]
|
last = last_row[array_key]
|
||||||
last_val_sticky.update_from_data(-1, last)
|
last_val_sticky.update_from_data(-1, last)
|
||||||
|
|
||||||
|
@ -244,18 +242,20 @@ async def run_fsp_ui(
|
||||||
|
|
||||||
chart.draw_curve(
|
chart.draw_curve(
|
||||||
name=name,
|
name=name,
|
||||||
shm=shm,
|
data=shm.array,
|
||||||
overlay=True,
|
overlay=True,
|
||||||
color='default_light',
|
color='default_light',
|
||||||
array_key=name,
|
array_key=name,
|
||||||
**conf.get('chart_kwargs', {})
|
**conf.get('chart_kwargs', {})
|
||||||
)
|
)
|
||||||
|
# specially store ref to shm for lookup in display loop
|
||||||
|
chart._flows[name].shm = shm
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# create a new sub-chart widget for this fsp
|
# create a new sub-chart widget for this fsp
|
||||||
chart = linkedsplits.add_plot(
|
chart = linkedsplits.add_plot(
|
||||||
name=name,
|
name=name,
|
||||||
shm=shm,
|
array=shm.array,
|
||||||
|
|
||||||
array_key=name,
|
array_key=name,
|
||||||
sidepane=sidepane,
|
sidepane=sidepane,
|
||||||
|
@ -267,6 +267,11 @@ async def run_fsp_ui(
|
||||||
**conf.get('chart_kwargs', {})
|
**conf.get('chart_kwargs', {})
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# XXX: ONLY for sub-chart fsps, overlays have their
|
||||||
|
# data looked up from the chart's internal array set.
|
||||||
|
# TODO: we must get a data view api going STAT!!
|
||||||
|
chart._shm = shm
|
||||||
|
|
||||||
# should **not** be the same sub-chart widget
|
# should **not** be the same sub-chart widget
|
||||||
assert chart.name != linkedsplits.chart.name
|
assert chart.name != linkedsplits.chart.name
|
||||||
|
|
||||||
|
@ -277,7 +282,7 @@ async def run_fsp_ui(
|
||||||
# first UI update, usually from shm pushed history
|
# first UI update, usually from shm pushed history
|
||||||
update_fsp_chart(
|
update_fsp_chart(
|
||||||
chart,
|
chart,
|
||||||
chart._flows[array_key],
|
shm,
|
||||||
name,
|
name,
|
||||||
array_key=array_key,
|
array_key=array_key,
|
||||||
)
|
)
|
||||||
|
@ -435,16 +440,9 @@ class FspAdmin:
|
||||||
# wait for graceful shutdown signal
|
# wait for graceful shutdown signal
|
||||||
async with stream.subscribe() as stream:
|
async with stream.subscribe() as stream:
|
||||||
async for msg in stream:
|
async for msg in stream:
|
||||||
info = msg.get('fsp_update')
|
if msg == 'update':
|
||||||
if info:
|
log.info(f'Re-syncing graphics for fsp: {ns_path}')
|
||||||
# if the chart isn't hidden try to update
|
self.linked.graphics_cycle()
|
||||||
# the data on screen.
|
|
||||||
if not self.linked.isHidden():
|
|
||||||
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
|
|
||||||
self.linked.graphics_cycle(
|
|
||||||
trigger_all=True,
|
|
||||||
prepend_update_index=info['first'],
|
|
||||||
)
|
|
||||||
else:
|
else:
|
||||||
log.info(f'recved unexpected fsp engine msg: {msg}')
|
log.info(f'recved unexpected fsp engine msg: {msg}')
|
||||||
|
|
||||||
|
@ -620,7 +618,7 @@ async def open_vlm_displays(
|
||||||
shm = ohlcv
|
shm = ohlcv
|
||||||
chart = linked.add_plot(
|
chart = linked.add_plot(
|
||||||
name='volume',
|
name='volume',
|
||||||
shm=shm,
|
array=shm.array,
|
||||||
|
|
||||||
array_key='volume',
|
array_key='volume',
|
||||||
sidepane=sidepane,
|
sidepane=sidepane,
|
||||||
|
@ -635,7 +633,7 @@ async def open_vlm_displays(
|
||||||
)
|
)
|
||||||
|
|
||||||
# force 0 to always be in view
|
# force 0 to always be in view
|
||||||
def multi_maxmin(
|
def maxmin(
|
||||||
names: list[str],
|
names: list[str],
|
||||||
|
|
||||||
) -> tuple[float, float]:
|
) -> tuple[float, float]:
|
||||||
|
@ -651,7 +649,7 @@ async def open_vlm_displays(
|
||||||
|
|
||||||
return 0, mx
|
return 0, mx
|
||||||
|
|
||||||
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
chart.view.maxmin = partial(maxmin, names=['volume'])
|
||||||
|
|
||||||
# TODO: fix the x-axis label issue where if you put
|
# TODO: fix the x-axis label issue where if you put
|
||||||
# the axis on the left it's totally not lined up...
|
# the axis on the left it's totally not lined up...
|
||||||
|
@ -659,6 +657,11 @@ async def open_vlm_displays(
|
||||||
# chart.hideAxis('right')
|
# chart.hideAxis('right')
|
||||||
# chart.showAxis('left')
|
# chart.showAxis('left')
|
||||||
|
|
||||||
|
# XXX: ONLY for sub-chart fsps, overlays have their
|
||||||
|
# data looked up from the chart's internal array set.
|
||||||
|
# TODO: we must get a data view api going STAT!!
|
||||||
|
chart._shm = shm
|
||||||
|
|
||||||
# send back new chart to caller
|
# send back new chart to caller
|
||||||
task_status.started(chart)
|
task_status.started(chart)
|
||||||
|
|
||||||
|
@ -673,9 +676,9 @@ async def open_vlm_displays(
|
||||||
|
|
||||||
last_val_sticky.update_from_data(-1, value)
|
last_val_sticky.update_from_data(-1, value)
|
||||||
|
|
||||||
vlm_curve = chart.update_graphics_from_flow(
|
vlm_curve = chart.update_graphics_from_array(
|
||||||
'volume',
|
'volume',
|
||||||
# shm.array,
|
shm.array,
|
||||||
)
|
)
|
||||||
|
|
||||||
# size view to data once at outset
|
# size view to data once at outset
|
||||||
|
@ -741,24 +744,29 @@ async def open_vlm_displays(
|
||||||
'dolla_vlm',
|
'dolla_vlm',
|
||||||
'dark_vlm',
|
'dark_vlm',
|
||||||
]
|
]
|
||||||
# dvlm_rate_fields = [
|
dvlm_rate_fields = [
|
||||||
# 'dvlm_rate',
|
'dvlm_rate',
|
||||||
# 'dark_dvlm_rate',
|
'dark_dvlm_rate',
|
||||||
# ]
|
]
|
||||||
trade_rate_fields = [
|
trade_rate_fields = [
|
||||||
'trade_rate',
|
'trade_rate',
|
||||||
'dark_trade_rate',
|
'dark_trade_rate',
|
||||||
]
|
]
|
||||||
|
|
||||||
group_mxmn = partial(
|
# add custom auto range handler
|
||||||
multi_maxmin,
|
dvlm_pi.vb._maxmin = partial(
|
||||||
|
maxmin,
|
||||||
# keep both regular and dark vlm in view
|
# keep both regular and dark vlm in view
|
||||||
names=fields,
|
names=fields + dvlm_rate_fields,
|
||||||
# names=fields + dvlm_rate_fields,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# add custom auto range handler
|
# TODO: is there a way to "sync" the dual axes such that only
|
||||||
dvlm_pi.vb._maxmin = group_mxmn
|
# one curve is needed?
|
||||||
|
# hide the original vlm curve since the $vlm one is now
|
||||||
|
# displayed and the curves are effectively the same minus
|
||||||
|
# liquidity events (well at least on low OHLC periods - 1s).
|
||||||
|
vlm_curve.hide()
|
||||||
|
chart.removeItem(vlm_curve)
|
||||||
|
|
||||||
# use slightly less light (then bracket) gray
|
# use slightly less light (then bracket) gray
|
||||||
# for volume from "main exchange" and a more "bluey"
|
# for volume from "main exchange" and a more "bluey"
|
||||||
|
@ -784,22 +792,21 @@ async def open_vlm_displays(
|
||||||
color = 'bracket'
|
color = 'bracket'
|
||||||
|
|
||||||
curve, _ = chart.draw_curve(
|
curve, _ = chart.draw_curve(
|
||||||
|
# name='dolla_vlm',
|
||||||
name=name,
|
name=name,
|
||||||
shm=shm,
|
data=shm.array,
|
||||||
array_key=name,
|
array_key=name,
|
||||||
overlay=pi,
|
overlay=pi,
|
||||||
color=color,
|
color=color,
|
||||||
step_mode=step_mode,
|
step_mode=step_mode,
|
||||||
style=style,
|
style=style,
|
||||||
pi=pi,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO: we need a better API to do this..
|
# TODO: we need a better API to do this..
|
||||||
# specially store ref to shm for lookup in display loop
|
# specially store ref to shm for lookup in display loop
|
||||||
# since only a placeholder of `None` is entered in
|
# since only a placeholder of `None` is entered in
|
||||||
# ``.draw_curve()``.
|
# ``.draw_curve()``.
|
||||||
flow = chart._flows[name]
|
chart._flows[name].shm = shm
|
||||||
assert flow.plot is pi
|
|
||||||
|
|
||||||
chart_curves(
|
chart_curves(
|
||||||
fields,
|
fields,
|
||||||
|
@ -821,24 +828,11 @@ async def open_vlm_displays(
|
||||||
)
|
)
|
||||||
await started.wait()
|
await started.wait()
|
||||||
|
|
||||||
# chart_curves(
|
chart_curves(
|
||||||
# dvlm_rate_fields,
|
dvlm_rate_fields,
|
||||||
# dvlm_pi,
|
dvlm_pi,
|
||||||
# fr_shm,
|
fr_shm,
|
||||||
# )
|
)
|
||||||
|
|
||||||
# TODO: is there a way to "sync" the dual axes such that only
|
|
||||||
# one curve is needed?
|
|
||||||
# hide the original vlm curve since the $vlm one is now
|
|
||||||
# displayed and the curves are effectively the same minus
|
|
||||||
# liquidity events (well at least on low OHLC periods - 1s).
|
|
||||||
vlm_curve.hide()
|
|
||||||
chart.removeItem(vlm_curve)
|
|
||||||
vflow = chart._flows['volume']
|
|
||||||
vflow.render = False
|
|
||||||
|
|
||||||
# avoid range sorting on volume once disabled
|
|
||||||
chart.view.disable_auto_yrange()
|
|
||||||
|
|
||||||
# Trade rate overlay
|
# Trade rate overlay
|
||||||
# XXX: requires an additional overlay for
|
# XXX: requires an additional overlay for
|
||||||
|
@ -863,7 +857,7 @@ async def open_vlm_displays(
|
||||||
)
|
)
|
||||||
# add custom auto range handler
|
# add custom auto range handler
|
||||||
tr_pi.vb.maxmin = partial(
|
tr_pi.vb.maxmin = partial(
|
||||||
multi_maxmin,
|
maxmin,
|
||||||
# keep both regular and dark vlm in view
|
# keep both regular and dark vlm in view
|
||||||
names=trade_rate_fields,
|
names=trade_rate_fields,
|
||||||
)
|
)
|
||||||
|
@ -879,10 +873,7 @@ async def open_vlm_displays(
|
||||||
style='dash',
|
style='dash',
|
||||||
)
|
)
|
||||||
|
|
||||||
for pi in (
|
for pi in (dvlm_pi, tr_pi):
|
||||||
dvlm_pi,
|
|
||||||
tr_pi,
|
|
||||||
):
|
|
||||||
for name, axis_info in pi.axes.items():
|
for name, axis_info in pi.axes.items():
|
||||||
# lol this sux XD
|
# lol this sux XD
|
||||||
axis = axis_info['item']
|
axis = axis_info['item']
|
||||||
|
@ -891,10 +882,10 @@ async def open_vlm_displays(
|
||||||
|
|
||||||
# built-in vlm fsps
|
# built-in vlm fsps
|
||||||
for target, conf in {
|
for target, conf in {
|
||||||
# tina_vwap: {
|
tina_vwap: {
|
||||||
# 'overlay': 'ohlc', # overlays with OHLCV (main) chart
|
'overlay': 'ohlc', # overlays with OHLCV (main) chart
|
||||||
# 'anchor': 'session',
|
'anchor': 'session',
|
||||||
# },
|
},
|
||||||
}.items():
|
}.items():
|
||||||
started = await admin.open_fsp_chart(
|
started = await admin.open_fsp_chart(
|
||||||
target,
|
target,
|
||||||
|
|
|
@ -20,6 +20,7 @@ Chart view box primitives
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
|
# import itertools
|
||||||
import time
|
import time
|
||||||
from typing import Optional, Callable
|
from typing import Optional, Callable
|
||||||
|
|
||||||
|
@ -33,10 +34,11 @@ import numpy as np
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from ._style import _min_points_to_show
|
||||||
# from ._style import _min_points_to_show
|
|
||||||
from ._editors import SelectRect
|
from ._editors import SelectRect
|
||||||
from . import _event
|
from . import _event
|
||||||
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
|
# from ._ohlc import BarItems
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -358,7 +360,6 @@ class ChartView(ViewBox):
|
||||||
):
|
):
|
||||||
super().__init__(
|
super().__init__(
|
||||||
parent=parent,
|
parent=parent,
|
||||||
name=name,
|
|
||||||
# TODO: look into the default view padding
|
# TODO: look into the default view padding
|
||||||
# support that might replace somem of our
|
# support that might replace somem of our
|
||||||
# ``ChartPlotWidget._set_yrange()`
|
# ``ChartPlotWidget._set_yrange()`
|
||||||
|
@ -391,11 +392,6 @@ class ChartView(ViewBox):
|
||||||
def start_ic(
|
def start_ic(
|
||||||
self,
|
self,
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
|
||||||
Signal the beginning of a click-drag interaction
|
|
||||||
to any interested task waiters.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if self._ic is None:
|
if self._ic is None:
|
||||||
self.chart.pause_all_feeds()
|
self.chart.pause_all_feeds()
|
||||||
self._ic = trio.Event()
|
self._ic = trio.Event()
|
||||||
|
@ -403,13 +399,13 @@ class ChartView(ViewBox):
|
||||||
def signal_ic(
|
def signal_ic(
|
||||||
self,
|
self,
|
||||||
*args,
|
*args,
|
||||||
|
# ev = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
if args:
|
||||||
Signal the end of a click-drag interaction
|
print(f'range change dun: {args}')
|
||||||
to any waiters.
|
else:
|
||||||
|
print('proxy called')
|
||||||
|
|
||||||
'''
|
|
||||||
if self._ic:
|
if self._ic:
|
||||||
self._ic.set()
|
self._ic.set()
|
||||||
self._ic = None
|
self._ic = None
|
||||||
|
@ -484,18 +480,15 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
# don't zoom more then the min points setting
|
# don't zoom more then the min points setting
|
||||||
l, lbar, rbar, r = chart.bars_range()
|
l, lbar, rbar, r = chart.bars_range()
|
||||||
# vl = r - l
|
vl = r - l
|
||||||
|
|
||||||
# if ev.delta() > 0 and vl <= _min_points_to_show:
|
if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||||
# log.debug("Max zoom bruh...")
|
log.debug("Max zoom bruh...")
|
||||||
# return
|
return
|
||||||
|
|
||||||
# if (
|
if ev.delta() < 0 and vl >= len(chart._arrays[chart.name]) + 666:
|
||||||
# ev.delta() < 0
|
log.debug("Min zoom bruh...")
|
||||||
# and vl >= len(chart._flows[chart.name].shm.array) + 666
|
return
|
||||||
# ):
|
|
||||||
# log.debug("Min zoom bruh...")
|
|
||||||
# return
|
|
||||||
|
|
||||||
# actual scaling factor
|
# actual scaling factor
|
||||||
s = 1.015 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor'])
|
s = 1.015 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor'])
|
||||||
|
@ -569,23 +562,11 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
self._resetTarget()
|
self._resetTarget()
|
||||||
self.scaleBy(s, focal)
|
self.scaleBy(s, focal)
|
||||||
|
|
||||||
# XXX: the order of the next 2 lines i'm pretty sure
|
|
||||||
# matters, we want the resize to trigger before the graphics
|
|
||||||
# update, but i gotta feelin that because this one is signal
|
|
||||||
# based (and thus not necessarily sync invoked right away)
|
|
||||||
# that calling the resize method manually might work better.
|
|
||||||
self.sigRangeChangedManually.emit(mask)
|
self.sigRangeChangedManually.emit(mask)
|
||||||
|
|
||||||
# XXX: without this is seems as though sometimes
|
# self._ic.set()
|
||||||
# when zooming in from far out (and maybe vice versa?)
|
# self._ic = None
|
||||||
# the signal isn't being fired enough since if you pan
|
# self.chart.resume_all_feeds()
|
||||||
# just after you'll see further downsampling code run
|
|
||||||
# (pretty noticeable on the OHLC ds curve) but with this
|
|
||||||
# that never seems to happen? Only question is how much this
|
|
||||||
# "double work" is causing latency when these missing event
|
|
||||||
# fires don't happen?
|
|
||||||
self.maybe_downsample_graphics()
|
|
||||||
|
|
||||||
ev.accept()
|
ev.accept()
|
||||||
|
|
||||||
|
@ -692,6 +673,7 @@ class ChartView(ViewBox):
|
||||||
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
||||||
|
|
||||||
if ev.isFinish():
|
if ev.isFinish():
|
||||||
|
print('DRAG FINISH')
|
||||||
self.signal_ic()
|
self.signal_ic()
|
||||||
# self._ic.set()
|
# self._ic.set()
|
||||||
# self._ic = None
|
# self._ic = None
|
||||||
|
@ -747,8 +729,9 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
# flag to prevent triggering sibling charts from the same linked
|
# flag to prevent triggering sibling charts from the same linked
|
||||||
# set from recursion errors.
|
# set from recursion errors.
|
||||||
autoscale_linked_plots: bool = False,
|
autoscale_linked_plots: bool = True,
|
||||||
name: Optional[str] = None,
|
name: Optional[str] = None,
|
||||||
|
# autoscale_overlays: bool = False,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -759,14 +742,6 @@ class ChartView(ViewBox):
|
||||||
data set.
|
data set.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name = self.name
|
|
||||||
# print(f'YRANGE ON {name}')
|
|
||||||
profiler = pg.debug.Profiler(
|
|
||||||
msg=f'`ChartView._set_yrange()`: `{name}`',
|
|
||||||
disabled=not pg_profile_enabled(),
|
|
||||||
ms_threshold=ms_slower_then,
|
|
||||||
delayed=True,
|
|
||||||
)
|
|
||||||
set_range = True
|
set_range = True
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
|
|
||||||
|
@ -790,23 +765,36 @@ class ChartView(ViewBox):
|
||||||
elif yrange is not None:
|
elif yrange is not None:
|
||||||
ylow, yhigh = yrange
|
ylow, yhigh = yrange
|
||||||
|
|
||||||
|
# calculate max, min y values in viewable x-range from data.
|
||||||
|
# Make sure min bars/datums on screen is adhered.
|
||||||
|
else:
|
||||||
|
br = bars_range or chart.bars_range()
|
||||||
|
|
||||||
|
# TODO: maybe should be a method on the
|
||||||
|
# chart widget/item?
|
||||||
|
if autoscale_linked_plots:
|
||||||
|
# avoid recursion by sibling plots
|
||||||
|
linked = self.linkedsplits
|
||||||
|
plots = list(linked.subplots.copy().values())
|
||||||
|
main = linked.chart
|
||||||
|
if main:
|
||||||
|
plots.append(main)
|
||||||
|
|
||||||
|
for chart in plots:
|
||||||
|
if chart and not chart._static_yrange:
|
||||||
|
chart.cv._set_yrange(
|
||||||
|
bars_range=br,
|
||||||
|
autoscale_linked_plots=False,
|
||||||
|
)
|
||||||
|
|
||||||
if set_range:
|
if set_range:
|
||||||
|
|
||||||
# XXX: only compute the mxmn range
|
yrange = self._maxmin()
|
||||||
# if none is provided as input!
|
if yrange is None:
|
||||||
if not yrange:
|
return
|
||||||
# flow = chart._flows[name]
|
|
||||||
yrange = self._maxmin()
|
|
||||||
|
|
||||||
if yrange is None:
|
|
||||||
log.warning(f'No yrange provided for {name}!?')
|
|
||||||
print(f"WTF NO YRANGE {name}")
|
|
||||||
return
|
|
||||||
|
|
||||||
ylow, yhigh = yrange
|
ylow, yhigh = yrange
|
||||||
|
|
||||||
profiler(f'callback ._maxmin(): {yrange}')
|
|
||||||
|
|
||||||
# view margins: stay within a % of the "true range"
|
# view margins: stay within a % of the "true range"
|
||||||
diff = yhigh - ylow
|
diff = yhigh - ylow
|
||||||
ylow = ylow - (diff * range_margin)
|
ylow = ylow - (diff * range_margin)
|
||||||
|
@ -820,13 +808,9 @@ class ChartView(ViewBox):
|
||||||
yMax=yhigh,
|
yMax=yhigh,
|
||||||
)
|
)
|
||||||
self.setYRange(ylow, yhigh)
|
self.setYRange(ylow, yhigh)
|
||||||
profiler(f'set limits: {(ylow, yhigh)}')
|
|
||||||
|
|
||||||
profiler.finish()
|
|
||||||
|
|
||||||
def enable_auto_yrange(
|
def enable_auto_yrange(
|
||||||
self,
|
vb: ChartView,
|
||||||
src_vb: Optional[ChartView] = None,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -834,11 +818,7 @@ class ChartView(ViewBox):
|
||||||
based on data contents and ``ViewBox`` state.
|
based on data contents and ``ViewBox`` state.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if src_vb is None:
|
vb.sigXRangeChanged.connect(vb._set_yrange)
|
||||||
src_vb = self
|
|
||||||
|
|
||||||
# splitter(s) resizing
|
|
||||||
src_vb.sigResized.connect(self._set_yrange)
|
|
||||||
|
|
||||||
# TODO: a smarter way to avoid calling this needlessly?
|
# TODO: a smarter way to avoid calling this needlessly?
|
||||||
# 2 things i can think of:
|
# 2 things i can think of:
|
||||||
|
@ -846,102 +826,55 @@ class ChartView(ViewBox):
|
||||||
# iterate those.
|
# iterate those.
|
||||||
# - only register this when certain downsampleable graphics are
|
# - only register this when certain downsampleable graphics are
|
||||||
# "added to scene".
|
# "added to scene".
|
||||||
src_vb.sigRangeChangedManually.connect(
|
vb.sigXRangeChanged.connect(vb.maybe_downsample_graphics)
|
||||||
self.maybe_downsample_graphics
|
|
||||||
)
|
|
||||||
|
|
||||||
# mouse wheel doesn't emit XRangeChanged
|
# mouse wheel doesn't emit XRangeChanged
|
||||||
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
vb.sigRangeChangedManually.connect(vb._set_yrange)
|
||||||
|
|
||||||
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
# splitter(s) resizing
|
||||||
# src_vb.sigXRangeChanged.connect(
|
vb.sigResized.connect(vb._set_yrange)
|
||||||
# self.maybe_downsample_graphics
|
|
||||||
# )
|
|
||||||
|
|
||||||
def disable_auto_yrange(self) -> None:
|
def disable_auto_yrange(
|
||||||
|
self,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
self.sigResized.disconnect(
|
self._chart._static_yrange = 'axis'
|
||||||
self._set_yrange,
|
|
||||||
)
|
|
||||||
self.sigRangeChangedManually.disconnect(
|
|
||||||
self.maybe_downsample_graphics
|
|
||||||
)
|
|
||||||
self.sigRangeChangedManually.disconnect(
|
|
||||||
self._set_yrange,
|
|
||||||
)
|
|
||||||
|
|
||||||
# self.sigXRangeChanged.disconnect(self._set_yrange)
|
def xs_in_px(self) -> float:
|
||||||
# self.sigXRangeChanged.disconnect(
|
|
||||||
# self.maybe_downsample_graphics
|
|
||||||
# )
|
|
||||||
|
|
||||||
def x_uppx(self) -> float:
|
|
||||||
'''
|
'''
|
||||||
Return the "number of x units" within a single
|
Return the "number of x units" within a single
|
||||||
pixel currently being displayed for relevant
|
pixel currently being displayed for relevant
|
||||||
graphics items which are our children.
|
graphics items which are our children.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
graphics = [f.graphics for f in self._chart._flows.values()]
|
for graphic in self._chart._graphics.values():
|
||||||
if not graphics:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
for graphic in graphics:
|
|
||||||
xvec = graphic.pixelVectors()[0]
|
xvec = graphic.pixelVectors()[0]
|
||||||
if xvec:
|
if xvec:
|
||||||
return xvec.x()
|
xpx = xvec.x()
|
||||||
else:
|
if xpx:
|
||||||
return 0
|
return xpx
|
||||||
|
else:
|
||||||
|
continue
|
||||||
|
return 1.0
|
||||||
|
|
||||||
def maybe_downsample_graphics(
|
def maybe_downsample_graphics(self):
|
||||||
self,
|
|
||||||
autoscale_overlays: bool = True,
|
|
||||||
):
|
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
|
||||||
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
|
||||||
disabled=not pg_profile_enabled(),
|
|
||||||
|
|
||||||
# XXX: important to avoid not seeing underlying
|
|
||||||
# ``.update_graphics_from_flow()`` nested profiling likely
|
|
||||||
# due to the way delaying works and garbage collection of
|
|
||||||
# the profiler in the delegated method calls.
|
|
||||||
ms_threshold=6,
|
|
||||||
# ms_threshold=ms_slower_then,
|
|
||||||
)
|
|
||||||
|
|
||||||
# TODO: a faster single-loop-iterator way of doing this XD
|
# TODO: a faster single-loop-iterator way of doing this XD
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
linked = self.linkedsplits
|
# graphics = list(self._chart._graphics.values())
|
||||||
plots = linked.subplots | {chart.name: chart}
|
|
||||||
for chart_name, chart in plots.items():
|
|
||||||
for name, flow in chart._flows.items():
|
|
||||||
|
|
||||||
if (
|
profiler = pg.debug.Profiler(
|
||||||
not flow.render
|
msg=f'FastAppendCurve.update_from_array(): `{chart.name}`',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
|
for name, graphics in chart._graphics.items():
|
||||||
|
# pass in no array which will read and render from the last
|
||||||
|
# passed array (normally provided by the display loop.)
|
||||||
|
chart.update_graphics_from_array(name)
|
||||||
|
profiler(f'updating {name}')
|
||||||
|
|
||||||
# XXX: super important to be aware of this.
|
# for graphic in graphics:
|
||||||
# or not flow.graphics.isVisible()
|
# ds_meth = getattr(graphic, 'maybe_downsample', None)
|
||||||
):
|
# if ds_meth:
|
||||||
continue
|
# ds_meth()
|
||||||
|
|
||||||
# pass in no array which will read and render from the last
|
|
||||||
# passed array (normally provided by the display loop.)
|
|
||||||
chart.update_graphics_from_flow(
|
|
||||||
name,
|
|
||||||
use_vr=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# for each overlay on this chart auto-scale the
|
|
||||||
# y-range to max-min values.
|
|
||||||
if autoscale_overlays:
|
|
||||||
overlay = chart.pi_overlay
|
|
||||||
if overlay:
|
|
||||||
for pi in overlay.overlays:
|
|
||||||
pi.vb._set_yrange(
|
|
||||||
# TODO: get the range once up front...
|
|
||||||
# bars_range=br,
|
|
||||||
)
|
|
||||||
profiler('autoscaled linked plots')
|
|
||||||
|
|
||||||
profiler(f'<{chart_name}>.update_graphics_from_flow({name})')
|
|
||||||
|
|
|
@ -25,13 +25,17 @@ from typing import (
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
|
from numba import njit, float64, int64 # , optional
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
from PyQt5.QtCore import QLineF, QPointF
|
from PyQt5.QtCore import QLineF, QPointF
|
||||||
from PyQt5.QtGui import QPainterPath
|
# from numba import types as ntypes
|
||||||
|
# from ..data._source import numba_ohlc_dtype
|
||||||
|
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._style import hcolor
|
from ._style import hcolor
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from ._curve import FastAppendCurve
|
||||||
|
from ._compression import ohlc_flatten
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._chart import LinkedSplits
|
from ._chart import LinkedSplits
|
||||||
|
@ -42,8 +46,7 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
def bar_from_ohlc_row(
|
def bar_from_ohlc_row(
|
||||||
row: np.ndarray,
|
row: np.ndarray,
|
||||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
w: float
|
||||||
w: float = 0.43
|
|
||||||
|
|
||||||
) -> tuple[QLineF]:
|
) -> tuple[QLineF]:
|
||||||
'''
|
'''
|
||||||
|
@ -81,11 +84,128 @@ def bar_from_ohlc_row(
|
||||||
return [hl, o, c]
|
return [hl, o, c]
|
||||||
|
|
||||||
|
|
||||||
|
@njit(
|
||||||
|
# TODO: for now need to construct this manually for readonly arrays, see
|
||||||
|
# https://github.com/numba/numba/issues/4511
|
||||||
|
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||||
|
# numba_ohlc_dtype[::1], # contiguous
|
||||||
|
# int64,
|
||||||
|
# optional(float64),
|
||||||
|
# ),
|
||||||
|
nogil=True
|
||||||
|
)
|
||||||
|
def path_arrays_from_ohlc(
|
||||||
|
data: np.ndarray,
|
||||||
|
start: int64,
|
||||||
|
bar_gap: float64 = 0.43,
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Generate an array of lines objects from input ohlc data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size = int(data.shape[0] * 6)
|
||||||
|
|
||||||
|
x = np.zeros(
|
||||||
|
# data,
|
||||||
|
shape=size,
|
||||||
|
dtype=float64,
|
||||||
|
)
|
||||||
|
y, c = x.copy(), x.copy()
|
||||||
|
|
||||||
|
# TODO: report bug for assert @
|
||||||
|
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||||
|
for i, q in enumerate(data[start:], start):
|
||||||
|
|
||||||
|
# TODO: ask numba why this doesn't work..
|
||||||
|
# open, high, low, close, index = q[
|
||||||
|
# ['open', 'high', 'low', 'close', 'index']]
|
||||||
|
|
||||||
|
open = q['open']
|
||||||
|
high = q['high']
|
||||||
|
low = q['low']
|
||||||
|
close = q['close']
|
||||||
|
index = float64(q['index'])
|
||||||
|
|
||||||
|
istart = i * 6
|
||||||
|
istop = istart + 6
|
||||||
|
|
||||||
|
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||||
|
x[istart:istop] = (
|
||||||
|
index - bar_gap,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index + bar_gap,
|
||||||
|
)
|
||||||
|
y[istart:istop] = (
|
||||||
|
open,
|
||||||
|
open,
|
||||||
|
low,
|
||||||
|
high,
|
||||||
|
close,
|
||||||
|
close,
|
||||||
|
)
|
||||||
|
|
||||||
|
# specifies that the first edge is never connected to the
|
||||||
|
# prior bars last edge thus providing a small "gap"/"space"
|
||||||
|
# between bars determined by ``bar_gap``.
|
||||||
|
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||||
|
|
||||||
|
return x, y, c
|
||||||
|
|
||||||
|
|
||||||
|
def gen_qpath(
|
||||||
|
data: np.ndarray,
|
||||||
|
start: int, # XXX: do we need this?
|
||||||
|
w: float,
|
||||||
|
path: Optional[QtGui.QPainterPath] = None,
|
||||||
|
|
||||||
|
) -> QtGui.QPainterPath:
|
||||||
|
|
||||||
|
path_was_none = path is None
|
||||||
|
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
msg='gen_qpath ohlc',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
|
|
||||||
|
x, y, c = path_arrays_from_ohlc(
|
||||||
|
data,
|
||||||
|
start,
|
||||||
|
bar_gap=w,
|
||||||
|
)
|
||||||
|
profiler("generate stream with numba")
|
||||||
|
|
||||||
|
# TODO: numba the internals of this!
|
||||||
|
path = pg.functions.arrayToQPath(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
connect=c,
|
||||||
|
path=path,
|
||||||
|
)
|
||||||
|
|
||||||
|
# avoid mem allocs if possible
|
||||||
|
if path_was_none:
|
||||||
|
path.reserve(path.capacity())
|
||||||
|
|
||||||
|
profiler("generate path with arrayToQPath")
|
||||||
|
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
class BarItems(pg.GraphicsObject):
|
class BarItems(pg.GraphicsObject):
|
||||||
'''
|
'''
|
||||||
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
sigPlotChanged = QtCore.pyqtSignal(object)
|
||||||
|
|
||||||
|
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||||
|
w: float = 0.43
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
linked: LinkedSplits,
|
linked: LinkedSplits,
|
||||||
|
@ -105,13 +225,495 @@ class BarItems(pg.GraphicsObject):
|
||||||
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
||||||
self._name = name
|
self._name = name
|
||||||
|
|
||||||
|
self._ds_line_xy: Optional[
|
||||||
|
tuple[np.ndarray, np.ndarray]
|
||||||
|
] = None
|
||||||
|
|
||||||
|
# NOTE: this prevents redraws on mouse interaction which is
|
||||||
|
# a huge boon for avg interaction latency.
|
||||||
|
|
||||||
|
# TODO: one question still remaining is if this makes trasform
|
||||||
|
# interactions slower (such as zooming) and if so maybe if/when
|
||||||
|
# we implement a "history" mode for the view we disable this in
|
||||||
|
# that mode?
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
self.path = QPainterPath()
|
|
||||||
|
self._pi = plotitem
|
||||||
|
self.path = QtGui.QPainterPath()
|
||||||
|
self.fast_path = QtGui.QPainterPath()
|
||||||
|
|
||||||
|
self._xrange: tuple[int, int]
|
||||||
|
self._yrange: tuple[float, float]
|
||||||
|
self._vrange = None
|
||||||
|
|
||||||
|
# TODO: don't render the full backing array each time
|
||||||
|
# self._path_data = None
|
||||||
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
||||||
|
|
||||||
def x_uppx(self) -> int:
|
# track the current length of drawable lines within the larger array
|
||||||
# we expect the downsample curve report this.
|
self.start_index: int = 0
|
||||||
return 0
|
self.stop_index: int = 0
|
||||||
|
|
||||||
|
# downsampler-line state
|
||||||
|
self._in_ds: bool = False
|
||||||
|
self._ds_line: Optional[FastAppendCurve] = None
|
||||||
|
self._dsi: tuple[int, int] = 0, 0
|
||||||
|
self._xs_in_px: float = 0
|
||||||
|
|
||||||
|
def draw_from_data(
|
||||||
|
self,
|
||||||
|
ohlc: np.ndarray,
|
||||||
|
start: int = 0,
|
||||||
|
|
||||||
|
) -> QtGui.QPainterPath:
|
||||||
|
'''
|
||||||
|
Draw OHLC datum graphics from a ``np.ndarray``.
|
||||||
|
|
||||||
|
This routine is usually only called to draw the initial history.
|
||||||
|
|
||||||
|
'''
|
||||||
|
hist, last = ohlc[:-1], ohlc[-1]
|
||||||
|
self.path = gen_qpath(hist, start, self.w)
|
||||||
|
|
||||||
|
# save graphics for later reference and keep track
|
||||||
|
# of current internal "last index"
|
||||||
|
# self.start_index = len(ohlc)
|
||||||
|
index = ohlc['index']
|
||||||
|
self._xrange = (index[0], index[-1])
|
||||||
|
self._yrange = (
|
||||||
|
np.nanmax(ohlc['high']),
|
||||||
|
np.nanmin(ohlc['low']),
|
||||||
|
)
|
||||||
|
|
||||||
|
# up to last to avoid double draw of last bar
|
||||||
|
self._last_bar_lines = bar_from_ohlc_row(last, self.w)
|
||||||
|
|
||||||
|
x, y = self._ds_line_xy = ohlc_flatten(ohlc)
|
||||||
|
|
||||||
|
# self.update_ds_line(
|
||||||
|
# x,
|
||||||
|
# y,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# TODO: figuring out the most optimial size for the ideal
|
||||||
|
# curve-path by,
|
||||||
|
# - calcing the display's max px width `.screen()`
|
||||||
|
# - drawing a curve and figuring out it's capacity:
|
||||||
|
# https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||||
|
# - reserving that cap for each curve-mapped-to-shm with
|
||||||
|
|
||||||
|
# - leveraging clearing when needed to redraw the entire
|
||||||
|
# curve that does not release mem allocs:
|
||||||
|
# https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||||
|
curve = FastAppendCurve(
|
||||||
|
y=y,
|
||||||
|
x=x,
|
||||||
|
name='OHLC',
|
||||||
|
color=self._color,
|
||||||
|
)
|
||||||
|
curve.hide()
|
||||||
|
self._pi.addItem(curve)
|
||||||
|
self._ds_line = curve
|
||||||
|
|
||||||
|
self._ds_xrange = (index[0], index[-1])
|
||||||
|
|
||||||
|
# trigger render
|
||||||
|
# https://doc.qt.io/qt-5/qgraphicsitem.html#update
|
||||||
|
self.update()
|
||||||
|
|
||||||
|
return self.path
|
||||||
|
|
||||||
|
# def update_ds_line(
|
||||||
|
# self,
|
||||||
|
# x,
|
||||||
|
# y,
|
||||||
|
|
||||||
|
# ) -> FastAppendCurve:
|
||||||
|
|
||||||
|
# # determine current potential downsampling value (based on pixel
|
||||||
|
# # scaling) and return any existing curve for it.
|
||||||
|
# curve = self._ds_line
|
||||||
|
|
||||||
|
# if not curve:
|
||||||
|
# # TODO: figuring out the most optimial size for the ideal
|
||||||
|
# # curve-path by,
|
||||||
|
# # - calcing the display's max px width `.screen()`
|
||||||
|
# # - drawing a curve and figuring out it's capacity:
|
||||||
|
# # https://doc.qt.io/qt-5/qpainterpath.html#capacity
|
||||||
|
# # - reserving that cap for each curve-mapped-to-shm with
|
||||||
|
|
||||||
|
# # - leveraging clearing when needed to redraw the entire
|
||||||
|
# # curve that does not release mem allocs:
|
||||||
|
# # https://doc.qt.io/qt-5/qpainterpath.html#clear
|
||||||
|
# curve = FastAppendCurve(
|
||||||
|
# y=y,
|
||||||
|
# x=x,
|
||||||
|
# name='OHLC',
|
||||||
|
# color=self._color,
|
||||||
|
# )
|
||||||
|
# curve.hide()
|
||||||
|
# self._pi.addItem(curve)
|
||||||
|
# self._ds_line = curve
|
||||||
|
|
||||||
|
# return curve
|
||||||
|
|
||||||
|
# # TODO: we should be diffing the amount of new data which
|
||||||
|
# # needs to be downsampled. Ideally we actually are just
|
||||||
|
# # doing all the ds-ing in sibling actors so that the data
|
||||||
|
# # can just be read and rendered to graphics on events of our
|
||||||
|
# # choice.
|
||||||
|
# # diff = do_diff(ohlc, new_bit)
|
||||||
|
|
||||||
|
# curve.update_from_array(
|
||||||
|
# y=y,
|
||||||
|
# x=x,
|
||||||
|
# x_iv=x,
|
||||||
|
# y_iv=y,
|
||||||
|
# view_range=True, # hack
|
||||||
|
# )
|
||||||
|
# return curve
|
||||||
|
|
||||||
|
def update_from_array(
|
||||||
|
self,
|
||||||
|
|
||||||
|
# full array input history
|
||||||
|
ohlc: np.ndarray,
|
||||||
|
|
||||||
|
# pre-sliced array data that's "in view"
|
||||||
|
ohlc_iv: np.ndarray,
|
||||||
|
|
||||||
|
view_range: Optional[tuple[int, int]] = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Update the last datum's bar graphic from input data array.
|
||||||
|
|
||||||
|
This routine should be interface compatible with
|
||||||
|
``pg.PlotCurveItem.setData()``. Normally this method in
|
||||||
|
``pyqtgraph`` seems to update all the data passed to the
|
||||||
|
graphics object, and then update/rerender, but here we're
|
||||||
|
assuming the prior graphics havent changed (OHLC history rarely
|
||||||
|
does) so this "should" be simpler and faster.
|
||||||
|
|
||||||
|
This routine should be made (transitively) as fast as possible.
|
||||||
|
|
||||||
|
'''
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
gt=ms_slower_then,
|
||||||
|
)
|
||||||
|
|
||||||
|
# vr = self.viewRect()
|
||||||
|
# l, r = int(vr.left()), int(vr.right())
|
||||||
|
# # l, r = self.view_range()
|
||||||
|
# # array = self._arrays[self.name]
|
||||||
|
# indexes = ohlc['index']
|
||||||
|
# start_index = indexes[0]
|
||||||
|
# end_index = indexes[-1]
|
||||||
|
|
||||||
|
# lbar = max(l, start_index) - start_index
|
||||||
|
# rbar = min(r, end_index) - start_index
|
||||||
|
# in_view = ohlc[lbar:rbar]
|
||||||
|
# self._vrange = lbar, rbar
|
||||||
|
|
||||||
|
# index = self.start_index
|
||||||
|
istart, istop = self._xrange
|
||||||
|
ds_istart, ds_istop = self._ds_xrange
|
||||||
|
|
||||||
|
index = ohlc['index']
|
||||||
|
first_index, last_index = index[0], index[-1]
|
||||||
|
|
||||||
|
# length = len(ohlc)
|
||||||
|
prepend_length = istart - first_index
|
||||||
|
append_length = last_index - istop
|
||||||
|
|
||||||
|
# ds_prepend_length = ds_istart - first_index
|
||||||
|
# ds_append_length = last_index - ds_istop
|
||||||
|
|
||||||
|
flip_cache = False
|
||||||
|
|
||||||
|
x_gt = 6
|
||||||
|
if self._ds_line:
|
||||||
|
uppx = self._ds_line.x_uppx()
|
||||||
|
else:
|
||||||
|
uppx = 0
|
||||||
|
|
||||||
|
should_line = self._in_ds
|
||||||
|
if (
|
||||||
|
self._in_ds
|
||||||
|
and uppx < x_gt
|
||||||
|
):
|
||||||
|
should_line = False
|
||||||
|
|
||||||
|
elif (
|
||||||
|
not self._in_ds
|
||||||
|
and uppx >= x_gt
|
||||||
|
):
|
||||||
|
should_line = True
|
||||||
|
|
||||||
|
# should_ds, should_redraw = self.should_ds_or_redraw()
|
||||||
|
# print(
|
||||||
|
# f'OHLC in line: {self._in_ds}'
|
||||||
|
# f'OHLC should line: {should_line}\n'
|
||||||
|
# # f'OHLC should_redraw: {should_redraw}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
|
if (
|
||||||
|
should_line
|
||||||
|
):
|
||||||
|
# update the line graphic
|
||||||
|
# x, y = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||||
|
# x, y = self._ds_line_xy = ohlc_flatten(ohlc)
|
||||||
|
x_iv, y_iv = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||||
|
profiler('flattening bars to line')
|
||||||
|
|
||||||
|
curve = self._ds_line
|
||||||
|
# curve = self.update_ds_line(x, y)
|
||||||
|
|
||||||
|
# TODO: we should be diffing the amount of new data which
|
||||||
|
# needs to be downsampled. Ideally we actually are just
|
||||||
|
# doing all the ds-ing in sibling actors so that the data
|
||||||
|
# can just be read and rendered to graphics on events of our
|
||||||
|
# choice.
|
||||||
|
# diff = do_diff(ohlc, new_bit)
|
||||||
|
curve.update_from_array(
|
||||||
|
y=x_iv,
|
||||||
|
x=y_iv,
|
||||||
|
x_iv=x_iv,
|
||||||
|
y_iv=y_iv,
|
||||||
|
view_range=view_range, # hack
|
||||||
|
)
|
||||||
|
|
||||||
|
# we already are showing a line and should be
|
||||||
|
# self._in_ds
|
||||||
|
|
||||||
|
# check if the ds line should be resampled/drawn
|
||||||
|
# should_ds_line, should_redraw_line = self._ds_line.should_ds_or_redraw()
|
||||||
|
# print(f'OHLC DS should ds: {should_ds_line}, should_redraw: {should_redraw_line}')
|
||||||
|
|
||||||
|
# if (
|
||||||
|
# # line should be redrawn/sampled
|
||||||
|
# # should_ds_line or
|
||||||
|
|
||||||
|
# # we are flipping to line from bars mode
|
||||||
|
# not self._in_ds
|
||||||
|
# ):
|
||||||
|
# uppx = self._ds_line.x_uppx()
|
||||||
|
# self._xs_in_px = uppx
|
||||||
|
|
||||||
|
|
||||||
|
if not self._in_ds:
|
||||||
|
# hide bars and show line
|
||||||
|
self.hide()
|
||||||
|
# XXX: is this actually any faster?
|
||||||
|
# self._pi.removeItem(self)
|
||||||
|
|
||||||
|
# TODO: a `.ui()` log level?
|
||||||
|
log.info(
|
||||||
|
f'downsampling to line graphic {self._name}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# self._pi.addItem(curve)
|
||||||
|
curve.show()
|
||||||
|
curve.update()
|
||||||
|
self._in_ds = True
|
||||||
|
|
||||||
|
# stop here since we don't need to update bars path any more
|
||||||
|
# as we delegate to the downsample line with updates.
|
||||||
|
return
|
||||||
|
|
||||||
|
elif (
|
||||||
|
not should_line
|
||||||
|
and self._in_ds
|
||||||
|
):
|
||||||
|
# flip back to bars graphics and hide the downsample line.
|
||||||
|
log.info(f'showing bars graphic {self._name}')
|
||||||
|
|
||||||
|
curve = self._ds_line
|
||||||
|
curve.hide()
|
||||||
|
# self._pi.removeItem(curve)
|
||||||
|
|
||||||
|
# XXX: is this actually any faster?
|
||||||
|
# self._pi.addItem(self)
|
||||||
|
self.show()
|
||||||
|
self._in_ds = False
|
||||||
|
|
||||||
|
# if not self._in_ds and should_ds
|
||||||
|
# self.hide()
|
||||||
|
# # XXX: is this actually any faster?
|
||||||
|
# # self._pi.removeItem(self)
|
||||||
|
|
||||||
|
# # this should have been done in the block above
|
||||||
|
# # x, y = self._ds_line_xy = ohlc_flatten(ohlc_iv)
|
||||||
|
# # curve = self.update_ds_line(x, y)
|
||||||
|
|
||||||
|
# # TODO: a `.ui()` log level?
|
||||||
|
# log.info(
|
||||||
|
# f'downsampling to line graphic {self._name}'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # self._pi.addItem(curve)
|
||||||
|
# curve.show()
|
||||||
|
# curve.update()
|
||||||
|
# self._in_ds = True
|
||||||
|
# return
|
||||||
|
|
||||||
|
# self._in_ds = False
|
||||||
|
|
||||||
|
# print('YO NOT DS OHLC')
|
||||||
|
|
||||||
|
# generate in_view path
|
||||||
|
self.path = gen_qpath(
|
||||||
|
ohlc_iv,
|
||||||
|
0,
|
||||||
|
self.w,
|
||||||
|
# path=self.path,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: to make the downsampling faster
|
||||||
|
# - allow mapping only a range of lines thus only drawing as
|
||||||
|
# many bars as exactly specified.
|
||||||
|
# - move ohlc "flattening" to a shmarr
|
||||||
|
# - maybe move all this embedded logic to a higher
|
||||||
|
# level type?
|
||||||
|
|
||||||
|
# ohlc = in_view
|
||||||
|
|
||||||
|
# if prepend_length:
|
||||||
|
# # new history was added and we need to render a new path
|
||||||
|
# prepend_bars = ohlc[:prepend_length]
|
||||||
|
|
||||||
|
# if ds_prepend_length:
|
||||||
|
# ds_prepend_bars = ohlc[:ds_prepend_length]
|
||||||
|
# pre_x, pre_y = ohlc_flatten(ds_prepend_bars)
|
||||||
|
# fx = np.concatenate((pre_x, fx))
|
||||||
|
# fy = np.concatenate((pre_y, fy))
|
||||||
|
# profiler('ds line prepend diff complete')
|
||||||
|
|
||||||
|
# if append_length:
|
||||||
|
# # generate new graphics to match provided array
|
||||||
|
# # path appending logic:
|
||||||
|
# # we need to get the previous "current bar(s)" for the time step
|
||||||
|
# # and convert it to a sub-path to append to the historical set
|
||||||
|
# # new_bars = ohlc[istop - 1:istop + append_length - 1]
|
||||||
|
# append_bars = ohlc[-append_length - 1:-1]
|
||||||
|
# # print(f'ohlc bars to append size: {append_bars.size}\n')
|
||||||
|
|
||||||
|
# if ds_append_length:
|
||||||
|
# ds_append_bars = ohlc[-ds_append_length - 1:-1]
|
||||||
|
# post_x, post_y = ohlc_flatten(ds_append_bars)
|
||||||
|
# print(
|
||||||
|
# f'ds curve to append sizes: {(post_x.size, post_y.size)}'
|
||||||
|
# )
|
||||||
|
# fx = np.concatenate((fx, post_x))
|
||||||
|
# fy = np.concatenate((fy, post_y))
|
||||||
|
|
||||||
|
# profiler('ds line append diff complete')
|
||||||
|
|
||||||
|
profiler('array diffs complete')
|
||||||
|
|
||||||
|
# does this work?
|
||||||
|
last = ohlc[-1]
|
||||||
|
# fy[-1] = last['close']
|
||||||
|
|
||||||
|
# # incremental update and cache line datums
|
||||||
|
# self._ds_line_xy = fx, fy
|
||||||
|
|
||||||
|
# maybe downsample to line
|
||||||
|
# ds = self.maybe_downsample()
|
||||||
|
# if ds:
|
||||||
|
# # if we downsample to a line don't bother with
|
||||||
|
# # any more path generation / updates
|
||||||
|
# self._ds_xrange = first_index, last_index
|
||||||
|
# profiler('downsampled to line')
|
||||||
|
# return
|
||||||
|
|
||||||
|
# print(in_view.size)
|
||||||
|
|
||||||
|
# if self.path:
|
||||||
|
# self.path = path
|
||||||
|
# self.path.reserve(path.capacity())
|
||||||
|
# self.path.swap(path)
|
||||||
|
|
||||||
|
# path updates
|
||||||
|
# if prepend_length:
|
||||||
|
# # XXX: SOMETHING IS MAYBE FISHY HERE what with the old_path
|
||||||
|
# # y value not matching the first value from
|
||||||
|
# # ohlc[prepend_length + 1] ???
|
||||||
|
# prepend_path = gen_qpath(prepend_bars, 0, self.w)
|
||||||
|
# old_path = self.path
|
||||||
|
# self.path = prepend_path
|
||||||
|
# self.path.addPath(old_path)
|
||||||
|
# profiler('path PREPEND')
|
||||||
|
|
||||||
|
# if append_length:
|
||||||
|
# append_path = gen_qpath(append_bars, 0, self.w)
|
||||||
|
|
||||||
|
# self.path.moveTo(
|
||||||
|
# float(istop - self.w),
|
||||||
|
# float(append_bars[0]['open'])
|
||||||
|
# )
|
||||||
|
# self.path.addPath(append_path)
|
||||||
|
|
||||||
|
# profiler('path APPEND')
|
||||||
|
# fp = self.fast_path
|
||||||
|
# if fp is None:
|
||||||
|
# self.fast_path = append_path
|
||||||
|
|
||||||
|
# else:
|
||||||
|
# fp.moveTo(
|
||||||
|
# float(istop - self.w), float(new_bars[0]['open'])
|
||||||
|
# )
|
||||||
|
# fp.addPath(append_path)
|
||||||
|
|
||||||
|
# self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
||||||
|
# flip_cache = True
|
||||||
|
|
||||||
|
self._xrange = first_index, last_index
|
||||||
|
|
||||||
|
# trigger redraw despite caching
|
||||||
|
self.prepareGeometryChange()
|
||||||
|
|
||||||
|
# generate new lines objects for updatable "current bar"
|
||||||
|
self._last_bar_lines = bar_from_ohlc_row(last, self.w)
|
||||||
|
|
||||||
|
# last bar update
|
||||||
|
i, o, h, l, last, v = last[
|
||||||
|
['index', 'open', 'high', 'low', 'close', 'volume']
|
||||||
|
]
|
||||||
|
# assert i == self.start_index - 1
|
||||||
|
# assert i == last_index
|
||||||
|
body, larm, rarm = self._last_bar_lines
|
||||||
|
|
||||||
|
# XXX: is there a faster way to modify this?
|
||||||
|
rarm.setLine(rarm.x1(), last, rarm.x2(), last)
|
||||||
|
|
||||||
|
# writer is responsible for changing open on "first" volume of bar
|
||||||
|
larm.setLine(larm.x1(), o, larm.x2(), o)
|
||||||
|
|
||||||
|
if l != h: # noqa
|
||||||
|
|
||||||
|
if body is None:
|
||||||
|
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
||||||
|
else:
|
||||||
|
# update body
|
||||||
|
body.setLine(i, l, i, h)
|
||||||
|
|
||||||
|
# XXX: pretty sure this is causing an issue where the bar has
|
||||||
|
# a large upward move right before the next sample and the body
|
||||||
|
# is getting set to None since the next bar is flat but the shm
|
||||||
|
# array index update wasn't read by the time this code runs. Iow
|
||||||
|
# we're doing this removal of the body for a bar index that is
|
||||||
|
# now out of date / from some previous sample. It's weird
|
||||||
|
# though because i've seen it do this to bars i - 3 back?
|
||||||
|
|
||||||
|
profiler('last bar set')
|
||||||
|
|
||||||
|
self.update()
|
||||||
|
profiler('.update()')
|
||||||
|
|
||||||
|
if flip_cache:
|
||||||
|
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
def boundingRect(self):
|
def boundingRect(self):
|
||||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||||
|
@ -135,6 +737,16 @@ class BarItems(pg.GraphicsObject):
|
||||||
hb.bottomRight(),
|
hb.bottomRight(),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# fp = self.fast_path
|
||||||
|
# if fp:
|
||||||
|
# fhb = fp.controlPointRect()
|
||||||
|
# print((hb_tl, hb_br))
|
||||||
|
# print(fhb)
|
||||||
|
# hb_tl, hb_br = (
|
||||||
|
# fhb.topLeft() + hb.topLeft(),
|
||||||
|
# fhb.bottomRight() + hb.bottomRight(),
|
||||||
|
# )
|
||||||
|
|
||||||
# need to include last bar height or BR will be off
|
# need to include last bar height or BR will be off
|
||||||
mx_y = hb_br.y()
|
mx_y = hb_br.y()
|
||||||
mn_y = hb_tl.y()
|
mn_y = hb_tl.y()
|
||||||
|
@ -162,6 +774,70 @@ class BarItems(pg.GraphicsObject):
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# def should_ds_or_redraw(
|
||||||
|
# self,
|
||||||
|
# x_gt: float = 2,
|
||||||
|
|
||||||
|
# ) -> tuple[bool, bool]:
|
||||||
|
|
||||||
|
# curve = self._ds_line
|
||||||
|
# if not curve:
|
||||||
|
# return False, False
|
||||||
|
|
||||||
|
# # this is the ``float`` value of the "number of x units" (in
|
||||||
|
# # view coords) that a pixel spans.
|
||||||
|
# uppx = self._ds_line.x_uppx()
|
||||||
|
# print(f'uppx: {uppx}')
|
||||||
|
|
||||||
|
# # linked = self.linked
|
||||||
|
# should_redraw: bool = False
|
||||||
|
# should_ds: bool = False
|
||||||
|
|
||||||
|
# if (
|
||||||
|
# not self._in_ds
|
||||||
|
# and uppx >= x_gt
|
||||||
|
# ):
|
||||||
|
|
||||||
|
# should_ds = True
|
||||||
|
# should_redraw = True
|
||||||
|
|
||||||
|
# elif (
|
||||||
|
# self._in_ds
|
||||||
|
# and uppx < x_gt
|
||||||
|
# ):
|
||||||
|
# should_ds = False
|
||||||
|
# should_redraw = True
|
||||||
|
|
||||||
|
# if self._in_ds:
|
||||||
|
# should_ds = True
|
||||||
|
|
||||||
|
# # no curve change
|
||||||
|
# return should_ds, should_redraw
|
||||||
|
|
||||||
|
# def maybe_downsample(
|
||||||
|
# self,
|
||||||
|
# x_gt: float = 2,
|
||||||
|
|
||||||
|
# ) -> bool:
|
||||||
|
# '''
|
||||||
|
# Call this when you want to stop drawing individual
|
||||||
|
# bars and instead use a ``FastAppendCurve`` intepolation
|
||||||
|
# line (normally when the width of a bar (aka 1.0 in the x)
|
||||||
|
# is less then a pixel width on the device).
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# ds_xy = self._ds_line_xy
|
||||||
|
# if ds_xy:
|
||||||
|
# ds_xy.maybe_downsample()
|
||||||
|
|
||||||
|
# if (
|
||||||
|
# self._ds_line_xy is not None
|
||||||
|
# and self._in_ds
|
||||||
|
# ):
|
||||||
|
# curve = self.update_ds_line(
|
||||||
|
# *self._ds_line_xy,
|
||||||
|
# )
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
self,
|
self,
|
||||||
p: QtGui.QPainter,
|
p: QtGui.QPainter,
|
||||||
|
@ -170,9 +846,12 @@ class BarItems(pg.GraphicsObject):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
if self._in_ds:
|
||||||
|
return
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
gt=ms_slower_then,
|
||||||
)
|
)
|
||||||
|
|
||||||
# p.setCompositionMode(0)
|
# p.setCompositionMode(0)
|
||||||
|
@ -184,67 +863,13 @@ class BarItems(pg.GraphicsObject):
|
||||||
# lead to any perf gains other then when zoomed in to less bars
|
# lead to any perf gains other then when zoomed in to less bars
|
||||||
# in view.
|
# in view.
|
||||||
p.setPen(self.last_bar_pen)
|
p.setPen(self.last_bar_pen)
|
||||||
if self._last_bar_lines:
|
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
||||||
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
profiler('draw last bar')
|
||||||
profiler('draw last bar')
|
|
||||||
|
|
||||||
p.setPen(self.bars_pen)
|
p.setPen(self.bars_pen)
|
||||||
p.drawPath(self.path)
|
p.drawPath(self.path)
|
||||||
profiler(f'draw history path: {self.path.capacity()}')
|
profiler(f'draw history path: {self.path.capacity()}')
|
||||||
|
|
||||||
def draw_last_datum(
|
# if self.fast_path:
|
||||||
self,
|
# p.drawPath(self.fast_path)
|
||||||
path: QPainterPath,
|
# profiler('draw fast path')
|
||||||
src_data: np.ndarray,
|
|
||||||
render_data: np.ndarray,
|
|
||||||
reset: bool,
|
|
||||||
array_key: str,
|
|
||||||
|
|
||||||
fields: list[str] = [
|
|
||||||
'index',
|
|
||||||
'open',
|
|
||||||
'high',
|
|
||||||
'low',
|
|
||||||
'close',
|
|
||||||
],
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
# relevant fields
|
|
||||||
ohlc = src_data[fields]
|
|
||||||
last_row = ohlc[-1:]
|
|
||||||
|
|
||||||
# individual values
|
|
||||||
last_row = i, o, h, l, last = ohlc[-1]
|
|
||||||
|
|
||||||
# generate new lines objects for updatable "current bar"
|
|
||||||
self._last_bar_lines = bar_from_ohlc_row(last_row)
|
|
||||||
|
|
||||||
# assert i == graphics.start_index - 1
|
|
||||||
# assert i == last_index
|
|
||||||
body, larm, rarm = self._last_bar_lines
|
|
||||||
|
|
||||||
# XXX: is there a faster way to modify this?
|
|
||||||
rarm.setLine(rarm.x1(), last, rarm.x2(), last)
|
|
||||||
|
|
||||||
# writer is responsible for changing open on "first" volume of bar
|
|
||||||
larm.setLine(larm.x1(), o, larm.x2(), o)
|
|
||||||
|
|
||||||
if l != h: # noqa
|
|
||||||
|
|
||||||
if body is None:
|
|
||||||
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
|
||||||
else:
|
|
||||||
# update body
|
|
||||||
body.setLine(i, l, i, h)
|
|
||||||
|
|
||||||
# XXX: pretty sure this is causing an issue where the
|
|
||||||
# bar has a large upward move right before the next
|
|
||||||
# sample and the body is getting set to None since the
|
|
||||||
# next bar is flat but the shm array index update wasn't
|
|
||||||
# read by the time this code runs. Iow we're doing this
|
|
||||||
# removal of the body for a bar index that is now out of
|
|
||||||
# date / from some previous sample. It's weird though
|
|
||||||
# because i've seen it do this to bars i - 3 back?
|
|
||||||
|
|
||||||
return ohlc['index'], ohlc['close']
|
|
||||||
|
|
|
@ -1,236 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
"""
|
|
||||||
Super fast ``QPainterPath`` generation related operator routines.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
from typing import (
|
|
||||||
# Optional,
|
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
from numpy.lib import recfunctions as rfn
|
|
||||||
from numba import njit, float64, int64 # , optional
|
|
||||||
# import pyqtgraph as pg
|
|
||||||
from PyQt5 import QtGui
|
|
||||||
# from PyQt5.QtCore import QLineF, QPointF
|
|
||||||
|
|
||||||
from ..data._sharedmem import (
|
|
||||||
ShmArray,
|
|
||||||
)
|
|
||||||
# from .._profile import pg_profile_enabled, ms_slower_then
|
|
||||||
from ._compression import (
|
|
||||||
ds_m4,
|
|
||||||
)
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from ._flows import Renderer
|
|
||||||
|
|
||||||
|
|
||||||
def xy_downsample(
|
|
||||||
x,
|
|
||||||
y,
|
|
||||||
uppx,
|
|
||||||
|
|
||||||
x_spacer: float = 0.5,
|
|
||||||
|
|
||||||
) -> tuple[np.ndarray, np.ndarray]:
|
|
||||||
|
|
||||||
# downsample whenever more then 1 pixels per datum can be shown.
|
|
||||||
# always refresh data bounds until we get diffing
|
|
||||||
# working properly, see above..
|
|
||||||
bins, x, y = ds_m4(
|
|
||||||
x,
|
|
||||||
y,
|
|
||||||
uppx,
|
|
||||||
)
|
|
||||||
|
|
||||||
# flatten output to 1d arrays suitable for path-graphics generation.
|
|
||||||
x = np.broadcast_to(x[:, None], y.shape)
|
|
||||||
x = (x + np.array(
|
|
||||||
[-x_spacer, 0, 0, x_spacer]
|
|
||||||
)).flatten()
|
|
||||||
y = y.flatten()
|
|
||||||
|
|
||||||
return x, y
|
|
||||||
|
|
||||||
|
|
||||||
@njit(
|
|
||||||
# TODO: for now need to construct this manually for readonly arrays, see
|
|
||||||
# https://github.com/numba/numba/issues/4511
|
|
||||||
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
|
||||||
# numba_ohlc_dtype[::1], # contiguous
|
|
||||||
# int64,
|
|
||||||
# optional(float64),
|
|
||||||
# ),
|
|
||||||
nogil=True
|
|
||||||
)
|
|
||||||
def path_arrays_from_ohlc(
|
|
||||||
data: np.ndarray,
|
|
||||||
start: int64,
|
|
||||||
bar_gap: float64 = 0.43,
|
|
||||||
|
|
||||||
) -> np.ndarray:
|
|
||||||
'''
|
|
||||||
Generate an array of lines objects from input ohlc data.
|
|
||||||
|
|
||||||
'''
|
|
||||||
size = int(data.shape[0] * 6)
|
|
||||||
|
|
||||||
x = np.zeros(
|
|
||||||
# data,
|
|
||||||
shape=size,
|
|
||||||
dtype=float64,
|
|
||||||
)
|
|
||||||
y, c = x.copy(), x.copy()
|
|
||||||
|
|
||||||
# TODO: report bug for assert @
|
|
||||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
|
||||||
for i, q in enumerate(data[start:], start):
|
|
||||||
|
|
||||||
# TODO: ask numba why this doesn't work..
|
|
||||||
# open, high, low, close, index = q[
|
|
||||||
# ['open', 'high', 'low', 'close', 'index']]
|
|
||||||
|
|
||||||
open = q['open']
|
|
||||||
high = q['high']
|
|
||||||
low = q['low']
|
|
||||||
close = q['close']
|
|
||||||
index = float64(q['index'])
|
|
||||||
|
|
||||||
istart = i * 6
|
|
||||||
istop = istart + 6
|
|
||||||
|
|
||||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
|
||||||
x[istart:istop] = (
|
|
||||||
index - bar_gap,
|
|
||||||
index,
|
|
||||||
index,
|
|
||||||
index,
|
|
||||||
index,
|
|
||||||
index + bar_gap,
|
|
||||||
)
|
|
||||||
y[istart:istop] = (
|
|
||||||
open,
|
|
||||||
open,
|
|
||||||
low,
|
|
||||||
high,
|
|
||||||
close,
|
|
||||||
close,
|
|
||||||
)
|
|
||||||
|
|
||||||
# specifies that the first edge is never connected to the
|
|
||||||
# prior bars last edge thus providing a small "gap"/"space"
|
|
||||||
# between bars determined by ``bar_gap``.
|
|
||||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
|
||||||
|
|
||||||
return x, y, c
|
|
||||||
|
|
||||||
|
|
||||||
def gen_ohlc_qpath(
|
|
||||||
r: Renderer,
|
|
||||||
data: np.ndarray,
|
|
||||||
array_key: str, # we ignore this
|
|
||||||
vr: tuple[int, int],
|
|
||||||
|
|
||||||
start: int = 0, # XXX: do we need this?
|
|
||||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
|
||||||
w: float = 0.43,
|
|
||||||
|
|
||||||
) -> QtGui.QPainterPath:
|
|
||||||
'''
|
|
||||||
More or less direct proxy to ``path_arrays_from_ohlc()``
|
|
||||||
but with closed in kwargs for line spacing.
|
|
||||||
|
|
||||||
'''
|
|
||||||
x, y, c = path_arrays_from_ohlc(
|
|
||||||
data,
|
|
||||||
start,
|
|
||||||
bar_gap=w,
|
|
||||||
)
|
|
||||||
return x, y, c
|
|
||||||
|
|
||||||
|
|
||||||
def ohlc_to_line(
|
|
||||||
ohlc_shm: ShmArray,
|
|
||||||
data_field: str,
|
|
||||||
fields: list[str] = ['open', 'high', 'low', 'close']
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Convert an input struct-array holding OHLC samples into a pair of
|
|
||||||
flattened x, y arrays with the same size (datums wise) as the source
|
|
||||||
data.
|
|
||||||
|
|
||||||
'''
|
|
||||||
y_out = ohlc_shm.ustruct(fields)
|
|
||||||
first = ohlc_shm._first.value
|
|
||||||
last = ohlc_shm._last.value
|
|
||||||
|
|
||||||
# write pushed data to flattened copy
|
|
||||||
y_out[first:last] = rfn.structured_to_unstructured(
|
|
||||||
ohlc_shm.array[fields]
|
|
||||||
)
|
|
||||||
|
|
||||||
# generate an flat-interpolated x-domain
|
|
||||||
x_out = (
|
|
||||||
np.broadcast_to(
|
|
||||||
ohlc_shm._array['index'][:, None],
|
|
||||||
(
|
|
||||||
ohlc_shm._array.size,
|
|
||||||
# 4, # only ohlc
|
|
||||||
y_out.shape[1],
|
|
||||||
),
|
|
||||||
) + np.array([-0.5, 0, 0, 0.5])
|
|
||||||
)
|
|
||||||
assert y_out.any()
|
|
||||||
|
|
||||||
return (
|
|
||||||
x_out,
|
|
||||||
y_out,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def to_step_format(
|
|
||||||
shm: ShmArray,
|
|
||||||
data_field: str,
|
|
||||||
index_field: str = 'index',
|
|
||||||
|
|
||||||
) -> tuple[int, np.ndarray, np.ndarray]:
|
|
||||||
'''
|
|
||||||
Convert an input 1d shm array to a "step array" format
|
|
||||||
for use by path graphics generation.
|
|
||||||
|
|
||||||
'''
|
|
||||||
i = shm._array['index'].copy()
|
|
||||||
out = shm._array[data_field].copy()
|
|
||||||
|
|
||||||
x_out = np.broadcast_to(
|
|
||||||
i[:, None],
|
|
||||||
(i.size, 2),
|
|
||||||
) + np.array([-0.5, 0.5])
|
|
||||||
|
|
||||||
y_out = np.empty((len(out), 2), dtype=out.dtype)
|
|
||||||
y_out[:] = out[:, np.newaxis]
|
|
||||||
|
|
||||||
# start y at origin level
|
|
||||||
y_out[0, 0] = 0
|
|
||||||
return x_out, y_out
|
|
|
@ -287,6 +287,7 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
app = QtGui.QApplication.instance()
|
app = QtGui.QApplication.instance()
|
||||||
geo = self.current_screen().geometry()
|
geo = self.current_screen().geometry()
|
||||||
h, w = geo.height(), geo.width()
|
h, w = geo.height(), geo.width()
|
||||||
|
self.setMaximumSize(w, h)
|
||||||
# use approx 1/3 of the area of the screen by default
|
# use approx 1/3 of the area of the screen by default
|
||||||
self._size = round(w * .666), round(h * .666)
|
self._size = round(w * .666), round(h * .666)
|
||||||
|
|
||||||
|
|
|
@ -122,8 +122,7 @@ def optschain(config, symbol, date, rate, test):
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option(
|
@click.option(
|
||||||
'--profile',
|
'--profile',
|
||||||
'-p',
|
is_flag=True,
|
||||||
default=None,
|
|
||||||
help='Enable pyqtgraph profiling'
|
help='Enable pyqtgraph profiling'
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
|
@ -134,16 +133,9 @@ def optschain(config, symbol, date, rate, test):
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbol', required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def chart(config, symbol, profile, pdb):
|
def chart(config, symbol, profile, pdb):
|
||||||
'''
|
"""Start a real-time chartng UI
|
||||||
Start a real-time chartng UI
|
"""
|
||||||
|
from .. import _profile
|
||||||
'''
|
|
||||||
# eg. ``--profile 3`` reports profiling for anything slower then 3 ms.
|
|
||||||
if profile is not None:
|
|
||||||
from .. import _profile
|
|
||||||
_profile._pg_profile = True
|
|
||||||
_profile.ms_slower_then = float(profile)
|
|
||||||
|
|
||||||
from ._app import _main
|
from ._app import _main
|
||||||
|
|
||||||
if '.' not in symbol:
|
if '.' not in symbol:
|
||||||
|
@ -153,6 +145,8 @@ def chart(config, symbol, profile, pdb):
|
||||||
))
|
))
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# toggle to enable profiling
|
||||||
|
_profile._pg_profile = profile
|
||||||
|
|
||||||
# global opts
|
# global opts
|
||||||
brokernames = config['brokers']
|
brokernames = config['brokers']
|
||||||
|
|
|
@ -30,7 +30,6 @@ import uuid
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from PyQt5.QtCore import Qt
|
|
||||||
|
|
||||||
from .. import config
|
from .. import config
|
||||||
from ..clearing._client import open_ems, OrderBook
|
from ..clearing._client import open_ems, OrderBook
|
||||||
|
@ -38,7 +37,6 @@ from ..clearing._allocate import (
|
||||||
mk_allocator,
|
mk_allocator,
|
||||||
Position,
|
Position,
|
||||||
)
|
)
|
||||||
from ._style import _font
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ..data.feed import Feed
|
from ..data.feed import Feed
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
@ -48,8 +46,7 @@ from ._position import (
|
||||||
PositionTracker,
|
PositionTracker,
|
||||||
SettingsPane,
|
SettingsPane,
|
||||||
)
|
)
|
||||||
from ._forms import FieldsForm
|
from ._label import FormatLabel
|
||||||
# from ._label import FormatLabel
|
|
||||||
from ._window import MultiStatus
|
from ._window import MultiStatus
|
||||||
from ..clearing._messages import Order, BrokerdPosition
|
from ..clearing._messages import Order, BrokerdPosition
|
||||||
from ._forms import open_form_input_handling
|
from ._forms import open_form_input_handling
|
||||||
|
@ -642,21 +639,63 @@ async def open_order_mode(
|
||||||
pp_tracker.hide_info()
|
pp_tracker.hide_info()
|
||||||
|
|
||||||
# setup order mode sidepane widgets
|
# setup order mode sidepane widgets
|
||||||
form: FieldsForm = chart.sidepane
|
form = chart.sidepane
|
||||||
form.vbox.setSpacing(
|
vbox = form.vbox
|
||||||
|
|
||||||
|
from textwrap import dedent
|
||||||
|
|
||||||
|
from PyQt5.QtCore import Qt
|
||||||
|
|
||||||
|
from ._style import _font, _font_small
|
||||||
|
from ..calc import humanize
|
||||||
|
|
||||||
|
feed_label = FormatLabel(
|
||||||
|
fmt_str=dedent("""
|
||||||
|
actor: **{actor_name}**\n
|
||||||
|
|_ @**{host}:{port}**\n
|
||||||
|
|_ throttle_hz: **{throttle_rate}**\n
|
||||||
|
|_ streams: **{symbols}**\n
|
||||||
|
|_ shm: **{shm}**\n
|
||||||
|
"""),
|
||||||
|
font=_font.font,
|
||||||
|
font_size=_font_small.px_size,
|
||||||
|
font_color='default_lightest',
|
||||||
|
)
|
||||||
|
|
||||||
|
form.feed_label = feed_label
|
||||||
|
|
||||||
|
# add feed info label to top
|
||||||
|
vbox.insertWidget(
|
||||||
|
0,
|
||||||
|
feed_label,
|
||||||
|
alignment=Qt.AlignBottom,
|
||||||
|
)
|
||||||
|
# vbox.setAlignment(feed_label, Qt.AlignBottom)
|
||||||
|
# vbox.setAlignment(Qt.AlignBottom)
|
||||||
|
_ = chart.height() - (
|
||||||
|
form.height() +
|
||||||
|
form.fill_bar.height()
|
||||||
|
# feed_label.height()
|
||||||
|
)
|
||||||
|
vbox.setSpacing(
|
||||||
int((1 + 5/8)*_font.px_size)
|
int((1 + 5/8)*_font.px_size)
|
||||||
)
|
)
|
||||||
|
|
||||||
from ._feedstatus import mk_feed_label
|
# fill in brokerd feed info
|
||||||
|
host, port = feed.portal.channel.raddr
|
||||||
feed_label = mk_feed_label(
|
if host == '127.0.0.1':
|
||||||
form,
|
host = 'localhost'
|
||||||
feed,
|
mpshm = feed.shm._shm
|
||||||
chart,
|
shmstr = f'{humanize(mpshm.size)}'
|
||||||
|
form.feed_label.format(
|
||||||
|
actor_name=feed.portal.channel.uid[0],
|
||||||
|
host=host,
|
||||||
|
port=port,
|
||||||
|
symbols=len(feed.symbols),
|
||||||
|
shm=shmstr,
|
||||||
|
throttle_rate=feed.throttle_rate,
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX: we set this because?
|
|
||||||
form.feed_label = feed_label
|
|
||||||
order_pane = SettingsPane(
|
order_pane = SettingsPane(
|
||||||
form=form,
|
form=form,
|
||||||
# XXX: ugh, so hideous...
|
# XXX: ugh, so hideous...
|
||||||
|
@ -667,11 +706,6 @@ async def open_order_mode(
|
||||||
)
|
)
|
||||||
order_pane.set_accounts(list(trackers.keys()))
|
order_pane.set_accounts(list(trackers.keys()))
|
||||||
|
|
||||||
form.vbox.addWidget(
|
|
||||||
feed_label,
|
|
||||||
alignment=Qt.AlignBottom,
|
|
||||||
)
|
|
||||||
|
|
||||||
# update pp icons
|
# update pp icons
|
||||||
for name, tracker in trackers.items():
|
for name, tracker in trackers.items():
|
||||||
order_pane.update_account_icons({name: tracker.live_pp})
|
order_pane.update_account_icons({name: tracker.live_pp})
|
||||||
|
@ -873,9 +907,7 @@ async def process_trades_and_update_ui(
|
||||||
mode.lines.remove_line(uuid=oid)
|
mode.lines.remove_line(uuid=oid)
|
||||||
|
|
||||||
# each clearing tick is responded individually
|
# each clearing tick is responded individually
|
||||||
elif resp in (
|
elif resp in ('broker_filled',):
|
||||||
'broker_filled',
|
|
||||||
):
|
|
||||||
|
|
||||||
known_order = book._sent_orders.get(oid)
|
known_order = book._sent_orders.get(oid)
|
||||||
if not known_order:
|
if not known_order:
|
||||||
|
|
|
@ -1,21 +1,14 @@
|
||||||
# we require a pinned dev branch to get some edge features that
|
# we require a pinned dev branch to get some edge features that
|
||||||
# are often untested in tractor's CI and/or being tested by us
|
# are often untested in tractor's CI and/or being tested by us
|
||||||
# first before committing as core features in tractor's base.
|
# first before committing as core features in tractor's base.
|
||||||
-e git+https://github.com/goodboy/tractor.git@master#egg=tractor
|
-e git+git://github.com/goodboy/tractor.git@master#egg=tractor
|
||||||
|
|
||||||
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
|
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
|
||||||
# pin this to a dev branch that we have more control over especially
|
# pin this to a dev branch that we have more control over especially
|
||||||
# as more graphics stuff gets hashed out.
|
# as more graphics stuff gets hashed out.
|
||||||
-e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
|
-e git+git://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
|
||||||
|
|
||||||
|
|
||||||
# our async client for ``marketstore`` (the tsdb)
|
# we own and maintain the async client for `marketstore` in our org
|
||||||
-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
|
# `anyio_marketstore`:
|
||||||
|
-e git+git://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
|
||||||
|
|
||||||
# ``trimeter`` for asysnc history fetching
|
|
||||||
-e git+https://github.com/python-trio/trimeter.git@master#egg=trimeter
|
|
||||||
|
|
||||||
|
|
||||||
# ``asyncvnc`` for sending interactions to ib-gw inside docker
|
|
||||||
-e git+https://github.com/pikers/asyncvnc.git@vid_passthrough#egg=asyncvnc
|
|
||||||
|
|
35
setup.py
35
setup.py
|
@ -51,55 +51,42 @@ setup(
|
||||||
# async
|
# async
|
||||||
'trio',
|
'trio',
|
||||||
'trio-websocket',
|
'trio-websocket',
|
||||||
|
# 'tractor', # from github currently
|
||||||
'msgspec', # performant IPC messaging
|
'msgspec', # performant IPC messaging
|
||||||
'async_generator',
|
'async_generator',
|
||||||
|
|
||||||
# from github currently (see requirements.txt)
|
|
||||||
# 'trimeter', # not released yet..
|
|
||||||
# 'tractor',
|
|
||||||
# asyncvnc,
|
|
||||||
|
|
||||||
# brokers
|
# brokers
|
||||||
'asks==2.4.8',
|
'asks==2.4.8',
|
||||||
'ib_insync',
|
'ib_insync',
|
||||||
|
|
||||||
# numerics
|
# numerics
|
||||||
'pendulum', # easier datetimes
|
'arrow', # better datetimes
|
||||||
'bidict', # 2 way map
|
'bidict', # 2 way map
|
||||||
'cython',
|
'cython',
|
||||||
'numpy',
|
'numpy',
|
||||||
'numba',
|
'numba',
|
||||||
|
'pandas',
|
||||||
|
|
||||||
# UI
|
# UI
|
||||||
'PyQt5',
|
'PyQt5',
|
||||||
# 'pyqtgraph', from our fork see reqs.txt
|
'pyqtgraph',
|
||||||
'qdarkstyle >= 3.0.2', # themeing
|
'qdarkstyle >= 3.0.2',
|
||||||
'fuzzywuzzy[speedup]', # fuzzy search
|
# fuzzy search
|
||||||
|
'fuzzywuzzy[speedup]',
|
||||||
|
|
||||||
# tsdbs
|
# tsdbs
|
||||||
# anyio-marketstore # from gh see reqs.txt
|
'pymarketstore',
|
||||||
],
|
],
|
||||||
extras_require={
|
|
||||||
'tsdb': [
|
|
||||||
'docker',
|
|
||||||
],
|
|
||||||
|
|
||||||
},
|
|
||||||
tests_require=['pytest'],
|
tests_require=['pytest'],
|
||||||
python_requires=">=3.10",
|
python_requires=">=3.9", # literally for ``datetime.datetime.fromisoformat``...
|
||||||
keywords=[
|
keywords=["async", "trading", "finance", "quant", "charting"],
|
||||||
"async",
|
|
||||||
"trading",
|
|
||||||
"finance",
|
|
||||||
"quant",
|
|
||||||
"charting",
|
|
||||||
],
|
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 3 - Alpha',
|
'Development Status :: 3 - Alpha',
|
||||||
'License :: OSI Approved :: ',
|
'License :: OSI Approved :: ',
|
||||||
'Operating System :: POSIX :: Linux',
|
'Operating System :: POSIX :: Linux',
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
"Programming Language :: Python :: 3.10",
|
"Programming Language :: Python :: 3.10",
|
||||||
'Intended Audience :: Financial and Insurance Industry',
|
'Intended Audience :: Financial and Insurance Industry',
|
||||||
'Intended Audience :: Science/Research',
|
'Intended Audience :: Science/Research',
|
||||||
|
|
Loading…
Reference in New Issue