Compare commits
No commits in common. "hist_backfill_fixes" and "gitea_feats" have entirely different histories.
hist_backf
...
gitea_feat
31
README.rst
31
README.rst
|
@ -88,23 +88,7 @@ a sane install with `uv`
|
||||||
************************
|
************************
|
||||||
bc why install with `python` when you can faster with `rust` ::
|
bc why install with `python` when you can faster with `rust` ::
|
||||||
|
|
||||||
uv sync
|
uv lock
|
||||||
|
|
||||||
# ^ astral's docs,
|
|
||||||
# https://docs.astral.sh/uv/concepts/projects/sync/
|
|
||||||
|
|
||||||
include all GUIs ::
|
|
||||||
|
|
||||||
uv sync --extra uis
|
|
||||||
|
|
||||||
AND with all our hacking tools::
|
|
||||||
|
|
||||||
uv sync --dev --extra uis
|
|
||||||
|
|
||||||
|
|
||||||
Ensure you can run the root-daemon::
|
|
||||||
|
|
||||||
uv run pikerd [-l info --pdb]
|
|
||||||
|
|
||||||
|
|
||||||
hacky install on nixos
|
hacky install on nixos
|
||||||
|
@ -119,18 +103,7 @@ start a chart
|
||||||
*************
|
*************
|
||||||
run a realtime OHLCV chart stand-alone::
|
run a realtime OHLCV chart stand-alone::
|
||||||
|
|
||||||
[uv run] piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken
|
piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken
|
||||||
|
|
||||||
# ^^^ iff you haven't activated the py-env,
|
|
||||||
# - https://docs.astral.sh/uv/concepts/projects/run/
|
|
||||||
#
|
|
||||||
# in order to create an explicit virt-env see,
|
|
||||||
# - https://docs.astral.sh/uv/concepts/projects/layout/#the-project-environment
|
|
||||||
# - https://docs.astral.sh/uv/pip/environments/
|
|
||||||
#
|
|
||||||
# use $UV_PROJECT_ENVIRONMENT to select any non-`.venv/`
|
|
||||||
# as the venv sudir in the repo's root.
|
|
||||||
# - https://docs.astral.sh/uv/reference/environment/#uv_project_environment
|
|
||||||
|
|
||||||
this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes
|
this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes
|
||||||
overlayed on the same graph. Use of `piker` without first starting
|
overlayed on the same graph. Use of `piker` without first starting
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
|
################
|
||||||
# ---- CEXY ----
|
# ---- CEXY ----
|
||||||
|
################
|
||||||
[binance]
|
[binance]
|
||||||
accounts.paper = 'paper'
|
accounts.paper = 'paper'
|
||||||
|
|
||||||
|
@ -12,41 +13,28 @@ accounts.spot = 'spot'
|
||||||
spot.use_testnet = false
|
spot.use_testnet = false
|
||||||
spot.api_key = ''
|
spot.api_key = ''
|
||||||
spot.api_secret = ''
|
spot.api_secret = ''
|
||||||
# ------ binance ------
|
|
||||||
|
|
||||||
|
|
||||||
[deribit]
|
[deribit]
|
||||||
# std assets
|
|
||||||
key_id = ''
|
key_id = ''
|
||||||
key_secret = ''
|
key_secret = ''
|
||||||
# options
|
|
||||||
accounts.option = 'option'
|
|
||||||
option.use_testnet = false
|
|
||||||
option.key_id = ''
|
|
||||||
option.key_secret = ''
|
|
||||||
# aux logging from `cryptofeed`
|
|
||||||
option.log.filename = 'cryptofeed.log'
|
|
||||||
option.log.level = 'DEBUG'
|
|
||||||
option.log.disabled = true
|
|
||||||
# ------ deribit ------
|
|
||||||
|
|
||||||
|
|
||||||
[kraken]
|
[kraken]
|
||||||
key_descr = ''
|
key_descr = ''
|
||||||
api_key = ''
|
api_key = ''
|
||||||
secret = ''
|
secret = ''
|
||||||
# ------ kraken ------
|
|
||||||
|
|
||||||
|
|
||||||
[kucoin]
|
[kucoin]
|
||||||
key_id = ''
|
key_id = ''
|
||||||
key_secret = ''
|
key_secret = ''
|
||||||
key_passphrase = ''
|
key_passphrase = ''
|
||||||
# ------ kucoin ------
|
|
||||||
|
|
||||||
|
|
||||||
|
################
|
||||||
# -- BROKERZ ---
|
# -- BROKERZ ---
|
||||||
|
################
|
||||||
[questrade]
|
[questrade]
|
||||||
refresh_token = ''
|
refresh_token = ''
|
||||||
access_token = ''
|
access_token = ''
|
||||||
|
@ -54,55 +42,44 @@ api_server = 'https://api06.iq.questrade.com/'
|
||||||
expires_in = 1800
|
expires_in = 1800
|
||||||
token_type = 'Bearer'
|
token_type = 'Bearer'
|
||||||
expires_at = 1616095326.355846
|
expires_at = 1616095326.355846
|
||||||
# ------ questrade ------
|
|
||||||
|
|
||||||
|
|
||||||
[ib]
|
[ib]
|
||||||
# define the (set of) host-port socketaddrs that
|
|
||||||
# brokerd.ib will scan to connect to an API endpoint
|
|
||||||
# (ib-gw or ib-tws listening instances)
|
|
||||||
hosts = [
|
hosts = [
|
||||||
'127.0.0.1',
|
'127.0.0.1',
|
||||||
]
|
]
|
||||||
|
# XXX: the order in which ports will be scanned
|
||||||
|
# (by the `brokerd` daemon-actor)
|
||||||
|
# is determined # by the line order here.
|
||||||
|
# TODO: when we eventually spawn gateways in our
|
||||||
|
# container, we can just dynamically allocate these
|
||||||
|
# using IBC.
|
||||||
ports = [
|
ports = [
|
||||||
4002, # gw
|
4002, # gw
|
||||||
7497, # tws
|
7497, # tws
|
||||||
]
|
]
|
||||||
|
|
||||||
# When API endpoints are being scanned durin startup, the order
|
# XXX: for a paper account the flex web query service
|
||||||
# of user-defined-account "names" (as defined below) here
|
# is not supported so you have to manually download
|
||||||
# determines which py-client connection is given priority to be
|
# and XML report and put it in a location that can be
|
||||||
# used for data-feed-requests by according to whichever client
|
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||||
# connected to an API endpoing which reported the equivalent
|
flex_token = ''
|
||||||
# account number for that name.
|
flex_trades_query_id = '' # live account
|
||||||
|
|
||||||
|
# when clients are being scanned this determines
|
||||||
|
# which clients are preferred to be used for data
|
||||||
|
# feeds based on the order of account names, if
|
||||||
|
# detected as active on an API client.
|
||||||
prefer_data_account = [
|
prefer_data_account = [
|
||||||
'paper',
|
'paper',
|
||||||
'margin',
|
'margin',
|
||||||
'ira',
|
'ira',
|
||||||
]
|
]
|
||||||
|
|
||||||
# For long-term trades txn (transaction) history
|
|
||||||
# processing (i.e your txn ledger with IB) you can
|
|
||||||
# (automatically for live accounts) query the FLEX
|
|
||||||
# report system for past history.
|
|
||||||
#
|
|
||||||
# (For paper accounts the web query service
|
|
||||||
# is not supported so you have to manually download
|
|
||||||
# an XML report and put it in a location that can be
|
|
||||||
# accessed by our `brokerd.ib` backend code for parsing).
|
|
||||||
#
|
|
||||||
flex_token = ''
|
|
||||||
flex_trades_query_id = '' # live account
|
|
||||||
|
|
||||||
# define "aliases" (names) for each account number
|
|
||||||
# such that the names can be reffed and logged throughout
|
|
||||||
# `piker.accounting` subsys and more easily
|
|
||||||
# referred to by the user.
|
|
||||||
#
|
|
||||||
# These keys will be the set exposed through the order-mode
|
|
||||||
# account-selection UI so that numbers are never shown.
|
|
||||||
[ib.accounts]
|
[ib.accounts]
|
||||||
paper = 'DU0000000' # <- literal account #
|
# the order in which accounts will be selectable
|
||||||
margin = 'U0000000'
|
# in the order mode UI (if found via clients during
|
||||||
ira = 'U0000000'
|
# API-app scanning)when a new symbol is loaded.
|
||||||
# ------ ib ------
|
paper = 'XX0000000'
|
||||||
|
margin = 'X0000000'
|
||||||
|
ira = 'X0000000'
|
||||||
|
|
|
@ -1,9 +1,7 @@
|
||||||
[network]
|
[network]
|
||||||
pikerd = [
|
tsdb.backend = 'marketstore'
|
||||||
'/ipv4/127.0.0.1/tcp/6116', # std localhost daemon-actor tree
|
tsdb.host = 'localhost'
|
||||||
# '/uds/6116', # TODO std uds socket file
|
tsdb.grpc_port = 5995
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
[ui]
|
[ui]
|
||||||
# set custom font + size which will scale entire UI
|
# set custom font + size which will scale entire UI
|
||||||
|
|
|
@ -1,138 +1,30 @@
|
||||||
running ``ib`` gateway in ``docker``
|
running ``ib`` gateway in ``docker``
|
||||||
------------------------------------
|
------------------------------------
|
||||||
We have a config based on a well maintained community
|
We have a config based on the (now defunct)
|
||||||
image from `@gnzsnz`:
|
image from "waytrade":
|
||||||
|
|
||||||
https://github.com/gnzsnz/ib-gateway-docker
|
https://github.com/waytrade/ib-gateway-docker
|
||||||
|
|
||||||
|
To startup this image with our custom settings
|
||||||
To startup this image simply run the command::
|
simply run the command::
|
||||||
|
|
||||||
docker compose up
|
docker compose up
|
||||||
|
|
||||||
(For further usage^ see the official `docker-compose`_ docs)
|
And you should have the following socket-available services:
|
||||||
|
|
||||||
|
- ``x11vnc1@127.0.0.1:3003``
|
||||||
And you should have the following socket-available services by
|
|
||||||
default:
|
|
||||||
|
|
||||||
- ``x11vnc1 @ 127.0.0.1:5900``
|
|
||||||
- ``ib-gw@127.0.0.1:4002``
|
- ``ib-gw@127.0.0.1:4002``
|
||||||
|
|
||||||
You can now attach to the container via a VNC client with password-auth;
|
You can attach to the container via a VNC client
|
||||||
here is an example using ``vncclient`` on ``linux``::
|
without password auth.
|
||||||
|
|
||||||
vncviewer localhost:5900
|
SECURITY STUFF!?!?!
|
||||||
|
-------------------
|
||||||
now enter the pw (password) you set via an (see second code blob)
|
Though "``ib``" claims they host filter connections outside
|
||||||
`.env file`_ or pw-file according to the `credentials section`_.
|
localhost (aka ``127.0.0.1``) it's probably better if you filter
|
||||||
|
the socket at the OS level using a stateless firewall rule::
|
||||||
If you want to change away from their default config see the example
|
|
||||||
`docker-compose.yml`-config issue and config-section of the readme,
|
|
||||||
|
|
||||||
- https://github.com/gnzsnz/ib-gateway-docker?tab=readme-ov-file#configuration
|
|
||||||
- https://github.com/gnzsnz/ib-gateway-docker/discussions/103
|
|
||||||
|
|
||||||
.. _.env file: https://github.com/gnzsnz/ib-gateway-docker?tab=readme-ov-file#how-to-use-it
|
|
||||||
.. _docker-compose: https://docs.docker.com/compose/
|
|
||||||
.. _credentials section: https://github.com/gnzsnz/ib-gateway-docker?tab=readme-ov-file#credentials
|
|
||||||
|
|
||||||
|
|
||||||
Connecting to the API from `piker`
|
|
||||||
---------------------------------
|
|
||||||
In order to expose the container's API endpoint to the
|
|
||||||
`brokerd/datad/ib` actor, we need to add a section to the user's
|
|
||||||
`brokers.toml` config (note the below is similar to the repo-shipped
|
|
||||||
template file),
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[ib]
|
|
||||||
# define the (set of) host-port socketaddrs that
|
|
||||||
# brokerd.ib will scan to connect to an API endpoint
|
|
||||||
# (ib-gw or ib-tws listening instances)
|
|
||||||
hosts = [
|
|
||||||
'127.0.0.1',
|
|
||||||
]
|
|
||||||
ports = [
|
|
||||||
4002, # gw
|
|
||||||
7497, # tws
|
|
||||||
]
|
|
||||||
|
|
||||||
# When API endpoints are being scanned durin startup, the order
|
|
||||||
# of user-defined-account "names" (as defined below) here
|
|
||||||
# determines which py-client connection is given priority to be
|
|
||||||
# used for data-feed-requests by according to whichever client
|
|
||||||
# connected to an API endpoing which reported the equivalent
|
|
||||||
# account number for that name.
|
|
||||||
prefer_data_account = [
|
|
||||||
'paper',
|
|
||||||
'margin',
|
|
||||||
'ira',
|
|
||||||
]
|
|
||||||
|
|
||||||
# define "aliases" (names) for each account number
|
|
||||||
# such that the names can be reffed and logged throughout
|
|
||||||
# `piker.accounting` subsys and more easily
|
|
||||||
# referred to by the user.
|
|
||||||
#
|
|
||||||
# These keys will be the set exposed through the order-mode
|
|
||||||
# account-selection UI so that numbers are never shown.
|
|
||||||
[ib.accounts]
|
|
||||||
paper = 'XX0000000'
|
|
||||||
margin = 'X0000000'
|
|
||||||
ira = 'X0000000'
|
|
||||||
|
|
||||||
|
|
||||||
the broker daemon can also connect to the container's VNC server for
|
|
||||||
added functionalies including,
|
|
||||||
|
|
||||||
- viewing the API endpoint program's GUI for manual interventions,
|
|
||||||
- workarounds for historical data throttling using hotkey hacks,
|
|
||||||
|
|
||||||
Add a further section to `brokers.toml` which maps each API-ep's
|
|
||||||
port to a table of VNC server connection info like,
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[ib.vnc_addrs]
|
|
||||||
4002 = {host = 'localhost', port = 5900, pw = 'doggy'}
|
|
||||||
|
|
||||||
The `pw = 'doggy'` here ^ should the same value as the particular
|
|
||||||
container instances `.env` file setting (when it was run),
|
|
||||||
|
|
||||||
.. code:: ini
|
|
||||||
|
|
||||||
VNC_SERVER_PASSWORD='doggy'
|
|
||||||
|
|
||||||
|
|
||||||
IF you also want to run ``TWS``
|
|
||||||
-------------------------------
|
|
||||||
You can also run it containerized,
|
|
||||||
|
|
||||||
https://github.com/gnzsnz/ib-gateway-docker?tab=readme-ov-file#using-tws
|
|
||||||
|
|
||||||
|
|
||||||
SECURITY stuff (advanced, only if you're paranoid)
|
|
||||||
--------------------------------------------------
|
|
||||||
First and foremost if doing a "distributed" container setup where you
|
|
||||||
run the ``ib-gw`` docker container and your connecting API client
|
|
||||||
(likely ``ib_async`` from python) on **different hosts** be sure to
|
|
||||||
read the `security considerations`_ section!
|
|
||||||
|
|
||||||
And for a further (somewhat paranoid) perspective from
|
|
||||||
a long-time-ago serious devops eng..
|
|
||||||
|
|
||||||
Though "``ib``" claims they filter remote host connections outside
|
|
||||||
``localhost`` (aka ``127.0.0.1`` on ipv4) it's prolly justified if
|
|
||||||
you'd like to filter the socket at the *OS level* using a stateless
|
|
||||||
firewall rule::
|
|
||||||
|
|
||||||
ip rule add not unicast iif lo to 0.0.0.0/0 dport 4002
|
ip rule add not unicast iif lo to 0.0.0.0/0 dport 4002
|
||||||
|
|
||||||
|
We will soon have this baked into our own custom image but for
|
||||||
We will soon have this either baked into our own custom derivative
|
now you'll have to do it urself dawgy.
|
||||||
image (or patched into the current upstream one after further testin)
|
|
||||||
but for now you'll have to do it urself, diggity dawg.
|
|
||||||
|
|
||||||
.. _security considerations: https://github.com/gnzsnz/ib-gateway-docker?tab=readme-ov-file#security-considerations
|
|
||||||
|
|
|
@ -1,15 +1,10 @@
|
||||||
# a community maintained IB API container!
|
# rework from the original @
|
||||||
#
|
# https://github.com/waytrade/ib-gateway-docker/blob/master/docker-compose.yml
|
||||||
# https://github.com/gnzsnz/ib-gateway-docker
|
version: "3.5"
|
||||||
#
|
|
||||||
# For piker we (currently) include some minor deviations
|
|
||||||
# for some config files in the `volumes` section.
|
|
||||||
#
|
|
||||||
# See full configuration settings @
|
|
||||||
# - https://github.com/gnzsnz/ib-gateway-docker?tab=readme-ov-file#configuration
|
|
||||||
# - https://github.com/gnzsnz/ib-gateway-docker/discussions/103
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
|
||||||
ib_gw_paper:
|
ib_gw_paper:
|
||||||
|
|
||||||
# apparently java is a mega cukc:
|
# apparently java is a mega cukc:
|
||||||
|
@ -55,22 +50,16 @@ services:
|
||||||
target: /root/scripts/run_x11_vnc.sh
|
target: /root/scripts/run_x11_vnc.sh
|
||||||
read_only: true
|
read_only: true
|
||||||
|
|
||||||
# NOTE: an alt method to fill these out is to
|
# NOTE:to fill these out, define an `.env` file in the same dir as
|
||||||
# define an `.env` file in the same dir as
|
# this compose file which looks something like:
|
||||||
# this compose file.
|
# TWS_USERID='myuser'
|
||||||
|
# TWS_PASSWORD='guest'
|
||||||
environment:
|
environment:
|
||||||
TWS_USERID: ${TWS_USERID}
|
TWS_USERID: ${TWS_USERID}
|
||||||
# TWS_USERID: 'myuser'
|
|
||||||
TWS_PASSWORD: ${TWS_PASSWORD}
|
TWS_PASSWORD: ${TWS_PASSWORD}
|
||||||
# TWS_PASSWORD: 'guest'
|
TRADING_MODE: 'paper'
|
||||||
TRADING_MODE: ${TRADING_MODE}
|
VNC_SERVER_PASSWORD: 'doggy'
|
||||||
# TRADING_MODE: 'paper'
|
VNC_SERVER_PORT: '3003'
|
||||||
VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD}
|
|
||||||
# VNC_SERVER_PASSWORD: 'doggy'
|
|
||||||
|
|
||||||
# TODO, see if we can get this supported like it
|
|
||||||
# was on the old `waytrade` image?
|
|
||||||
# VNC_SERVER_PORT: '3003'
|
|
||||||
|
|
||||||
# ports:
|
# ports:
|
||||||
# - target: 4002
|
# - target: 4002
|
||||||
|
@ -87,9 +76,6 @@ services:
|
||||||
# - "127.0.0.1:4002:4002"
|
# - "127.0.0.1:4002:4002"
|
||||||
# - "127.0.0.1:5900:5900"
|
# - "127.0.0.1:5900:5900"
|
||||||
|
|
||||||
# TODO, a masked but working example of dual paper + live
|
|
||||||
# ib-gw instances running in a single app run!
|
|
||||||
#
|
|
||||||
# ib_gw_live:
|
# ib_gw_live:
|
||||||
# image: waytrade/ib-gateway:1012.2i
|
# image: waytrade/ib-gateway:1012.2i
|
||||||
# restart: no
|
# restart: no
|
||||||
|
|
|
@ -121,7 +121,6 @@ async def bot_main():
|
||||||
# tick_throttle=10,
|
# tick_throttle=10,
|
||||||
) as feed,
|
) as feed,
|
||||||
|
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
assert accounts
|
assert accounts
|
||||||
|
|
|
@ -1,338 +0,0 @@
|
||||||
#!/usr/bin/env python
|
|
||||||
from decimal import (
|
|
||||||
Decimal,
|
|
||||||
)
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
# import polars as pl
|
|
||||||
import trio
|
|
||||||
import tractor
|
|
||||||
from datetime import datetime
|
|
||||||
# from pprint import pformat
|
|
||||||
from piker.brokers.deribit.api import (
|
|
||||||
get_client,
|
|
||||||
maybe_open_oi_feed,
|
|
||||||
)
|
|
||||||
from piker.storage import open_storage_client, StorageClient
|
|
||||||
from piker.log import get_logger
|
|
||||||
import sys
|
|
||||||
import pyqtgraph as pg
|
|
||||||
from PyQt6 import QtCore
|
|
||||||
from pyqtgraph import ScatterPlotItem, InfiniteLine
|
|
||||||
from PyQt6.QtWidgets import QApplication
|
|
||||||
from cryptofeed.symbols import Symbol
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
# XXX, use 2 newlines between top level LOC (even between these
|
|
||||||
# imports and the next function line ;)
|
|
||||||
|
|
||||||
def check_if_complete(
|
|
||||||
oi: dict[str, dict[str, Decimal | None]]
|
|
||||||
) -> bool:
|
|
||||||
return all(
|
|
||||||
oi[strike]['C'] is not None
|
|
||||||
and
|
|
||||||
oi[strike]['P'] is not None for strike in oi
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def max_pain_daemon(
|
|
||||||
) -> None:
|
|
||||||
oi_by_strikes: dict[str, dict[str, Decimal | None]]
|
|
||||||
instruments: list[Symbol] = []
|
|
||||||
expiry_dates: list[str]
|
|
||||||
expiry_date: str
|
|
||||||
currency: str = 'btc'
|
|
||||||
kind: str = 'option'
|
|
||||||
|
|
||||||
async with get_client(
|
|
||||||
) as client:
|
|
||||||
expiry_dates: list[str] = await client.get_expiration_dates(
|
|
||||||
currency=currency,
|
|
||||||
kind=kind
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f'Available expiries for {currency!r}-{kind}:\n'
|
|
||||||
f'{expiry_dates}\n'
|
|
||||||
)
|
|
||||||
expiry_date: str = input(
|
|
||||||
'Please enter a valid expiration date: '
|
|
||||||
).upper()
|
|
||||||
print('Starting little daemon...')
|
|
||||||
|
|
||||||
# maybe move this type annot down to the assignment line?
|
|
||||||
oi_by_strikes: dict[str, dict[str, Decimal]]
|
|
||||||
instruments = await client.get_instruments(
|
|
||||||
expiry_date=expiry_date,
|
|
||||||
)
|
|
||||||
oi_by_strikes = client.get_strikes_dict(instruments)
|
|
||||||
|
|
||||||
|
|
||||||
def get_total_intrinsic_values(
|
|
||||||
oi_by_strikes: dict[str, dict[str, Decimal]]
|
|
||||||
) -> dict[str, dict[str, Decimal]]:
|
|
||||||
call_cash: Decimal = Decimal(0)
|
|
||||||
put_cash: Decimal = Decimal(0)
|
|
||||||
intrinsic_values: dict[str, dict[str, Decimal]] = {}
|
|
||||||
closes: list = sorted(Decimal(close) for close in oi_by_strikes)
|
|
||||||
|
|
||||||
for strike, oi in oi_by_strikes.items():
|
|
||||||
s = Decimal(strike)
|
|
||||||
call_cash = sum(max(0, (s - c) * oi_by_strikes[str(c)]['C']) for c in closes)
|
|
||||||
put_cash = sum(max(0, (c - s) * oi_by_strikes[str(c)]['P']) for c in closes)
|
|
||||||
|
|
||||||
intrinsic_values[strike] = {
|
|
||||||
'C': call_cash,
|
|
||||||
'P': put_cash,
|
|
||||||
'total': call_cash + put_cash,
|
|
||||||
}
|
|
||||||
|
|
||||||
return intrinsic_values
|
|
||||||
|
|
||||||
def get_intrinsic_value_and_max_pain(
|
|
||||||
intrinsic_values: dict[str, dict[str, Decimal]]
|
|
||||||
):
|
|
||||||
# We meed to find the lowest value, so we start at
|
|
||||||
# infinity to ensure that, and the max_pain must be
|
|
||||||
# an amount greater than zero.
|
|
||||||
total_intrinsic_value: Decimal = Decimal('Infinity')
|
|
||||||
max_pain: Decimal = Decimal(0)
|
|
||||||
|
|
||||||
for strike, oi in oi_by_strikes.items():
|
|
||||||
s = Decimal(strike)
|
|
||||||
if intrinsic_values[strike]['total'] < total_intrinsic_value:
|
|
||||||
total_intrinsic_value = intrinsic_values[strike]['total']
|
|
||||||
max_pain = s
|
|
||||||
|
|
||||||
return total_intrinsic_value, max_pain
|
|
||||||
|
|
||||||
def plot_graph(
|
|
||||||
oi_by_strikes: dict[str, dict[str, Decimal]],
|
|
||||||
plot,
|
|
||||||
):
|
|
||||||
"""Update the bar graph with new open interest data."""
|
|
||||||
plot.clear()
|
|
||||||
|
|
||||||
intrinsic_values = get_total_intrinsic_values(oi_by_strikes)
|
|
||||||
|
|
||||||
for strike_str in sorted(oi_by_strikes, key=lambda x: int(x)):
|
|
||||||
strike = int(strike_str)
|
|
||||||
calls_val = float(oi_by_strikes[strike_str]['C'])
|
|
||||||
puts_val = float(oi_by_strikes[strike_str]['P'])
|
|
||||||
|
|
||||||
bar_c = pg.BarGraphItem(
|
|
||||||
x=[strike - 100],
|
|
||||||
height=[calls_val],
|
|
||||||
width=200,
|
|
||||||
pen='w',
|
|
||||||
brush=(0, 0, 255, 150)
|
|
||||||
)
|
|
||||||
plot.addItem(bar_c)
|
|
||||||
|
|
||||||
bar_p = pg.BarGraphItem(
|
|
||||||
x=[strike + 100],
|
|
||||||
height=[puts_val],
|
|
||||||
width=200,
|
|
||||||
pen='w',
|
|
||||||
brush=(255, 0, 0, 150)
|
|
||||||
)
|
|
||||||
plot.addItem(bar_p)
|
|
||||||
|
|
||||||
total_val = float(intrinsic_values[strike_str]['total']) / 100000
|
|
||||||
|
|
||||||
scatter_iv = ScatterPlotItem(
|
|
||||||
x=[strike],
|
|
||||||
y=[total_val],
|
|
||||||
pen=pg.mkPen(color=(0, 255, 0), width=2),
|
|
||||||
brush=pg.mkBrush(0, 255, 0, 150),
|
|
||||||
size=3,
|
|
||||||
symbol='o'
|
|
||||||
)
|
|
||||||
plot.addItem(scatter_iv)
|
|
||||||
|
|
||||||
_, max_pain = get_intrinsic_value_and_max_pain(intrinsic_values)
|
|
||||||
|
|
||||||
vertical_line = InfiniteLine(
|
|
||||||
pos=max_pain,
|
|
||||||
angle=90,
|
|
||||||
pen=pg.mkPen(color='yellow', width=1, style=QtCore.Qt.PenStyle.DotLine),
|
|
||||||
label=f'Max pain: {max_pain:,.0f}',
|
|
||||||
labelOpts={
|
|
||||||
'position': 0.85,
|
|
||||||
'color': 'yellow',
|
|
||||||
'movable': True
|
|
||||||
}
|
|
||||||
)
|
|
||||||
plot.addItem(vertical_line)
|
|
||||||
|
|
||||||
def update_oi_by_strikes(msg: tuple):
|
|
||||||
nonlocal oi_by_strikes
|
|
||||||
if 'oi' == msg[0]:
|
|
||||||
strike_price = msg[1]['strike_price']
|
|
||||||
option_type = msg[1]['option_type']
|
|
||||||
open_interest = msg[1]['open_interest']
|
|
||||||
oi_by_strikes.setdefault(
|
|
||||||
strike_price, {}
|
|
||||||
).update(
|
|
||||||
{option_type: open_interest}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Define the structured dtype
|
|
||||||
dtype = np.dtype([
|
|
||||||
('time', int),
|
|
||||||
('oi', float),
|
|
||||||
('oi_calc', float),
|
|
||||||
])
|
|
||||||
async def write_open_interest_on_file(msg: tuple, client: StorageClient):
|
|
||||||
if 'oi' == msg[0]:
|
|
||||||
nonlocal expiry_date
|
|
||||||
timestamp = msg[1]['timestamp']
|
|
||||||
strike_price = msg[1]["strike_price"]
|
|
||||||
option_type = msg[1]['option_type'].lower()
|
|
||||||
col_sym_key = f'btc-{expiry_date.lower()}-{strike_price}-{option_type}'
|
|
||||||
|
|
||||||
# Create the numpy array with sample data
|
|
||||||
data = np.array([
|
|
||||||
(
|
|
||||||
int(timestamp),
|
|
||||||
float(msg[1]['open_interest']),
|
|
||||||
np.nan,
|
|
||||||
),
|
|
||||||
], dtype=dtype)
|
|
||||||
|
|
||||||
path: Path = await client.write_oi(
|
|
||||||
col_sym_key,
|
|
||||||
data,
|
|
||||||
)
|
|
||||||
# TODO, use std logging like this throughout for status
|
|
||||||
# emissions on console!
|
|
||||||
log.info(f'Wrote OI history to {path}')
|
|
||||||
|
|
||||||
def get_max_pain(
|
|
||||||
oi_by_strikes: dict[str, dict[str, Decimal]]
|
|
||||||
) -> dict[str, str | Decimal]:
|
|
||||||
'''
|
|
||||||
This method requires only the strike_prices and oi for call
|
|
||||||
and puts, the closes list are the same as the strike_prices
|
|
||||||
the idea is to sum all the calls and puts cash for each strike
|
|
||||||
and the ITM strikes from that strike, the lowest value is what we
|
|
||||||
are looking for the intrinsic value.
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
nonlocal timestamp
|
|
||||||
|
|
||||||
intrinsic_values = get_total_intrinsic_values(oi_by_strikes)
|
|
||||||
|
|
||||||
total_intrinsic_value, max_pain = get_intrinsic_value_and_max_pain(intrinsic_values)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'timestamp': timestamp,
|
|
||||||
'expiry_date': expiry_date,
|
|
||||||
'total_intrinsic_value': total_intrinsic_value,
|
|
||||||
'max_pain': max_pain,
|
|
||||||
}
|
|
||||||
|
|
||||||
async with (
|
|
||||||
open_storage_client() as (_, storage),
|
|
||||||
|
|
||||||
maybe_open_oi_feed(
|
|
||||||
instruments,
|
|
||||||
) as oi_feed,
|
|
||||||
):
|
|
||||||
# Initialize QApplication
|
|
||||||
app = QApplication(sys.argv)
|
|
||||||
|
|
||||||
win = pg.GraphicsLayoutWidget(show=True)
|
|
||||||
win.setWindowTitle('Calls (blue) vs Puts (red)')
|
|
||||||
|
|
||||||
plot = win.addPlot(title='OI by Strikes')
|
|
||||||
plot.showGrid(x=True, y=True)
|
|
||||||
print('Plot initialized...')
|
|
||||||
|
|
||||||
async for msg in oi_feed:
|
|
||||||
|
|
||||||
# In memory oi_by_strikes dict, all message are filtered here
|
|
||||||
# and the dict is updated with the open interest data
|
|
||||||
update_oi_by_strikes(msg)
|
|
||||||
|
|
||||||
# Write on file using storage client
|
|
||||||
await write_open_interest_on_file(msg, storage)
|
|
||||||
|
|
||||||
# Max pain calcs, before start we must gather all the open interest for
|
|
||||||
# all the strike prices and option types available for a expiration date
|
|
||||||
if check_if_complete(oi_by_strikes):
|
|
||||||
if 'oi' == msg[0]:
|
|
||||||
# Here we must read for the filesystem all the latest open interest value for
|
|
||||||
# each instrument for that specific expiration date, that means look up for the
|
|
||||||
# last update got the instrument btc-{expity_date}-*oi1s.parquet (1s because is
|
|
||||||
# hardcoded to something, sorry.)
|
|
||||||
timestamp = msg[1]['timestamp']
|
|
||||||
max_pain = get_max_pain(oi_by_strikes)
|
|
||||||
# intrinsic_values = get_total_intrinsic_values(oi_by_strikes)
|
|
||||||
|
|
||||||
# graph here
|
|
||||||
plot_graph(oi_by_strikes, plot)
|
|
||||||
|
|
||||||
# TODO, use a single multiline string with `()`
|
|
||||||
# and drop the multiple `print()` calls (this
|
|
||||||
# should be done elsewhere in this file as well!
|
|
||||||
#
|
|
||||||
# As per the docs,
|
|
||||||
# https://docs.python.org/3/reference/lexical_analysis.html#string-literal-concatenation
|
|
||||||
# you could instead do,
|
|
||||||
# print(
|
|
||||||
# '-----------------------------------------------\n'
|
|
||||||
# f'timestamp: {datetime.fromtimestamp(max_pain['timestamp'])}\n'
|
|
||||||
# )
|
|
||||||
# WHY?
|
|
||||||
# |_ less ctx-switches/calls to `print()`
|
|
||||||
# |_ the `str` can then be modified / passed
|
|
||||||
# around as a variable more easily if needed in
|
|
||||||
# the future ;)
|
|
||||||
#
|
|
||||||
# ALSO, i believe there already is a stdlib
|
|
||||||
# module to do "alignment" of text which you
|
|
||||||
# could try for doing the right-side alignment,
|
|
||||||
# https://docs.python.org/3/library/textwrap.html#textwrap.indent
|
|
||||||
#
|
|
||||||
print('-----------------------------------------------')
|
|
||||||
print(f'timestamp: {datetime.fromtimestamp(max_pain['timestamp'])}')
|
|
||||||
print(f'expiry_date: {max_pain['expiry_date']}')
|
|
||||||
print(f'max_pain: {max_pain['max_pain']:,.0f}')
|
|
||||||
print(f'total intrinsic value: {max_pain['total_intrinsic_value']:,.0f}')
|
|
||||||
print('-----------------------------------------------')
|
|
||||||
|
|
||||||
# Process GUI events to keep the window responsive
|
|
||||||
app.processEvents()
|
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
|
|
||||||
async with tractor.open_nursery(
|
|
||||||
debug_mode=True,
|
|
||||||
loglevel='info',
|
|
||||||
) as an:
|
|
||||||
from tractor import log
|
|
||||||
log.get_console_log(level='info')
|
|
||||||
|
|
||||||
ptl: tractor.Portal = await an.start_actor(
|
|
||||||
'max_pain_daemon',
|
|
||||||
enable_modules=[__name__],
|
|
||||||
infect_asyncio=True,
|
|
||||||
# ^TODO, we can actually run this in the root-actor now
|
|
||||||
# if needed as per 2nd "section" in,
|
|
||||||
# https://pikers.dev/goodboy/tractor/pulls/2
|
|
||||||
#
|
|
||||||
# NOTE, will first require us porting to modern
|
|
||||||
# `tractor:main` though ofc!
|
|
||||||
|
|
||||||
)
|
|
||||||
await ptl.run(max_pain_daemon)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
trio.run(main)
|
|
|
@ -1,29 +0,0 @@
|
||||||
## Max Pain Calculation for Deribit Options
|
|
||||||
|
|
||||||
This feature, which calculates the max pain point for options traded
|
|
||||||
on the Deribit exchange using cryptofeed library.
|
|
||||||
|
|
||||||
- Functions in the api module for fetching options data from Deribit.
|
|
||||||
[commit](https://pikers.dev/pikers/piker/commit/da55856dd2876291f55a06eb0561438a912d8241)
|
|
||||||
|
|
||||||
- Compute the max pain point based on open interest data using
|
|
||||||
deribit's api.
|
|
||||||
[commit](https://pikers.dev/pikers/piker/commit/0d9d6e15ba0edeb662ec97f7599dd66af3046b94)
|
|
||||||
|
|
||||||
### How to test it?
|
|
||||||
|
|
||||||
**Before start:** in order to get this working with `uv`, you
|
|
||||||
**must** use my [`tractor` fork](https://pikers.dev/ntorres/tractor/src/branch/aio_abandons)
|
|
||||||
and this branch: `aio_abandons`, the reason is that I cherry-pick the
|
|
||||||
`uv_migration` that guille made, for some reason that a didn't dive
|
|
||||||
into, in my system y need tractor using `uv` too. quite hacky
|
|
||||||
I guess.
|
|
||||||
|
|
||||||
1. `uv lock`
|
|
||||||
|
|
||||||
2. `uv run --no-dev python examples/max_pain.py`
|
|
||||||
|
|
||||||
3. A message should be display, enter one of the expiration date
|
|
||||||
available.
|
|
||||||
|
|
||||||
4. The script should be up and running.
|
|
|
@ -33,6 +33,7 @@ from ._pos import (
|
||||||
Account,
|
Account,
|
||||||
load_account,
|
load_account,
|
||||||
load_account_from_ledger,
|
load_account_from_ledger,
|
||||||
|
open_pps,
|
||||||
open_account,
|
open_account,
|
||||||
Position,
|
Position,
|
||||||
)
|
)
|
||||||
|
@ -41,6 +42,7 @@ from ._mktinfo import (
|
||||||
dec_digits,
|
dec_digits,
|
||||||
digits_to_dec,
|
digits_to_dec,
|
||||||
MktPair,
|
MktPair,
|
||||||
|
Symbol,
|
||||||
unpack_fqme,
|
unpack_fqme,
|
||||||
_derivs as DerivTypes,
|
_derivs as DerivTypes,
|
||||||
)
|
)
|
||||||
|
@ -58,6 +60,7 @@ __all__ = [
|
||||||
'Asset',
|
'Asset',
|
||||||
'MktPair',
|
'MktPair',
|
||||||
'Position',
|
'Position',
|
||||||
|
'Symbol',
|
||||||
'Transaction',
|
'Transaction',
|
||||||
'TransactionLedger',
|
'TransactionLedger',
|
||||||
'dec_digits',
|
'dec_digits',
|
||||||
|
@ -67,6 +70,7 @@ __all__ = [
|
||||||
'load_account_from_ledger',
|
'load_account_from_ledger',
|
||||||
'mk_allocator',
|
'mk_allocator',
|
||||||
'open_account',
|
'open_account',
|
||||||
|
'open_pps',
|
||||||
'open_trade_ledger',
|
'open_trade_ledger',
|
||||||
'unpack_fqme',
|
'unpack_fqme',
|
||||||
'DerivTypes',
|
'DerivTypes',
|
||||||
|
|
|
@ -677,3 +677,90 @@ def unpack_fqme(
|
||||||
# '.'.join([mkt_ep, venue]),
|
# '.'.join([mkt_ep, venue]),
|
||||||
suffix,
|
suffix,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Symbol(Struct):
|
||||||
|
'''
|
||||||
|
I guess this is some kinda container thing for dealing with
|
||||||
|
all the different meta-data formats from brokers?
|
||||||
|
|
||||||
|
'''
|
||||||
|
key: str
|
||||||
|
|
||||||
|
broker: str = ''
|
||||||
|
venue: str = ''
|
||||||
|
|
||||||
|
# precision descriptors for price and vlm
|
||||||
|
tick_size: Decimal = Decimal('0.01')
|
||||||
|
lot_tick_size: Decimal = Decimal('0.0')
|
||||||
|
|
||||||
|
suffix: str = ''
|
||||||
|
broker_info: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_fqme(
|
||||||
|
cls,
|
||||||
|
fqsn: str,
|
||||||
|
info: dict[str, Any],
|
||||||
|
|
||||||
|
) -> Symbol:
|
||||||
|
broker, mktep, venue, suffix = unpack_fqme(fqsn)
|
||||||
|
tick_size = info.get('price_tick_size', 0.01)
|
||||||
|
lot_size = info.get('lot_tick_size', 0.0)
|
||||||
|
|
||||||
|
return Symbol(
|
||||||
|
broker=broker,
|
||||||
|
key=mktep,
|
||||||
|
tick_size=tick_size,
|
||||||
|
lot_tick_size=lot_size,
|
||||||
|
venue=venue,
|
||||||
|
suffix=suffix,
|
||||||
|
broker_info={broker: info},
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type_key(self) -> str:
|
||||||
|
return list(self.broker_info.values())[0]['asset_type']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tick_size_digits(self) -> int:
|
||||||
|
return float_digits(self.tick_size)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lot_size_digits(self) -> int:
|
||||||
|
return float_digits(self.lot_tick_size)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def price_tick(self) -> Decimal:
|
||||||
|
return Decimal(str(self.tick_size))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_tick(self) -> Decimal:
|
||||||
|
return Decimal(str(self.lot_tick_size))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def broker(self) -> str:
|
||||||
|
return list(self.broker_info.keys())[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fqme(self) -> str:
|
||||||
|
return maybe_cons_tokens([
|
||||||
|
self.key, # final "pair name" (eg. qqq[/usd], btcusdt)
|
||||||
|
self.venue,
|
||||||
|
self.suffix, # includes expiry and other con info
|
||||||
|
self.broker,
|
||||||
|
])
|
||||||
|
|
||||||
|
def quantize(
|
||||||
|
self,
|
||||||
|
size: float,
|
||||||
|
) -> Decimal:
|
||||||
|
digits = float_digits(self.lot_tick_size)
|
||||||
|
return Decimal(size).quantize(
|
||||||
|
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
||||||
|
rounding=ROUND_HALF_EVEN
|
||||||
|
)
|
||||||
|
|
||||||
|
# NOTE: when cast to `str` return fqme
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.fqme
|
||||||
|
|
|
@ -353,20 +353,17 @@ class Position(Struct):
|
||||||
) -> bool:
|
) -> bool:
|
||||||
'''
|
'''
|
||||||
Update clearing table by calculating the rolling ppu and
|
Update clearing table by calculating the rolling ppu and
|
||||||
(accumulative) size in both the clears entry and local attrs
|
(accumulative) size in both the clears entry and local
|
||||||
state.
|
attrs state.
|
||||||
|
|
||||||
Inserts are always done in datetime sorted order.
|
Inserts are always done in datetime sorted order.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
# added: bool = False
|
||||||
tid: str = t.tid
|
tid: str = t.tid
|
||||||
if tid in self._events:
|
if tid in self._events:
|
||||||
log.debug(
|
log.warning(f'{t} is already added?!')
|
||||||
f'Txn is already added?\n'
|
# return added
|
||||||
f'\n'
|
|
||||||
f'{t}\n'
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# TODO: apparently this IS possible with a dict but not
|
# TODO: apparently this IS possible with a dict but not
|
||||||
# common and probably not that beneficial unless we're also
|
# common and probably not that beneficial unless we're also
|
||||||
|
@ -447,12 +444,6 @@ class Position(Struct):
|
||||||
# def suggest_split(self) -> float:
|
# def suggest_split(self) -> float:
|
||||||
# ...
|
# ...
|
||||||
|
|
||||||
# ?TODO, for sending rendered state over the wire?
|
|
||||||
# def summary(self) -> PositionSummary:
|
|
||||||
# do minimal conversion to a subset of fields
|
|
||||||
# currently defined in `.clearing._messages.BrokerdPosition`
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class Account(Struct):
|
class Account(Struct):
|
||||||
'''
|
'''
|
||||||
|
@ -705,7 +696,7 @@ class Account(Struct):
|
||||||
else:
|
else:
|
||||||
# TODO: we reallly need a diff set of
|
# TODO: we reallly need a diff set of
|
||||||
# loglevels/colors per subsys.
|
# loglevels/colors per subsys.
|
||||||
log.debug(
|
log.warning(
|
||||||
f'Recent position for {fqme} was closed!'
|
f'Recent position for {fqme} was closed!'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -719,7 +710,7 @@ class Account(Struct):
|
||||||
# XXX WTF: if we use a tomlkit.Integer here we get this
|
# XXX WTF: if we use a tomlkit.Integer here we get this
|
||||||
# super weird --1 thing going on for cumsize!?1!
|
# super weird --1 thing going on for cumsize!?1!
|
||||||
# NOTE: the fix was to always float() the size value loaded
|
# NOTE: the fix was to always float() the size value loaded
|
||||||
# in open_account() below!
|
# in open_pps() below!
|
||||||
config.write(
|
config.write(
|
||||||
config=self.conf,
|
config=self.conf,
|
||||||
path=self.conf_path,
|
path=self.conf_path,
|
||||||
|
@ -903,6 +894,7 @@ def open_account(
|
||||||
clears_table['dt'] = dt
|
clears_table['dt'] = dt
|
||||||
trans.append(Transaction(
|
trans.append(Transaction(
|
||||||
fqme=bs_mktid,
|
fqme=bs_mktid,
|
||||||
|
# sym=mkt,
|
||||||
bs_mktid=bs_mktid,
|
bs_mktid=bs_mktid,
|
||||||
tid=tid,
|
tid=tid,
|
||||||
# XXX: not sure why sometimes these are loaded as
|
# XXX: not sure why sometimes these are loaded as
|
||||||
|
@ -925,18 +917,7 @@ def open_account(
|
||||||
):
|
):
|
||||||
expiry: pendulum.DateTime = pendulum.parse(expiry)
|
expiry: pendulum.DateTime = pendulum.parse(expiry)
|
||||||
|
|
||||||
# !XXX, should never be duplicates over
|
pp = pp_objs[bs_mktid] = Position(
|
||||||
# a backend-(broker)-system's unique market-IDs!
|
|
||||||
if pos := pp_objs.get(bs_mktid):
|
|
||||||
if mkt != pos.mkt:
|
|
||||||
log.warning(
|
|
||||||
f'Duplicated position but diff `MktPair.fqme` ??\n'
|
|
||||||
f'bs_mktid: {bs_mktid!r}\n'
|
|
||||||
f'pos.mkt: {pos.mkt}\n'
|
|
||||||
f'mkt: {mkt}\n'
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
pos = pp_objs[bs_mktid] = Position(
|
|
||||||
mkt,
|
mkt,
|
||||||
split_ratio=split_ratio,
|
split_ratio=split_ratio,
|
||||||
bs_mktid=bs_mktid,
|
bs_mktid=bs_mktid,
|
||||||
|
@ -948,13 +929,8 @@ def open_account(
|
||||||
# state, since today's records may have already been
|
# state, since today's records may have already been
|
||||||
# processed!
|
# processed!
|
||||||
for t in trans:
|
for t in trans:
|
||||||
added: bool = pos.add_clear(t)
|
pp.add_clear(t)
|
||||||
if not added:
|
|
||||||
log.warning(
|
|
||||||
f'Txn already recorded in pp ??\n'
|
|
||||||
f'\n'
|
|
||||||
f'{t}\n'
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
yield acnt
|
yield acnt
|
||||||
finally:
|
finally:
|
||||||
|
@ -962,6 +938,20 @@ def open_account(
|
||||||
acnt.write_config()
|
acnt.write_config()
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: drop the old name and THIS!
|
||||||
|
@cm
|
||||||
|
def open_pps(
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
) -> Generator[Account, None, None]:
|
||||||
|
log.warning(
|
||||||
|
'`open_pps()` is now deprecated!\n'
|
||||||
|
'Please use `with open_account() as cnt:`'
|
||||||
|
)
|
||||||
|
with open_account(*args, **kwargs) as acnt:
|
||||||
|
yield acnt
|
||||||
|
|
||||||
|
|
||||||
def load_account_from_ledger(
|
def load_account_from_ledger(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
|
|
|
@ -22,9 +22,7 @@ you know when you're losing money (if possible) XD
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from collections.abc import ValuesView
|
from collections.abc import ValuesView
|
||||||
from contextlib import contextmanager as cm
|
from contextlib import contextmanager as cm
|
||||||
from functools import partial
|
|
||||||
from math import copysign
|
from math import copysign
|
||||||
from pprint import pformat
|
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Callable,
|
Callable,
|
||||||
|
@ -32,7 +30,6 @@ from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
||||||
from tractor.devx import maybe_open_crash_handler
|
|
||||||
import polars as pl
|
import polars as pl
|
||||||
from pendulum import (
|
from pendulum import (
|
||||||
DateTime,
|
DateTime,
|
||||||
|
@ -40,16 +37,12 @@ from pendulum import (
|
||||||
parse,
|
parse,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..log import get_logger
|
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._ledger import (
|
from ._ledger import (
|
||||||
Transaction,
|
Transaction,
|
||||||
TransactionLedger,
|
TransactionLedger,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def ppu(
|
def ppu(
|
||||||
clears: Iterator[Transaction],
|
clears: Iterator[Transaction],
|
||||||
|
@ -245,9 +238,6 @@ def iter_by_dt(
|
||||||
|
|
||||||
def dyn_parse_to_dt(
|
def dyn_parse_to_dt(
|
||||||
tx: tuple[str, dict[str, Any]] | Transaction,
|
tx: tuple[str, dict[str, Any]] | Transaction,
|
||||||
|
|
||||||
debug: bool = False,
|
|
||||||
_invalid: list|None = None,
|
|
||||||
) -> DateTime:
|
) -> DateTime:
|
||||||
|
|
||||||
# handle `.items()` inputs
|
# handle `.items()` inputs
|
||||||
|
@ -260,16 +250,11 @@ def iter_by_dt(
|
||||||
# get best parser for this record..
|
# get best parser for this record..
|
||||||
for k in parsers:
|
for k in parsers:
|
||||||
if (
|
if (
|
||||||
(v := getattr(tx, k, None))
|
isdict and k in tx
|
||||||
or
|
or getattr(tx, k, None)
|
||||||
(
|
|
||||||
isdict
|
|
||||||
and
|
|
||||||
(v := tx.get(k))
|
|
||||||
)
|
|
||||||
):
|
):
|
||||||
# TODO? remove yah?
|
v = tx[k] if isdict else tx.dt
|
||||||
# v = tx[k] if isdict else tx.dt
|
assert v is not None, f'No valid value for `{k}`!?'
|
||||||
|
|
||||||
# only call parser on the value if not None from
|
# only call parser on the value if not None from
|
||||||
# the `parsers` table above (when NOT using
|
# the `parsers` table above (when NOT using
|
||||||
|
@ -277,63 +262,21 @@ def iter_by_dt(
|
||||||
# sort on it directly
|
# sort on it directly
|
||||||
if (
|
if (
|
||||||
not isinstance(v, DateTime)
|
not isinstance(v, DateTime)
|
||||||
and
|
and (parser := parsers.get(k))
|
||||||
(parser := parsers.get(k))
|
|
||||||
):
|
):
|
||||||
ret = parser(v)
|
return parser(v)
|
||||||
else:
|
else:
|
||||||
ret = v
|
return v
|
||||||
|
|
||||||
return ret
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
continue
|
# XXX: should never get here..
|
||||||
|
breakpoint()
|
||||||
# XXX: we should never really get here bc it means some kinda
|
|
||||||
# bad txn-record (field) data..
|
|
||||||
#
|
|
||||||
# -> set the `debug_mode = True` if you want to trace such
|
|
||||||
# cases from REPL ;)
|
|
||||||
else:
|
|
||||||
debug_mode: bool = False
|
|
||||||
report: str = (
|
|
||||||
f'Invalid txn time ??\n'
|
|
||||||
f'txn-id: {k!r}\n'
|
|
||||||
f'{k!r}: {v!r}\n'
|
|
||||||
)
|
|
||||||
if debug_mode:
|
|
||||||
with maybe_open_crash_handler(
|
|
||||||
pdb=debug_mode,
|
|
||||||
raise_on_exit=False,
|
|
||||||
):
|
|
||||||
raise ValueError(report)
|
|
||||||
else:
|
|
||||||
log.error(report)
|
|
||||||
|
|
||||||
if _invalid is not None:
|
|
||||||
_invalid.append(tx)
|
|
||||||
return from_timestamp(0.)
|
|
||||||
|
|
||||||
# breakpoint()
|
|
||||||
|
|
||||||
entry: tuple[str, dict] | Transaction
|
entry: tuple[str, dict] | Transaction
|
||||||
invalid: list = []
|
|
||||||
for entry in sorted(
|
for entry in sorted(
|
||||||
records,
|
records,
|
||||||
key=key or partial(
|
key=key or dyn_parse_to_dt,
|
||||||
dyn_parse_to_dt,
|
|
||||||
_invalid=invalid,
|
|
||||||
),
|
|
||||||
):
|
):
|
||||||
if entry in invalid:
|
|
||||||
log.warning(
|
|
||||||
f'Ignoring txn w invalid timestamp ??\n'
|
|
||||||
f'{pformat(entry)}\n'
|
|
||||||
# f'txn-id: {k!r}\n'
|
|
||||||
# f'{k!r}: {v!r}\n'
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# NOTE the type sig above; either pairs or txns B)
|
# NOTE the type sig above; either pairs or txns B)
|
||||||
yield entry
|
yield entry
|
||||||
|
|
||||||
|
@ -396,7 +339,6 @@ def open_ledger_dfs(
|
||||||
acctname: str,
|
acctname: str,
|
||||||
|
|
||||||
ledger: TransactionLedger | None = None,
|
ledger: TransactionLedger | None = None,
|
||||||
debug_mode: bool = False,
|
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
|
@ -411,10 +353,8 @@ def open_ledger_dfs(
|
||||||
can update the ledger on exit.
|
can update the ledger on exit.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
with maybe_open_crash_handler(
|
from piker.toolz import open_crash_handler
|
||||||
pdb=debug_mode,
|
with open_crash_handler():
|
||||||
# raise_on_exit=False,
|
|
||||||
):
|
|
||||||
if not ledger:
|
if not ledger:
|
||||||
import time
|
import time
|
||||||
from ._ledger import open_trade_ledger
|
from ._ledger import open_trade_ledger
|
||||||
|
@ -506,7 +446,7 @@ def ledger_to_dfs(
|
||||||
|
|
||||||
df = dfs[key] = ldf.with_columns([
|
df = dfs[key] = ldf.with_columns([
|
||||||
|
|
||||||
pl.cum_sum('size').alias('cumsize'),
|
pl.cumsum('size').alias('cumsize'),
|
||||||
|
|
||||||
# amount of source asset "sent" (via buy txns in
|
# amount of source asset "sent" (via buy txns in
|
||||||
# the market) to acquire the dst asset, PER txn.
|
# the market) to acquire the dst asset, PER txn.
|
||||||
|
@ -521,7 +461,7 @@ def ledger_to_dfs(
|
||||||
]).with_columns([
|
]).with_columns([
|
||||||
|
|
||||||
# rolling balance in src asset units
|
# rolling balance in src asset units
|
||||||
(pl.col('dst_bot').cum_sum() * -1).alias('src_balance'),
|
(pl.col('dst_bot').cumsum() * -1).alias('src_balance'),
|
||||||
|
|
||||||
# "position operation type" in terms of increasing the
|
# "position operation type" in terms of increasing the
|
||||||
# amount in the dst asset (entering) or decreasing the
|
# amount in the dst asset (entering) or decreasing the
|
||||||
|
@ -663,7 +603,7 @@ def ledger_to_dfs(
|
||||||
# cost that was included in the least-recently
|
# cost that was included in the least-recently
|
||||||
# entered txn that is still part of the current CSi
|
# entered txn that is still part of the current CSi
|
||||||
# set.
|
# set.
|
||||||
# => we look up the cost-per-unit cum_sum and apply
|
# => we look up the cost-per-unit cumsum and apply
|
||||||
# if over the current txn size (by multiplication)
|
# if over the current txn size (by multiplication)
|
||||||
# and then reverse that previusly applied cost on
|
# and then reverse that previusly applied cost on
|
||||||
# the txn_cost for this record.
|
# the txn_cost for this record.
|
||||||
|
|
|
@ -51,7 +51,6 @@ __brokers__: list[str] = [
|
||||||
'ib',
|
'ib',
|
||||||
'kraken',
|
'kraken',
|
||||||
'kucoin',
|
'kucoin',
|
||||||
'deribit',
|
|
||||||
|
|
||||||
# broken but used to work
|
# broken but used to work
|
||||||
# 'questrade',
|
# 'questrade',
|
||||||
|
@ -62,6 +61,7 @@ __brokers__: list[str] = [
|
||||||
# wstrade
|
# wstrade
|
||||||
# iex
|
# iex
|
||||||
|
|
||||||
|
# deribit
|
||||||
# bitso
|
# bitso
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -96,10 +96,7 @@ async def _setup_persistent_brokerd(
|
||||||
# - `open_symbol_search()`
|
# - `open_symbol_search()`
|
||||||
# NOTE: see ep invocation details inside `.data.feed`.
|
# NOTE: see ep invocation details inside `.data.feed`.
|
||||||
try:
|
try:
|
||||||
async with (
|
async with trio.open_nursery() as service_nursery:
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as service_nursery
|
|
||||||
):
|
|
||||||
bus: _FeedsBus = feed.get_feed_bus(
|
bus: _FeedsBus = feed.get_feed_bus(
|
||||||
brokername,
|
brokername,
|
||||||
service_nursery,
|
service_nursery,
|
||||||
|
|
|
@ -374,14 +374,9 @@ class Client:
|
||||||
pair: Pair = pair_type(**item)
|
pair: Pair = pair_type(**item)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
e.add_note(
|
e.add_note(
|
||||||
f'\n'
|
"\nDon't panic, prolly stupid binance changed their symbology schema again..\n"
|
||||||
f'New or removed field we need to codify!\n'
|
'Check out their API docs here:\n\n'
|
||||||
f'pair-type: {pair_type!r}\n'
|
'https://binance-docs.github.io/apidocs/spot/en/#exchange-information'
|
||||||
f'\n'
|
|
||||||
f"Don't panic, prolly stupid binance changed their symbology schema again..\n"
|
|
||||||
f'Check out their API docs here:\n'
|
|
||||||
f'\n'
|
|
||||||
f'https://binance-docs.github.io/apidocs/spot/en/#exchange-information\n'
|
|
||||||
)
|
)
|
||||||
raise
|
raise
|
||||||
pair_table[pair.symbol.upper()] = pair
|
pair_table[pair.symbol.upper()] = pair
|
||||||
|
|
|
@ -440,7 +440,6 @@ async def open_trade_dialog(
|
||||||
# - ledger: TransactionLedger
|
# - ledger: TransactionLedger
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery() as tn,
|
||||||
ctx.open_stream() as ems_stream,
|
ctx.open_stream() as ems_stream,
|
||||||
):
|
):
|
||||||
|
|
|
@ -448,6 +448,7 @@ async def subscribe(
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: list[str],
|
symbols: list[str],
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
|
@ -459,7 +460,6 @@ async def stream_quotes(
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
tractor.trionics.maybe_raise_from_masking_exc(),
|
|
||||||
send_chan as send_chan,
|
send_chan as send_chan,
|
||||||
open_cached_client('binance') as client,
|
open_cached_client('binance') as client,
|
||||||
):
|
):
|
||||||
|
|
|
@ -97,13 +97,6 @@ class Pair(Struct, frozen=True, kw_only=True):
|
||||||
baseAsset: str
|
baseAsset: str
|
||||||
baseAssetPrecision: int
|
baseAssetPrecision: int
|
||||||
|
|
||||||
permissionSets: list[list[str]]
|
|
||||||
|
|
||||||
# https://developers.binance.com/docs/binance-spot-api-docs#2025-08-26
|
|
||||||
# will become non-optional 2025-08-28?
|
|
||||||
# https://developers.binance.com/docs/binance-spot-api-docs#future-changes
|
|
||||||
pegInstructionsAllowed: bool = False
|
|
||||||
|
|
||||||
filters: dict[
|
filters: dict[
|
||||||
str,
|
str,
|
||||||
str | int | float,
|
str | int | float,
|
||||||
|
@ -149,11 +142,7 @@ class SpotPair(Pair, frozen=True):
|
||||||
defaultSelfTradePreventionMode: str
|
defaultSelfTradePreventionMode: str
|
||||||
allowedSelfTradePreventionModes: list[str]
|
allowedSelfTradePreventionModes: list[str]
|
||||||
permissions: list[str]
|
permissions: list[str]
|
||||||
|
permissionSets: list[list[str]]
|
||||||
# can the paint botz creat liq gaps even easier on this asset?
|
|
||||||
# Bp
|
|
||||||
# https://developers.binance.com/docs/binance-spot-api-docs/faqs/order_amend_keep_priority
|
|
||||||
amendAllowed: bool
|
|
||||||
|
|
||||||
# NOTE: see `.data._symcache.SymbologyCache.load()` for why
|
# NOTE: see `.data._symcache.SymbologyCache.load()` for why
|
||||||
ns_path: str = 'piker.brokers.binance:SpotPair'
|
ns_path: str = 'piker.brokers.binance:SpotPair'
|
||||||
|
|
|
@ -25,7 +25,6 @@ from .api import (
|
||||||
get_client,
|
get_client,
|
||||||
)
|
)
|
||||||
from .feed import (
|
from .feed import (
|
||||||
get_mkt_info,
|
|
||||||
open_history_client,
|
open_history_client,
|
||||||
open_symbol_search,
|
open_symbol_search,
|
||||||
stream_quotes,
|
stream_quotes,
|
||||||
|
@ -35,20 +34,15 @@ from .feed import (
|
||||||
# open_trade_dialog,
|
# open_trade_dialog,
|
||||||
# norm_trade_records,
|
# norm_trade_records,
|
||||||
# )
|
# )
|
||||||
from .venues import (
|
|
||||||
OptionPair,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'get_client',
|
'get_client',
|
||||||
# 'trades_dialogue',
|
# 'trades_dialogue',
|
||||||
'get_mkt_info',
|
|
||||||
'open_history_client',
|
'open_history_client',
|
||||||
'open_symbol_search',
|
'open_symbol_search',
|
||||||
'stream_quotes',
|
'stream_quotes',
|
||||||
'OptionPair',
|
|
||||||
# 'norm_trade_records',
|
# 'norm_trade_records',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -18,59 +18,38 @@
|
||||||
Deribit backend.
|
Deribit backend.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import (
|
from typing import Any, Optional, Callable
|
||||||
# Any,
|
|
||||||
# Optional,
|
|
||||||
Callable,
|
|
||||||
)
|
|
||||||
# from pprint import pformat
|
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import cryptofeed
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
from pendulum import (
|
import pendulum
|
||||||
from_timestamp,
|
from rapidfuzz import process as fuzzy
|
||||||
)
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from piker.accounting import (
|
from piker.brokers import open_cached_client
|
||||||
Asset,
|
from piker.log import get_logger, get_console_log
|
||||||
MktPair,
|
from piker.data import ShmArray
|
||||||
unpack_fqme,
|
from piker.brokers._util import (
|
||||||
)
|
BrokerError,
|
||||||
from piker.brokers import (
|
|
||||||
open_cached_client,
|
|
||||||
NoData,
|
|
||||||
DataUnavailable,
|
DataUnavailable,
|
||||||
)
|
)
|
||||||
from piker._cacheables import (
|
|
||||||
async_lifo_cache,
|
|
||||||
)
|
|
||||||
from piker.log import (
|
|
||||||
get_logger,
|
|
||||||
mk_repr,
|
|
||||||
)
|
|
||||||
from piker.data.validate import FeedInit
|
|
||||||
|
|
||||||
|
from cryptofeed import FeedHandler
|
||||||
|
from cryptofeed.defines import (
|
||||||
|
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
|
||||||
|
)
|
||||||
|
from cryptofeed.symbols import Symbol
|
||||||
|
|
||||||
from .api import (
|
from .api import (
|
||||||
Client,
|
Client, Trade,
|
||||||
# get_config,
|
get_config,
|
||||||
piker_sym_to_cb_sym,
|
str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst,
|
||||||
cb_sym_to_deribit_inst,
|
|
||||||
str_to_cb_sym,
|
|
||||||
maybe_open_price_feed
|
maybe_open_price_feed
|
||||||
)
|
)
|
||||||
from .venues import (
|
|
||||||
Pair,
|
|
||||||
OptionPair,
|
|
||||||
Trade,
|
|
||||||
)
|
|
||||||
|
|
||||||
_spawn_kwargs = {
|
_spawn_kwargs = {
|
||||||
'infect_asyncio': True,
|
'infect_asyncio': True,
|
||||||
|
@ -85,215 +64,90 @@ async def open_history_client(
|
||||||
mkt: MktPair,
|
mkt: MktPair,
|
||||||
) -> tuple[Callable, int]:
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
fnstrument: str = mkt.bs_fqme
|
||||||
# TODO implement history getter for the new storage layer.
|
# TODO implement history getter for the new storage layer.
|
||||||
async with open_cached_client('deribit') as client:
|
async with open_cached_client('deribit') as client:
|
||||||
|
|
||||||
pair: OptionPair = client._pairs[mkt.dst.name]
|
|
||||||
# XXX NOTE, the cuckers use ms !!!
|
|
||||||
creation_time_s: int = pair.creation_timestamp/1000
|
|
||||||
|
|
||||||
async def get_ohlc(
|
async def get_ohlc(
|
||||||
timeframe: float,
|
end_dt: Optional[datetime] = None,
|
||||||
end_dt: datetime | None = None,
|
start_dt: Optional[datetime] = None,
|
||||||
start_dt: datetime | None = None,
|
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
np.ndarray,
|
np.ndarray,
|
||||||
datetime, # start
|
datetime, # start
|
||||||
datetime, # end
|
datetime, # end
|
||||||
]:
|
]:
|
||||||
if timeframe != 60:
|
|
||||||
raise DataUnavailable('Only 1m bars are supported')
|
|
||||||
|
|
||||||
array: np.ndarray = await client.bars(
|
array = await client.bars(
|
||||||
mkt,
|
instrument,
|
||||||
start_dt=start_dt,
|
start_dt=start_dt,
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
)
|
)
|
||||||
if len(array) == 0:
|
if len(array) == 0:
|
||||||
if (
|
raise DataUnavailable
|
||||||
end_dt is None
|
|
||||||
):
|
|
||||||
raise DataUnavailable(
|
|
||||||
'No history seems to exist yet?\n\n'
|
|
||||||
f'{mkt}'
|
|
||||||
)
|
|
||||||
elif (
|
|
||||||
end_dt
|
|
||||||
and
|
|
||||||
end_dt.timestamp() < creation_time_s
|
|
||||||
):
|
|
||||||
# the contract can't have history
|
|
||||||
# before it was created.
|
|
||||||
pair_type_str: str = type(pair).__name__
|
|
||||||
create_dt: datetime = from_timestamp(creation_time_s)
|
|
||||||
raise DataUnavailable(
|
|
||||||
f'No history prior to\n'
|
|
||||||
f'`{pair_type_str}.creation_timestamp: int = '
|
|
||||||
f'{pair.creation_timestamp}\n\n'
|
|
||||||
f'------ deribit sux ------\n'
|
|
||||||
f'WHICH IN "NORMAL PEOPLE WHO USE EPOCH TIME" form is,\n'
|
|
||||||
f'creation_time_s: {creation_time_s}\n'
|
|
||||||
f'create_dt: {create_dt}\n'
|
|
||||||
)
|
|
||||||
raise NoData(
|
|
||||||
f'No frame for {start_dt} -> {end_dt}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
start_dt = from_timestamp(array[0]['time'])
|
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||||
end_dt = from_timestamp(array[-1]['time'])
|
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||||
|
|
||||||
times = array['time']
|
|
||||||
if not times.any():
|
|
||||||
raise ValueError(
|
|
||||||
'Bad frame with null-times?\n\n'
|
|
||||||
f'{times}'
|
|
||||||
)
|
|
||||||
|
|
||||||
if end_dt is None:
|
|
||||||
inow: int = round(time.time())
|
|
||||||
if (inow - times[-1]) > 60:
|
|
||||||
await tractor.pause()
|
|
||||||
|
|
||||||
return array, start_dt, end_dt
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
yield (
|
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||||
get_ohlc,
|
|
||||||
{ # backfill config
|
|
||||||
'erlangs': 3,
|
|
||||||
'rate': 3,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@async_lifo_cache()
|
|
||||||
async def get_mkt_info(
|
|
||||||
fqme: str,
|
|
||||||
|
|
||||||
) -> tuple[MktPair, Pair|OptionPair] | None:
|
|
||||||
|
|
||||||
# uppercase since kraken bs_mktid is always upper
|
|
||||||
if 'deribit' not in fqme.lower():
|
|
||||||
fqme += '.deribit'
|
|
||||||
|
|
||||||
mkt_mode: str = ''
|
|
||||||
broker, mkt_ep, venue, expiry = unpack_fqme(fqme)
|
|
||||||
|
|
||||||
# NOTE: we always upper case all tokens to be consistent with
|
|
||||||
# binance's symbology style for pairs, like `BTCUSDT`, but in
|
|
||||||
# theory we could also just keep things lower case; as long as
|
|
||||||
# we're consistent and the symcache matches whatever this func
|
|
||||||
# returns, always!
|
|
||||||
expiry: str = expiry.upper()
|
|
||||||
venue: str = venue.upper()
|
|
||||||
# venue_lower: str = venue.lower()
|
|
||||||
|
|
||||||
mkt_mode: str = 'option'
|
|
||||||
|
|
||||||
async with open_cached_client(
|
|
||||||
'deribit',
|
|
||||||
) as client:
|
|
||||||
|
|
||||||
assets: dict[str, Asset] = await client.get_assets()
|
|
||||||
pair_str: str = mkt_ep.lower()
|
|
||||||
|
|
||||||
pair: Pair = await client.exch_info(
|
|
||||||
sym=pair_str,
|
|
||||||
)
|
|
||||||
mkt_mode = pair.venue
|
|
||||||
client.mkt_mode = mkt_mode
|
|
||||||
|
|
||||||
dst: Asset | None = assets.get(pair.bs_dst_asset)
|
|
||||||
src: Asset | None = assets.get(pair.bs_src_asset)
|
|
||||||
|
|
||||||
mkt = MktPair(
|
|
||||||
dst=dst,
|
|
||||||
src=src,
|
|
||||||
price_tick=pair.price_tick,
|
|
||||||
size_tick=pair.size_tick,
|
|
||||||
bs_mktid=pair.symbol,
|
|
||||||
venue=mkt_mode,
|
|
||||||
broker='deribit',
|
|
||||||
_atype=mkt_mode,
|
|
||||||
_fqme_without_src=True,
|
|
||||||
|
|
||||||
# expiry=pair.expiry,
|
|
||||||
# XXX TODO, currently we don't use it since it's
|
|
||||||
# already "described" in the `OptionPair.symbol: str`
|
|
||||||
# and if we slap in the ISO repr it's kinda hideous..
|
|
||||||
# -[ ] figure out the best either std
|
|
||||||
)
|
|
||||||
return mkt, pair
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: list[str],
|
symbols: list[str],
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
|
loglevel: str = None,
|
||||||
|
|
||||||
# startup sync
|
# startup sync
|
||||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
Open a live quote stream for the market set defined by `symbols`.
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
Internally this starts a `cryptofeed.FeedHandler` inside an `asyncio`-side
|
sym = symbols[0]
|
||||||
task and relays through L1 and `Trade` msgs here to our `trio.Task`.
|
|
||||||
|
|
||||||
'''
|
|
||||||
sym = symbols[0].split('.')[0]
|
|
||||||
init_msgs: list[FeedInit] = []
|
|
||||||
|
|
||||||
# multiline nested `dict` formatter (since rn quote-msgs are
|
|
||||||
# just that).
|
|
||||||
pfmt: Callable[[str], str] = mk_repr(
|
|
||||||
# so we can see `deribit`'s delightfully mega-long bs fields..
|
|
||||||
maxstring=100,
|
|
||||||
)
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_cached_client('deribit') as client,
|
open_cached_client('deribit') as client,
|
||||||
send_chan as send_chan
|
send_chan as send_chan
|
||||||
):
|
):
|
||||||
mkt: MktPair
|
|
||||||
pair: Pair
|
|
||||||
mkt, pair = await get_mkt_info(sym)
|
|
||||||
|
|
||||||
# build out init msgs according to latest spec
|
init_msgs = {
|
||||||
init_msgs.append(
|
# pass back token, and bool, signalling if we're the writer
|
||||||
FeedInit(
|
# and that history has been written
|
||||||
mkt_info=mkt,
|
sym: {
|
||||||
)
|
'symbol_info': {
|
||||||
)
|
'asset_type': 'option',
|
||||||
# build `cryptofeed` feed-handle
|
'price_tick_size': 0.0005
|
||||||
cf_sym: cryptofeed.Symbol = piker_sym_to_cb_sym(sym)
|
},
|
||||||
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
from_cf: tractor.to_asyncio.LinkedTaskChannel
|
nsym = piker_sym_to_cb_sym(sym)
|
||||||
async with maybe_open_price_feed(sym) as from_cf:
|
|
||||||
|
|
||||||
# load the "last trades" summary
|
async with maybe_open_price_feed(sym) as stream:
|
||||||
last_trades_res: cryptofeed.LastTradesResult = await client.last_trades(
|
|
||||||
cb_sym_to_deribit_inst(cf_sym),
|
|
||||||
count=1,
|
|
||||||
)
|
|
||||||
last_trades: list[Trade] = last_trades_res.trades
|
|
||||||
|
|
||||||
# TODO, do we even need this or will the above always
|
cache = await client.cache_symbols()
|
||||||
# work?
|
|
||||||
# if not last_trades:
|
|
||||||
# await tractor.pause()
|
|
||||||
# async for typ, quote in from_cf:
|
|
||||||
# if typ == 'trade':
|
|
||||||
# last_trade = Trade(**(quote['data']))
|
|
||||||
# break
|
|
||||||
|
|
||||||
# else:
|
last_trades = (await client.last_trades(
|
||||||
last_trade = Trade(
|
cb_sym_to_deribit_inst(nsym), count=1)).trades
|
||||||
**(last_trades[0])
|
|
||||||
)
|
|
||||||
|
|
||||||
first_quote: dict = {
|
if len(last_trades) == 0:
|
||||||
|
last_trade = None
|
||||||
|
async for typ, quote in stream:
|
||||||
|
if typ == 'trade':
|
||||||
|
last_trade = Trade(**(quote['data']))
|
||||||
|
break
|
||||||
|
|
||||||
|
else:
|
||||||
|
last_trade = Trade(**(last_trades[0]))
|
||||||
|
|
||||||
|
first_quote = {
|
||||||
'symbol': sym,
|
'symbol': sym,
|
||||||
'last': last_trade.price,
|
'last': last_trade.price,
|
||||||
'brokerd_ts': last_trade.timestamp,
|
'brokerd_ts': last_trade.timestamp,
|
||||||
|
@ -304,84 +158,13 @@ async def stream_quotes(
|
||||||
'broker_ts': last_trade.timestamp
|
'broker_ts': last_trade.timestamp
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
task_status.started((
|
task_status.started((init_msgs, first_quote))
|
||||||
init_msgs,
|
|
||||||
first_quote,
|
|
||||||
))
|
|
||||||
|
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
||||||
# NOTE XXX, static for now!
|
async for typ, quote in stream:
|
||||||
# => since this only handles ONE mkt feed at a time we
|
topic = quote['symbol']
|
||||||
# don't need a lookup table to map interleaved quotes
|
await send_chan.send({topic: quote})
|
||||||
# from multiple possible mkt-pairs
|
|
||||||
topic: str = mkt.bs_fqme
|
|
||||||
|
|
||||||
# deliver until cancelled
|
|
||||||
async for typ, ref in from_cf:
|
|
||||||
match typ:
|
|
||||||
case 'trade':
|
|
||||||
trade: cryptofeed.types.Trade = ref
|
|
||||||
|
|
||||||
# TODO, re-impl this according to teh ideal
|
|
||||||
# fqme for opts that we choose!!
|
|
||||||
bs_fqme: str = cb_sym_to_deribit_inst(
|
|
||||||
str_to_cb_sym(trade.symbol)
|
|
||||||
).lower()
|
|
||||||
|
|
||||||
piker_quote: dict = {
|
|
||||||
'symbol': bs_fqme,
|
|
||||||
'last': trade.price,
|
|
||||||
'broker_ts': time.time(),
|
|
||||||
# ^TODO, name this `brokerd/datad_ts` and
|
|
||||||
# use `time.time_ns()` ??
|
|
||||||
'ticks': [{
|
|
||||||
'type': 'trade',
|
|
||||||
'price': float(trade.price),
|
|
||||||
'size': float(trade.amount),
|
|
||||||
'broker_ts': trade.timestamp,
|
|
||||||
}],
|
|
||||||
}
|
|
||||||
log.info(
|
|
||||||
f'deribit {typ!r} quote for {sym!r}\n\n'
|
|
||||||
f'{trade}\n\n'
|
|
||||||
f'{pfmt(piker_quote)}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
case 'l1':
|
|
||||||
book: cryptofeed.types.L1Book = ref
|
|
||||||
|
|
||||||
# TODO, so this is where we can possibly change things
|
|
||||||
# and instead lever the `MktPair.bs_fqme: str` output?
|
|
||||||
bs_fqme: str = cb_sym_to_deribit_inst(
|
|
||||||
str_to_cb_sym(book.symbol)
|
|
||||||
).lower()
|
|
||||||
|
|
||||||
piker_quote: dict = {
|
|
||||||
'symbol': bs_fqme,
|
|
||||||
'ticks': [
|
|
||||||
|
|
||||||
{'type': 'bid',
|
|
||||||
'price': float(book.bid_price),
|
|
||||||
'size': float(book.bid_size)},
|
|
||||||
|
|
||||||
{'type': 'bsize',
|
|
||||||
'price': float(book.bid_price),
|
|
||||||
'size': float(book.bid_size),},
|
|
||||||
|
|
||||||
{'type': 'ask',
|
|
||||||
'price': float(book.ask_price),
|
|
||||||
'size': float(book.ask_size),},
|
|
||||||
|
|
||||||
{'type': 'asize',
|
|
||||||
'price': float(book.ask_price),
|
|
||||||
'size': float(book.ask_size),}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
await send_chan.send({
|
|
||||||
topic: piker_quote,
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -391,21 +174,12 @@ async def open_symbol_search(
|
||||||
async with open_cached_client('deribit') as client:
|
async with open_cached_client('deribit') as client:
|
||||||
|
|
||||||
# load all symbols locally for fast search
|
# load all symbols locally for fast search
|
||||||
# cache = client._pairs
|
cache = await client.cache_symbols()
|
||||||
await ctx.started()
|
await ctx.started()
|
||||||
|
|
||||||
async with ctx.open_stream() as stream:
|
async with ctx.open_stream() as stream:
|
||||||
pattern: str
|
|
||||||
async for pattern in stream:
|
async for pattern in stream:
|
||||||
|
# repack in dict form
|
||||||
# NOTE: pattern fuzzy-matching is done within
|
await stream.send(
|
||||||
# the methd impl.
|
await client.search_symbols(pattern))
|
||||||
pairs: dict[str, Pair] = await client.search_symbols(
|
|
||||||
pattern,
|
|
||||||
)
|
|
||||||
# repack in fqme-keyed table
|
|
||||||
byfqme: dict[str, Pair] = {}
|
|
||||||
for pair in pairs.values():
|
|
||||||
byfqme[pair.bs_fqme] = pair
|
|
||||||
|
|
||||||
await stream.send(byfqme)
|
|
||||||
|
|
|
@ -1,196 +0,0 @@
|
||||||
# piker: trading gear for hackers
|
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Per market data-type definitions and schemas types.
|
|
||||||
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
import pendulum
|
|
||||||
from typing import (
|
|
||||||
Literal,
|
|
||||||
Optional,
|
|
||||||
)
|
|
||||||
from decimal import Decimal
|
|
||||||
|
|
||||||
from piker.types import Struct
|
|
||||||
|
|
||||||
|
|
||||||
# API endpoint paths by venue / sub-API
|
|
||||||
_domain: str = 'deribit.com'
|
|
||||||
_url = f'https://www.{_domain}'
|
|
||||||
|
|
||||||
# WEBsocketz
|
|
||||||
_ws_url: str = f'wss://www.{_domain}/ws/api/v2'
|
|
||||||
|
|
||||||
# test nets
|
|
||||||
_testnet_ws_url: str = f'wss://test.{_domain}/ws/api/v2'
|
|
||||||
|
|
||||||
MarketType = Literal[
|
|
||||||
'option'
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def get_api_eps(venue: MarketType) -> tuple[str, str]:
|
|
||||||
'''
|
|
||||||
Return API ep root paths per venue.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return {
|
|
||||||
'option': (
|
|
||||||
_ws_url,
|
|
||||||
),
|
|
||||||
}[venue]
|
|
||||||
|
|
||||||
|
|
||||||
class Pair(Struct, frozen=True, kw_only=True):
|
|
||||||
|
|
||||||
symbol: str
|
|
||||||
|
|
||||||
# src
|
|
||||||
quote_currency: str # 'BTC'
|
|
||||||
|
|
||||||
# dst
|
|
||||||
base_currency: str # "BTC",
|
|
||||||
|
|
||||||
tick_size: float # 0.0001 # [{'above_price': 0.005, 'tick_size': 0.0005}]
|
|
||||||
tick_size_steps: list[dict[str, float]]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def price_tick(self) -> Decimal:
|
|
||||||
return Decimal(str(self.tick_size_steps[0]['above_price']))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def size_tick(self) -> Decimal:
|
|
||||||
return Decimal(str(self.tick_size))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bs_fqme(self) -> str:
|
|
||||||
return f'{self.symbol}'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bs_mktid(self) -> str:
|
|
||||||
return f'{self.symbol}.{self.venue}'
|
|
||||||
|
|
||||||
|
|
||||||
class OptionPair(Pair, frozen=True):
|
|
||||||
|
|
||||||
taker_commission: float # 0.0003
|
|
||||||
strike: float # 5000.0
|
|
||||||
settlement_period: str # 'day'
|
|
||||||
settlement_currency: str # "BTC",
|
|
||||||
rfq: bool # false
|
|
||||||
price_index: str # 'btc_usd'
|
|
||||||
option_type: str # 'call'
|
|
||||||
min_trade_amount: float # 0.1
|
|
||||||
maker_commission: float # 0.0003
|
|
||||||
kind: str # 'option'
|
|
||||||
is_active: bool # true
|
|
||||||
instrument_type: str # 'reversed'
|
|
||||||
instrument_name: str # 'BTC-1SEP24-55000-C'
|
|
||||||
instrument_id: int # 364671
|
|
||||||
expiration_timestamp: int # 1725177600000
|
|
||||||
creation_timestamp: int # 1724918461000
|
|
||||||
counter_currency: str # 'USD'
|
|
||||||
contract_size: float # '1.0'
|
|
||||||
block_trade_tick_size: float # '0.0001'
|
|
||||||
block_trade_min_trade_amount: int # '25'
|
|
||||||
block_trade_commission: float # '0.003'
|
|
||||||
|
|
||||||
# NOTE: see `.data._symcache.SymbologyCache.load()` for why
|
|
||||||
ns_path: str = 'piker.brokers.deribit:OptionPair'
|
|
||||||
|
|
||||||
# TODO, impl this without the MM:SS part of
|
|
||||||
# the `'THH:MM:SS..'` etc..
|
|
||||||
@property
|
|
||||||
def expiry(self) -> str:
|
|
||||||
iso_date = pendulum.from_timestamp(
|
|
||||||
self.expiration_timestamp / 1000
|
|
||||||
).isoformat()
|
|
||||||
return iso_date
|
|
||||||
|
|
||||||
@property
|
|
||||||
def venue(self) -> str:
|
|
||||||
return f'{self.instrument_type}_option'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bs_fqme(self) -> str:
|
|
||||||
return f'{self.symbol}'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bs_src_asset(self) -> str:
|
|
||||||
return f'{self.quote_currency}'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def bs_dst_asset(self) -> str:
|
|
||||||
return f'{self.symbol}'
|
|
||||||
|
|
||||||
|
|
||||||
PAIRTYPES: dict[MarketType, Pair] = {
|
|
||||||
'option': OptionPair,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class JSONRPCResult(Struct):
|
|
||||||
id: int
|
|
||||||
usIn: int
|
|
||||||
usOut: int
|
|
||||||
usDiff: int
|
|
||||||
testnet: bool
|
|
||||||
jsonrpc: str = '2.0'
|
|
||||||
error: Optional[dict] = None
|
|
||||||
result: Optional[list[dict]] = None
|
|
||||||
|
|
||||||
|
|
||||||
class JSONRPCChannel(Struct):
|
|
||||||
method: str
|
|
||||||
params: dict
|
|
||||||
jsonrpc: str = '2.0'
|
|
||||||
|
|
||||||
|
|
||||||
class KLinesResult(Struct):
|
|
||||||
low: list[float]
|
|
||||||
cost: list[float]
|
|
||||||
high: list[float]
|
|
||||||
open: list[float]
|
|
||||||
close: list[float]
|
|
||||||
ticks: list[int]
|
|
||||||
status: str
|
|
||||||
volume: list[float]
|
|
||||||
|
|
||||||
|
|
||||||
class Trade(Struct):
|
|
||||||
iv: float
|
|
||||||
price: float
|
|
||||||
amount: float
|
|
||||||
trade_id: str
|
|
||||||
contracts: float
|
|
||||||
direction: str
|
|
||||||
trade_seq: int
|
|
||||||
timestamp: int
|
|
||||||
mark_price: float
|
|
||||||
index_price: float
|
|
||||||
tick_direction: int
|
|
||||||
instrument_name: str
|
|
||||||
combo_id: Optional[str] = '',
|
|
||||||
combo_trade_id: Optional[int] = 0,
|
|
||||||
block_trade_id: Optional[str] = '',
|
|
||||||
block_trade_leg_count: Optional[int] = 0,
|
|
||||||
|
|
||||||
|
|
||||||
class LastTradesResult(Struct):
|
|
||||||
trades: list[Trade]
|
|
||||||
has_more: bool
|
|
|
@ -20,11 +20,6 @@ runnable script-programs.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from datetime import ( # noqa
|
|
||||||
datetime,
|
|
||||||
date,
|
|
||||||
tzinfo as TzInfo,
|
|
||||||
)
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import (
|
from typing import (
|
||||||
Literal,
|
Literal,
|
||||||
|
@ -38,7 +33,7 @@ from piker.brokers._util import get_logger
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .api import Client
|
from .api import Client
|
||||||
import i3ipc
|
from ib_insync import IB
|
||||||
|
|
||||||
log = get_logger('piker.brokers.ib')
|
log = get_logger('piker.brokers.ib')
|
||||||
|
|
||||||
|
@ -53,39 +48,8 @@ _reset_tech: Literal[
|
||||||
] = 'vnc'
|
] = 'vnc'
|
||||||
|
|
||||||
|
|
||||||
no_setup_msg:str = (
|
|
||||||
'No data reset hack test setup for {vnc_sockaddr}!\n'
|
|
||||||
'See config setup tips @\n'
|
|
||||||
'https://github.com/pikers/piker/tree/master/piker/brokers/ib'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def try_xdo_manual(
|
|
||||||
client: Client,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Do the "manual" `xdo`-based screen switch + click
|
|
||||||
combo since apparently the `asyncvnc` client ain't workin..
|
|
||||||
|
|
||||||
Note this is only meant as a backup method for Xorg users,
|
|
||||||
ideally you can use a real vnc client and the `vnc_click_hack()`
|
|
||||||
impl!
|
|
||||||
|
|
||||||
'''
|
|
||||||
global _reset_tech
|
|
||||||
try:
|
|
||||||
i3ipc_xdotool_manual_click_hack()
|
|
||||||
_reset_tech = 'i3ipc_xdotool'
|
|
||||||
return True
|
|
||||||
except OSError:
|
|
||||||
vnc_sockaddr: str = client.conf.vnc_addrs
|
|
||||||
log.exception(
|
|
||||||
no_setup_msg.format(vnc_sockaddr=vnc_sockaddr)
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
async def data_reset_hack(
|
async def data_reset_hack(
|
||||||
|
# vnc_host: str,
|
||||||
client: Client,
|
client: Client,
|
||||||
reset_type: Literal['data', 'connection'],
|
reset_type: Literal['data', 'connection'],
|
||||||
|
|
||||||
|
@ -117,60 +81,65 @@ async def data_reset_hack(
|
||||||
that need to be wrangle.
|
that need to be wrangle.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
ib_client: IB = client.ib
|
||||||
|
|
||||||
# look up any user defined vnc socket address mapped from
|
# look up any user defined vnc socket address mapped from
|
||||||
# a particular API socket port.
|
# a particular API socket port.
|
||||||
vnc_addrs: tuple[str]|None = client.conf.get('vnc_addrs')
|
api_port: str = str(ib_client.client.port)
|
||||||
if not vnc_addrs:
|
vnc_host: str
|
||||||
|
vnc_port: int
|
||||||
|
vnc_sockaddr: tuple[str] | None = client.conf.get('vnc_addrs')
|
||||||
|
|
||||||
|
no_setup_msg:str = (
|
||||||
|
f'No data reset hack test setup for {vnc_sockaddr}!\n'
|
||||||
|
'See config setup tips @\n'
|
||||||
|
'https://github.com/pikers/piker/tree/master/piker/brokers/ib'
|
||||||
|
)
|
||||||
|
|
||||||
|
if not vnc_sockaddr:
|
||||||
log.warning(
|
log.warning(
|
||||||
no_setup_msg.format(vnc_sockaddr=client.conf)
|
no_setup_msg
|
||||||
+
|
+
|
||||||
'REQUIRES A `vnc_addrs: array` ENTRY'
|
'REQUIRES A `vnc_addrs: array` ENTRY'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
vnc_host, vnc_port = vnc_sockaddr.get(
|
||||||
|
api_port,
|
||||||
|
('localhost', 3003)
|
||||||
|
)
|
||||||
global _reset_tech
|
global _reset_tech
|
||||||
|
|
||||||
match _reset_tech:
|
match _reset_tech:
|
||||||
case 'vnc':
|
case 'vnc':
|
||||||
try:
|
try:
|
||||||
await tractor.to_asyncio.run_task(
|
await tractor.to_asyncio.run_task(
|
||||||
partial(
|
partial(
|
||||||
vnc_click_hack,
|
vnc_click_hack,
|
||||||
client=client,
|
host=vnc_host,
|
||||||
|
port=vnc_port,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
except (
|
except OSError:
|
||||||
OSError, # no VNC server avail..
|
if vnc_host != 'localhost':
|
||||||
PermissionError, # asyncvnc pw fail..
|
log.warning(no_setup_msg)
|
||||||
):
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
import i3ipc # noqa (since a deps dynamic check)
|
import i3ipc # noqa (since a deps dynamic check)
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
log.warning(
|
log.warning(no_setup_msg)
|
||||||
no_setup_msg.format(vnc_sockaddr=client.conf)
|
|
||||||
)
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# XXX, Xorg only workaround..
|
try:
|
||||||
# TODO? remove now that we have `pyvnc`?
|
i3ipc_xdotool_manual_click_hack()
|
||||||
# if vnc_host not in {
|
_reset_tech = 'i3ipc_xdotool'
|
||||||
# 'localhost',
|
return True
|
||||||
# '127.0.0.1',
|
except OSError:
|
||||||
# }:
|
log.exception(no_setup_msg)
|
||||||
# focussed, matches = i3ipc_fin_wins_titled()
|
return False
|
||||||
# if not matches:
|
|
||||||
# log.warning(
|
|
||||||
# no_setup_msg.format(vnc_sockaddr=vnc_sockaddr)
|
|
||||||
# )
|
|
||||||
# return False
|
|
||||||
# else:
|
|
||||||
# try_xdo_manual(vnc_sockaddr)
|
|
||||||
|
|
||||||
# localhost but no vnc-client or it borked..
|
|
||||||
else:
|
|
||||||
try_xdo_manual(client)
|
|
||||||
|
|
||||||
case 'i3ipc_xdotool':
|
case 'i3ipc_xdotool':
|
||||||
try_xdo_manual(client)
|
i3ipc_xdotool_manual_click_hack()
|
||||||
# i3ipc_xdotool_manual_click_hack()
|
|
||||||
|
|
||||||
case _ as tech:
|
case _ as tech:
|
||||||
raise RuntimeError(f'{tech} is not supported for reset tech!?')
|
raise RuntimeError(f'{tech} is not supported for reset tech!?')
|
||||||
|
@ -180,66 +149,21 @@ async def data_reset_hack(
|
||||||
|
|
||||||
|
|
||||||
async def vnc_click_hack(
|
async def vnc_click_hack(
|
||||||
client: Client,
|
host: str,
|
||||||
reset_type: str = 'data',
|
port: int,
|
||||||
pw: str|None = None,
|
reset_type: str = 'data'
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Reset the data or network connection for the VNC attached
|
Reset the data or network connection for the VNC attached
|
||||||
ib-gateway using a (magic) keybinding combo.
|
ib gateway using magic combos.
|
||||||
|
|
||||||
A vnc-server password can be set either by an input `pw` param or
|
|
||||||
set in the client's config with the latter loaded from the user's
|
|
||||||
`brokers.toml` in a vnc-addrs-port-mapping section,
|
|
||||||
|
|
||||||
.. code:: toml
|
|
||||||
|
|
||||||
[ib.vnc_addrs]
|
|
||||||
4002 = {host = 'localhost', port = 5900, pw = 'doggy'}
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
api_port: str = str(client.ib.client.port)
|
|
||||||
conf: dict = client.conf
|
|
||||||
vnc_addrs: dict[int, tuple] = conf.get('vnc_addrs')
|
|
||||||
if not vnc_addrs:
|
|
||||||
return None
|
|
||||||
|
|
||||||
addr_entry: dict|tuple = vnc_addrs.get(
|
|
||||||
api_port,
|
|
||||||
('localhost', 5900) # a typical default
|
|
||||||
)
|
|
||||||
if pw is None:
|
|
||||||
match addr_entry:
|
|
||||||
case (
|
|
||||||
host,
|
|
||||||
port,
|
|
||||||
):
|
|
||||||
pass
|
|
||||||
|
|
||||||
case {
|
|
||||||
'host': host,
|
|
||||||
'port': port,
|
|
||||||
'pw': pw
|
|
||||||
}:
|
|
||||||
pass
|
|
||||||
|
|
||||||
case _:
|
|
||||||
raise ValueError(
|
|
||||||
f'Invalid `ib.vnc_addrs` entry ?\n'
|
|
||||||
f'{addr_entry!r}\n'
|
|
||||||
)
|
|
||||||
try:
|
try:
|
||||||
from pyvnc import (
|
import asyncvnc
|
||||||
AsyncVNCClient,
|
|
||||||
VNCConfig,
|
|
||||||
Point,
|
|
||||||
MOUSE_BUTTON_LEFT,
|
|
||||||
)
|
|
||||||
except ModuleNotFoundError:
|
except ModuleNotFoundError:
|
||||||
log.warning(
|
log.warning(
|
||||||
"In order to leverage `piker`'s built-in data reset hacks, install "
|
"In order to leverage `piker`'s built-in data reset hacks, install "
|
||||||
"the `pyvnc` project: https://github.com/regulad/pyvnc.git"
|
"the `asyncvnc` project: https://github.com/barneygale/asyncvnc"
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -250,79 +174,24 @@ async def vnc_click_hack(
|
||||||
'connection': 'r'
|
'connection': 'r'
|
||||||
}[reset_type]
|
}[reset_type]
|
||||||
|
|
||||||
with tractor.devx.open_crash_handler():
|
async with asyncvnc.connect(
|
||||||
client = await AsyncVNCClient.connect(
|
host,
|
||||||
VNCConfig(
|
|
||||||
host=host,
|
|
||||||
port=port,
|
port=port,
|
||||||
password=pw,
|
|
||||||
)
|
# TODO: doesn't work see:
|
||||||
)
|
# https://github.com/barneygale/asyncvnc/issues/7
|
||||||
async with client:
|
# password='ibcansmbz',
|
||||||
|
|
||||||
|
) as client:
|
||||||
|
|
||||||
# move to middle of screen
|
# move to middle of screen
|
||||||
# 640x1800
|
# 640x1800
|
||||||
await client.move(
|
client.mouse.move(
|
||||||
Point(
|
x=500,
|
||||||
500,
|
y=500,
|
||||||
500,
|
|
||||||
)
|
)
|
||||||
)
|
client.mouse.click()
|
||||||
# ensure the ib-gw window is active
|
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
||||||
await client.click(MOUSE_BUTTON_LEFT)
|
|
||||||
# send the hotkeys combo B)
|
|
||||||
await client.press('Ctrl', 'Alt', key) # keys are stacked
|
|
||||||
|
|
||||||
|
|
||||||
def i3ipc_fin_wins_titled(
|
|
||||||
titles: list[str] = [
|
|
||||||
'Interactive Brokers', # tws running in i3
|
|
||||||
'IB Gateway', # gw running in i3
|
|
||||||
# 'IB', # gw running in i3 (newer version?)
|
|
||||||
|
|
||||||
# !TODO, remote vnc instance
|
|
||||||
# -[ ] something in title (or other Con-props) that indicates
|
|
||||||
# this is explicitly for ibrk sw?
|
|
||||||
# |_[ ] !can use modden spawn eventually!
|
|
||||||
'TigerVNC',
|
|
||||||
# 'vncviewer', # the terminal..
|
|
||||||
],
|
|
||||||
) -> tuple[
|
|
||||||
i3ipc.Con, # orig focussed win
|
|
||||||
list[tuple[str, i3ipc.Con]], # matching wins by title
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Attempt to find a local-DE window titled with an entry in
|
|
||||||
`titles`.
|
|
||||||
|
|
||||||
If found deliver the current focussed window and all matching
|
|
||||||
`i3ipc.Con`s in a list.
|
|
||||||
|
|
||||||
'''
|
|
||||||
import i3ipc
|
|
||||||
ipc = i3ipc.Connection()
|
|
||||||
|
|
||||||
# TODO: might be worth offering some kinda api for grabbing
|
|
||||||
# the window id from the pid?
|
|
||||||
# https://stackoverflow.com/a/2250879
|
|
||||||
tree = ipc.get_tree()
|
|
||||||
focussed: i3ipc.Con = tree.find_focused()
|
|
||||||
|
|
||||||
matches: list[i3ipc.Con] = []
|
|
||||||
for name in titles:
|
|
||||||
results = tree.find_titled(name)
|
|
||||||
print(f'results for {name}: {results}')
|
|
||||||
if results:
|
|
||||||
con = results[0]
|
|
||||||
matches.append((
|
|
||||||
name,
|
|
||||||
con,
|
|
||||||
))
|
|
||||||
|
|
||||||
return (
|
|
||||||
focussed,
|
|
||||||
matches,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def i3ipc_xdotool_manual_click_hack() -> None:
|
def i3ipc_xdotool_manual_click_hack() -> None:
|
||||||
|
@ -330,10 +199,29 @@ def i3ipc_xdotool_manual_click_hack() -> None:
|
||||||
Do the data reset hack but expecting a local X-window using `xdotool`.
|
Do the data reset hack but expecting a local X-window using `xdotool`.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
focussed, matches = i3ipc_fin_wins_titled()
|
import i3ipc
|
||||||
orig_win_id = focussed.window
|
i3 = i3ipc.Connection()
|
||||||
|
|
||||||
|
# TODO: might be worth offering some kinda api for grabbing
|
||||||
|
# the window id from the pid?
|
||||||
|
# https://stackoverflow.com/a/2250879
|
||||||
|
t = i3.get_tree()
|
||||||
|
|
||||||
|
orig_win_id = t.find_focused().window
|
||||||
|
|
||||||
|
# for tws
|
||||||
|
win_names: list[str] = [
|
||||||
|
'Interactive Brokers', # tws running in i3
|
||||||
|
'IB Gateway', # gw running in i3
|
||||||
|
# 'IB', # gw running in i3 (newer version?)
|
||||||
|
]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
for name, con in matches:
|
for name in win_names:
|
||||||
|
results = t.find_titled(name)
|
||||||
|
print(f'results for {name}: {results}')
|
||||||
|
if results:
|
||||||
|
con = results[0]
|
||||||
print(f'Resetting data feed for {name}')
|
print(f'Resetting data feed for {name}')
|
||||||
win_id = str(con.window)
|
win_id = str(con.window)
|
||||||
w, h = con.rect.width, con.rect.height
|
w, h = con.rect.width, con.rect.height
|
||||||
|
@ -379,99 +267,3 @@ def i3ipc_xdotool_manual_click_hack() -> None:
|
||||||
])
|
])
|
||||||
except subprocess.TimeoutExpired:
|
except subprocess.TimeoutExpired:
|
||||||
log.exception('xdotool timed out?')
|
log.exception('xdotool timed out?')
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def is_current_time_in_range(
|
|
||||||
start_dt: datetime,
|
|
||||||
end_dt: datetime,
|
|
||||||
) -> bool:
|
|
||||||
'''
|
|
||||||
Check if current time is within the datetime range.
|
|
||||||
|
|
||||||
Use any/the-same timezone as provided by `start_dt.tzinfo` value
|
|
||||||
in the range.
|
|
||||||
|
|
||||||
'''
|
|
||||||
now: datetime = datetime.now(start_dt.tzinfo)
|
|
||||||
return start_dt <= now <= end_dt
|
|
||||||
|
|
||||||
|
|
||||||
# TODO, put this into `._util` and call it from here!
|
|
||||||
#
|
|
||||||
# NOTE, this was generated by @guille from a gpt5 prompt
|
|
||||||
# and was originally thot to be needed before learning about
|
|
||||||
# `ib_insync.contract.ContractDetails._parseSessions()` and
|
|
||||||
# it's downstream meths..
|
|
||||||
#
|
|
||||||
# This is still likely useful to keep for now to parse the
|
|
||||||
# `.tradingHours: str` value manually if we ever decide
|
|
||||||
# to move off `ib_async` and implement our own `trio`/`anyio`
|
|
||||||
# based version Bp
|
|
||||||
#
|
|
||||||
# >attempt to parse the retarted ib "time stampy thing" they
|
|
||||||
# >do for "venue hours" with this.. written by
|
|
||||||
# >gpt5-"thinking",
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
def parse_trading_hours(
|
|
||||||
spec: str,
|
|
||||||
tz: TzInfo|None = None
|
|
||||||
) -> dict[
|
|
||||||
date,
|
|
||||||
tuple[datetime, datetime]
|
|
||||||
]|None:
|
|
||||||
'''
|
|
||||||
Parse venue hours like:
|
|
||||||
'YYYYMMDD:HHMM-YYYYMMDD:HHMM;YYYYMMDD:CLOSED;...'
|
|
||||||
|
|
||||||
Returns `dict[date] = (open_dt, close_dt)` or `None` if
|
|
||||||
closed.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if (
|
|
||||||
not isinstance(spec, str)
|
|
||||||
or
|
|
||||||
not spec
|
|
||||||
):
|
|
||||||
raise ValueError('spec must be a non-empty string')
|
|
||||||
|
|
||||||
out: dict[
|
|
||||||
date,
|
|
||||||
tuple[datetime, datetime]
|
|
||||||
]|None = {}
|
|
||||||
|
|
||||||
for part in (p.strip() for p in spec.split(';') if p.strip()):
|
|
||||||
if part.endswith(':CLOSED'):
|
|
||||||
day_s, _ = part.split(':', 1)
|
|
||||||
d = datetime.strptime(day_s, '%Y%m%d').date()
|
|
||||||
out[d] = None
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
start_s, end_s = part.split('-', 1)
|
|
||||||
start_dt = datetime.strptime(start_s, '%Y%m%d:%H%M')
|
|
||||||
end_dt = datetime.strptime(end_s, '%Y%m%d:%H%M')
|
|
||||||
except ValueError as exc:
|
|
||||||
raise ValueError(f'invalid segment: {part}') from exc
|
|
||||||
|
|
||||||
if tz is not None:
|
|
||||||
start_dt = start_dt.replace(tzinfo=tz)
|
|
||||||
end_dt = end_dt.replace(tzinfo=tz)
|
|
||||||
|
|
||||||
out[start_dt.date()] = (start_dt, end_dt)
|
|
||||||
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
# ORIG desired usage,
|
|
||||||
#
|
|
||||||
# TODO, for non-drunk tomorrow,
|
|
||||||
# - call above fn and check that `output[today] is not None`
|
|
||||||
# trading_hrs: dict = parse_trading_hours(
|
|
||||||
# details.tradingHours
|
|
||||||
# )
|
|
||||||
# liq_hrs: dict = parse_trading_hours(
|
|
||||||
# details.liquidHours
|
|
||||||
# )
|
|
||||||
|
|
|
@ -48,7 +48,6 @@ from bidict import bidict
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor import to_asyncio
|
from tractor import to_asyncio
|
||||||
from tractor import trionics
|
|
||||||
from pendulum import (
|
from pendulum import (
|
||||||
from_timestamp,
|
from_timestamp,
|
||||||
DateTime,
|
DateTime,
|
||||||
|
@ -97,10 +96,6 @@ from ._util import (
|
||||||
get_logger,
|
get_logger,
|
||||||
)
|
)
|
||||||
|
|
||||||
# ?TODO? this can now be removed since it was originally to extend
|
|
||||||
# with a `bar_vwap` field that we removed from the default ohlcv
|
|
||||||
# dtype since it's better calculated in an FSP func
|
|
||||||
#
|
|
||||||
_bar_load_dtype: list[tuple[str, type]] = [
|
_bar_load_dtype: list[tuple[str, type]] = [
|
||||||
# NOTE XXX: only part that's diff
|
# NOTE XXX: only part that's diff
|
||||||
# from our default fields where
|
# from our default fields where
|
||||||
|
@ -1367,20 +1362,23 @@ async def load_aio_clients(
|
||||||
|
|
||||||
|
|
||||||
async def load_clients_for_trio(
|
async def load_clients_for_trio(
|
||||||
chan: tractor.to_asyncio.LinkedTaskChannel,
|
from_trio: asyncio.Queue,
|
||||||
|
to_trio: trio.abc.SendChannel,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Pure async mngr proxy to ``load_aio_clients()``.
|
Pure async mngr proxy to ``load_aio_clients()``.
|
||||||
|
|
||||||
This is a bootstrap entrypoint to call from
|
This is a bootstrap entrypoing to call from
|
||||||
a `tractor.to_asyncio.open_channel_from()`.
|
a ``tractor.to_asyncio.open_channel_from()``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async with load_aio_clients(
|
async with load_aio_clients(
|
||||||
disconnect_on_exit=False,
|
disconnect_on_exit=False,
|
||||||
) as accts2clients:
|
) as accts2clients:
|
||||||
|
|
||||||
chan.started_nowait(accts2clients)
|
to_trio.send_nowait(accts2clients)
|
||||||
|
|
||||||
# TODO: maybe a sync event to wait on instead?
|
# TODO: maybe a sync event to wait on instead?
|
||||||
await asyncio.sleep(float('inf'))
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
|
@ -1393,10 +1391,7 @@ async def open_client_proxies() -> tuple[
|
||||||
async with (
|
async with (
|
||||||
tractor.trionics.maybe_open_context(
|
tractor.trionics.maybe_open_context(
|
||||||
acm_func=tractor.to_asyncio.open_channel_from,
|
acm_func=tractor.to_asyncio.open_channel_from,
|
||||||
kwargs={
|
kwargs={'target': load_clients_for_trio},
|
||||||
'target': load_clients_for_trio,
|
|
||||||
# ^XXX, kwarg to `open_channel_from()`
|
|
||||||
},
|
|
||||||
|
|
||||||
# lock around current actor task access
|
# lock around current actor task access
|
||||||
# TODO: maybe this should be the default in tractor?
|
# TODO: maybe this should be the default in tractor?
|
||||||
|
@ -1527,22 +1522,23 @@ class MethodProxy:
|
||||||
|
|
||||||
|
|
||||||
async def open_aio_client_method_relay(
|
async def open_aio_client_method_relay(
|
||||||
chan: tractor.to_asyncio.LinkedTaskChannel,
|
from_trio: asyncio.Queue,
|
||||||
|
to_trio: trio.abc.SendChannel,
|
||||||
client: Client,
|
client: Client,
|
||||||
event_consumers: dict[str, trio.Event],
|
event_consumers: dict[str, trio.Event],
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# sync with `open_client_proxy()` caller
|
# sync with `open_client_proxy()` caller
|
||||||
chan.started_nowait(client)
|
to_trio.send_nowait(client)
|
||||||
|
|
||||||
# TODO: separate channel for error handling?
|
# TODO: separate channel for error handling?
|
||||||
client.inline_errors(chan)
|
client.inline_errors(to_trio)
|
||||||
|
|
||||||
# relay all method requests to ``asyncio``-side client and deliver
|
# relay all method requests to ``asyncio``-side client and deliver
|
||||||
# back results
|
# back results
|
||||||
while not chan._to_trio._closed: # <- TODO, better check like `._web_bs`?
|
while not to_trio._closed:
|
||||||
msg: tuple[str, dict]|dict|None = await chan.get()
|
msg: tuple[str, dict] | dict | None = await from_trio.get()
|
||||||
match msg:
|
match msg:
|
||||||
case None: # termination sentinel
|
case None: # termination sentinel
|
||||||
log.info('asyncio `Client` method-proxy SHUTDOWN!')
|
log.info('asyncio `Client` method-proxy SHUTDOWN!')
|
||||||
|
@ -1555,7 +1551,7 @@ async def open_aio_client_method_relay(
|
||||||
try:
|
try:
|
||||||
resp = await meth(**kwargs)
|
resp = await meth(**kwargs)
|
||||||
# echo the msg back
|
# echo the msg back
|
||||||
chan.send_nowait({'result': resp})
|
to_trio.send_nowait({'result': resp})
|
||||||
|
|
||||||
except (
|
except (
|
||||||
RequestError,
|
RequestError,
|
||||||
|
@ -1563,10 +1559,10 @@ async def open_aio_client_method_relay(
|
||||||
# TODO: relay all errors to trio?
|
# TODO: relay all errors to trio?
|
||||||
# BaseException,
|
# BaseException,
|
||||||
) as err:
|
) as err:
|
||||||
chan.send_nowait({'exception': err})
|
to_trio.send_nowait({'exception': err})
|
||||||
|
|
||||||
case {'error': content}:
|
case {'error': content}:
|
||||||
chan.send_nowait({'exception': content})
|
to_trio.send_nowait({'exception': content})
|
||||||
|
|
||||||
case _:
|
case _:
|
||||||
raise ValueError(f'Unhandled msg {msg}')
|
raise ValueError(f'Unhandled msg {msg}')
|
||||||
|
@ -1588,8 +1584,7 @@ async def open_client_proxy(
|
||||||
event_consumers=event_table,
|
event_consumers=event_table,
|
||||||
) as (first, chan),
|
) as (first, chan),
|
||||||
|
|
||||||
trionics.collapse_eg(), # loose-ify
|
trio.open_nursery() as relay_n,
|
||||||
trio.open_nursery() as relay_tn,
|
|
||||||
):
|
):
|
||||||
|
|
||||||
assert isinstance(first, Client)
|
assert isinstance(first, Client)
|
||||||
|
@ -1629,7 +1624,7 @@ async def open_client_proxy(
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
relay_tn.start_soon(relay_events)
|
relay_n.start_soon(relay_events)
|
||||||
|
|
||||||
yield proxy
|
yield proxy
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,6 @@ import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.to_asyncio import LinkedTaskChannel
|
from tractor.to_asyncio import LinkedTaskChannel
|
||||||
from tractor import trionics
|
|
||||||
from ib_insync.contract import (
|
from ib_insync.contract import (
|
||||||
Contract,
|
Contract,
|
||||||
)
|
)
|
||||||
|
@ -358,10 +357,6 @@ async def update_and_audit_pos_msg(
|
||||||
size=ibpos.position,
|
size=ibpos.position,
|
||||||
|
|
||||||
avg_price=pikerpos.ppu,
|
avg_price=pikerpos.ppu,
|
||||||
|
|
||||||
# XXX ensures matching even if multiple venue-names
|
|
||||||
# in `.bs_fqme`, likely from txn records..
|
|
||||||
bs_mktid=mkt.bs_mktid,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
ibfmtmsg: str = pformat(ibpos._asdict())
|
ibfmtmsg: str = pformat(ibpos._asdict())
|
||||||
|
@ -412,7 +407,7 @@ async def update_and_audit_pos_msg(
|
||||||
|
|
||||||
# TODO: make this a "propaganda" log level?
|
# TODO: make this a "propaganda" log level?
|
||||||
if ibpos.avgCost != msg.avg_price:
|
if ibpos.avgCost != msg.avg_price:
|
||||||
log.debug(
|
log.warning(
|
||||||
f'IB "FIFO" avg price for {msg.symbol} is DIFF:\n'
|
f'IB "FIFO" avg price for {msg.symbol} is DIFF:\n'
|
||||||
f'ib: {ibfmtmsg}\n'
|
f'ib: {ibfmtmsg}\n'
|
||||||
'---------------------------\n'
|
'---------------------------\n'
|
||||||
|
@ -430,8 +425,7 @@ async def aggr_open_orders(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Collect all open orders from client and fill in `order_msgs:
|
Collect all open orders from client and fill in `order_msgs: list`.
|
||||||
list`.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
trades: list[Trade] = client.ib.openTrades()
|
trades: list[Trade] = client.ib.openTrades()
|
||||||
|
@ -552,10 +546,7 @@ async def open_trade_dialog(
|
||||||
),
|
),
|
||||||
|
|
||||||
# TODO: do this as part of `open_account()`!?
|
# TODO: do this as part of `open_account()`!?
|
||||||
open_symcache(
|
open_symcache('ib', only_from_memcache=True) as symcache,
|
||||||
'ib',
|
|
||||||
only_from_memcache=True,
|
|
||||||
) as symcache,
|
|
||||||
):
|
):
|
||||||
# Open a trade ledgers stack for appending trade records over
|
# Open a trade ledgers stack for appending trade records over
|
||||||
# multiple accounts.
|
# multiple accounts.
|
||||||
|
@ -564,9 +555,7 @@ async def open_trade_dialog(
|
||||||
tables: dict[str, Account] = {}
|
tables: dict[str, Account] = {}
|
||||||
order_msgs: list[Status] = []
|
order_msgs: list[Status] = []
|
||||||
conf = get_config()
|
conf = get_config()
|
||||||
accounts_def_inv: bidict[str, str] = bidict(
|
accounts_def_inv: bidict[str, str] = bidict(conf['accounts']).inverse
|
||||||
conf['accounts']
|
|
||||||
).inverse
|
|
||||||
|
|
||||||
with (
|
with (
|
||||||
ExitStack() as lstack,
|
ExitStack() as lstack,
|
||||||
|
@ -716,11 +705,7 @@ async def open_trade_dialog(
|
||||||
# client-account and build out position msgs to deliver to
|
# client-account and build out position msgs to deliver to
|
||||||
# EMS.
|
# EMS.
|
||||||
for acctid, acnt in tables.items():
|
for acctid, acnt in tables.items():
|
||||||
active_pps: dict[str, Position]
|
active_pps, closed_pps = acnt.dump_active()
|
||||||
(
|
|
||||||
active_pps,
|
|
||||||
closed_pps,
|
|
||||||
) = acnt.dump_active()
|
|
||||||
|
|
||||||
for pps in [active_pps, closed_pps]:
|
for pps in [active_pps, closed_pps]:
|
||||||
piker_pps: list[Position] = list(pps.values())
|
piker_pps: list[Position] = list(pps.values())
|
||||||
|
@ -736,7 +721,6 @@ async def open_trade_dialog(
|
||||||
)
|
)
|
||||||
if ibpos:
|
if ibpos:
|
||||||
bs_mktid: str = str(ibpos.contract.conId)
|
bs_mktid: str = str(ibpos.contract.conId)
|
||||||
|
|
||||||
msg = await update_and_audit_pos_msg(
|
msg = await update_and_audit_pos_msg(
|
||||||
acctid,
|
acctid,
|
||||||
pikerpos,
|
pikerpos,
|
||||||
|
@ -754,7 +738,7 @@ async def open_trade_dialog(
|
||||||
f'UNEXPECTED POSITION says IB => {msg.symbol}\n'
|
f'UNEXPECTED POSITION says IB => {msg.symbol}\n'
|
||||||
'Maybe they LIQUIDATED YOU or your ledger is wrong?\n'
|
'Maybe they LIQUIDATED YOU or your ledger is wrong?\n'
|
||||||
)
|
)
|
||||||
log.debug(logmsg)
|
log.error(logmsg)
|
||||||
|
|
||||||
await ctx.started((
|
await ctx.started((
|
||||||
all_positions,
|
all_positions,
|
||||||
|
@ -763,22 +747,21 @@ async def open_trade_dialog(
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as ems_stream,
|
ctx.open_stream() as ems_stream,
|
||||||
trionics.collapse_eg(),
|
trio.open_nursery() as n,
|
||||||
trio.open_nursery() as tn,
|
|
||||||
):
|
):
|
||||||
# relay existing open orders to ems
|
# relay existing open orders to ems
|
||||||
for msg in order_msgs:
|
for msg in order_msgs:
|
||||||
await ems_stream.send(msg)
|
await ems_stream.send(msg)
|
||||||
|
|
||||||
for client in set(aioclients.values()):
|
for client in set(aioclients.values()):
|
||||||
trade_event_stream: LinkedTaskChannel = await tn.start(
|
trade_event_stream: LinkedTaskChannel = await n.start(
|
||||||
open_trade_event_stream,
|
open_trade_event_stream,
|
||||||
client,
|
client,
|
||||||
)
|
)
|
||||||
|
|
||||||
# start order request handler **before** local trades
|
# start order request handler **before** local trades
|
||||||
# event loop
|
# event loop
|
||||||
tn.start_soon(
|
n.start_soon(
|
||||||
handle_order_requests,
|
handle_order_requests,
|
||||||
ems_stream,
|
ems_stream,
|
||||||
accounts_def,
|
accounts_def,
|
||||||
|
@ -786,7 +769,7 @@ async def open_trade_dialog(
|
||||||
)
|
)
|
||||||
|
|
||||||
# allocate event relay tasks for each client connection
|
# allocate event relay tasks for each client connection
|
||||||
tn.start_soon(
|
n.start_soon(
|
||||||
deliver_trade_events,
|
deliver_trade_events,
|
||||||
|
|
||||||
trade_event_stream,
|
trade_event_stream,
|
||||||
|
@ -1258,47 +1241,32 @@ async def deliver_trade_events(
|
||||||
# never relay errors for non-broker related issues
|
# never relay errors for non-broker related issues
|
||||||
# https://interactivebrokers.github.io/tws-api/message_codes.html
|
# https://interactivebrokers.github.io/tws-api/message_codes.html
|
||||||
code: int = err['error_code']
|
code: int = err['error_code']
|
||||||
reason: str = err['reason']
|
if code in {
|
||||||
reqid: str = str(err['reqid'])
|
200, # uhh
|
||||||
|
|
||||||
# "Warning:" msg codes,
|
|
||||||
# https://interactivebrokers.github.io/tws-api/message_codes.html#warning_codes
|
|
||||||
# - 2109: 'Outside Regular Trading Hours'
|
|
||||||
if 'Warning:' in reason:
|
|
||||||
log.warning(
|
|
||||||
f'Order-API-warning: {code!r}\n'
|
|
||||||
f'reqid: {reqid!r}\n'
|
|
||||||
f'\n'
|
|
||||||
f'{pformat(err)}\n'
|
|
||||||
# ^TODO? should we just print the `reason`
|
|
||||||
# not the full `err`-dict?
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# XXX known special (ignore) cases
|
|
||||||
elif code in {
|
|
||||||
200, # uhh.. ni idea
|
|
||||||
|
|
||||||
# hist pacing / connectivity
|
# hist pacing / connectivity
|
||||||
162,
|
162,
|
||||||
165,
|
165,
|
||||||
|
|
||||||
|
# WARNING codes:
|
||||||
|
# https://interactivebrokers.github.io/tws-api/message_codes.html#warning_codes
|
||||||
|
# Attribute 'Outside Regular Trading Hours' is
|
||||||
|
# " 'ignored based on the order type and
|
||||||
|
# destination. PlaceOrder is now ' 'being
|
||||||
|
# processed.',
|
||||||
|
2109,
|
||||||
|
|
||||||
# XXX: lol this isn't even documented..
|
# XXX: lol this isn't even documented..
|
||||||
# 'No market data during competing live session'
|
# 'No market data during competing live session'
|
||||||
1669,
|
1669,
|
||||||
}:
|
}:
|
||||||
log.error(
|
|
||||||
f'Order-API-error which is non-cancel-causing ?!\n'
|
|
||||||
f'\n'
|
|
||||||
f'{pformat(err)}\n'
|
|
||||||
)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
reqid: str = str(err['reqid'])
|
||||||
|
reason: str = err['reason']
|
||||||
|
|
||||||
if err['reqid'] == -1:
|
if err['reqid'] == -1:
|
||||||
log.error(
|
log.error(f'TWS external order error:\n{pformat(err)}')
|
||||||
f'TWS external order error ??\n'
|
|
||||||
f'{pformat(err)}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
flow: dict = dict(
|
flow: dict = dict(
|
||||||
flows.get(reqid)
|
flows.get(reqid)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-forever Tyler Goodlet (in stewardship for pikers)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -13,12 +13,10 @@
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
"""
|
||||||
|
Data feed endpoints pre-wrapped and ready for use with ``tractor``/``trio``.
|
||||||
|
|
||||||
'''
|
"""
|
||||||
Data feed endpoints pre-wrapped and ready for use with `tractor`/`trio`
|
|
||||||
via "infected-asyncio-mode".
|
|
||||||
|
|
||||||
'''
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import (
|
from contextlib import (
|
||||||
|
@ -28,6 +26,7 @@ from dataclasses import asdict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
from math import isnan
|
||||||
import time
|
import time
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
@ -41,6 +40,7 @@ import numpy as np
|
||||||
from pendulum import (
|
from pendulum import (
|
||||||
now,
|
now,
|
||||||
from_timestamp,
|
from_timestamp,
|
||||||
|
# DateTime,
|
||||||
Duration,
|
Duration,
|
||||||
duration as mk_duration,
|
duration as mk_duration,
|
||||||
)
|
)
|
||||||
|
@ -69,10 +69,7 @@ from .api import (
|
||||||
Contract,
|
Contract,
|
||||||
RequestError,
|
RequestError,
|
||||||
)
|
)
|
||||||
from ._util import (
|
from ._util import data_reset_hack
|
||||||
data_reset_hack,
|
|
||||||
is_current_time_in_range,
|
|
||||||
)
|
|
||||||
from .symbols import get_mkt_info
|
from .symbols import get_mkt_info
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
@ -187,8 +184,7 @@ async def open_history_client(
|
||||||
|
|
||||||
if (
|
if (
|
||||||
start_dt
|
start_dt
|
||||||
and
|
and start_dt.timestamp() == 0
|
||||||
start_dt.timestamp() == 0
|
|
||||||
):
|
):
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
|
|
||||||
|
@ -207,16 +203,14 @@ async def open_history_client(
|
||||||
):
|
):
|
||||||
count += 1
|
count += 1
|
||||||
mean += latency / count
|
mean += latency / count
|
||||||
log.debug(
|
print(
|
||||||
f'HISTORY FRAME QUERY LATENCY: {latency}\n'
|
f'HISTORY FRAME QUERY LATENCY: {latency}\n'
|
||||||
f'mean: {mean}'
|
f'mean: {mean}'
|
||||||
)
|
)
|
||||||
|
|
||||||
# could be trying to retreive bars over weekend
|
# could be trying to retreive bars over weekend
|
||||||
if out is None:
|
if out is None:
|
||||||
log.error(
|
log.error(f"Can't grab bars starting at {end_dt}!?!?")
|
||||||
f"No bars starting at {end_dt!r} !?!?"
|
|
||||||
)
|
|
||||||
if (
|
if (
|
||||||
end_dt
|
end_dt
|
||||||
and head_dt
|
and head_dt
|
||||||
|
@ -291,9 +285,8 @@ _pacing: str = (
|
||||||
|
|
||||||
async def wait_on_data_reset(
|
async def wait_on_data_reset(
|
||||||
proxy: MethodProxy,
|
proxy: MethodProxy,
|
||||||
|
|
||||||
reset_type: str = 'data',
|
reset_type: str = 'data',
|
||||||
timeout: float = 16,
|
timeout: float = 16, # float('inf'),
|
||||||
|
|
||||||
task_status: TaskStatus[
|
task_status: TaskStatus[
|
||||||
tuple[
|
tuple[
|
||||||
|
@ -302,47 +295,29 @@ async def wait_on_data_reset(
|
||||||
]
|
]
|
||||||
] = trio.TASK_STATUS_IGNORED,
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
) -> bool:
|
) -> bool:
|
||||||
'''
|
|
||||||
Wait on a (global-ish) "data-farm" event to be emitted
|
|
||||||
by the IB api server.
|
|
||||||
|
|
||||||
Allows syncing to reconnect event-messages emitted on the API
|
# TODO: we might have to put a task lock around this
|
||||||
console, such as:
|
# method..
|
||||||
|
hist_ev = proxy.status_event(
|
||||||
- 'HMDS data farm connection is OK:ushmds'
|
|
||||||
- 'Market data farm is connecting:usfuture'
|
|
||||||
- 'Market data farm connection is OK:usfuture'
|
|
||||||
|
|
||||||
Deliver a `(cs, done: Event)` pair to the caller to support it
|
|
||||||
waiting or cancelling the associated "data-reset-request";
|
|
||||||
normally a manual data-reset-req is expected to be the cause and
|
|
||||||
thus trigger such events (such as our click-hack-magic from
|
|
||||||
`.ib._util`).
|
|
||||||
|
|
||||||
'''
|
|
||||||
# ?TODO, do we need a task-lock around this method?
|
|
||||||
#
|
|
||||||
# register for an API "status event" wrapped for `trio`-sync.
|
|
||||||
hist_ev: trio.Event = proxy.status_event(
|
|
||||||
'HMDS data farm connection is OK:ushmds'
|
'HMDS data farm connection is OK:ushmds'
|
||||||
)
|
)
|
||||||
#
|
|
||||||
# ^TODO: other event-messages we might want to support waiting-for
|
# TODO: other event messages we might want to try and
|
||||||
# but i wasn't able to get reliable..
|
# wait for but i wasn't able to get any of this
|
||||||
#
|
# reliable..
|
||||||
# reconnect_start = proxy.status_event(
|
# reconnect_start = proxy.status_event(
|
||||||
# 'Market data farm is connecting:usfuture'
|
# 'Market data farm is connecting:usfuture'
|
||||||
# )
|
# )
|
||||||
# live_ev = proxy.status_event(
|
# live_ev = proxy.status_event(
|
||||||
# 'Market data farm connection is OK:usfuture'
|
# 'Market data farm connection is OK:usfuture'
|
||||||
# )
|
# )
|
||||||
|
|
||||||
# try to wait on the reset event(s) to arrive, a timeout
|
# try to wait on the reset event(s) to arrive, a timeout
|
||||||
# will trigger a retry up to 6 times (for now).
|
# will trigger a retry up to 6 times (for now).
|
||||||
client: Client = proxy._aio_ns
|
client: Client = proxy._aio_ns
|
||||||
|
|
||||||
done = trio.Event()
|
done = trio.Event()
|
||||||
with trio.move_on_after(timeout) as cs:
|
with trio.move_on_after(timeout) as cs:
|
||||||
|
|
||||||
task_status.started((cs, done))
|
task_status.started((cs, done))
|
||||||
|
|
||||||
log.warning(
|
log.warning(
|
||||||
|
@ -421,9 +396,8 @@ async def get_bars(
|
||||||
bool, # timed out hint
|
bool, # timed out hint
|
||||||
]:
|
]:
|
||||||
'''
|
'''
|
||||||
Request-n-retrieve historical data frames from a `trio.Task`
|
Retrieve historical data from a ``trio``-side task using
|
||||||
using a `MethoProxy` to query the `asyncio`-side's
|
a ``MethoProxy``.
|
||||||
`.ib.api.Client` methods.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
global _data_resetter_task, _failed_resets
|
global _data_resetter_task, _failed_resets
|
||||||
|
@ -633,10 +607,7 @@ async def get_bars(
|
||||||
# such that simultaneous symbol queries don't try data resettingn
|
# such that simultaneous symbol queries don't try data resettingn
|
||||||
# too fast..
|
# too fast..
|
||||||
unset_resetter: bool = False
|
unset_resetter: bool = False
|
||||||
async with (
|
async with trio.open_nursery() as nurse:
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as nurse
|
|
||||||
):
|
|
||||||
|
|
||||||
# start history request that we allow
|
# start history request that we allow
|
||||||
# to run indefinitely until a result is acquired
|
# to run indefinitely until a result is acquired
|
||||||
|
@ -682,12 +653,14 @@ async def get_bars(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# per-actor cache of inter-eventloop-chans
|
|
||||||
_quote_streams: dict[str, trio.abc.ReceiveStream] = {}
|
_quote_streams: dict[str, trio.abc.ReceiveStream] = {}
|
||||||
|
|
||||||
|
|
||||||
async def _setup_quote_stream(
|
async def _setup_quote_stream(
|
||||||
chan: tractor.to_asyncio.LinkedTaskChannel,
|
|
||||||
|
from_trio: asyncio.Queue,
|
||||||
|
to_trio: trio.abc.SendChannel,
|
||||||
|
|
||||||
symbol: str,
|
symbol: str,
|
||||||
opts: tuple[int] = (
|
opts: tuple[int] = (
|
||||||
'375', # RT trade volume (excludes utrades)
|
'375', # RT trade volume (excludes utrades)
|
||||||
|
@ -705,13 +678,10 @@ async def _setup_quote_stream(
|
||||||
|
|
||||||
) -> trio.abc.ReceiveChannel:
|
) -> trio.abc.ReceiveChannel:
|
||||||
'''
|
'''
|
||||||
Stream L1 quotes via the `Ticker.updateEvent.connect(push)`
|
Stream a ticker using the std L1 api.
|
||||||
callback API by registering a `push` callback which simply
|
|
||||||
`chan.send_nowait()`s quote msgs back to the calling
|
|
||||||
parent-`trio.Task`-side.
|
|
||||||
|
|
||||||
NOTE, that this task-fn is run on the `asyncio.Task`-side ONLY
|
This task is ``asyncio``-side and must be called from
|
||||||
and is thus run via `tractor.to_asyncio.open_channel_from()`.
|
``tractor.to_asyncio.open_channel_from()``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
global _quote_streams
|
global _quote_streams
|
||||||
|
@ -719,79 +689,37 @@ async def _setup_quote_stream(
|
||||||
async with load_aio_clients(
|
async with load_aio_clients(
|
||||||
disconnect_on_exit=False,
|
disconnect_on_exit=False,
|
||||||
) as accts2clients:
|
) as accts2clients:
|
||||||
|
|
||||||
# XXX since this is an `asyncio.Task`, we must use
|
|
||||||
# tractor.pause_from_sync()
|
|
||||||
|
|
||||||
caccount_name, client = get_preferred_data_client(accts2clients)
|
caccount_name, client = get_preferred_data_client(accts2clients)
|
||||||
contract = (
|
contract = contract or (await client.find_contract(symbol))
|
||||||
contract
|
to_trio.send_nowait(contract) # cuz why not
|
||||||
or
|
ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts))
|
||||||
(await client.find_contract(symbol))
|
|
||||||
)
|
|
||||||
chan.started_nowait(contract) # cuz why not
|
|
||||||
ticker: Ticker = client.ib.reqMktData(
|
|
||||||
contract,
|
|
||||||
','.join(opts),
|
|
||||||
)
|
|
||||||
maybe_exc: BaseException|None = None
|
|
||||||
handler_tries: int = 0
|
|
||||||
aio_task: asyncio.Task = asyncio.current_task()
|
|
||||||
|
|
||||||
# ?TODO? this API is batch-wise and quite slow-af but,
|
# NOTE: it's batch-wise and slow af but I guess could
|
||||||
# - seems to be 5s updates?
|
# be good for backchecking? Seems to be every 5s maybe?
|
||||||
# - maybe we could use it for backchecking?
|
|
||||||
#
|
|
||||||
# ticker: Ticker = client.ib.reqTickByTickData(
|
# ticker: Ticker = client.ib.reqTickByTickData(
|
||||||
# contract, 'Last',
|
# contract, 'Last',
|
||||||
# )
|
# )
|
||||||
|
|
||||||
# define a very naive queue-pushing callback that relays
|
# # define a simple queue push routine that streams quote packets
|
||||||
# quote-packets directly the calling (parent) `trio.Task`.
|
# # to trio over the ``to_trio`` memory channel.
|
||||||
# Ensure on teardown we cancel the feed via their cancel API.
|
# to_trio, from_aio = trio.open_memory_channel(2**8) # type: ignore
|
||||||
#
|
|
||||||
def teardown():
|
def teardown():
|
||||||
'''
|
|
||||||
Disconnect our `push`-er callback and cancel the data-feed
|
|
||||||
for `contract`.
|
|
||||||
|
|
||||||
'''
|
|
||||||
nonlocal maybe_exc
|
|
||||||
ticker.updateEvent.disconnect(push)
|
ticker.updateEvent.disconnect(push)
|
||||||
report: str = f'Disconnected mkt-data for {symbol!r} due to '
|
log.error(f"Disconnected stream for `{symbol}`")
|
||||||
if maybe_exc is not None:
|
|
||||||
report += (
|
|
||||||
'error,\n'
|
|
||||||
f'{maybe_exc!r}\n'
|
|
||||||
)
|
|
||||||
log.error(report)
|
|
||||||
else:
|
|
||||||
report += (
|
|
||||||
'cancellation.\n'
|
|
||||||
)
|
|
||||||
log.cancel(report)
|
|
||||||
|
|
||||||
client.ib.cancelMktData(contract)
|
client.ib.cancelMktData(contract)
|
||||||
|
|
||||||
# decouple broadcast mem chan
|
# decouple broadcast mem chan
|
||||||
_quote_streams.pop(symbol, None)
|
_quote_streams.pop(symbol, None)
|
||||||
|
|
||||||
def push(
|
def push(t: Ticker) -> None:
|
||||||
t: Ticker,
|
"""
|
||||||
tries_before_raise: int = 6,
|
Push quotes to trio task.
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Push quotes verbatim to parent-side `trio.Task`.
|
|
||||||
|
|
||||||
'''
|
"""
|
||||||
nonlocal maybe_exc, handler_tries
|
# log.debug(t)
|
||||||
# log.debug(f'new IB quote: {t}\n')
|
|
||||||
try:
|
try:
|
||||||
chan.send_nowait(t)
|
to_trio.send_nowait(t)
|
||||||
|
|
||||||
# XXX TODO XXX replicate in `tractor` tests
|
|
||||||
# as per `CancelledError`-handler notes below!
|
|
||||||
# assert 0
|
|
||||||
except (
|
except (
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
|
|
||||||
|
@ -806,104 +734,35 @@ async def _setup_quote_stream(
|
||||||
# resulting in tracebacks spammed to console..
|
# resulting in tracebacks spammed to console..
|
||||||
# Manually do the dereg ourselves.
|
# Manually do the dereg ourselves.
|
||||||
teardown()
|
teardown()
|
||||||
|
|
||||||
# for slow debugging purposes to avoid clobbering prompt
|
|
||||||
# with log msgs
|
|
||||||
except trio.WouldBlock:
|
except trio.WouldBlock:
|
||||||
log.exception(
|
# log.warning(
|
||||||
f'Asyncio->Trio `chan.send_nowait()` blocked !?\n'
|
# f'channel is blocking symbol feed for {symbol}?'
|
||||||
f'\n'
|
# f'\n{to_trio.statistics}'
|
||||||
f'{chan._to_trio.statistics()}\n'
|
# )
|
||||||
)
|
pass
|
||||||
|
|
||||||
# ?TODO, handle re-connection attempts?
|
|
||||||
except BaseException as _berr:
|
|
||||||
berr = _berr
|
|
||||||
if handler_tries >= tries_before_raise:
|
|
||||||
# breakpoint()
|
|
||||||
maybe_exc = _berr
|
|
||||||
# task.set_exception(berr)
|
|
||||||
aio_task.cancel(msg=berr.args)
|
|
||||||
raise berr
|
|
||||||
else:
|
|
||||||
handler_tries += 1
|
|
||||||
|
|
||||||
log.exception(
|
|
||||||
f'Failed to push ticker quote !?\n'
|
|
||||||
f'handler_tries={handler_tries!r}\n'
|
|
||||||
f'ticker: {t!r}\n'
|
|
||||||
f'\n'
|
|
||||||
f'{chan._to_trio.statistics()}\n'
|
|
||||||
f'\n'
|
|
||||||
f'CAUSE: {berr}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
# except trio.WouldBlock:
|
||||||
|
# # for slow debugging purposes to avoid clobbering prompt
|
||||||
|
# # with log msgs
|
||||||
|
# pass
|
||||||
|
|
||||||
ticker.updateEvent.connect(push)
|
ticker.updateEvent.connect(push)
|
||||||
try:
|
try:
|
||||||
await asyncio.sleep(float('inf'))
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
# XXX, for debug.. TODO? can we rm again?
|
|
||||||
#
|
|
||||||
# tractor.pause_from_sync()
|
|
||||||
# while True:
|
|
||||||
# await asyncio.sleep(1.6)
|
|
||||||
# if ticker.ticks:
|
|
||||||
# log.debug(
|
|
||||||
# f'ticker.ticks = \n'
|
|
||||||
# f'{ticker.ticks}\n'
|
|
||||||
# )
|
|
||||||
# else:
|
|
||||||
# log.warning(
|
|
||||||
# 'UHH no ticker.ticks ??'
|
|
||||||
# )
|
|
||||||
|
|
||||||
# XXX TODO XXX !?!?
|
|
||||||
# apparently **without this handler** and the subsequent
|
|
||||||
# re-raising of `maybe_exc from _taskc` cancelling the
|
|
||||||
# `aio_task` from the `push()`-callback will cause a very
|
|
||||||
# strange chain of exc raising that breaks alll sorts of
|
|
||||||
# downstream callers, tasks and remote-actor tasks!?
|
|
||||||
#
|
|
||||||
# -[ ] we need some lowlevel reproducting tests to replicate
|
|
||||||
# those worst-case scenarios in `tractor` core!!
|
|
||||||
# -[ ] likely we should factor-out the `tractor.to_asyncio`
|
|
||||||
# attempts at workarounds in `.translate_aio_errors()`
|
|
||||||
# for failed `asyncio.Task.set_exception()` to either
|
|
||||||
# call `aio_task.cancel()` and/or
|
|
||||||
# `aio_task._fut_waiter.set_exception()` to a re-useable
|
|
||||||
# toolset in something like a `.to_asyncio._utils`??
|
|
||||||
#
|
|
||||||
except asyncio.CancelledError as _taskc:
|
|
||||||
if maybe_exc is not None:
|
|
||||||
raise maybe_exc from _taskc
|
|
||||||
|
|
||||||
raise _taskc
|
|
||||||
|
|
||||||
except BaseException as _berr:
|
|
||||||
# stash any crash cause for reporting in `teardown()`
|
|
||||||
maybe_exc = _berr
|
|
||||||
raise _berr
|
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# always disconnect our `push()` and cancel the
|
|
||||||
# ib-"mkt-data-feed".
|
|
||||||
teardown()
|
teardown()
|
||||||
|
|
||||||
|
# return from_aio
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_aio_quote_stream(
|
async def open_aio_quote_stream(
|
||||||
|
|
||||||
symbol: str,
|
symbol: str,
|
||||||
contract: Contract | None = None,
|
contract: Contract | None = None,
|
||||||
|
|
||||||
) -> trio.abc.ReceiveStream:
|
) -> trio.abc.ReceiveStream:
|
||||||
'''
|
|
||||||
Open a real-time `Ticker` quote stream from an `asyncio.Task`
|
|
||||||
spawned via `tractor.to_asyncio.open_channel_from()`, deliver the
|
|
||||||
inter-event-loop channel to the `trio.Task` caller and cache it
|
|
||||||
globally for re-use.
|
|
||||||
|
|
||||||
'''
|
|
||||||
from tractor.trionics import broadcast_receiver
|
from tractor.trionics import broadcast_receiver
|
||||||
global _quote_streams
|
global _quote_streams
|
||||||
|
|
||||||
|
@ -928,10 +787,6 @@ async def open_aio_quote_stream(
|
||||||
|
|
||||||
assert contract
|
assert contract
|
||||||
|
|
||||||
# TODO? de-reg on teardown of last consumer task?
|
|
||||||
# -> why aren't we using `.trionics.maybe_open_context()`
|
|
||||||
# here again?? (we are in `open_client_proxies()` tho?)
|
|
||||||
#
|
|
||||||
# cache feed for later consumers
|
# cache feed for later consumers
|
||||||
_quote_streams[symbol] = from_aio
|
_quote_streams[symbol] = from_aio
|
||||||
|
|
||||||
|
@ -946,12 +801,7 @@ def normalize(
|
||||||
calc_price: bool = False
|
calc_price: bool = False
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
'''
|
|
||||||
Translate `ib_async`'s `Ticker.ticks` values to a `piker`
|
|
||||||
normalized `dict` form for transmit to downstream `.data` layer
|
|
||||||
consumers.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# check for special contract types
|
# check for special contract types
|
||||||
con = ticker.contract
|
con = ticker.contract
|
||||||
fqme, calc_price = con2fqme(con)
|
fqme, calc_price = con2fqme(con)
|
||||||
|
@ -970,7 +820,7 @@ def normalize(
|
||||||
|
|
||||||
tbt = ticker.tickByTicks
|
tbt = ticker.tickByTicks
|
||||||
if tbt:
|
if tbt:
|
||||||
log.info(f'tickbyticks:\n {ticker.tickByTicks}')
|
print(f'tickbyticks:\n {ticker.tickByTicks}')
|
||||||
|
|
||||||
ticker.ticks = new_ticks
|
ticker.ticks = new_ticks
|
||||||
|
|
||||||
|
@ -1006,39 +856,27 @@ def normalize(
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
# ?TODO? feels like this task-fn could be factored to reduce some
|
|
||||||
# indentation levels?
|
|
||||||
# -[ ] the reconnect while loop on ib-gw "data farm connection.."s
|
|
||||||
# -[ ] everything embedded under the `async with aclosing(stream):`
|
|
||||||
# as the "meat" of the quote delivery once the connection is
|
|
||||||
# stable.
|
|
||||||
#
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: list[str],
|
symbols: list[str],
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
|
loglevel: str = None,
|
||||||
# TODO? we need to hook into the `ib_async` logger like
|
|
||||||
# we can with i3ipc from modden!
|
|
||||||
# loglevel: str|None = None,
|
|
||||||
|
|
||||||
# startup sync
|
# startup sync
|
||||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Stream `symbols[0]` quotes back via `send_chan`.
|
Stream symbol quotes.
|
||||||
|
|
||||||
The `feed_is_live: Event` is set to signal the caller that it can
|
This is a ``trio`` callable routine meant to be invoked
|
||||||
begin processing msgs from the mem-chan.
|
once the brokerd is up.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# TODO: support multiple subscriptions
|
# TODO: support multiple subscriptions
|
||||||
sym: str = symbols[0]
|
sym = symbols[0]
|
||||||
log.info(
|
log.info(f'request for real-time quotes: {sym}')
|
||||||
f'request for real-time quotes\n'
|
|
||||||
f'sym: {sym!r}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
init_msgs: list[FeedInit] = []
|
init_msgs: list[FeedInit] = []
|
||||||
|
|
||||||
|
@ -1047,30 +885,21 @@ async def stream_quotes(
|
||||||
details: ibis.ContractDetails
|
details: ibis.ContractDetails
|
||||||
async with (
|
async with (
|
||||||
open_data_client() as proxy,
|
open_data_client() as proxy,
|
||||||
|
# trio.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
mkt, details = await get_mkt_info(
|
mkt, details = await get_mkt_info(
|
||||||
sym,
|
sym,
|
||||||
proxy=proxy, # passed to avoid implicit client load
|
proxy=proxy, # passed to avoid implicit client load
|
||||||
)
|
)
|
||||||
|
|
||||||
# is venue active rn?
|
|
||||||
venue_is_open: bool = any(
|
|
||||||
is_current_time_in_range(
|
|
||||||
start_dt=sesh.start,
|
|
||||||
end_dt=sesh.end,
|
|
||||||
)
|
|
||||||
for sesh in details.tradingSessions()
|
|
||||||
)
|
|
||||||
|
|
||||||
init_msg = FeedInit(mkt_info=mkt)
|
init_msg = FeedInit(mkt_info=mkt)
|
||||||
|
|
||||||
# NOTE, tell sampler (via config) to skip vlm summing for dst
|
|
||||||
# assets which provide no vlm data..
|
|
||||||
if mkt.dst.atype in {
|
if mkt.dst.atype in {
|
||||||
'fiat',
|
'fiat',
|
||||||
'index',
|
'index',
|
||||||
'commodity',
|
'commodity',
|
||||||
}:
|
}:
|
||||||
|
# tell sampler config that it shouldn't do vlm summing.
|
||||||
init_msg.shm_write_opts['sum_tick_vlm'] = False
|
init_msg.shm_write_opts['sum_tick_vlm'] = False
|
||||||
init_msg.shm_write_opts['has_vlm'] = False
|
init_msg.shm_write_opts['has_vlm'] = False
|
||||||
|
|
||||||
|
@ -1078,18 +907,12 @@ async def stream_quotes(
|
||||||
|
|
||||||
con: Contract = details.contract
|
con: Contract = details.contract
|
||||||
first_ticker: Ticker | None = None
|
first_ticker: Ticker | None = None
|
||||||
|
with trio.move_on_after(1):
|
||||||
with trio.move_on_after(1.6) as quote_cs:
|
|
||||||
first_ticker: Ticker = await proxy.get_quote(
|
first_ticker: Ticker = await proxy.get_quote(
|
||||||
contract=con,
|
contract=con,
|
||||||
raise_on_timeout=False,
|
raise_on_timeout=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX should never happen with this ep right?
|
|
||||||
# but if so then, more then likely mkt is closed?
|
|
||||||
if quote_cs.cancelled_caught:
|
|
||||||
await tractor.pause()
|
|
||||||
|
|
||||||
if first_ticker:
|
if first_ticker:
|
||||||
first_quote: dict = normalize(first_ticker)
|
first_quote: dict = normalize(first_ticker)
|
||||||
|
|
||||||
|
@ -1101,27 +924,28 @@ async def stream_quotes(
|
||||||
f'{pformat(first_quote)}\n'
|
f'{pformat(first_quote)}\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX NOTE: whenever we're "outside regular trading hours"
|
# NOTE: it might be outside regular trading hours for
|
||||||
# (only relevant for assets coming from the "legacy markets"
|
# assets with "standard venue operating hours" so we
|
||||||
# space) so we basically (from an API/runtime-operational
|
# only "pretend the feed is live" when the dst asset
|
||||||
# perspective) "pretend the feed is live" even if it's
|
# type is NOT within the NON-NORMAL-venue set: aka not
|
||||||
# actually closed.
|
# commodities, forex or crypto currencies which CAN
|
||||||
#
|
# always return a NaN on a snap quote request during
|
||||||
# IOW, we signal to the effective caller (task) that the live
|
# normal venue hours. In the case of a closed venue
|
||||||
# feed is "already up" but really we're just indicating that
|
# (equitiies, futes, bonds etc.) we at least try to
|
||||||
# the OHLCV history can start being loaded immediately by the
|
# grab the OHLC history.
|
||||||
# `piker.data`/`.tsp` layers.
|
if (
|
||||||
#
|
first_ticker
|
||||||
# XXX, deats: the "pretend we're live" is just done by
|
and
|
||||||
# a `feed_is_live.set()` even though nothing is actually live
|
isnan(first_ticker.last)
|
||||||
# Bp
|
# SO, if the last quote price value is NaN we ONLY
|
||||||
if not venue_is_open:
|
# "pretend to do" `feed_is_live.set()` if it's a known
|
||||||
log.warning(
|
# dst asset venue with a lot of closed operating hours.
|
||||||
f'Venue is closed, unable to establish real-time feed.\n'
|
and mkt.dst.atype not in {
|
||||||
f'mkt: {mkt!r}\n'
|
'commodity',
|
||||||
f'\n'
|
'fiat',
|
||||||
f'first_ticker: {first_ticker}\n'
|
'crypto',
|
||||||
)
|
}
|
||||||
|
):
|
||||||
task_status.started((
|
task_status.started((
|
||||||
init_msgs,
|
init_msgs,
|
||||||
first_quote,
|
first_quote,
|
||||||
|
@ -1132,12 +956,10 @@ async def stream_quotes(
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
||||||
# block and let data history backfill code run.
|
# block and let data history backfill code run.
|
||||||
# XXX obvi given the venue is closed, we never expect feed
|
|
||||||
# to come up; a taskc should be the only way to
|
|
||||||
# terminate this task.
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
return # we never expect feed to come up?
|
||||||
|
|
||||||
# ?TODO, we could instead spawn a task that waits on a feed
|
# TODO: we should instead spawn a task that waits on a feed
|
||||||
# to start and let it wait indefinitely..instead of this
|
# to start and let it wait indefinitely..instead of this
|
||||||
# hard coded stuff.
|
# hard coded stuff.
|
||||||
# async def wait_for_first_quote():
|
# async def wait_for_first_quote():
|
||||||
|
@ -1163,22 +985,19 @@ async def stream_quotes(
|
||||||
startup: bool = True
|
startup: bool = True
|
||||||
while (
|
while (
|
||||||
startup
|
startup
|
||||||
or
|
or cs.cancel_called
|
||||||
cs.cancel_called
|
|
||||||
):
|
):
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
async with (
|
async with (
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as nurse,
|
trio.open_nursery() as nurse,
|
||||||
open_aio_quote_stream(
|
open_aio_quote_stream(
|
||||||
symbol=sym,
|
symbol=sym,
|
||||||
contract=con,
|
contract=con,
|
||||||
) as stream,
|
) as stream,
|
||||||
):
|
):
|
||||||
# ?TODO? can we rm this - particularly for `ib_async`?
|
|
||||||
# ugh, clear ticks since we've consumed them
|
# ugh, clear ticks since we've consumed them
|
||||||
# (ahem, ib_insync is stateful trash)
|
# (ahem, ib_insync is stateful trash)
|
||||||
# first_ticker.ticks = []
|
first_ticker.ticks = []
|
||||||
|
|
||||||
# only on first entry at feed boot up
|
# only on first entry at feed boot up
|
||||||
if startup:
|
if startup:
|
||||||
|
@ -1192,8 +1011,8 @@ async def stream_quotes(
|
||||||
# data feed event.
|
# data feed event.
|
||||||
async def reset_on_feed():
|
async def reset_on_feed():
|
||||||
|
|
||||||
# ??TODO? this seems to be surpressed from the
|
# TODO: this seems to be surpressed from the
|
||||||
# traceback in `tractor`?
|
# traceback in ``tractor``?
|
||||||
# assert 0
|
# assert 0
|
||||||
|
|
||||||
rt_ev = proxy.status_event(
|
rt_ev = proxy.status_event(
|
||||||
|
@ -1237,7 +1056,7 @@ async def stream_quotes(
|
||||||
# ugh, clear ticks since we've
|
# ugh, clear ticks since we've
|
||||||
# consumed them (ahem, ib_insync is
|
# consumed them (ahem, ib_insync is
|
||||||
# truly stateful trash)
|
# truly stateful trash)
|
||||||
# ticker.ticks = []
|
ticker.ticks = []
|
||||||
|
|
||||||
# XXX: this works because we don't use
|
# XXX: this works because we don't use
|
||||||
# ``aclosing()`` above?
|
# ``aclosing()`` above?
|
||||||
|
@ -1254,12 +1073,8 @@ async def stream_quotes(
|
||||||
async for ticker in stream:
|
async for ticker in stream:
|
||||||
quote = normalize(ticker)
|
quote = normalize(ticker)
|
||||||
fqme = quote['fqme']
|
fqme = quote['fqme']
|
||||||
log.debug(
|
|
||||||
f'Sending quote\n'
|
|
||||||
f'{quote}'
|
|
||||||
)
|
|
||||||
await send_chan.send({fqme: quote})
|
await send_chan.send({fqme: quote})
|
||||||
|
|
||||||
# ugh, clear ticks since we've consumed them
|
# ugh, clear ticks since we've consumed them
|
||||||
# ticker.ticks = []
|
ticker.ticks = []
|
||||||
# last = time.time()
|
# last = time.time()
|
||||||
|
|
|
@ -34,7 +34,6 @@ import urllib.parse
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import base64
|
import base64
|
||||||
import tractor
|
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from piker import config
|
from piker import config
|
||||||
|
@ -373,7 +372,8 @@ class Client:
|
||||||
# 1658347714, 'status': 'Success'}]}
|
# 1658347714, 'status': 'Success'}]}
|
||||||
|
|
||||||
if xfers:
|
if xfers:
|
||||||
await tractor.pause()
|
import tractor
|
||||||
|
await tractor.pp()
|
||||||
|
|
||||||
trans: dict[str, Transaction] = {}
|
trans: dict[str, Transaction] = {}
|
||||||
for entry in xfers:
|
for entry in xfers:
|
||||||
|
@ -501,7 +501,6 @@ class Client:
|
||||||
for xkey, data in resp['result'].items():
|
for xkey, data in resp['result'].items():
|
||||||
|
|
||||||
# NOTE: always cache in pairs tables for faster lookup
|
# NOTE: always cache in pairs tables for faster lookup
|
||||||
with tractor.devx.maybe_open_crash_handler(): # as bxerr:
|
|
||||||
pair = Pair(xname=xkey, **data)
|
pair = Pair(xname=xkey, **data)
|
||||||
|
|
||||||
# register the above `Pair` structs for all
|
# register the above `Pair` structs for all
|
||||||
|
|
|
@ -175,8 +175,9 @@ async def handle_order_requests(
|
||||||
|
|
||||||
case {
|
case {
|
||||||
'account': 'kraken.spot' as account,
|
'account': 'kraken.spot' as account,
|
||||||
'action': 'buy'|'sell',
|
'action': action,
|
||||||
}:
|
} if action in {'buy', 'sell'}:
|
||||||
|
|
||||||
# validate
|
# validate
|
||||||
order = BrokerdOrder(**msg)
|
order = BrokerdOrder(**msg)
|
||||||
|
|
||||||
|
@ -261,12 +262,6 @@ async def handle_order_requests(
|
||||||
} | extra
|
} | extra
|
||||||
|
|
||||||
log.info(f'Submitting WS order request:\n{pformat(req)}')
|
log.info(f'Submitting WS order request:\n{pformat(req)}')
|
||||||
|
|
||||||
# NOTE HOWTO, debug order requests
|
|
||||||
#
|
|
||||||
# if 'XRP' in pair:
|
|
||||||
# await tractor.pause()
|
|
||||||
|
|
||||||
await ws.send_msg(req)
|
await ws.send_msg(req)
|
||||||
|
|
||||||
# placehold for sanity checking in relay loop
|
# placehold for sanity checking in relay loop
|
||||||
|
@ -1090,8 +1085,6 @@ async def handle_order_updates(
|
||||||
f'Failed to {action} order {reqid}:\n'
|
f'Failed to {action} order {reqid}:\n'
|
||||||
f'{errmsg}'
|
f'{errmsg}'
|
||||||
)
|
)
|
||||||
# if tractor._state.debug_mode():
|
|
||||||
# await tractor.pause()
|
|
||||||
|
|
||||||
symbol: str = 'N/A'
|
symbol: str = 'N/A'
|
||||||
if chain := apiflows.get(reqid):
|
if chain := apiflows.get(reqid):
|
||||||
|
|
|
@ -21,6 +21,7 @@ Symbology defs and search.
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
|
from rapidfuzz import process as fuzzy
|
||||||
|
|
||||||
from piker._cacheables import (
|
from piker._cacheables import (
|
||||||
async_lifo_cache,
|
async_lifo_cache,
|
||||||
|
@ -40,13 +41,8 @@ from piker.accounting._mktinfo import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# https://www.kraken.com/features/api#get-tradable-pairs
|
||||||
class Pair(Struct):
|
class Pair(Struct):
|
||||||
'''
|
|
||||||
A tradable asset pair as schema-defined by,
|
|
||||||
|
|
||||||
https://docs.kraken.com/api/docs/rest-api/get-tradable-asset-pairs
|
|
||||||
|
|
||||||
'''
|
|
||||||
xname: str # idiotic bs_mktid equiv i guess?
|
xname: str # idiotic bs_mktid equiv i guess?
|
||||||
altname: str # alternate pair name
|
altname: str # alternate pair name
|
||||||
wsname: str # WebSocket pair name (if available)
|
wsname: str # WebSocket pair name (if available)
|
||||||
|
@ -57,6 +53,7 @@ class Pair(Struct):
|
||||||
lot: str # volume lot size
|
lot: str # volume lot size
|
||||||
|
|
||||||
cost_decimals: int
|
cost_decimals: int
|
||||||
|
costmin: float
|
||||||
pair_decimals: int # scaling decimal places for pair
|
pair_decimals: int # scaling decimal places for pair
|
||||||
lot_decimals: int # scaling decimal places for volume
|
lot_decimals: int # scaling decimal places for volume
|
||||||
|
|
||||||
|
@ -82,7 +79,6 @@ class Pair(Struct):
|
||||||
tick_size: float # min price step size
|
tick_size: float # min price step size
|
||||||
status: str
|
status: str
|
||||||
|
|
||||||
costmin: str|None = None # XXX, only some mktpairs?
|
|
||||||
short_position_limit: float = 0
|
short_position_limit: float = 0
|
||||||
long_position_limit: float = float('inf')
|
long_position_limit: float = float('inf')
|
||||||
|
|
||||||
|
|
|
@ -25,10 +25,7 @@ from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import (
|
from tractor.trionics import broadcast_receiver
|
||||||
broadcast_receiver,
|
|
||||||
collapse_eg,
|
|
||||||
)
|
|
||||||
|
|
||||||
from ._util import (
|
from ._util import (
|
||||||
log, # sub-sys logger
|
log, # sub-sys logger
|
||||||
|
@ -284,11 +281,8 @@ async def open_ems(
|
||||||
client._ems_stream = trades_stream
|
client._ems_stream = trades_stream
|
||||||
|
|
||||||
# start sync code order msg delivery task
|
# start sync code order msg delivery task
|
||||||
async with (
|
async with trio.open_nursery() as n:
|
||||||
collapse_eg(),
|
n.start_soon(
|
||||||
trio.open_nursery() as tn,
|
|
||||||
):
|
|
||||||
tn.start_soon(
|
|
||||||
relay_orders_from_sync_code,
|
relay_orders_from_sync_code,
|
||||||
client,
|
client,
|
||||||
fqme,
|
fqme,
|
||||||
|
@ -304,4 +298,4 @@ async def open_ems(
|
||||||
)
|
)
|
||||||
|
|
||||||
# stop the sync-msg-relay task on exit.
|
# stop the sync-msg-relay task on exit.
|
||||||
tn.cancel_scope.cancel()
|
n.cancel_scope.cancel()
|
||||||
|
|
|
@ -42,7 +42,6 @@ from bidict import bidict
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
from tractor import trionics
|
|
||||||
|
|
||||||
from ._util import (
|
from ._util import (
|
||||||
log, # sub-sys logger
|
log, # sub-sys logger
|
||||||
|
@ -77,6 +76,7 @@ if TYPE_CHECKING:
|
||||||
|
|
||||||
# TODO: numba all of this
|
# TODO: numba all of this
|
||||||
def mk_check(
|
def mk_check(
|
||||||
|
|
||||||
trigger_price: float,
|
trigger_price: float,
|
||||||
known_last: float,
|
known_last: float,
|
||||||
action: str,
|
action: str,
|
||||||
|
@ -162,7 +162,7 @@ async def clear_dark_triggers(
|
||||||
|
|
||||||
router: Router,
|
router: Router,
|
||||||
brokerd_orders_stream: tractor.MsgStream,
|
brokerd_orders_stream: tractor.MsgStream,
|
||||||
quote_stream: tractor.MsgStream,
|
quote_stream: tractor.ReceiveMsgStream, # noqa
|
||||||
broker: str,
|
broker: str,
|
||||||
fqme: str,
|
fqme: str,
|
||||||
|
|
||||||
|
@ -178,7 +178,6 @@ async def clear_dark_triggers(
|
||||||
'''
|
'''
|
||||||
# XXX: optimize this for speed!
|
# XXX: optimize this for speed!
|
||||||
# TODO:
|
# TODO:
|
||||||
# - port to the new ringbuf stuff in `tractor.ipc`!
|
|
||||||
# - numba all this!
|
# - numba all this!
|
||||||
# - this stream may eventually contain multiple symbols
|
# - this stream may eventually contain multiple symbols
|
||||||
quote_stream._raise_on_lag = False
|
quote_stream._raise_on_lag = False
|
||||||
|
@ -388,7 +387,6 @@ async def open_brokerd_dialog(
|
||||||
for ep_name in [
|
for ep_name in [
|
||||||
'open_trade_dialog', # probably final name?
|
'open_trade_dialog', # probably final name?
|
||||||
'trades_dialogue', # legacy
|
'trades_dialogue', # legacy
|
||||||
# ^!TODO, rm this since all backends ported no ?!?
|
|
||||||
]:
|
]:
|
||||||
trades_endpoint = getattr(
|
trades_endpoint = getattr(
|
||||||
brokermod,
|
brokermod,
|
||||||
|
@ -502,7 +500,7 @@ class Router(Struct):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# setup at actor spawn time
|
# setup at actor spawn time
|
||||||
_tn: trio.Nursery
|
nursery: trio.Nursery
|
||||||
|
|
||||||
# broker to book map
|
# broker to book map
|
||||||
books: dict[str, DarkBook] = {}
|
books: dict[str, DarkBook] = {}
|
||||||
|
@ -668,7 +666,7 @@ class Router(Struct):
|
||||||
# dark book clearing loop, also lives with parent
|
# dark book clearing loop, also lives with parent
|
||||||
# daemon to allow dark order clearing while no
|
# daemon to allow dark order clearing while no
|
||||||
# client is connected.
|
# client is connected.
|
||||||
self._tn.start_soon(
|
self.nursery.start_soon(
|
||||||
clear_dark_triggers,
|
clear_dark_triggers,
|
||||||
self,
|
self,
|
||||||
relay.brokerd_stream,
|
relay.brokerd_stream,
|
||||||
|
@ -691,7 +689,7 @@ class Router(Struct):
|
||||||
|
|
||||||
# spawn a ``brokerd`` order control dialog stream
|
# spawn a ``brokerd`` order control dialog stream
|
||||||
# that syncs lifetime with the parent `emsd` daemon.
|
# that syncs lifetime with the parent `emsd` daemon.
|
||||||
self._tn.start_soon(
|
self.nursery.start_soon(
|
||||||
translate_and_relay_brokerd_events,
|
translate_and_relay_brokerd_events,
|
||||||
broker,
|
broker,
|
||||||
relay.brokerd_stream,
|
relay.brokerd_stream,
|
||||||
|
@ -765,12 +763,10 @@ async def _setup_persistent_emsd(
|
||||||
|
|
||||||
global _router
|
global _router
|
||||||
|
|
||||||
# open a root "service task-nursery" for the `emsd`-actor
|
# open a root "service nursery" for the ``emsd`` actor
|
||||||
async with (
|
async with trio.open_nursery() as service_nursery:
|
||||||
trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as tn
|
_router = Router(nursery=service_nursery)
|
||||||
):
|
|
||||||
_router = Router(_tn=tn)
|
|
||||||
|
|
||||||
# TODO: send back the full set of persistent
|
# TODO: send back the full set of persistent
|
||||||
# orders/execs?
|
# orders/execs?
|
||||||
|
@ -1020,18 +1016,8 @@ async def translate_and_relay_brokerd_events(
|
||||||
)
|
)
|
||||||
|
|
||||||
if status == 'closed':
|
if status == 'closed':
|
||||||
log.info(
|
log.info(f'Execution for {oid} is complete!')
|
||||||
f'Execution is complete!\n'
|
status_msg = book._active.pop(oid)
|
||||||
f'oid: {oid!r}\n'
|
|
||||||
)
|
|
||||||
status_msg = book._active.pop(oid, None)
|
|
||||||
if status_msg is None:
|
|
||||||
log.warning(
|
|
||||||
f'Order was already cleared from book ??\n'
|
|
||||||
f'oid: {oid!r}\n'
|
|
||||||
f'\n'
|
|
||||||
f'Maybe the order cancelled before submitted ??\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
elif status == 'canceled':
|
elif status == 'canceled':
|
||||||
log.cancel(f'Cancellation for {oid} is complete!')
|
log.cancel(f'Cancellation for {oid} is complete!')
|
||||||
|
@ -1196,16 +1182,12 @@ async def process_client_order_cmds(
|
||||||
submitting live orders immediately if requested by the client.
|
submitting live orders immediately if requested by the client.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# TODO, only allow `msgspec.Struct` form!
|
# cmd: dict
|
||||||
cmd: dict
|
|
||||||
async for cmd in client_order_stream:
|
async for cmd in client_order_stream:
|
||||||
log.info(
|
log.info(f'Received order cmd:\n{pformat(cmd)}')
|
||||||
f'Received order cmd:\n'
|
|
||||||
f'{pformat(cmd)}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
# CAWT DAMN we need struct support!
|
# CAWT DAMN we need struct support!
|
||||||
oid: str = str(cmd['oid'])
|
oid = str(cmd['oid'])
|
||||||
|
|
||||||
# register this stream as an active order dialog (msg flow) for
|
# register this stream as an active order dialog (msg flow) for
|
||||||
# this order id such that translated message from the brokerd
|
# this order id such that translated message from the brokerd
|
||||||
|
@ -1311,7 +1293,7 @@ async def process_client_order_cmds(
|
||||||
case {
|
case {
|
||||||
'oid': oid,
|
'oid': oid,
|
||||||
'symbol': fqme,
|
'symbol': fqme,
|
||||||
'price': price,
|
'price': trigger_price,
|
||||||
'size': size,
|
'size': size,
|
||||||
'action': ('buy' | 'sell') as action,
|
'action': ('buy' | 'sell') as action,
|
||||||
'exec_mode': ('live' | 'paper'),
|
'exec_mode': ('live' | 'paper'),
|
||||||
|
@ -1343,7 +1325,7 @@ async def process_client_order_cmds(
|
||||||
|
|
||||||
symbol=sym,
|
symbol=sym,
|
||||||
action=action,
|
action=action,
|
||||||
price=price,
|
price=trigger_price,
|
||||||
size=size,
|
size=size,
|
||||||
account=req.account,
|
account=req.account,
|
||||||
)
|
)
|
||||||
|
@ -1365,11 +1347,7 @@ async def process_client_order_cmds(
|
||||||
# (``translate_and_relay_brokerd_events()`` above) will
|
# (``translate_and_relay_brokerd_events()`` above) will
|
||||||
# handle relaying the ems side responses back to
|
# handle relaying the ems side responses back to
|
||||||
# the client/cmd sender from this request
|
# the client/cmd sender from this request
|
||||||
log.info(
|
log.info(f'Sending live order to {broker}:\n{pformat(msg)}')
|
||||||
f'Sending live order to {broker}:\n'
|
|
||||||
f'{pformat(msg)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
await brokerd_order_stream.send(msg)
|
await brokerd_order_stream.send(msg)
|
||||||
|
|
||||||
# an immediate response should be ``BrokerdOrderAck``
|
# an immediate response should be ``BrokerdOrderAck``
|
||||||
|
@ -1385,7 +1363,7 @@ async def process_client_order_cmds(
|
||||||
case {
|
case {
|
||||||
'oid': oid,
|
'oid': oid,
|
||||||
'symbol': fqme,
|
'symbol': fqme,
|
||||||
'price': price,
|
'price': trigger_price,
|
||||||
'size': size,
|
'size': size,
|
||||||
'exec_mode': exec_mode,
|
'exec_mode': exec_mode,
|
||||||
'action': action,
|
'action': action,
|
||||||
|
@ -1413,12 +1391,7 @@ async def process_client_order_cmds(
|
||||||
if isnan(last):
|
if isnan(last):
|
||||||
last = flume.rt_shm.array[-1]['close']
|
last = flume.rt_shm.array[-1]['close']
|
||||||
|
|
||||||
trigger_price: float = float(price)
|
pred = mk_check(trigger_price, last, action)
|
||||||
pred = mk_check(
|
|
||||||
trigger_price,
|
|
||||||
last,
|
|
||||||
action,
|
|
||||||
)
|
|
||||||
|
|
||||||
# NOTE: for dark orders currently we submit
|
# NOTE: for dark orders currently we submit
|
||||||
# the triggered live order at a price 5 ticks
|
# the triggered live order at a price 5 ticks
|
||||||
|
@ -1525,7 +1498,7 @@ async def maybe_open_trade_relays(
|
||||||
loglevel: str = 'info',
|
loglevel: str = 'info',
|
||||||
):
|
):
|
||||||
|
|
||||||
fqme, relay, feed, client_ready = await _router._tn.start(
|
fqme, relay, feed, client_ready = await _router.nursery.start(
|
||||||
_router.open_trade_relays,
|
_router.open_trade_relays,
|
||||||
fqme,
|
fqme,
|
||||||
exec_mode,
|
exec_mode,
|
||||||
|
@ -1555,18 +1528,19 @@ async def maybe_open_trade_relays(
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def _emsd_main(
|
async def _emsd_main(
|
||||||
ctx: tractor.Context, # becomes `ems_ctx` below
|
ctx: tractor.Context,
|
||||||
fqme: str,
|
fqme: str,
|
||||||
exec_mode: str, # ('paper', 'live')
|
exec_mode: str, # ('paper', 'live')
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
) -> tuple[ # `ctx.started()` value!
|
) -> tuple[
|
||||||
dict[ # positions
|
dict[
|
||||||
tuple[str, str], # brokername, acctid
|
# brokername, acctid
|
||||||
|
tuple[str, str],
|
||||||
list[BrokerdPosition],
|
list[BrokerdPosition],
|
||||||
],
|
],
|
||||||
list[str], # accounts
|
list[str],
|
||||||
dict[str, Status], # dialogs
|
dict[str, Status],
|
||||||
]:
|
]:
|
||||||
'''
|
'''
|
||||||
EMS (sub)actor entrypoint providing the execution management
|
EMS (sub)actor entrypoint providing the execution management
|
||||||
|
|
|
@ -19,7 +19,6 @@ Clearing sub-system message and protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from decimal import Decimal
|
|
||||||
from typing import (
|
from typing import (
|
||||||
Literal,
|
Literal,
|
||||||
)
|
)
|
||||||
|
@ -72,15 +71,7 @@ class Order(Struct):
|
||||||
symbol: str # | MktPair
|
symbol: str # | MktPair
|
||||||
account: str # should we set a default as '' ?
|
account: str # should we set a default as '' ?
|
||||||
|
|
||||||
# https://docs.python.org/3/library/decimal.html#decimal-objects
|
price: float
|
||||||
#
|
|
||||||
# ?TODO? decimal usage throughout?
|
|
||||||
# -[ ] possibly leverage the `Encoder(decimal_format='number')`
|
|
||||||
# bit?
|
|
||||||
# |_https://jcristharif.com/msgspec/supported-types.html#decimal
|
|
||||||
# -[ ] should we also use it for .size?
|
|
||||||
#
|
|
||||||
price: Decimal
|
|
||||||
size: float # -ve is "sell", +ve is "buy"
|
size: float # -ve is "sell", +ve is "buy"
|
||||||
|
|
||||||
brokers: list[str] = []
|
brokers: list[str] = []
|
||||||
|
@ -187,7 +178,7 @@ class BrokerdOrder(Struct):
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
|
||||||
symbol: str # fqme
|
symbol: str # fqme
|
||||||
price: Decimal
|
price: float
|
||||||
size: float
|
size: float
|
||||||
|
|
||||||
# TODO: if we instead rely on a +ve/-ve size to determine
|
# TODO: if we instead rely on a +ve/-ve size to determine
|
||||||
|
@ -301,9 +292,6 @@ class BrokerdError(Struct):
|
||||||
|
|
||||||
# TODO: yeah, so we REALLY need to completely deprecate
|
# TODO: yeah, so we REALLY need to completely deprecate
|
||||||
# this and use the `.accounting.Position` msg-type instead..
|
# this and use the `.accounting.Position` msg-type instead..
|
||||||
# -[ ] an alternative might be to add a `Position.summary() ->
|
|
||||||
# `PositionSummary`-msg that we generate since `Position` has a lot
|
|
||||||
# of fields by default we likely don't want to send over the wire?
|
|
||||||
class BrokerdPosition(Struct):
|
class BrokerdPosition(Struct):
|
||||||
'''
|
'''
|
||||||
Position update event from brokerd.
|
Position update event from brokerd.
|
||||||
|
@ -316,4 +304,3 @@ class BrokerdPosition(Struct):
|
||||||
avg_price: float
|
avg_price: float
|
||||||
currency: str = ''
|
currency: str = ''
|
||||||
name: str = 'position'
|
name: str = 'position'
|
||||||
bs_mktid: str|int|None = None
|
|
||||||
|
|
|
@ -508,7 +508,7 @@ async def handle_order_requests(
|
||||||
reqid = await client.submit_limit(
|
reqid = await client.submit_limit(
|
||||||
oid=order.oid,
|
oid=order.oid,
|
||||||
symbol=f'{order.symbol}.{client.broker}',
|
symbol=f'{order.symbol}.{client.broker}',
|
||||||
price=float(order.price),
|
price=order.price,
|
||||||
action=order.action,
|
action=order.action,
|
||||||
size=order.size,
|
size=order.size,
|
||||||
# XXX: by default 0 tells ``ib_insync`` methods that
|
# XXX: by default 0 tells ``ib_insync`` methods that
|
||||||
|
|
|
@ -134,8 +134,8 @@ def pikerd(
|
||||||
Spawn the piker broker-daemon.
|
Spawn the piker broker-daemon.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# from tractor.devx import maybe_open_crash_handler
|
from tractor.devx import maybe_open_crash_handler
|
||||||
# with maybe_open_crash_handler(pdb=False):
|
with maybe_open_crash_handler(pdb=pdb):
|
||||||
log = get_console_log(loglevel, name='cli')
|
log = get_console_log(loglevel, name='cli')
|
||||||
|
|
||||||
if pdb:
|
if pdb:
|
||||||
|
@ -178,18 +178,39 @@ def pikerd(
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
service_mngr: service.Services
|
service_mngr: service.Services
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
service.open_pikerd(
|
service.open_pikerd(
|
||||||
registry_addrs=regaddrs,
|
registry_addrs=regaddrs,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=pdb,
|
debug_mode=pdb,
|
||||||
enable_transports=['uds'],
|
|
||||||
# enable_transports=['tcp'],
|
) as service_mngr, # normally delivers a ``Services`` handle
|
||||||
) as service_mngr,
|
|
||||||
|
# AsyncExitStack() as stack,
|
||||||
):
|
):
|
||||||
assert service_mngr
|
# TODO: spawn all other sub-actor daemons according to
|
||||||
# ?TODO? spawn all other sub-actor daemons according to
|
|
||||||
# multiaddress endpoint spec defined by user config
|
# multiaddress endpoint spec defined by user config
|
||||||
|
assert service_mngr
|
||||||
|
|
||||||
|
# if tsdb:
|
||||||
|
# dname, conf = await stack.enter_async_context(
|
||||||
|
# service.marketstore.start_ahab_daemon(
|
||||||
|
# service_mngr,
|
||||||
|
# loglevel=loglevel,
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# log.info(f'TSDB `{dname}` up with conf:\n{conf}')
|
||||||
|
|
||||||
|
# if es:
|
||||||
|
# dname, conf = await stack.enter_async_context(
|
||||||
|
# service.elastic.start_ahab_daemon(
|
||||||
|
# service_mngr,
|
||||||
|
# loglevel=loglevel,
|
||||||
|
# )
|
||||||
|
# )
|
||||||
|
# log.info(f'DB `{dname}` up with conf:\n{conf}')
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
@ -307,10 +328,6 @@ def services(config, tl, ports):
|
||||||
if not ports:
|
if not ports:
|
||||||
ports = [_default_registry_port]
|
ports = [_default_registry_port]
|
||||||
|
|
||||||
addr = tractor._addr.wrap_address(
|
|
||||||
addr=(host, ports[0])
|
|
||||||
)
|
|
||||||
|
|
||||||
async def list_services():
|
async def list_services():
|
||||||
nonlocal host
|
nonlocal host
|
||||||
async with (
|
async with (
|
||||||
|
@ -318,18 +335,16 @@ def services(config, tl, ports):
|
||||||
name='service_query',
|
name='service_query',
|
||||||
loglevel=config['loglevel'] if tl else None,
|
loglevel=config['loglevel'] if tl else None,
|
||||||
),
|
),
|
||||||
tractor.get_registry(
|
tractor.get_arbiter(
|
||||||
addr=addr,
|
host=host,
|
||||||
|
port=ports[0]
|
||||||
) as portal
|
) as portal
|
||||||
):
|
):
|
||||||
registry = await portal.run_from_ns(
|
registry = await portal.run_from_ns('self', 'get_registry')
|
||||||
'self',
|
|
||||||
'get_registry',
|
|
||||||
)
|
|
||||||
json_d = {}
|
json_d = {}
|
||||||
for key, socket in registry.items():
|
for key, socket in registry.items():
|
||||||
json_d[key] = f'{socket}'
|
host, port = socket
|
||||||
|
json_d[key] = f'{host}:{port}'
|
||||||
click.echo(f"{colorize_json(json_d)}")
|
click.echo(f"{colorize_json(json_d)}")
|
||||||
|
|
||||||
trio.run(list_services)
|
trio.run(list_services)
|
||||||
|
|
|
@ -284,8 +284,7 @@ class Sampler:
|
||||||
|
|
||||||
except (
|
except (
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
trio.ClosedResourceError,
|
trio.ClosedResourceError
|
||||||
trio.EndOfChannel,
|
|
||||||
):
|
):
|
||||||
log.error(
|
log.error(
|
||||||
f'{stream._ctx.chan.uid} dropped connection'
|
f'{stream._ctx.chan.uid} dropped connection'
|
||||||
|
@ -698,7 +697,7 @@ async def sample_and_broadcast(
|
||||||
|
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Feed OVERRUN {sub_key}'
|
f'Feed OVERRUN {sub_key}'
|
||||||
f'@{bus.brokername} -> \n'
|
'@{bus.brokername} -> \n'
|
||||||
f'feed @ {chan.uid}\n'
|
f'feed @ {chan.uid}\n'
|
||||||
f'throttle = {throttle} Hz'
|
f'throttle = {throttle} Hz'
|
||||||
)
|
)
|
||||||
|
@ -877,7 +876,6 @@ async def uniform_rate_send(
|
||||||
except tractor.RemoteActorError as rme:
|
except tractor.RemoteActorError as rme:
|
||||||
if rme.type is not tractor._exceptions.StreamOverrun:
|
if rme.type is not tractor._exceptions.StreamOverrun:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
ctx = stream._ctx
|
ctx = stream._ctx
|
||||||
chan = ctx.chan
|
chan = ctx.chan
|
||||||
log.warning(
|
log.warning(
|
||||||
|
@ -894,7 +892,6 @@ async def uniform_rate_send(
|
||||||
trio.ClosedResourceError,
|
trio.ClosedResourceError,
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
ConnectionResetError,
|
ConnectionResetError,
|
||||||
trio.EndOfChannel,
|
|
||||||
):
|
):
|
||||||
# if the feed consumer goes down then drop
|
# if the feed consumer goes down then drop
|
||||||
# out of this rate limiter
|
# out of this rate limiter
|
||||||
|
|
|
@ -90,18 +90,6 @@ class SymbologyCache(Struct):
|
||||||
# provided by the backend pkg.
|
# provided by the backend pkg.
|
||||||
mktmaps: dict[str, MktPair] = field(default_factory=dict)
|
mktmaps: dict[str, MktPair] = field(default_factory=dict)
|
||||||
|
|
||||||
def pformat(self) -> str:
|
|
||||||
return (
|
|
||||||
f'<{type(self).__name__}(\n'
|
|
||||||
f' .mod: {self.mod!r}\n'
|
|
||||||
f' .assets: {len(self.assets)!r}\n'
|
|
||||||
f' .pairs: {len(self.pairs)!r}\n'
|
|
||||||
f' .mktmaps: {len(self.mktmaps)!r}\n'
|
|
||||||
f')>'
|
|
||||||
)
|
|
||||||
|
|
||||||
__repr__ = pformat
|
|
||||||
|
|
||||||
def write_config(self) -> None:
|
def write_config(self) -> None:
|
||||||
|
|
||||||
# put the backend's pair-struct type ref at the top
|
# put the backend's pair-struct type ref at the top
|
||||||
|
|
|
@ -27,6 +27,7 @@ from functools import partial
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Optional,
|
||||||
Callable,
|
Callable,
|
||||||
AsyncContextManager,
|
AsyncContextManager,
|
||||||
AsyncGenerator,
|
AsyncGenerator,
|
||||||
|
@ -34,7 +35,6 @@ from typing import (
|
||||||
)
|
)
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import tractor
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
from trio_websocket import (
|
from trio_websocket import (
|
||||||
|
@ -167,7 +167,7 @@ async def _reconnect_forever(
|
||||||
|
|
||||||
async def proxy_msgs(
|
async def proxy_msgs(
|
||||||
ws: WebSocketConnection,
|
ws: WebSocketConnection,
|
||||||
rent_cs: trio.CancelScope, # parent cancel scope
|
pcs: trio.CancelScope, # parent cancel scope
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Receive (under `timeout` deadline) all msgs from from underlying
|
Receive (under `timeout` deadline) all msgs from from underlying
|
||||||
|
@ -192,7 +192,7 @@ async def _reconnect_forever(
|
||||||
f'{url} connection bail with:'
|
f'{url} connection bail with:'
|
||||||
)
|
)
|
||||||
await trio.sleep(0.5)
|
await trio.sleep(0.5)
|
||||||
rent_cs.cancel()
|
pcs.cancel()
|
||||||
|
|
||||||
# go back to reonnect loop in parent task
|
# go back to reonnect loop in parent task
|
||||||
return
|
return
|
||||||
|
@ -204,7 +204,7 @@ async def _reconnect_forever(
|
||||||
f'{src_mod}\n'
|
f'{src_mod}\n'
|
||||||
'WS feed seems down and slow af.. reconnecting\n'
|
'WS feed seems down and slow af.. reconnecting\n'
|
||||||
)
|
)
|
||||||
rent_cs.cancel()
|
pcs.cancel()
|
||||||
|
|
||||||
# go back to reonnect loop in parent task
|
# go back to reonnect loop in parent task
|
||||||
return
|
return
|
||||||
|
@ -228,12 +228,7 @@ async def _reconnect_forever(
|
||||||
nobsws._connected = trio.Event()
|
nobsws._connected = trio.Event()
|
||||||
task_status.started()
|
task_status.started()
|
||||||
|
|
||||||
mc_state: trio._channel.MemoryChannelState = snd._state
|
while not snd._closed:
|
||||||
while (
|
|
||||||
mc_state.open_receive_channels > 0
|
|
||||||
and
|
|
||||||
mc_state.open_send_channels > 0
|
|
||||||
):
|
|
||||||
log.info(
|
log.info(
|
||||||
f'{src_mod}\n'
|
f'{src_mod}\n'
|
||||||
f'{url} trying (RE)CONNECT'
|
f'{url} trying (RE)CONNECT'
|
||||||
|
@ -242,11 +237,10 @@ async def _reconnect_forever(
|
||||||
ws: WebSocketConnection
|
ws: WebSocketConnection
|
||||||
try:
|
try:
|
||||||
async with (
|
async with (
|
||||||
|
trio.open_nursery() as n,
|
||||||
open_websocket_url(url) as ws,
|
open_websocket_url(url) as ws,
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as tn,
|
|
||||||
):
|
):
|
||||||
cs = nobsws._cs = tn.cancel_scope
|
cs = nobsws._cs = n.cancel_scope
|
||||||
nobsws._ws = ws
|
nobsws._ws = ws
|
||||||
log.info(
|
log.info(
|
||||||
f'{src_mod}\n'
|
f'{src_mod}\n'
|
||||||
|
@ -254,7 +248,7 @@ async def _reconnect_forever(
|
||||||
)
|
)
|
||||||
|
|
||||||
# begin relay loop to forward msgs
|
# begin relay loop to forward msgs
|
||||||
tn.start_soon(
|
n.start_soon(
|
||||||
proxy_msgs,
|
proxy_msgs,
|
||||||
ws,
|
ws,
|
||||||
cs,
|
cs,
|
||||||
|
@ -268,7 +262,7 @@ async def _reconnect_forever(
|
||||||
|
|
||||||
# TODO: should we return an explicit sub-cs
|
# TODO: should we return an explicit sub-cs
|
||||||
# from this fixture task?
|
# from this fixture task?
|
||||||
await tn.start(
|
await n.start(
|
||||||
open_fixture,
|
open_fixture,
|
||||||
fixture,
|
fixture,
|
||||||
nobsws,
|
nobsws,
|
||||||
|
@ -278,23 +272,11 @@ async def _reconnect_forever(
|
||||||
# to let tasks run **inside** the ws open block above.
|
# to let tasks run **inside** the ws open block above.
|
||||||
nobsws._connected.set()
|
nobsws._connected.set()
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
except HandshakeError:
|
||||||
except (
|
|
||||||
HandshakeError,
|
|
||||||
ConnectionRejected,
|
|
||||||
):
|
|
||||||
log.exception('Retrying connection')
|
log.exception('Retrying connection')
|
||||||
await trio.sleep(0.5) # throttle
|
|
||||||
|
|
||||||
except BaseException as _berr:
|
# ws & nursery block ends
|
||||||
berr = _berr
|
|
||||||
log.exception(
|
|
||||||
'Reconnect-attempt failed ??\n'
|
|
||||||
)
|
|
||||||
await trio.sleep(0.2) # throttle
|
|
||||||
raise berr
|
|
||||||
|
|
||||||
#|_ws & nursery block ends
|
|
||||||
nobsws._connected = trio.Event()
|
nobsws._connected = trio.Event()
|
||||||
if cs.cancelled_caught:
|
if cs.cancelled_caught:
|
||||||
log.cancel(
|
log.cancel(
|
||||||
|
@ -342,25 +324,21 @@ async def open_autorecon_ws(
|
||||||
connetivity errors, or some user defined recv timeout.
|
connetivity errors, or some user defined recv timeout.
|
||||||
|
|
||||||
You can provide a ``fixture`` async-context-manager which will be
|
You can provide a ``fixture`` async-context-manager which will be
|
||||||
entered/exitted around each connection reset; eg. for
|
entered/exitted around each connection reset; eg. for (re)requesting
|
||||||
(re)requesting subscriptions without requiring streaming setup
|
subscriptions without requiring streaming setup code to rerun.
|
||||||
code to rerun.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
snd: trio.MemorySendChannel
|
snd: trio.MemorySendChannel
|
||||||
rcv: trio.MemoryReceiveChannel
|
rcv: trio.MemoryReceiveChannel
|
||||||
snd, rcv = trio.open_memory_channel(616)
|
snd, rcv = trio.open_memory_channel(616)
|
||||||
|
|
||||||
async with (
|
async with trio.open_nursery() as n:
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as tn
|
|
||||||
):
|
|
||||||
nobsws = NoBsWs(
|
nobsws = NoBsWs(
|
||||||
url,
|
url,
|
||||||
rcv,
|
rcv,
|
||||||
msg_recv_timeout=msg_recv_timeout,
|
msg_recv_timeout=msg_recv_timeout,
|
||||||
)
|
)
|
||||||
await tn.start(
|
await n.start(
|
||||||
partial(
|
partial(
|
||||||
_reconnect_forever,
|
_reconnect_forever,
|
||||||
url,
|
url,
|
||||||
|
@ -373,10 +351,11 @@ async def open_autorecon_ws(
|
||||||
await nobsws._connected.wait()
|
await nobsws._connected.wait()
|
||||||
assert nobsws._cs
|
assert nobsws._cs
|
||||||
assert nobsws.connected()
|
assert nobsws.connected()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield nobsws
|
yield nobsws
|
||||||
finally:
|
finally:
|
||||||
tn.cancel_scope.cancel()
|
n.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -389,8 +368,8 @@ of msgs over a `NoBsWs`.
|
||||||
class JSONRPCResult(Struct):
|
class JSONRPCResult(Struct):
|
||||||
id: int
|
id: int
|
||||||
jsonrpc: str = '2.0'
|
jsonrpc: str = '2.0'
|
||||||
result: dict|None = None
|
result: Optional[dict] = None
|
||||||
error: dict|None = None
|
error: Optional[dict] = None
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
|
|
@ -39,7 +39,6 @@ from typing import (
|
||||||
AsyncContextManager,
|
AsyncContextManager,
|
||||||
Awaitable,
|
Awaitable,
|
||||||
Sequence,
|
Sequence,
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
|
@ -76,10 +75,6 @@ from ._sampling import (
|
||||||
uniform_rate_send,
|
uniform_rate_send,
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
|
||||||
from tractor._addr import Address
|
|
||||||
from tractor.msg.types import Aid
|
|
||||||
|
|
||||||
|
|
||||||
class Sub(Struct, frozen=True):
|
class Sub(Struct, frozen=True):
|
||||||
'''
|
'''
|
||||||
|
@ -728,10 +723,7 @@ class Feed(Struct):
|
||||||
async for msg in stream:
|
async for msg in stream:
|
||||||
await tx.send(msg)
|
await tx.send(msg)
|
||||||
|
|
||||||
async with (
|
async with trio.open_nursery() as nurse:
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as nurse
|
|
||||||
):
|
|
||||||
# spawn a relay task for each stream so that they all
|
# spawn a relay task for each stream so that they all
|
||||||
# multiplex to a common channel.
|
# multiplex to a common channel.
|
||||||
for brokername in mods:
|
for brokername in mods:
|
||||||
|
@ -907,19 +899,19 @@ async def open_feed(
|
||||||
feed.portals[brokermod] = portal
|
feed.portals[brokermod] = portal
|
||||||
|
|
||||||
# fill out "status info" that the UI can show
|
# fill out "status info" that the UI can show
|
||||||
chan: tractor.Channel = portal.chan
|
host, port = portal.channel.raddr
|
||||||
raddr: Address = chan.raddr
|
if host == '127.0.0.1':
|
||||||
aid: Aid = chan.aid
|
host = 'localhost'
|
||||||
# TAG_feed_status_update
|
|
||||||
feed.status.update({
|
feed.status.update({
|
||||||
'actor_id': aid,
|
'actor_name': portal.channel.uid[0],
|
||||||
'actor_short_id': f'{aid.name}@{aid.pid}',
|
'host': host,
|
||||||
'ipc': chan.raddr.proto_key,
|
'port': port,
|
||||||
'ipc_addr': raddr,
|
|
||||||
'hist_shm': 'NA',
|
'hist_shm': 'NA',
|
||||||
'rt_shm': 'NA',
|
'rt_shm': 'NA',
|
||||||
'throttle_hz': tick_throttle,
|
'throttle_rate': tick_throttle,
|
||||||
})
|
})
|
||||||
|
# feed.status.update(init_msg.pop('status', {}))
|
||||||
|
|
||||||
# (allocate and) connect to any feed bus for this broker
|
# (allocate and) connect to any feed bus for this broker
|
||||||
bus_ctxs.append(
|
bus_ctxs.append(
|
||||||
|
|
|
@ -498,7 +498,6 @@ async def cascade(
|
||||||
|
|
||||||
func_name: str = func.__name__
|
func_name: str = func.__name__
|
||||||
async with (
|
async with (
|
||||||
tractor.trionics.collapse_eg(), # avoid multi-taskc tb in console
|
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
# TODO: might be better to just make a "restart" method where
|
# TODO: might be better to just make a "restart" method where
|
||||||
|
|
28
piker/log.py
28
piker/log.py
|
@ -18,11 +18,7 @@
|
||||||
Log like a forester!
|
Log like a forester!
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
import reprlib
|
|
||||||
import json
|
import json
|
||||||
from typing import (
|
|
||||||
Callable,
|
|
||||||
)
|
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
from pygments import (
|
from pygments import (
|
||||||
|
@ -88,27 +84,3 @@ def colorize_json(
|
||||||
# likeable styles: algol_nu, tango, monokai
|
# likeable styles: algol_nu, tango, monokai
|
||||||
formatters.TerminalTrueColorFormatter(style=style)
|
formatters.TerminalTrueColorFormatter(style=style)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def mk_repr(
|
|
||||||
**repr_kws,
|
|
||||||
) -> Callable[[str], str]:
|
|
||||||
'''
|
|
||||||
Allocate and deliver a `repr.Repr` instance with provided input
|
|
||||||
settings using the std-lib's `reprlib` mod,
|
|
||||||
* https://docs.python.org/3/library/reprlib.html
|
|
||||||
|
|
||||||
------ Ex. ------
|
|
||||||
An up to 6-layer-nested `dict` as multi-line:
|
|
||||||
- https://stackoverflow.com/a/79102479
|
|
||||||
- https://docs.python.org/3/library/reprlib.html#reprlib.Repr.maxlevel
|
|
||||||
|
|
||||||
'''
|
|
||||||
def_kws: dict[str, int] = dict(
|
|
||||||
indent=2,
|
|
||||||
maxlevel=6, # recursion levels
|
|
||||||
maxstring=66, # match editor line-len limit
|
|
||||||
)
|
|
||||||
def_kws |= repr_kws
|
|
||||||
reprr = reprlib.Repr(**def_kws)
|
|
||||||
return reprr.repr
|
|
||||||
|
|
|
@ -107,22 +107,17 @@ async def open_piker_runtime(
|
||||||
async with (
|
async with (
|
||||||
tractor.open_root_actor(
|
tractor.open_root_actor(
|
||||||
|
|
||||||
# passed through to `open_root_actor`
|
# passed through to ``open_root_actor``
|
||||||
registry_addrs=registry_addrs,
|
registry_addrs=registry_addrs,
|
||||||
name=name,
|
name=name,
|
||||||
start_method=start_method,
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=debug_mode,
|
debug_mode=debug_mode,
|
||||||
|
start_method=start_method,
|
||||||
# XXX NOTE MEMBER DAT der's a perf hit yo!!
|
|
||||||
# https://greenback.readthedocs.io/en/latest/principle.html#performance
|
|
||||||
maybe_enable_greenback=True,
|
|
||||||
|
|
||||||
# TODO: eventually we should be able to avoid
|
# TODO: eventually we should be able to avoid
|
||||||
# having the root have more then permissions to
|
# having the root have more then permissions to
|
||||||
# spawn other specialized daemons I think?
|
# spawn other specialized daemons I think?
|
||||||
enable_modules=enable_modules,
|
enable_modules=enable_modules,
|
||||||
hide_tb=False,
|
|
||||||
|
|
||||||
**tractor_kwargs,
|
**tractor_kwargs,
|
||||||
) as actor,
|
) as actor,
|
||||||
|
@ -205,8 +200,7 @@ async def open_pikerd(
|
||||||
reg_addrs,
|
reg_addrs,
|
||||||
),
|
),
|
||||||
tractor.open_nursery() as actor_nursery,
|
tractor.open_nursery() as actor_nursery,
|
||||||
tractor.trionics.collapse_eg(),
|
trio.open_nursery() as service_nursery,
|
||||||
trio.open_nursery() as service_tn,
|
|
||||||
):
|
):
|
||||||
for addr in reg_addrs:
|
for addr in reg_addrs:
|
||||||
if addr not in root_actor.accept_addrs:
|
if addr not in root_actor.accept_addrs:
|
||||||
|
@ -217,7 +211,7 @@ async def open_pikerd(
|
||||||
|
|
||||||
# assign globally for future daemon/task creation
|
# assign globally for future daemon/task creation
|
||||||
Services.actor_n = actor_nursery
|
Services.actor_n = actor_nursery
|
||||||
Services.service_n = service_tn
|
Services.service_n = service_nursery
|
||||||
Services.debug_mode = debug_mode
|
Services.debug_mode = debug_mode
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -227,7 +221,7 @@ async def open_pikerd(
|
||||||
# TODO: is this more clever/efficient?
|
# TODO: is this more clever/efficient?
|
||||||
# if 'samplerd' in Services.service_tasks:
|
# if 'samplerd' in Services.service_tasks:
|
||||||
# await Services.cancel_service('samplerd')
|
# await Services.cancel_service('samplerd')
|
||||||
service_tn.cancel_scope.cancel()
|
service_nursery.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
# TODO: do we even need this?
|
# TODO: do we even need this?
|
||||||
|
@ -262,10 +256,7 @@ async def maybe_open_pikerd(
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> (
|
) -> tractor._portal.Portal | ClassVar[Services]:
|
||||||
tractor._portal.Portal
|
|
||||||
|ClassVar[Services]
|
|
||||||
):
|
|
||||||
'''
|
'''
|
||||||
If no ``pikerd`` daemon-root-actor can be found start it and
|
If no ``pikerd`` daemon-root-actor can be found start it and
|
||||||
yield up (we should probably figure out returning a portal to self
|
yield up (we should probably figure out returning a portal to self
|
||||||
|
@ -290,8 +281,7 @@ async def maybe_open_pikerd(
|
||||||
|
|
||||||
registry_addrs: list[tuple[str, int]] = (
|
registry_addrs: list[tuple[str, int]] = (
|
||||||
registry_addrs
|
registry_addrs
|
||||||
or
|
or [_default_reg_addr]
|
||||||
[_default_reg_addr]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
pikerd_portal: tractor.Portal | None
|
pikerd_portal: tractor.Portal | None
|
||||||
|
|
|
@ -28,7 +28,6 @@ from contextlib import (
|
||||||
)
|
)
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
from trio.lowlevel import current_task
|
|
||||||
|
|
||||||
from ._util import (
|
from ._util import (
|
||||||
log, # sub-sys logger
|
log, # sub-sys logger
|
||||||
|
@ -71,7 +70,6 @@ async def maybe_spawn_daemon(
|
||||||
lock = Services.locks[service_name]
|
lock = Services.locks[service_name]
|
||||||
await lock.acquire()
|
await lock.acquire()
|
||||||
|
|
||||||
try:
|
|
||||||
async with find_service(
|
async with find_service(
|
||||||
service_name,
|
service_name,
|
||||||
registry_addrs=[('127.0.0.1', 6116)],
|
registry_addrs=[('127.0.0.1', 6116)],
|
||||||
|
@ -136,20 +134,6 @@ async def maybe_spawn_daemon(
|
||||||
yield portal
|
yield portal
|
||||||
await portal.cancel_actor()
|
await portal.cancel_actor()
|
||||||
|
|
||||||
except BaseException as _err:
|
|
||||||
err = _err
|
|
||||||
if (
|
|
||||||
lock.locked()
|
|
||||||
and
|
|
||||||
lock.statistics().owner is current_task()
|
|
||||||
):
|
|
||||||
log.exception(
|
|
||||||
f'Releasing stale lock after crash..?'
|
|
||||||
f'{err!r}\n'
|
|
||||||
)
|
|
||||||
lock.release()
|
|
||||||
raise err
|
|
||||||
|
|
||||||
|
|
||||||
async def spawn_emsd(
|
async def spawn_emsd(
|
||||||
|
|
||||||
|
|
|
@ -109,7 +109,7 @@ class Services:
|
||||||
# wait on any context's return value
|
# wait on any context's return value
|
||||||
# and any final portal result from the
|
# and any final portal result from the
|
||||||
# sub-actor.
|
# sub-actor.
|
||||||
ctx_res: Any = await ctx.wait_for_result()
|
ctx_res: Any = await ctx.result()
|
||||||
|
|
||||||
# NOTE: blocks indefinitely until cancelled
|
# NOTE: blocks indefinitely until cancelled
|
||||||
# either by error from the target context
|
# either by error from the target context
|
||||||
|
|
|
@ -101,15 +101,13 @@ async def open_registry(
|
||||||
|
|
||||||
if (
|
if (
|
||||||
not tractor.is_root_process()
|
not tractor.is_root_process()
|
||||||
and
|
and not Registry.addrs
|
||||||
not Registry.addrs
|
|
||||||
):
|
):
|
||||||
Registry.addrs.extend(actor.reg_addrs)
|
Registry.addrs.extend(actor.reg_addrs)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
ensure_exists
|
ensure_exists
|
||||||
and
|
and not Registry.addrs
|
||||||
not Registry.addrs
|
|
||||||
):
|
):
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f"`{uid}` registry should already exist but doesn't?"
|
f"`{uid}` registry should already exist but doesn't?"
|
||||||
|
@ -148,7 +146,7 @@ async def find_service(
|
||||||
| list[Portal]
|
| list[Portal]
|
||||||
| None
|
| None
|
||||||
):
|
):
|
||||||
# try:
|
|
||||||
reg_addrs: list[tuple[str, int]]
|
reg_addrs: list[tuple[str, int]]
|
||||||
async with open_registry(
|
async with open_registry(
|
||||||
addrs=(
|
addrs=(
|
||||||
|
@ -159,39 +157,22 @@ async def find_service(
|
||||||
or Registry.addrs
|
or Registry.addrs
|
||||||
),
|
),
|
||||||
) as reg_addrs:
|
) as reg_addrs:
|
||||||
|
log.info(f'Scanning for service `{service_name}`')
|
||||||
|
|
||||||
log.info(
|
maybe_portals: list[Portal] | Portal | None
|
||||||
f'Scanning for service {service_name!r}'
|
|
||||||
)
|
|
||||||
|
|
||||||
# attach to existing daemon by name if possible
|
# attach to existing daemon by name if possible
|
||||||
maybe_portals: list[Portal]|Portal|None
|
|
||||||
async with tractor.find_actor(
|
async with tractor.find_actor(
|
||||||
service_name,
|
service_name,
|
||||||
registry_addrs=reg_addrs,
|
registry_addrs=reg_addrs,
|
||||||
only_first=first_only, # if set only returns single ref
|
only_first=first_only, # if set only returns single ref
|
||||||
) as maybe_portals:
|
) as maybe_portals:
|
||||||
if not maybe_portals:
|
if not maybe_portals:
|
||||||
# log.info(
|
|
||||||
print(
|
|
||||||
f'Could NOT find service {service_name!r} -> {maybe_portals!r}'
|
|
||||||
)
|
|
||||||
yield None
|
yield None
|
||||||
return
|
return
|
||||||
|
|
||||||
# log.info(
|
|
||||||
print(
|
|
||||||
f'Found service {service_name!r} -> {maybe_portals}'
|
|
||||||
)
|
|
||||||
yield maybe_portals
|
yield maybe_portals
|
||||||
|
|
||||||
# except BaseException as _berr:
|
|
||||||
# berr = _berr
|
|
||||||
# log.exception(
|
|
||||||
# 'tractor.find_actor() failed with,\n'
|
|
||||||
# )
|
|
||||||
# raise berr
|
|
||||||
|
|
||||||
|
|
||||||
async def check_for_service(
|
async def check_for_service(
|
||||||
service_name: str,
|
service_name: str,
|
||||||
|
|
|
@ -43,6 +43,7 @@ from typing import (
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
|
||||||
from .. import config
|
from .. import config
|
||||||
from ..service import (
|
from ..service import (
|
||||||
check_for_service,
|
check_for_service,
|
||||||
|
@ -137,16 +138,6 @@ class StorageClient(
|
||||||
) -> None:
|
) -> None:
|
||||||
...
|
...
|
||||||
|
|
||||||
async def write_oi(
|
|
||||||
self,
|
|
||||||
fqme: str,
|
|
||||||
oi: np.ndarray,
|
|
||||||
append_and_duplicate: bool = True,
|
|
||||||
limit: int = int(800e3),
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
class TimeseriesNotFound(Exception):
|
class TimeseriesNotFound(Exception):
|
||||||
'''
|
'''
|
||||||
|
@ -161,10 +152,7 @@ class StorageConnectionError(ConnectionError):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
def get_storagemod(
|
def get_storagemod(name: str) -> ModuleType:
|
||||||
name: str,
|
|
||||||
|
|
||||||
) -> ModuleType:
|
|
||||||
mod: ModuleType = import_module(
|
mod: ModuleType = import_module(
|
||||||
'.' + name,
|
'.' + name,
|
||||||
'piker.storage',
|
'piker.storage',
|
||||||
|
@ -179,10 +167,7 @@ def get_storagemod(
|
||||||
async def open_storage_client(
|
async def open_storage_client(
|
||||||
backend: str | None = None,
|
backend: str | None = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[ModuleType, StorageClient]:
|
||||||
ModuleType,
|
|
||||||
StorageClient,
|
|
||||||
]:
|
|
||||||
'''
|
'''
|
||||||
Load the ``StorageClient`` for named backend.
|
Load the ``StorageClient`` for named backend.
|
||||||
|
|
||||||
|
@ -282,10 +267,7 @@ async def open_tsdb_client(
|
||||||
from ..data.feed import maybe_open_feed
|
from ..data.feed import maybe_open_feed
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_storage_client() as (
|
open_storage_client() as (_, storage),
|
||||||
_,
|
|
||||||
storage,
|
|
||||||
),
|
|
||||||
|
|
||||||
maybe_open_feed(
|
maybe_open_feed(
|
||||||
[fqme],
|
[fqme],
|
||||||
|
@ -293,7 +275,7 @@ async def open_tsdb_client(
|
||||||
|
|
||||||
) as feed,
|
) as feed,
|
||||||
):
|
):
|
||||||
profiler(f'opened feed for {fqme!r}')
|
profiler(f'opened feed for {fqme}')
|
||||||
|
|
||||||
# to_append = feed.hist_shm.array
|
# to_append = feed.hist_shm.array
|
||||||
# to_prepend = None
|
# to_prepend = None
|
||||||
|
|
|
@ -111,24 +111,6 @@ def mk_ohlcv_shm_keyed_filepath(
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
|
||||||
def mk_oi_shm_keyed_filepath(
|
|
||||||
fqme: str,
|
|
||||||
period: float | int,
|
|
||||||
datadir: Path,
|
|
||||||
|
|
||||||
) -> Path:
|
|
||||||
|
|
||||||
if period < 1.:
|
|
||||||
raise ValueError('Sample period should be >= 1.!?')
|
|
||||||
|
|
||||||
path: Path = (
|
|
||||||
datadir
|
|
||||||
/
|
|
||||||
f'{fqme}.oi{int(period)}s.parquet'
|
|
||||||
)
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
def unpack_fqme_from_parquet_filepath(path: Path) -> str:
|
def unpack_fqme_from_parquet_filepath(path: Path) -> str:
|
||||||
|
|
||||||
filename: str = str(path.name)
|
filename: str = str(path.name)
|
||||||
|
@ -190,11 +172,7 @@ class NativeStorageClient:
|
||||||
|
|
||||||
key: str = path.name.rstrip('.parquet')
|
key: str = path.name.rstrip('.parquet')
|
||||||
fqme, _, descr = key.rpartition('.')
|
fqme, _, descr = key.rpartition('.')
|
||||||
if 'ohlcv' in descr:
|
|
||||||
prefix, _, suffix = descr.partition('ohlcv')
|
prefix, _, suffix = descr.partition('ohlcv')
|
||||||
elif 'oi' in descr:
|
|
||||||
prefix, _, suffix = descr.partition('oi')
|
|
||||||
|
|
||||||
period: int = int(suffix.strip('s'))
|
period: int = int(suffix.strip('s'))
|
||||||
|
|
||||||
# cache description data
|
# cache description data
|
||||||
|
@ -391,61 +369,6 @@ class NativeStorageClient:
|
||||||
timeframe,
|
timeframe,
|
||||||
)
|
)
|
||||||
|
|
||||||
def _write_oi(
|
|
||||||
self,
|
|
||||||
fqme: str,
|
|
||||||
oi: np.ndarray,
|
|
||||||
|
|
||||||
) -> Path:
|
|
||||||
'''
|
|
||||||
Sync version of the public interface meth, since we don't
|
|
||||||
currently actually need or support an async impl.
|
|
||||||
|
|
||||||
'''
|
|
||||||
path: Path = mk_oi_shm_keyed_filepath(
|
|
||||||
fqme=fqme,
|
|
||||||
period=1,
|
|
||||||
datadir=self._datadir,
|
|
||||||
)
|
|
||||||
if isinstance(oi, np.ndarray):
|
|
||||||
new_df: pl.DataFrame = tsp.np2pl(oi)
|
|
||||||
else:
|
|
||||||
new_df = oi
|
|
||||||
|
|
||||||
if path.exists():
|
|
||||||
old_df = pl.read_parquet(path)
|
|
||||||
df = pl.concat([old_df, new_df])
|
|
||||||
else:
|
|
||||||
df = new_df
|
|
||||||
|
|
||||||
start = time.time()
|
|
||||||
df.write_parquet(path)
|
|
||||||
delay: float = round(
|
|
||||||
time.time() - start,
|
|
||||||
ndigits=6,
|
|
||||||
)
|
|
||||||
log.info(
|
|
||||||
f'parquet write took {delay} secs\n'
|
|
||||||
f'file path: {path}'
|
|
||||||
)
|
|
||||||
return path
|
|
||||||
|
|
||||||
async def write_oi(
|
|
||||||
self,
|
|
||||||
fqme: str,
|
|
||||||
oi: np.ndarray,
|
|
||||||
|
|
||||||
) -> Path:
|
|
||||||
'''
|
|
||||||
Write input oi time series for fqme and sampling period
|
|
||||||
to (local) disk.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return self._write_oi(
|
|
||||||
fqme,
|
|
||||||
oi,
|
|
||||||
)
|
|
||||||
|
|
||||||
async def delete_ts(
|
async def delete_ts(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -517,7 +517,7 @@ def with_dts(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
return df.with_columns([
|
return df.with_columns([
|
||||||
pl.col(time_col).shift(1).name.suffix('_prev'),
|
pl.col(time_col).shift(1).suffix('_prev'),
|
||||||
pl.col(time_col).diff().alias('s_diff'),
|
pl.col(time_col).diff().alias('s_diff'),
|
||||||
pl.from_epoch(pl.col(time_col)).alias('dt'),
|
pl.from_epoch(pl.col(time_col)).alias('dt'),
|
||||||
]).with_columns([
|
]).with_columns([
|
||||||
|
@ -623,7 +623,7 @@ def detect_vlm_gaps(
|
||||||
|
|
||||||
) -> pl.DataFrame:
|
) -> pl.DataFrame:
|
||||||
|
|
||||||
vnull: pl.DataFrame = df.filter(
|
vnull: pl.DataFrame = w_dts.filter(
|
||||||
pl.col(col) == 0
|
pl.col(col) == 0
|
||||||
)
|
)
|
||||||
return vnull
|
return vnull
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -21,7 +21,6 @@ Main app startup and run.
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
|
|
||||||
import tractor
|
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from piker.ui.qt import (
|
from piker.ui.qt import (
|
||||||
|
@ -117,7 +116,6 @@ async def _async_main(
|
||||||
needed_brokermods[brokername] = brokers[brokername]
|
needed_brokermods[brokername] = brokers[brokername]
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as root_n,
|
trio.open_nursery() as root_n,
|
||||||
):
|
):
|
||||||
# set root nursery and task stack for spawning other charts/feeds
|
# set root nursery and task stack for spawning other charts/feeds
|
||||||
|
|
|
@ -33,6 +33,7 @@ import trio
|
||||||
|
|
||||||
from piker.ui.qt import (
|
from piker.ui.qt import (
|
||||||
QtCore,
|
QtCore,
|
||||||
|
QtWidgets,
|
||||||
Qt,
|
Qt,
|
||||||
QLineF,
|
QLineF,
|
||||||
QFrame,
|
QFrame,
|
||||||
|
|
|
@ -1445,10 +1445,7 @@ async def display_symbol_data(
|
||||||
# for pause/resume on mouse interaction
|
# for pause/resume on mouse interaction
|
||||||
rt_chart.feed = feed
|
rt_chart.feed = feed
|
||||||
|
|
||||||
async with (
|
async with trio.open_nursery() as ln:
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as ln,
|
|
||||||
):
|
|
||||||
# if available load volume related built-in display(s)
|
# if available load volume related built-in display(s)
|
||||||
vlm_charts: dict[
|
vlm_charts: dict[
|
||||||
str,
|
str,
|
||||||
|
|
|
@ -22,10 +22,7 @@ from contextlib import asynccontextmanager as acm
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
from tractor.trionics import (
|
from tractor.trionics import gather_contexts
|
||||||
gather_contexts,
|
|
||||||
collapse_eg,
|
|
||||||
)
|
|
||||||
|
|
||||||
from piker.ui.qt import (
|
from piker.ui.qt import (
|
||||||
QtCore,
|
QtCore,
|
||||||
|
@ -210,10 +207,7 @@ async def open_signal_handler(
|
||||||
async for args in recv:
|
async for args in recv:
|
||||||
await async_handler(*args)
|
await async_handler(*args)
|
||||||
|
|
||||||
async with (
|
async with trio.open_nursery() as tn:
|
||||||
collapse_eg(),
|
|
||||||
trio.open_nursery() as tn
|
|
||||||
):
|
|
||||||
tn.start_soon(proxy_to_handler)
|
tn.start_soon(proxy_to_handler)
|
||||||
async with send:
|
async with send:
|
||||||
yield
|
yield
|
||||||
|
@ -248,7 +242,6 @@ async def open_handlers(
|
||||||
widget: QWidget
|
widget: QWidget
|
||||||
streams: list[trio.abc.ReceiveChannel]
|
streams: list[trio.abc.ReceiveChannel]
|
||||||
async with (
|
async with (
|
||||||
collapse_eg(),
|
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery() as tn,
|
||||||
gather_contexts([
|
gather_contexts([
|
||||||
open_event_stream(
|
open_event_stream(
|
||||||
|
|
|
@ -18,11 +18,10 @@
|
||||||
Feed status and controls widget(s) for embedding in a UI-pane.
|
Feed status and controls widget(s) for embedding in a UI-pane.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import (
|
from textwrap import dedent
|
||||||
Any,
|
from typing import TYPE_CHECKING
|
||||||
TYPE_CHECKING,
|
|
||||||
)
|
|
||||||
|
|
||||||
# from PyQt5.QtCore import Qt
|
# from PyQt5.QtCore import Qt
|
||||||
|
|
||||||
|
@ -50,55 +49,35 @@ def mk_feed_label(
|
||||||
a feed control protocol.
|
a feed control protocol.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
status: dict[str, Any] = feed.status
|
status = feed.status
|
||||||
assert status
|
assert status
|
||||||
|
|
||||||
# SO tips on ws/nls,
|
msg = dedent("""
|
||||||
# https://stackoverflow.com/a/15721400
|
actor: **{actor_name}**\n
|
||||||
ws: str = ' '
|
|_ @**{host}:{port}**\n
|
||||||
# nl: str = '<br>' # dun work?
|
""")
|
||||||
actor_info_repr: str = (
|
|
||||||
f')> **{status["actor_short_id"]}**\n'
|
|
||||||
'\n' # bc md?
|
|
||||||
)
|
|
||||||
|
|
||||||
# fields to select *IN* for display
|
for key, val in status.items():
|
||||||
# (see `.data.feed.open_feed()` status
|
if key in ('host', 'port', 'actor_name'):
|
||||||
# update -> TAG_feed_status_update)
|
continue
|
||||||
for key in [
|
msg += f'\n|_ {key}: **{{{key}}}**\n'
|
||||||
'ipc',
|
|
||||||
'hist_shm',
|
|
||||||
'rt_shm',
|
|
||||||
'throttle_hz',
|
|
||||||
]:
|
|
||||||
# NOTE, the 2nd key is filled via `.format()` updates.
|
|
||||||
actor_info_repr += (
|
|
||||||
f'\n' # bc md?
|
|
||||||
f'{ws}|_{key}: **{{{key}}}**\n'
|
|
||||||
)
|
|
||||||
# ^TODO? formatting and content..
|
|
||||||
# -[ ] showing which fqme is "forward" on the
|
|
||||||
# chart/fsp/order-mode?
|
|
||||||
# '|_ flows: **{symbols}**\n'
|
|
||||||
#
|
|
||||||
# -[x] why isn't the indent working?
|
|
||||||
# => markdown, now solved..
|
|
||||||
|
|
||||||
feed_label = FormatLabel(
|
feed_label = FormatLabel(
|
||||||
fmt_str=actor_info_repr,
|
fmt_str=msg,
|
||||||
|
# |_ streams: **{symbols}**\n
|
||||||
font=_font.font,
|
font=_font.font,
|
||||||
font_size=_font_small.px_size,
|
font_size=_font_small.px_size,
|
||||||
font_color='default_lightest',
|
font_color='default_lightest',
|
||||||
)
|
)
|
||||||
|
|
||||||
# ?TODO, remove this?
|
|
||||||
# form.vbox.setAlignment(feed_label, Qt.AlignBottom)
|
# form.vbox.setAlignment(feed_label, Qt.AlignBottom)
|
||||||
# form.vbox.setAlignment(Qt.AlignBottom)
|
# form.vbox.setAlignment(Qt.AlignBottom)
|
||||||
# _ = chart.height() - (
|
_ = chart.height() - (
|
||||||
# form.height() +
|
form.height() +
|
||||||
# form.fill_bar.height()
|
form.fill_bar.height()
|
||||||
# # feed_label.height()
|
# feed_label.height()
|
||||||
# )
|
)
|
||||||
|
|
||||||
feed_label.format(**feed.status)
|
feed_label.format(**feed.status)
|
||||||
|
|
||||||
return feed_label
|
return feed_label
|
||||||
|
|
|
@ -600,7 +600,6 @@ async def open_fsp_admin(
|
||||||
kwargs=kwargs,
|
kwargs=kwargs,
|
||||||
) as (cache_hit, cluster_map),
|
) as (cache_hit, cluster_map),
|
||||||
|
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
if cache_hit:
|
if cache_hit:
|
||||||
|
@ -614,8 +613,6 @@ async def open_fsp_admin(
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
yield admin
|
yield admin
|
||||||
|
|
||||||
# ??TODO, does this *need* to be inside a finally?
|
|
||||||
finally:
|
finally:
|
||||||
# terminate all tasks via signals
|
# terminate all tasks via signals
|
||||||
for key, entry in admin._registry.items():
|
for key, entry in admin._registry.items():
|
||||||
|
|
|
@ -285,20 +285,18 @@ class FormatLabel(QLabel):
|
||||||
font_size: int,
|
font_size: int,
|
||||||
font_color: str,
|
font_color: str,
|
||||||
|
|
||||||
use_md: bool = True,
|
|
||||||
|
|
||||||
parent=None,
|
parent=None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
|
|
||||||
# by default set the format string verbatim and expect user
|
# by default set the format string verbatim and expect user to
|
||||||
# to call ``.format()`` later (presumably they'll notice the
|
# call ``.format()`` later (presumably they'll notice the
|
||||||
# unformatted content if ``fmt_str`` isn't meant to be
|
# unformatted content if ``fmt_str`` isn't meant to be
|
||||||
# unformatted).
|
# unformatted).
|
||||||
self.fmt_str = fmt_str
|
self.fmt_str = fmt_str
|
||||||
# self.setText(fmt_str) # ?TODO, why here?
|
self.setText(fmt_str)
|
||||||
|
|
||||||
self.setStyleSheet(
|
self.setStyleSheet(
|
||||||
f"""QLabel {{
|
f"""QLabel {{
|
||||||
|
@ -308,7 +306,6 @@ class FormatLabel(QLabel):
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
self.setFont(_font.font)
|
self.setFont(_font.font)
|
||||||
if use_md:
|
|
||||||
self.setTextFormat(
|
self.setTextFormat(
|
||||||
Qt.TextFormat.MarkdownText
|
Qt.TextFormat.MarkdownText
|
||||||
)
|
)
|
||||||
|
@ -319,10 +316,7 @@ class FormatLabel(QLabel):
|
||||||
size_policy.Expanding,
|
size_policy.Expanding,
|
||||||
)
|
)
|
||||||
self.setAlignment(
|
self.setAlignment(
|
||||||
Qt.AlignLeft
|
Qt.AlignVCenter | Qt.AlignLeft
|
||||||
|
|
|
||||||
Qt.AlignBottom
|
|
||||||
# Qt.AlignVCenter
|
|
||||||
)
|
)
|
||||||
self.setText(self.fmt_str)
|
self.setText(self.fmt_str)
|
||||||
|
|
||||||
|
|
|
@ -15,8 +15,8 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Remote control tasks for sending annotations (and maybe more cmds) to
|
Remote control tasks for sending annotations (and maybe more cmds)
|
||||||
a chart from some other actor.
|
to a chart from some other actor.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
@ -32,7 +32,6 @@ from typing import (
|
||||||
)
|
)
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
|
||||||
from tractor import trionics
|
from tractor import trionics
|
||||||
from tractor import (
|
from tractor import (
|
||||||
Portal,
|
Portal,
|
||||||
|
@ -317,8 +316,6 @@ class AnnotCtl(Struct):
|
||||||
)
|
)
|
||||||
yield aid
|
yield aid
|
||||||
finally:
|
finally:
|
||||||
# async ipc send op
|
|
||||||
with trio.CancelScope(shield=True):
|
|
||||||
await self.remove(aid)
|
await self.remove(aid)
|
||||||
|
|
||||||
async def redraw(
|
async def redraw(
|
||||||
|
|
|
@ -15,8 +15,7 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
qompleterz: embeddable search and complete using trio, Qt and
|
qompleterz: embeddable search and complete using trio, Qt and rapidfuzz.
|
||||||
rapidfuzz.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
@ -47,7 +46,6 @@ import time
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
from rapidfuzz import process as fuzzy
|
from rapidfuzz import process as fuzzy
|
||||||
import tractor
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
|
@ -55,7 +53,7 @@ from piker.ui.qt import (
|
||||||
size_policy,
|
size_policy,
|
||||||
align_flag,
|
align_flag,
|
||||||
Qt,
|
Qt,
|
||||||
# QtCore,
|
QtCore,
|
||||||
QtWidgets,
|
QtWidgets,
|
||||||
QModelIndex,
|
QModelIndex,
|
||||||
QItemSelectionModel,
|
QItemSelectionModel,
|
||||||
|
@ -922,10 +920,7 @@ async def fill_results(
|
||||||
|
|
||||||
# issue multi-provider fan-out search request and place
|
# issue multi-provider fan-out search request and place
|
||||||
# "searching.." statuses on outstanding results providers
|
# "searching.." statuses on outstanding results providers
|
||||||
async with (
|
async with trio.open_nursery() as n:
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as tn
|
|
||||||
):
|
|
||||||
|
|
||||||
for provider, (search, pause) in (
|
for provider, (search, pause) in (
|
||||||
_searcher_cache.copy().items()
|
_searcher_cache.copy().items()
|
||||||
|
@ -949,7 +944,7 @@ async def fill_results(
|
||||||
status_field='-> searchin..',
|
status_field='-> searchin..',
|
||||||
)
|
)
|
||||||
|
|
||||||
await tn.start(
|
await n.start(
|
||||||
pack_matches,
|
pack_matches,
|
||||||
view,
|
view,
|
||||||
has_results,
|
has_results,
|
||||||
|
@ -1009,14 +1004,12 @@ async def handle_keyboard_input(
|
||||||
view.set_font_size(searchbar.dpi_font.px_size)
|
view.set_font_size(searchbar.dpi_font.px_size)
|
||||||
send, recv = trio.open_memory_channel(616)
|
send, recv = trio.open_memory_channel(616)
|
||||||
|
|
||||||
async with (
|
async with trio.open_nursery() as n:
|
||||||
tractor.trionics.collapse_eg(), # needed?
|
|
||||||
trio.open_nursery() as tn
|
|
||||||
):
|
|
||||||
# start a background multi-searcher task which receives
|
# start a background multi-searcher task which receives
|
||||||
# patterns relayed from this keyboard input handler and
|
# patterns relayed from this keyboard input handler and
|
||||||
# async updates the completer view's results.
|
# async updates the completer view's results.
|
||||||
tn.start_soon(
|
n.start_soon(
|
||||||
partial(
|
partial(
|
||||||
fill_results,
|
fill_results,
|
||||||
searchw,
|
searchw,
|
||||||
|
|
|
@ -269,8 +269,6 @@ def hcolor(name: str) -> str:
|
||||||
|
|
||||||
# default ohlc-bars/curve gray
|
# default ohlc-bars/curve gray
|
||||||
'bracket': '#666666', # like the logo
|
'bracket': '#666666', # like the logo
|
||||||
'pikers': '#616161', # a trader shade of..
|
|
||||||
'beast': '#161616', # in the dark alone.
|
|
||||||
|
|
||||||
# bluish
|
# bluish
|
||||||
'charcoal': '#36454F',
|
'charcoal': '#36454F',
|
||||||
|
|
|
@ -21,7 +21,6 @@ Chart trading, the only way to scalp.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from decimal import Decimal
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
import time
|
import time
|
||||||
|
@ -42,6 +41,7 @@ from piker.accounting import (
|
||||||
Position,
|
Position,
|
||||||
mk_allocator,
|
mk_allocator,
|
||||||
MktPair,
|
MktPair,
|
||||||
|
Symbol,
|
||||||
)
|
)
|
||||||
from piker.clearing import (
|
from piker.clearing import (
|
||||||
open_ems,
|
open_ems,
|
||||||
|
@ -143,15 +143,6 @@ class OrderMode:
|
||||||
}
|
}
|
||||||
_staged_order: Order | None = None
|
_staged_order: Order | None = None
|
||||||
|
|
||||||
@property
|
|
||||||
def curr_mkt(self) -> MktPair:
|
|
||||||
'''
|
|
||||||
Deliver the currently selected `MktPair` according
|
|
||||||
chart state.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return self.chart.linked.mkt
|
|
||||||
|
|
||||||
def on_level_change_update_next_order_info(
|
def on_level_change_update_next_order_info(
|
||||||
self,
|
self,
|
||||||
level: float,
|
level: float,
|
||||||
|
@ -181,11 +172,7 @@ class OrderMode:
|
||||||
line.update_labels(order_info)
|
line.update_labels(order_info)
|
||||||
|
|
||||||
# update bound-in staged order
|
# update bound-in staged order
|
||||||
mkt: MktPair = self.curr_mkt
|
order.price = level
|
||||||
order.price: Decimal = mkt.quantize(
|
|
||||||
size=level,
|
|
||||||
quantity_type='price',
|
|
||||||
)
|
|
||||||
order.size = order_info['size']
|
order.size = order_info['size']
|
||||||
|
|
||||||
# when an order is changed we flip the settings side-pane to
|
# when an order is changed we flip the settings side-pane to
|
||||||
|
@ -200,9 +187,7 @@ class OrderMode:
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
|
|
||||||
# TODO, if we instead just always decimalize at the ems layer
|
level = order.price
|
||||||
# we can avoid this back-n-forth casting?
|
|
||||||
level = float(order.price)
|
|
||||||
|
|
||||||
line = order_line(
|
line = order_line(
|
||||||
chart or self.chart,
|
chart or self.chart,
|
||||||
|
@ -239,11 +224,7 @@ class OrderMode:
|
||||||
# the order mode allocator but we still need to update the
|
# the order mode allocator but we still need to update the
|
||||||
# "staged" order message we'll send to the ems
|
# "staged" order message we'll send to the ems
|
||||||
def update_order_price(y: float) -> None:
|
def update_order_price(y: float) -> None:
|
||||||
mkt: MktPair = self.curr_mkt
|
order.price = y
|
||||||
order.price: Decimal = mkt.quantize(
|
|
||||||
size=y,
|
|
||||||
quantity_type='price',
|
|
||||||
)
|
|
||||||
|
|
||||||
line._on_level_change = update_order_price
|
line._on_level_change = update_order_price
|
||||||
|
|
||||||
|
@ -294,31 +275,34 @@ class OrderMode:
|
||||||
chart = cursor.linked.chart
|
chart = cursor.linked.chart
|
||||||
if (
|
if (
|
||||||
not chart
|
not chart
|
||||||
and
|
and cursor
|
||||||
cursor
|
and cursor.active_plot
|
||||||
and
|
|
||||||
cursor.active_plot
|
|
||||||
):
|
):
|
||||||
return
|
return
|
||||||
|
|
||||||
chart = cursor.active_plot
|
chart = cursor.active_plot
|
||||||
price: float = cursor._datum_xy[1]
|
price = cursor._datum_xy[1]
|
||||||
if not price:
|
if not price:
|
||||||
# zero prices are not supported by any means
|
# zero prices are not supported by any means
|
||||||
# since that's illogical / a no-op.
|
# since that's illogical / a no-op.
|
||||||
return
|
return
|
||||||
|
|
||||||
|
mkt: MktPair = self.chart.linked.mkt
|
||||||
|
|
||||||
|
# NOTE : we could also use instead,
|
||||||
|
# mkt.quantize(price, quantity_type='price')
|
||||||
|
# but it returns a Decimal and it's probably gonna
|
||||||
|
# be slower?
|
||||||
# TODO: should we be enforcing this precision
|
# TODO: should we be enforcing this precision
|
||||||
# at a different layer in the stack?
|
# at a different layer in the stack? right now
|
||||||
# |_ might require `MktPair` tracking in the EMS?
|
# any precision error will literally be relayed
|
||||||
# |_ right now any precision error will be relayed
|
# all the way back from the backend.
|
||||||
# all the way back from the backend and vice-versa..
|
|
||||||
#
|
price = round(
|
||||||
mkt: MktPair = self.curr_mkt
|
price,
|
||||||
price: Decimal = mkt.quantize(
|
ndigits=mkt.price_tick_digits,
|
||||||
size=price,
|
|
||||||
quantity_type='price',
|
|
||||||
)
|
)
|
||||||
|
|
||||||
order = self._staged_order = Order(
|
order = self._staged_order = Order(
|
||||||
action=action,
|
action=action,
|
||||||
price=price,
|
price=price,
|
||||||
|
@ -394,7 +378,7 @@ class OrderMode:
|
||||||
'oid': oid,
|
'oid': oid,
|
||||||
})
|
})
|
||||||
|
|
||||||
if float(order.price) <= 0:
|
if order.price <= 0:
|
||||||
log.error(
|
log.error(
|
||||||
'*!? Invalid `Order.price <= 0` ?!*\n'
|
'*!? Invalid `Order.price <= 0` ?!*\n'
|
||||||
# TODO: make this present multi-line in object form
|
# TODO: make this present multi-line in object form
|
||||||
|
@ -531,15 +515,14 @@ class OrderMode:
|
||||||
# if an order msg is provided update the line
|
# if an order msg is provided update the line
|
||||||
# **from** that msg.
|
# **from** that msg.
|
||||||
if order:
|
if order:
|
||||||
price: float = float(order.price)
|
if order.price <= 0:
|
||||||
if price <= 0:
|
|
||||||
log.error(f'Order has 0 price, cancelling..\n{order}')
|
log.error(f'Order has 0 price, cancelling..\n{order}')
|
||||||
self.cancel_orders([order.oid])
|
self.cancel_orders([order.oid])
|
||||||
return None
|
return None
|
||||||
|
|
||||||
line.set_level(price)
|
line.set_level(order.price)
|
||||||
self.on_level_change_update_next_order_info(
|
self.on_level_change_update_next_order_info(
|
||||||
level=price,
|
level=order.price,
|
||||||
line=line,
|
line=line,
|
||||||
order=order,
|
order=order,
|
||||||
# use the corresponding position tracker for the
|
# use the corresponding position tracker for the
|
||||||
|
@ -555,13 +538,14 @@ class OrderMode:
|
||||||
|
|
||||||
def on_fill(
|
def on_fill(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
uuid: str,
|
uuid: str,
|
||||||
price: float,
|
price: float,
|
||||||
time_s: float,
|
time_s: float,
|
||||||
|
|
||||||
pointing: str | None = None,
|
pointing: str | None = None,
|
||||||
|
|
||||||
) -> bool:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Fill msg handler.
|
Fill msg handler.
|
||||||
|
|
||||||
|
@ -574,33 +558,13 @@ class OrderMode:
|
||||||
- update fill bar size
|
- update fill bar size
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# XXX WARNING XXX
|
dialog = self.dialogs[uuid]
|
||||||
# if a `Status(resp='error')` arrives *before* this
|
|
||||||
# fill-status, the `.dialogs` entry may have already been
|
|
||||||
# popped and thus the below will skipped.
|
|
||||||
#
|
|
||||||
# NOTE, to avoid this confusing scenario ensure that any
|
|
||||||
# errors delivered thru from the broker-backend are not just
|
|
||||||
# "noisy reporting" (like is very common from IB..) and are
|
|
||||||
# instead ONLY errors-causing-order-dialog-cancellation!
|
|
||||||
if not (dialog := self.dialogs.get(uuid)):
|
|
||||||
log.warning(
|
|
||||||
f'Order was already cleared from `.dialogs` ??\n'
|
|
||||||
f'uuid: {uuid!r}\n'
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
lines = dialog.lines
|
lines = dialog.lines
|
||||||
chart = self.chart
|
chart = self.chart
|
||||||
|
|
||||||
if not lines:
|
# XXX: seems to fail on certain types of races?
|
||||||
log.warn("No line(s) for order {uuid}!?")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# update line state(s)
|
|
||||||
#
|
|
||||||
# ?XXX this fails on certain types of races?
|
|
||||||
# assert len(lines) == 2
|
# assert len(lines) == 2
|
||||||
|
if lines:
|
||||||
flume: Flume = self.feed.flumes[chart.linked.mkt.fqme]
|
flume: Flume = self.feed.flumes[chart.linked.mkt.fqme]
|
||||||
_, _, ratio = flume.get_ds_info()
|
_, _, ratio = flume.get_ds_info()
|
||||||
|
|
||||||
|
@ -626,31 +590,28 @@ class OrderMode:
|
||||||
pointing=pointing,
|
pointing=pointing,
|
||||||
color=lines[0].color
|
color=lines[0].color
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
log.warn("No line(s) for order {uuid}!?")
|
||||||
|
|
||||||
def on_cancel(
|
def on_cancel(
|
||||||
self,
|
self,
|
||||||
uuid: str,
|
uuid: str
|
||||||
|
|
||||||
) -> bool:
|
) -> None:
|
||||||
|
|
||||||
msg: Order|None = self.client._sent_orders.pop(uuid, None)
|
msg: Order = self.client._sent_orders.pop(uuid, None)
|
||||||
if msg is None:
|
|
||||||
log.warning(
|
|
||||||
f'Received cancel for unsubmitted order {pformat(msg)}'
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# remove GUI line, show cursor.
|
if msg is not None:
|
||||||
self.lines.remove_line(uuid=uuid)
|
self.lines.remove_line(uuid=uuid)
|
||||||
self.chart.linked.cursor.show_xhair()
|
self.chart.linked.cursor.show_xhair()
|
||||||
|
|
||||||
# remove msg dialog (history)
|
dialog = self.dialogs.pop(uuid, None)
|
||||||
dialog: Dialog|None = self.dialogs.pop(uuid, None)
|
|
||||||
if dialog:
|
if dialog:
|
||||||
dialog.last_status_close()
|
dialog.last_status_close()
|
||||||
|
else:
|
||||||
return True
|
log.warning(
|
||||||
|
f'Received cancel for unsubmitted order {pformat(msg)}'
|
||||||
|
)
|
||||||
|
|
||||||
def cancel_orders_under_cursor(self) -> list[str]:
|
def cancel_orders_under_cursor(self) -> list[str]:
|
||||||
return self.cancel_orders(
|
return self.cancel_orders(
|
||||||
|
@ -720,9 +681,9 @@ class OrderMode:
|
||||||
) -> Dialog | None:
|
) -> Dialog | None:
|
||||||
# NOTE: the `.order` attr **must** be set with the
|
# NOTE: the `.order` attr **must** be set with the
|
||||||
# equivalent order msg in order to be loaded.
|
# equivalent order msg in order to be loaded.
|
||||||
order: Order = msg.req
|
order = msg.req
|
||||||
oid = str(msg.oid)
|
oid = str(msg.oid)
|
||||||
symbol: str = order.symbol
|
symbol = order.symbol
|
||||||
|
|
||||||
# TODO: MEGA UGGG ZONEEEE!
|
# TODO: MEGA UGGG ZONEEEE!
|
||||||
src = msg.src
|
src = msg.src
|
||||||
|
@ -741,22 +702,13 @@ class OrderMode:
|
||||||
order.oid = str(order.oid)
|
order.oid = str(order.oid)
|
||||||
order.brokers = [brokername]
|
order.brokers = [brokername]
|
||||||
|
|
||||||
# ?TODO? change this over to `MktPair`, but it's gonna be
|
# TODO: change this over to `MktPair`, but it's
|
||||||
# tough since we don't have any such data really in our
|
# gonna be tough since we don't have any such data
|
||||||
# clearing msg schema..
|
# really in our clearing msg schema..
|
||||||
# BUT WAIT! WHY do we even want/need this!?
|
order.symbol = Symbol.from_fqme(
|
||||||
#
|
fqsn=fqme,
|
||||||
# order.symbol = self.curr_mkt
|
info={},
|
||||||
#
|
)
|
||||||
# XXX, the old approach.. which i don't quire member why..
|
|
||||||
# -[ ] verify we for sure don't require this any more!
|
|
||||||
# |_https://github.com/pikers/piker/issues/517
|
|
||||||
#
|
|
||||||
# order.symbol = Symbol.from_fqme(
|
|
||||||
# fqsn=fqme,
|
|
||||||
# info={},
|
|
||||||
# )
|
|
||||||
|
|
||||||
maybe_dialog: Dialog | None = self.submit_order(
|
maybe_dialog: Dialog | None = self.submit_order(
|
||||||
send_msg=False,
|
send_msg=False,
|
||||||
order=order,
|
order=order,
|
||||||
|
@ -814,7 +766,6 @@ async def open_order_mode(
|
||||||
brokerd_accounts,
|
brokerd_accounts,
|
||||||
ems_dialog_msgs,
|
ems_dialog_msgs,
|
||||||
),
|
),
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery() as tn,
|
||||||
|
|
||||||
):
|
):
|
||||||
|
@ -1079,23 +1030,13 @@ async def process_trade_msg(
|
||||||
if name in (
|
if name in (
|
||||||
'position',
|
'position',
|
||||||
):
|
):
|
||||||
mkt: MktPair = mode.chart.linked.mkt
|
sym: MktPair = mode.chart.linked.mkt
|
||||||
pp_msg_symbol = msg['symbol'].lower()
|
pp_msg_symbol = msg['symbol'].lower()
|
||||||
pp_msg_bsmktid = msg['bs_mktid']
|
fqme = sym.fqme
|
||||||
fqme = mkt.fqme
|
broker = sym.broker
|
||||||
broker = mkt.broker
|
|
||||||
if (
|
if (
|
||||||
# match on any backed-specific(-unique)-ID first!
|
|
||||||
(
|
|
||||||
pp_msg_bsmktid
|
|
||||||
and
|
|
||||||
mkt.bs_mktid == pp_msg_bsmktid
|
|
||||||
)
|
|
||||||
or
|
|
||||||
# OW try against what's provided as an FQME..
|
|
||||||
pp_msg_symbol == fqme
|
pp_msg_symbol == fqme
|
||||||
or
|
or pp_msg_symbol == fqme.removesuffix(f'.{broker}')
|
||||||
pp_msg_symbol == fqme.removesuffix(f'.{broker}')
|
|
||||||
):
|
):
|
||||||
log.info(
|
log.info(
|
||||||
f'Loading position for `{fqme}`:\n'
|
f'Loading position for `{fqme}`:\n'
|
||||||
|
@ -1118,7 +1059,7 @@ async def process_trade_msg(
|
||||||
return
|
return
|
||||||
|
|
||||||
msg = Status(**msg)
|
msg = Status(**msg)
|
||||||
# resp: str = msg.resp
|
resp = msg.resp
|
||||||
oid = msg.oid
|
oid = msg.oid
|
||||||
dialog: Dialog = mode.dialogs.get(oid)
|
dialog: Dialog = mode.dialogs.get(oid)
|
||||||
|
|
||||||
|
@ -1160,7 +1101,7 @@ async def process_trade_msg(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
msg.req: Order = order
|
msg.req = order
|
||||||
dialog: (
|
dialog: (
|
||||||
Dialog
|
Dialog
|
||||||
# NOTE: on an invalid order submission (eg.
|
# NOTE: on an invalid order submission (eg.
|
||||||
|
@ -1182,33 +1123,20 @@ async def process_trade_msg(
|
||||||
mode.on_submit(oid)
|
mode.on_submit(oid)
|
||||||
|
|
||||||
case Status(resp='error'):
|
case Status(resp='error'):
|
||||||
# TODO: parse into broker-side msg, or should we
|
|
||||||
# expect it to just be **that** msg verbatim (since
|
|
||||||
# we'd presumably have only 1 `Error` msg-struct)
|
|
||||||
broker_msg: dict = msg.brokerd_msg
|
|
||||||
|
|
||||||
# XXX NOTE, this presumes the rxed "error" is
|
|
||||||
# order-dialog-cancel-causing, THUS backends much ONLY
|
|
||||||
# relay errors of this "severity"!!
|
|
||||||
log.error(
|
|
||||||
f'Order errored ??\n'
|
|
||||||
f'oid: {oid!r}\n'
|
|
||||||
f'\n'
|
|
||||||
f'{pformat(broker_msg)}\n'
|
|
||||||
f'\n'
|
|
||||||
f'=> CANCELLING ORDER DIALOG <=\n'
|
|
||||||
|
|
||||||
# from tractor.devx.pformat import ppfmt
|
|
||||||
# !TODO LOL, wtf the msg is causing
|
|
||||||
# a recursion bug!
|
|
||||||
# -[ ] get this shit on msgspec stat!
|
|
||||||
# f'{ppfmt(broker_msg)}'
|
|
||||||
)
|
|
||||||
# do all the things for a cancel:
|
# do all the things for a cancel:
|
||||||
# - drop order-msg dialog from client table
|
# - drop order-msg dialog from client table
|
||||||
# - delete level line from view
|
# - delete level line from view
|
||||||
mode.on_cancel(oid)
|
mode.on_cancel(oid)
|
||||||
|
|
||||||
|
# TODO: parse into broker-side msg, or should we
|
||||||
|
# expect it to just be **that** msg verbatim (since
|
||||||
|
# we'd presumably have only 1 `Error` msg-struct)
|
||||||
|
broker_msg: dict = msg.brokerd_msg
|
||||||
|
log.error(
|
||||||
|
f'Order {oid}->{resp} with:\n{pformat(broker_msg)}'
|
||||||
|
)
|
||||||
|
|
||||||
case Status(resp='canceled'):
|
case Status(resp='canceled'):
|
||||||
# delete level line from view
|
# delete level line from view
|
||||||
mode.on_cancel(oid)
|
mode.on_cancel(oid)
|
||||||
|
@ -1223,10 +1151,10 @@ async def process_trade_msg(
|
||||||
# TODO: UX for a "pending" clear/live order
|
# TODO: UX for a "pending" clear/live order
|
||||||
log.info(f'Dark order triggered for {fmtmsg}')
|
log.info(f'Dark order triggered for {fmtmsg}')
|
||||||
|
|
||||||
# TODO: do the struct-msg version, blah blah..
|
|
||||||
# req=Order(exec_mode='live', action='alert') as req,
|
|
||||||
case Status(
|
case Status(
|
||||||
resp='triggered',
|
resp='triggered',
|
||||||
|
# TODO: do the struct-msg version, blah blah..
|
||||||
|
# req=Order(exec_mode='live', action='alert') as req,
|
||||||
req={
|
req={
|
||||||
'exec_mode': 'live',
|
'exec_mode': 'live',
|
||||||
'action': 'alert',
|
'action': 'alert',
|
||||||
|
@ -1238,7 +1166,7 @@ async def process_trade_msg(
|
||||||
tm = time.time()
|
tm = time.time()
|
||||||
mode.on_fill(
|
mode.on_fill(
|
||||||
oid,
|
oid,
|
||||||
price=float(req.price),
|
price=req.price,
|
||||||
time_s=tm,
|
time_s=tm,
|
||||||
)
|
)
|
||||||
mode.lines.remove_line(uuid=oid)
|
mode.lines.remove_line(uuid=oid)
|
||||||
|
@ -1293,7 +1221,7 @@ async def process_trade_msg(
|
||||||
tm = details['broker_time']
|
tm = details['broker_time']
|
||||||
mode.on_fill(
|
mode.on_fill(
|
||||||
oid,
|
oid,
|
||||||
price=float(details['price']),
|
price=details['price'],
|
||||||
time_s=tm,
|
time_s=tm,
|
||||||
pointing='up' if action == 'buy' else 'down',
|
pointing='up' if action == 'buy' else 'down',
|
||||||
)
|
)
|
||||||
|
|
|
@ -23,7 +23,7 @@ name = "piker"
|
||||||
version = "0.1.0a0dev0"
|
version = "0.1.0a0dev0"
|
||||||
description = "trading gear for hackers"
|
description = "trading gear for hackers"
|
||||||
authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }]
|
authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }]
|
||||||
requires-python = ">=3.12"
|
requires-python = ">=3.12, <3.13"
|
||||||
license = "AGPL-3.0-or-later"
|
license = "AGPL-3.0-or-later"
|
||||||
readme = "README.rst"
|
readme = "README.rst"
|
||||||
keywords = [
|
keywords = [
|
||||||
|
@ -39,8 +39,8 @@ classifiers = [
|
||||||
"Operating System :: POSIX :: Linux",
|
"Operating System :: POSIX :: Linux",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
"Programming Language :: Python :: 3.13",
|
|
||||||
"Intended Audience :: Financial and Insurance Industry",
|
"Intended Audience :: Financial and Insurance Industry",
|
||||||
"Intended Audience :: Science/Research",
|
"Intended Audience :: Science/Research",
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
|
@ -49,13 +49,13 @@ classifiers = [
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"async-generator >=1.10, <2.0.0",
|
"async-generator >=1.10, <2.0.0",
|
||||||
"attrs >=23.1.0, <24.0.0",
|
"attrs >=23.1.0, <24.0.0",
|
||||||
"bidict >=0.23.1",
|
"bidict >=0.22.1, <0.23.0",
|
||||||
"colorama >=0.4.6, <0.5.0",
|
"colorama >=0.4.6, <0.5.0",
|
||||||
"colorlog >=6.7.0, <7.0.0",
|
"colorlog >=6.7.0, <7.0.0",
|
||||||
"ib-insync >=0.9.86, <0.10.0",
|
"ib-insync >=0.9.86, <0.10.0",
|
||||||
"numpy>=2.0",
|
"numba >=0.59.0, <0.60.0",
|
||||||
"polars >=0.20.6",
|
"numpy >=1.25, <2.0",
|
||||||
"polars-fuzzy-match>=0.1.5",
|
"polars >=0.18.13, <0.19.0",
|
||||||
"pygments >=2.16.1, <3.0.0",
|
"pygments >=2.16.1, <3.0.0",
|
||||||
"rich >=13.5.2, <14.0.0",
|
"rich >=13.5.2, <14.0.0",
|
||||||
"tomli >=2.0.1, <3.0.0",
|
"tomli >=2.0.1, <3.0.0",
|
||||||
|
@ -65,18 +65,16 @@ dependencies = [
|
||||||
"typer >=0.9.0, <1.0.0",
|
"typer >=0.9.0, <1.0.0",
|
||||||
"rapidfuzz >=3.5.2, <4.0.0",
|
"rapidfuzz >=3.5.2, <4.0.0",
|
||||||
"pdbp >=1.5.0, <2.0.0",
|
"pdbp >=1.5.0, <2.0.0",
|
||||||
"trio >=0.27",
|
"trio >=0.24, <0.25",
|
||||||
"pendulum >=3.0.0, <4.0.0",
|
"pendulum >=3.0.0, <4.0.0",
|
||||||
"httpx >=0.27.0, <0.28.0",
|
"httpx >=0.27.0, <0.28.0",
|
||||||
"cryptofeed >=2.4.0, <3.0.0",
|
"cryptofeed >=2.4.0, <3.0.0",
|
||||||
"pyarrow>=18.0.0",
|
"pyarrow >=17.0.0, <18.0.0",
|
||||||
"websockets ==12.0",
|
"websockets ==12.0",
|
||||||
"msgspec>=0.19.0,<0.20",
|
"msgspec",
|
||||||
"tractor",
|
"tractor",
|
||||||
|
"asyncvnc",
|
||||||
"tomlkit",
|
"tomlkit",
|
||||||
"trio-typing>=0.10.0",
|
|
||||||
"numba>=0.61.0",
|
|
||||||
"pyvnc",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[project.optional-dependencies]
|
[project.optional-dependencies]
|
||||||
|
@ -107,32 +105,15 @@ uis = [
|
||||||
# console ehancements and eventually remote debugging extras/helpers.
|
# console ehancements and eventually remote debugging extras/helpers.
|
||||||
# use `uv --dev` to enable
|
# use `uv --dev` to enable
|
||||||
dev = [
|
dev = [
|
||||||
"pytest",
|
"pytest >=6.0.0, <7.0.0",
|
||||||
"elasticsearch >=8.9.0, <9.0.0",
|
"elasticsearch >=8.9.0, <9.0.0",
|
||||||
|
"xonsh >=0.14.2, <0.15.0",
|
||||||
"prompt-toolkit ==3.0.40",
|
"prompt-toolkit ==3.0.40",
|
||||||
"cython >=3.0.0, <4.0.0",
|
"cython >=3.0.0, <4.0.0",
|
||||||
"greenback >=1.1.1, <2.0.0",
|
"greenback >=1.1.1, <2.0.0",
|
||||||
"ruff>=0.9.6",
|
"ruff>=0.9.6",
|
||||||
"pyperclip>=1.9.0",
|
|
||||||
"i3ipc>=2.2.1",
|
|
||||||
|
|
||||||
# ?from git, see below.
|
|
||||||
"xonsh",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
# https://docs.pytest.org/en/stable/reference/reference.html#configuration-options
|
|
||||||
testpaths = [
|
|
||||||
"tests",
|
|
||||||
]
|
|
||||||
# https://docs.pytest.org/en/stable/reference/reference.html#confval-console_output_style
|
|
||||||
console_output_style = 'progress'
|
|
||||||
|
|
||||||
# https://docs.pytest.org/en/stable/how-to/plugins.html#disabling-plugins-from-autoloading
|
|
||||||
# https://docs.pytest.org/en/stable/how-to/plugins.html#deactivating-unregistering-a-plugin-by-name
|
|
||||||
addopts = '-p no:xonsh'
|
|
||||||
|
|
||||||
|
|
||||||
[project.scripts]
|
[project.scripts]
|
||||||
piker = "piker.cli:cli"
|
piker = "piker.cli:cli"
|
||||||
pikerd = "piker.cli:pikerd"
|
pikerd = "piker.cli:pikerd"
|
||||||
|
@ -144,24 +125,9 @@ include = ["piker"]
|
||||||
[tool.hatch.build.targets.wheel]
|
[tool.hatch.build.targets.wheel]
|
||||||
include = ["piker"]
|
include = ["piker"]
|
||||||
|
|
||||||
|
|
||||||
# TODO? move to a `uv.toml`?
|
|
||||||
[tool.uv]
|
|
||||||
python-preference = 'system'
|
|
||||||
python-downloads = 'manual'
|
|
||||||
|
|
||||||
|
|
||||||
[tool.uv.sources]
|
[tool.uv.sources]
|
||||||
pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
|
pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
|
||||||
|
asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" }
|
||||||
tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" }
|
tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" }
|
||||||
pyvnc = { git = "https://github.com/regulad/pyvnc.git" }
|
msgspec = { git = "https://github.com/jcrist/msgspec.git" }
|
||||||
|
tractor = { path = "../tractor", editable = true }
|
||||||
# TODO, long term we should be synced to upstream `main` branch!
|
|
||||||
# tractor = { git = "https://github.com/goodboy/tractor.git", branch ="piker_pin" }
|
|
||||||
tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "piker_pin" }
|
|
||||||
|
|
||||||
# goodboy's dev-env
|
|
||||||
# XXX for @goodboy's hackin dev env, usually there's something new in
|
|
||||||
# the runtime being seriously tested here Bp
|
|
||||||
# tractor = { path = "../tractor/", editable = true }
|
|
||||||
# xonsh = { path = "../xonsh", editable = true }
|
|
||||||
|
|
|
@ -62,9 +62,8 @@ ignore-init-module-imports = false
|
||||||
fixable = ["ALL"]
|
fixable = ["ALL"]
|
||||||
unfixable = []
|
unfixable = []
|
||||||
|
|
||||||
# TODO? uhh why no work!?
|
|
||||||
# Allow unused variables when underscore-prefixed.
|
# Allow unused variables when underscore-prefixed.
|
||||||
# dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||||
|
|
||||||
[format]
|
[format]
|
||||||
# Use single quotes in `ruff format`.
|
# Use single quotes in `ruff format`.
|
||||||
|
|
|
@ -1,22 +1,4 @@
|
||||||
# piker: trading gear for hackers
|
"""
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
|
|
||||||
'''
|
|
||||||
A per-display, DPI (scaling) info dumper.
|
|
||||||
|
|
||||||
Resource list for mucking with DPIs on multiple screens:
|
Resource list for mucking with DPIs on multiple screens:
|
||||||
|
|
||||||
- https://stackoverflow.com/questions/42141354/convert-pixel-size-to-point-size-for-fonts-on-multiple-platforms
|
- https://stackoverflow.com/questions/42141354/convert-pixel-size-to-point-size-for-fonts-on-multiple-platforms
|
||||||
|
@ -30,86 +12,89 @@ Resource list for mucking with DPIs on multiple screens:
|
||||||
- https://stackoverflow.com/questions/16561879/what-is-the-difference-between-logicaldpix-and-physicaldpix-in-qt
|
- https://stackoverflow.com/questions/16561879/what-is-the-difference-between-logicaldpix-and-physicaldpix-in-qt
|
||||||
- https://doc.qt.io/qt-5/qguiapplication.html#screenAt
|
- https://doc.qt.io/qt-5/qguiapplication.html#screenAt
|
||||||
|
|
||||||
'''
|
"""
|
||||||
|
|
||||||
from pyqtgraph import QtGui
|
from pyqtgraph import QtGui
|
||||||
from PyQt6 import (
|
from PyQt5.QtCore import (
|
||||||
QtCore,
|
Qt, QCoreApplication
|
||||||
QtWidgets,
|
|
||||||
)
|
|
||||||
from PyQt6.QtCore import (
|
|
||||||
Qt,
|
|
||||||
QCoreApplication,
|
|
||||||
QSize,
|
|
||||||
QRect,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
|
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
|
||||||
# must be set before creating the application
|
# must be set before creating the application
|
||||||
if hasattr(Qt, 'AA_EnableHighDpiScaling'):
|
if hasattr(Qt, 'AA_EnableHighDpiScaling'):
|
||||||
QCoreApplication.setAttribute(
|
QCoreApplication.setAttribute(Qt.AA_EnableHighDpiScaling, True)
|
||||||
Qt.AA_EnableHighDpiScaling,
|
|
||||||
True,
|
|
||||||
)
|
|
||||||
|
|
||||||
if hasattr(Qt, 'AA_UseHighDpiPixmaps'):
|
if hasattr(Qt, 'AA_UseHighDpiPixmaps'):
|
||||||
QCoreApplication.setAttribute(
|
QCoreApplication.setAttribute(Qt.AA_UseHighDpiPixmaps, True)
|
||||||
Qt.AA_UseHighDpiPixmaps,
|
|
||||||
True,
|
|
||||||
)
|
|
||||||
|
|
||||||
app = QtWidgets.QApplication([])
|
|
||||||
window = QtWidgets.QMainWindow()
|
app = QtGui.QApplication([])
|
||||||
main_widget = QtWidgets.QWidget()
|
window = QtGui.QMainWindow()
|
||||||
|
main_widget = QtGui.QWidget()
|
||||||
window.setCentralWidget(main_widget)
|
window.setCentralWidget(main_widget)
|
||||||
window.show()
|
window.show()
|
||||||
|
|
||||||
pxr: float = main_widget.devicePixelRatioF()
|
pxr = main_widget.devicePixelRatioF()
|
||||||
|
|
||||||
# explicitly get main widget and primary displays
|
# screen_num = app.desktop().screenNumber()
|
||||||
current_screen: QtGui.QScreen = app.screenAt(
|
# screen = app.screens()[screen_num]
|
||||||
main_widget.geometry().center()
|
|
||||||
)
|
|
||||||
primary_screen: QtGui.QScreen = app.primaryScreen()
|
|
||||||
|
|
||||||
screen: QtGui.QScreen
|
screen = app.screenAt(main_widget.geometry().center())
|
||||||
for screen in app.screens():
|
|
||||||
name: str = screen.name()
|
name = screen.name()
|
||||||
model: str = screen.model().rstrip()
|
size = screen.size()
|
||||||
size: QSize = screen.size()
|
geo = screen.availableGeometry()
|
||||||
geo: QRect = screen.availableGeometry()
|
phydpi = screen.physicalDotsPerInch()
|
||||||
phydpi: float = screen.physicalDotsPerInch()
|
logdpi = screen.logicalDotsPerInch()
|
||||||
logdpi: float = screen.logicalDotsPerInch()
|
|
||||||
is_primary: bool = screen is primary_screen
|
|
||||||
is_current: bool = screen is current_screen
|
|
||||||
|
|
||||||
print(
|
print(
|
||||||
f'------ screen name: {name} ------\n'
|
# f'screen number: {screen_num}\n',
|
||||||
f'|_primary: {is_primary}\n'
|
f'screen name: {name}\n'
|
||||||
f' _current: {is_current}\n'
|
f'screen size: {size}\n'
|
||||||
f' _model: {model}\n'
|
f'screen geometry: {geo}\n\n'
|
||||||
f' _screen size: {size}\n'
|
f'devicePixelRationF(): {pxr}\n'
|
||||||
f' _screen geometry: {geo}\n'
|
f'physical dpi: {phydpi}\n'
|
||||||
f' _devicePixelRationF(): {pxr}\n'
|
f'logical dpi: {logdpi}\n'
|
||||||
f' _physical dpi: {phydpi}\n'
|
|
||||||
f' _logical dpi: {logdpi}\n'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# app-wide font info
|
print('-'*50)
|
||||||
|
|
||||||
|
screen = app.primaryScreen()
|
||||||
|
|
||||||
|
name = screen.name()
|
||||||
|
size = screen.size()
|
||||||
|
geo = screen.availableGeometry()
|
||||||
|
phydpi = screen.physicalDotsPerInch()
|
||||||
|
logdpi = screen.logicalDotsPerInch()
|
||||||
|
|
||||||
|
print(
|
||||||
|
# f'screen number: {screen_num}\n',
|
||||||
|
f'screen name: {name}\n'
|
||||||
|
f'screen size: {size}\n'
|
||||||
|
f'screen geometry: {geo}\n\n'
|
||||||
|
f'devicePixelRationF(): {pxr}\n'
|
||||||
|
f'physical dpi: {phydpi}\n'
|
||||||
|
f'logical dpi: {logdpi}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# app-wide font
|
||||||
font = QtGui.QFont("Hack")
|
font = QtGui.QFont("Hack")
|
||||||
# use pixel size to be cross-resolution compatible?
|
# use pixel size to be cross-resolution compatible?
|
||||||
font.setPixelSize(6)
|
font.setPixelSize(6)
|
||||||
|
|
||||||
fm = QtGui.QFontMetrics(font)
|
|
||||||
fontdpi: float = fm.fontDpi()
|
|
||||||
font_h: int = fm.height()
|
|
||||||
|
|
||||||
string: str = '10000'
|
fm = QtGui.QFontMetrics(font)
|
||||||
str_br: QtCore.QRect = fm.boundingRect(string)
|
fontdpi = fm.fontDpi()
|
||||||
str_w: int = str_br.width()
|
font_h = fm.height()
|
||||||
|
|
||||||
|
string = '10000'
|
||||||
|
str_br = fm.boundingRect(string)
|
||||||
|
str_w = str_br.width()
|
||||||
|
|
||||||
|
|
||||||
print(
|
print(
|
||||||
f'------ global font settings ------\n'
|
# f'screen number: {screen_num}\n',
|
||||||
f'font dpi: {fontdpi}\n'
|
f'font dpi: {fontdpi}\n'
|
||||||
f'font height: {font_h}\n'
|
f'font height: {font_h}\n'
|
||||||
f'string bounding rect: {str_br}\n'
|
f'string bounding rect: {str_br}\n'
|
||||||
|
|
|
@ -15,12 +15,6 @@ from piker.service import (
|
||||||
from piker.log import get_console_log
|
from piker.log import get_console_log
|
||||||
|
|
||||||
|
|
||||||
# include `tractor`'s built-in fixtures!
|
|
||||||
pytest_plugins: tuple[str] = (
|
|
||||||
"tractor._testing.pytest",
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
parser.addoption("--ll", action="store", dest='loglevel',
|
parser.addoption("--ll", action="store", dest='loglevel',
|
||||||
default=None, help="logging level to set when testing")
|
default=None, help="logging level to set when testing")
|
||||||
|
|
|
@ -12,14 +12,12 @@ from piker import config
|
||||||
from piker.accounting import (
|
from piker.accounting import (
|
||||||
Account,
|
Account,
|
||||||
calc,
|
calc,
|
||||||
open_account,
|
|
||||||
load_account,
|
|
||||||
load_account_from_ledger,
|
|
||||||
open_trade_ledger,
|
|
||||||
Position,
|
Position,
|
||||||
TransactionLedger,
|
TransactionLedger,
|
||||||
|
open_trade_ledger,
|
||||||
|
load_account,
|
||||||
|
load_account_from_ledger,
|
||||||
)
|
)
|
||||||
import tractor
|
|
||||||
|
|
||||||
|
|
||||||
def test_root_conf_networking_section(
|
def test_root_conf_networking_section(
|
||||||
|
@ -55,17 +53,12 @@ def test_account_file_default_empty(
|
||||||
)
|
)
|
||||||
def test_paper_ledger_position_calcs(
|
def test_paper_ledger_position_calcs(
|
||||||
fq_acnt: tuple[str, str],
|
fq_acnt: tuple[str, str],
|
||||||
debug_mode: bool,
|
|
||||||
):
|
):
|
||||||
broker: str
|
broker: str
|
||||||
acnt_name: str
|
acnt_name: str
|
||||||
broker, acnt_name = fq_acnt
|
broker, acnt_name = fq_acnt
|
||||||
|
|
||||||
accounts_path: Path = (
|
accounts_path: Path = config.repodir() / 'tests' / '_inputs'
|
||||||
config.repodir()
|
|
||||||
/ 'tests'
|
|
||||||
/ '_inputs' # tests-local-subdir
|
|
||||||
)
|
|
||||||
|
|
||||||
ldr: TransactionLedger
|
ldr: TransactionLedger
|
||||||
with (
|
with (
|
||||||
|
@ -84,7 +77,6 @@ def test_paper_ledger_position_calcs(
|
||||||
ledger=ldr,
|
ledger=ldr,
|
||||||
|
|
||||||
_fp=accounts_path,
|
_fp=accounts_path,
|
||||||
debug_mode=debug_mode,
|
|
||||||
|
|
||||||
) as (dfs, ledger),
|
) as (dfs, ledger),
|
||||||
|
|
||||||
|
@ -110,87 +102,3 @@ def test_paper_ledger_position_calcs(
|
||||||
df = dfs[xrp]
|
df = dfs[xrp]
|
||||||
assert df['cumsize'][-1] == 0
|
assert df['cumsize'][-1] == 0
|
||||||
assert pos.cumsize == 0
|
assert pos.cumsize == 0
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
|
||||||
'fq_acnt',
|
|
||||||
[
|
|
||||||
('ib', 'algopaper'),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_ib_account_with_duplicated_mktids(
|
|
||||||
fq_acnt: tuple[str, str],
|
|
||||||
debug_mode: bool,
|
|
||||||
):
|
|
||||||
# ?TODO, once we start symcache-incremental-update-support?
|
|
||||||
# from piker.data import (
|
|
||||||
# open_symcache,
|
|
||||||
# )
|
|
||||||
#
|
|
||||||
# async def main():
|
|
||||||
# async with (
|
|
||||||
# # TODO: do this as part of `open_account()`!?
|
|
||||||
# open_symcache(
|
|
||||||
# 'ib',
|
|
||||||
# only_from_memcache=True,
|
|
||||||
# ) as symcache,
|
|
||||||
# ):
|
|
||||||
|
|
||||||
|
|
||||||
from piker.brokers.ib.ledger import (
|
|
||||||
tx_sort,
|
|
||||||
|
|
||||||
# ?TODO, once we want to pull lowlevel txns and process them?
|
|
||||||
# norm_trade_records,
|
|
||||||
# update_ledger_from_api_trades,
|
|
||||||
)
|
|
||||||
|
|
||||||
broker: str
|
|
||||||
acnt_id: str = 'algopaper'
|
|
||||||
broker, acnt_id = fq_acnt
|
|
||||||
accounts_def = config.load_accounts([broker])
|
|
||||||
assert accounts_def[f'{broker}.{acnt_id}']
|
|
||||||
|
|
||||||
ledger: TransactionLedger
|
|
||||||
acnt: Account
|
|
||||||
with (
|
|
||||||
tractor.devx.maybe_open_crash_handler(pdb=debug_mode),
|
|
||||||
|
|
||||||
open_trade_ledger(
|
|
||||||
'ib',
|
|
||||||
acnt_id,
|
|
||||||
tx_sort=tx_sort,
|
|
||||||
|
|
||||||
# TODO, eventually incrementally updated for IB..
|
|
||||||
# symcache=symcache,
|
|
||||||
symcache=None,
|
|
||||||
allow_from_sync_code=True,
|
|
||||||
|
|
||||||
) as ledger,
|
|
||||||
|
|
||||||
open_account(
|
|
||||||
'ib',
|
|
||||||
acnt_id,
|
|
||||||
write_on_exit=True,
|
|
||||||
) as acnt,
|
|
||||||
):
|
|
||||||
# per input params
|
|
||||||
symcache = ledger.symcache
|
|
||||||
assert not (
|
|
||||||
symcache.pairs
|
|
||||||
or
|
|
||||||
symcache.pairs
|
|
||||||
or
|
|
||||||
symcache.mktmaps
|
|
||||||
)
|
|
||||||
# re-compute all positions that have changed state.
|
|
||||||
# TODO: likely we should change the API to return the
|
|
||||||
# position updates from `.update_from_ledger()`?
|
|
||||||
active, closed = acnt.dump_active()
|
|
||||||
|
|
||||||
# breakpoint()
|
|
||||||
|
|
||||||
# TODO, (see above imports as well) incremental update from
|
|
||||||
# (updated) ledger?
|
|
||||||
# -[ ] pull some code from `.ib.broker` content.
|
|
||||||
|
|
|
@ -42,7 +42,7 @@ from piker.accounting import (
|
||||||
unpack_fqme,
|
unpack_fqme,
|
||||||
)
|
)
|
||||||
from piker.accounting import (
|
from piker.accounting import (
|
||||||
open_account,
|
open_pps,
|
||||||
Position,
|
Position,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ def load_and_check_pos(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
with open_account(ppmsg.broker, ppmsg.account) as table:
|
with open_pps(ppmsg.broker, ppmsg.account) as table:
|
||||||
|
|
||||||
if ppmsg.size == 0:
|
if ppmsg.size == 0:
|
||||||
assert ppmsg.symbol not in table.pps
|
assert ppmsg.symbol not in table.pps
|
||||||
|
@ -179,7 +179,7 @@ def test_ems_err_on_bad_broker(
|
||||||
# NOTE: emsd should error on the actor's enabled modules
|
# NOTE: emsd should error on the actor's enabled modules
|
||||||
# import phase, when looking for a backend named `doggy`.
|
# import phase, when looking for a backend named `doggy`.
|
||||||
except tractor.RemoteActorError as re:
|
except tractor.RemoteActorError as re:
|
||||||
assert re.type is ModuleNotFoundError
|
assert re.type == ModuleNotFoundError
|
||||||
|
|
||||||
run_and_tollerate_cancels(load_bad_fqme)
|
run_and_tollerate_cancels(load_bad_fqme)
|
||||||
|
|
||||||
|
|
|
@ -142,12 +142,7 @@ async def test_concurrent_tokens_refresh(us_symbols, loglevel):
|
||||||
# async with tractor.open_nursery() as n:
|
# async with tractor.open_nursery() as n:
|
||||||
# await n.run_in_actor('other', intermittently_refresh_tokens)
|
# await n.run_in_actor('other', intermittently_refresh_tokens)
|
||||||
|
|
||||||
async with (
|
async with trio.open_nursery() as n:
|
||||||
tractor.trionics.collapse_eg(),
|
|
||||||
trio.open_nursery(
|
|
||||||
# strict_exception_groups=False,
|
|
||||||
) as n
|
|
||||||
):
|
|
||||||
|
|
||||||
quoter = await qt.stock_quoter(client, us_symbols)
|
quoter = await qt.stock_quoter(client, us_symbols)
|
||||||
|
|
||||||
|
@ -388,9 +383,7 @@ async def test_quote_streaming(tmx_symbols, loglevel, stream_what):
|
||||||
else:
|
else:
|
||||||
symbols = [tmx_symbols]
|
symbols = [tmx_symbols]
|
||||||
|
|
||||||
async with trio.open_nursery(
|
async with trio.open_nursery() as n:
|
||||||
strict_exception_groups=False,
|
|
||||||
) as n:
|
|
||||||
for syms, func in zip(symbols, stream_what):
|
for syms, func in zip(symbols, stream_what):
|
||||||
n.start_soon(func, feed, syms)
|
n.start_soon(func, feed, syms)
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue