commit
e51ba404fc
|
@ -43,16 +43,21 @@ jobs:
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v3
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Build DB container
|
# elastic only
|
||||||
run: docker build -t piker:elastic dockering/elastic
|
# - name: Build DB container
|
||||||
|
# run: docker build -t piker:elastic dockering/elastic
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@v3
|
uses: actions/setup-python@v4
|
||||||
with:
|
with:
|
||||||
python-version: '3.10'
|
python-version: '3.10'
|
||||||
|
|
||||||
|
# elastic only
|
||||||
|
# - name: Install dependencies
|
||||||
|
# run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||||
|
|
||||||
- name: Test suite
|
- name: Test suite
|
||||||
run: pytest tests -rs
|
run: pytest tests -rs
|
||||||
|
|
|
@ -1,19 +1,32 @@
|
||||||
[questrade]
|
[questrade]
|
||||||
refresh_token = ""
|
refresh_token = ''
|
||||||
access_token = ""
|
access_token = ''
|
||||||
api_server = "https://api06.iq.questrade.com/"
|
api_server = 'https://api06.iq.questrade.com/'
|
||||||
expires_in = 1800
|
expires_in = 1800
|
||||||
token_type = "Bearer"
|
token_type = 'Bearer'
|
||||||
expires_at = 1616095326.355846
|
expires_at = 1616095326.355846
|
||||||
|
|
||||||
|
|
||||||
|
[deribit]
|
||||||
|
key_id = ''
|
||||||
|
key_secret = ''
|
||||||
|
|
||||||
|
|
||||||
[kraken]
|
[kraken]
|
||||||
key_descr = "api_0"
|
key_descr = ''
|
||||||
api_key = ""
|
api_key = ''
|
||||||
secret = ""
|
secret = ''
|
||||||
|
|
||||||
|
|
||||||
|
[kucoin]
|
||||||
|
key_id = ''
|
||||||
|
key_secret = ''
|
||||||
|
key_passphrase = ''
|
||||||
|
|
||||||
|
|
||||||
[ib]
|
[ib]
|
||||||
hosts = [
|
hosts = [
|
||||||
"127.0.0.1",
|
'127.0.0.1',
|
||||||
]
|
]
|
||||||
# XXX: the order in which ports will be scanned
|
# XXX: the order in which ports will be scanned
|
||||||
# (by the `brokerd` daemon-actor)
|
# (by the `brokerd` daemon-actor)
|
||||||
|
@ -30,8 +43,8 @@ ports = [
|
||||||
# is not supported so you have to manually download
|
# is not supported so you have to manually download
|
||||||
# and XML report and put it in a location that can be
|
# and XML report and put it in a location that can be
|
||||||
# accessed by the ``brokerd.ib`` backend code for parsing.
|
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||||
flex_token = '666666666666666666666666'
|
flex_token = ''
|
||||||
flex_trades_query_id = '666666' # live account
|
flex_trades_query_id = '' # live account
|
||||||
|
|
||||||
# when clients are being scanned this determines
|
# when clients are being scanned this determines
|
||||||
# which clients are preferred to be used for data
|
# which clients are preferred to be used for data
|
||||||
|
@ -47,11 +60,6 @@ prefer_data_account = [
|
||||||
# the order in which accounts will be selectable
|
# the order in which accounts will be selectable
|
||||||
# in the order mode UI (if found via clients during
|
# in the order mode UI (if found via clients during
|
||||||
# API-app scanning)when a new symbol is loaded.
|
# API-app scanning)when a new symbol is loaded.
|
||||||
paper = "XX0000000"
|
paper = 'XX0000000'
|
||||||
margin = "X0000000"
|
margin = 'X0000000'
|
||||||
ira = "X0000000"
|
ira = 'X0000000'
|
||||||
|
|
||||||
|
|
||||||
[deribit]
|
|
||||||
key_id = 'XXXXXXXX'
|
|
||||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
|
||||||
|
|
|
@ -0,0 +1,4 @@
|
||||||
|
[network]
|
||||||
|
tsdb.backend = 'marketstore'
|
||||||
|
tsdb.host = 'localhost'
|
||||||
|
tsdb.grpc_port = 5995
|
|
@ -2,8 +2,21 @@
|
||||||
# https://github.com/waytrade/ib-gateway-docker/blob/master/docker-compose.yml
|
# https://github.com/waytrade/ib-gateway-docker/blob/master/docker-compose.yml
|
||||||
version: "3.5"
|
version: "3.5"
|
||||||
|
|
||||||
|
|
||||||
services:
|
services:
|
||||||
|
|
||||||
ib_gw_paper:
|
ib_gw_paper:
|
||||||
|
|
||||||
|
# apparently java is a mega cukc:
|
||||||
|
# https://stackoverflow.com/a/56895801
|
||||||
|
# https://bugs.openjdk.org/browse/JDK-8150460
|
||||||
|
ulimits:
|
||||||
|
# nproc: 65535
|
||||||
|
nproc: 6000
|
||||||
|
nofile:
|
||||||
|
soft: 2000
|
||||||
|
hard: 3000
|
||||||
|
|
||||||
# other image tags available:
|
# other image tags available:
|
||||||
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
||||||
# image: waytrade/ib-gateway:981.3j
|
# image: waytrade/ib-gateway:981.3j
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -14,14 +14,20 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
Cacheing apis and toolz.
|
Cacheing apis and toolz.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from contextlib import (
|
from contextlib import (
|
||||||
asynccontextmanager,
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
|
from typing import (
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
ParamSpec,
|
||||||
|
TypeVar,
|
||||||
)
|
)
|
||||||
|
|
||||||
from tractor.trionics import maybe_open_context
|
from tractor.trionics import maybe_open_context
|
||||||
|
@ -32,19 +38,54 @@ from .log import get_logger
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
P = ParamSpec("P")
|
||||||
|
|
||||||
def async_lifo_cache(maxsize=128):
|
|
||||||
"""Async ``cache`` with a LIFO policy.
|
# TODO: move this to `tractor.trionics`..
|
||||||
|
# - egs. to replicate for tests: https://github.com/aio-libs/async-lru#usage
|
||||||
|
# - their suite as well:
|
||||||
|
# https://github.com/aio-libs/async-lru/tree/master/tests
|
||||||
|
# - asked trio_util about it too:
|
||||||
|
# https://github.com/groove-x/trio-util/issues/21
|
||||||
|
def async_lifo_cache(
|
||||||
|
maxsize=128,
|
||||||
|
|
||||||
|
# NOTE: typing style was learned from:
|
||||||
|
# https://stackoverflow.com/a/71132186
|
||||||
|
) -> Callable[
|
||||||
|
Callable[P, Awaitable[T]],
|
||||||
|
Callable[
|
||||||
|
Callable[P, Awaitable[T]],
|
||||||
|
Callable[P, Awaitable[T]],
|
||||||
|
],
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Async ``cache`` with a LIFO policy.
|
||||||
|
|
||||||
Implemented my own since no one else seems to have
|
Implemented my own since no one else seems to have
|
||||||
a standard. I'll wait for the smarter people to come
|
a standard. I'll wait for the smarter people to come
|
||||||
up with one, but until then...
|
up with one, but until then...
|
||||||
"""
|
|
||||||
|
NOTE: when decorating, due to this simple/naive implementation, you
|
||||||
|
MUST call the decorator like,
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
@async_lifo_cache()
|
||||||
|
async def cache_target():
|
||||||
|
|
||||||
|
'''
|
||||||
cache = OrderedDict()
|
cache = OrderedDict()
|
||||||
|
|
||||||
def decorator(fn):
|
def decorator(
|
||||||
|
fn: Callable[P, Awaitable[T]],
|
||||||
|
) -> Callable[P, Awaitable[T]]:
|
||||||
|
|
||||||
async def wrapper(*args):
|
async def decorated(
|
||||||
|
*args: P.args,
|
||||||
|
**kwargs: P.kwargs,
|
||||||
|
) -> T:
|
||||||
key = args
|
key = args
|
||||||
try:
|
try:
|
||||||
return cache[key]
|
return cache[key]
|
||||||
|
@ -53,16 +94,20 @@ def async_lifo_cache(maxsize=128):
|
||||||
# discard last added new entry
|
# discard last added new entry
|
||||||
cache.popitem()
|
cache.popitem()
|
||||||
|
|
||||||
# do it
|
# call underlying
|
||||||
cache[key] = await fn(*args)
|
cache[key] = await fn(
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
return cache[key]
|
return cache[key]
|
||||||
|
|
||||||
return wrapper
|
return decorated
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
# TODO: move this to `.brokers.utils`..
|
||||||
|
@acm
|
||||||
async def open_cached_client(
|
async def open_cached_client(
|
||||||
brokername: str,
|
brokername: str,
|
||||||
) -> 'Client': # noqa
|
) -> 'Client': # noqa
|
||||||
|
|
|
@ -0,0 +1,115 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
"Accounting for degens": count dem numberz that tracks how much you got
|
||||||
|
for tendiez.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
|
from ._ledger import (
|
||||||
|
iter_by_dt,
|
||||||
|
Transaction,
|
||||||
|
TransactionLedger,
|
||||||
|
open_trade_ledger,
|
||||||
|
)
|
||||||
|
from ._pos import (
|
||||||
|
load_pps_from_ledger,
|
||||||
|
open_pps,
|
||||||
|
Position,
|
||||||
|
PpTable,
|
||||||
|
)
|
||||||
|
from ._mktinfo import (
|
||||||
|
Asset,
|
||||||
|
dec_digits,
|
||||||
|
digits_to_dec,
|
||||||
|
MktPair,
|
||||||
|
Symbol,
|
||||||
|
unpack_fqme,
|
||||||
|
)
|
||||||
|
from ._allocate import (
|
||||||
|
mk_allocator,
|
||||||
|
Allocator,
|
||||||
|
)
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'Allocator',
|
||||||
|
'Asset',
|
||||||
|
'MktPair',
|
||||||
|
'Position',
|
||||||
|
'PpTable',
|
||||||
|
'Symbol',
|
||||||
|
'Transaction',
|
||||||
|
'TransactionLedger',
|
||||||
|
'dec_digits',
|
||||||
|
'digits_to_dec',
|
||||||
|
'iter_by_dt',
|
||||||
|
'load_pps_from_ledger',
|
||||||
|
'mk_allocator',
|
||||||
|
'open_pps',
|
||||||
|
'open_trade_ledger',
|
||||||
|
'unpack_fqme',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_likely_pair(
|
||||||
|
src: str,
|
||||||
|
dst: str,
|
||||||
|
bs_mktid: str,
|
||||||
|
|
||||||
|
) -> str | None:
|
||||||
|
'''
|
||||||
|
Attempt to get the likely trading pair matching a given destination
|
||||||
|
asset `dst: str`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
src_name_start = bs_mktid.rindex(src)
|
||||||
|
except (
|
||||||
|
ValueError, # substr not found
|
||||||
|
):
|
||||||
|
# TODO: handle nested positions..(i.e.
|
||||||
|
# positions where the src fiat was used to
|
||||||
|
# buy some other dst which was furhter used
|
||||||
|
# to buy another dst..)
|
||||||
|
# log.warning(
|
||||||
|
# f'No src fiat {src} found in {bs_mktid}?'
|
||||||
|
# )
|
||||||
|
return
|
||||||
|
|
||||||
|
likely_dst = bs_mktid[:src_name_start]
|
||||||
|
if likely_dst == dst:
|
||||||
|
return bs_mktid
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import sys
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
args = sys.argv
|
||||||
|
assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
|
||||||
|
args = args[1:]
|
||||||
|
for acctid in args:
|
||||||
|
broker, name = acctid.split('.')
|
||||||
|
trans, updated_pps = load_pps_from_ledger(broker, name)
|
||||||
|
print(
|
||||||
|
f'Processing transactions into pps for {broker}:{acctid}\n'
|
||||||
|
f'{pformat(trans)}\n\n'
|
||||||
|
f'{pformat(updated_pps)}'
|
||||||
|
)
|
|
@ -23,9 +23,9 @@ from typing import Optional
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
|
|
||||||
from ..data._source import Symbol
|
from ._pos import Position
|
||||||
|
from . import MktPair
|
||||||
from ..data.types import Struct
|
from ..data.types import Struct
|
||||||
from ..pp import Position
|
|
||||||
|
|
||||||
|
|
||||||
_size_units = bidict({
|
_size_units = bidict({
|
||||||
|
@ -42,7 +42,7 @@ SizeUnit = Enum(
|
||||||
|
|
||||||
class Allocator(Struct):
|
class Allocator(Struct):
|
||||||
|
|
||||||
symbol: Symbol
|
mkt: MktPair
|
||||||
|
|
||||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||||
# "sizes"
|
# "sizes"
|
||||||
|
@ -114,8 +114,8 @@ class Allocator(Struct):
|
||||||
depending on position / order entry config.
|
depending on position / order entry config.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
sym = self.symbol
|
mkt: MktPair = self.mkt
|
||||||
ld = sym.lot_size_digits
|
ld: int = mkt.size_tick_digits
|
||||||
|
|
||||||
size_unit = self.size_unit
|
size_unit = self.size_unit
|
||||||
live_size = live_pp.size
|
live_size = live_pp.size
|
||||||
|
@ -125,13 +125,13 @@ class Allocator(Struct):
|
||||||
u_per_slot, currency_per_slot = self.step_sizes()
|
u_per_slot, currency_per_slot = self.step_sizes()
|
||||||
|
|
||||||
if size_unit == 'units':
|
if size_unit == 'units':
|
||||||
slot_size = u_per_slot
|
slot_size: float = u_per_slot
|
||||||
l_sub_pp = self.units_limit - abs_live_size
|
l_sub_pp: float = self.units_limit - abs_live_size
|
||||||
|
|
||||||
elif size_unit == 'currency':
|
elif size_unit == 'currency':
|
||||||
live_cost_basis = abs_live_size * live_pp.ppu
|
live_cost_basis: float = abs_live_size * live_pp.ppu
|
||||||
slot_size = currency_per_slot / price
|
slot_size: float = currency_per_slot / price
|
||||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
l_sub_pp: float = (self.currency_limit - live_cost_basis) / price
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
|
@ -141,8 +141,14 @@ class Allocator(Struct):
|
||||||
# an entry (adding-to or starting a pp)
|
# an entry (adding-to or starting a pp)
|
||||||
if (
|
if (
|
||||||
live_size == 0
|
live_size == 0
|
||||||
or (action == 'buy' and live_size > 0)
|
or (
|
||||||
or action == 'sell' and live_size < 0
|
action == 'buy'
|
||||||
|
and live_size > 0
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
action == 'sell'
|
||||||
|
and live_size < 0
|
||||||
|
)
|
||||||
):
|
):
|
||||||
order_size = min(
|
order_size = min(
|
||||||
slot_size,
|
slot_size,
|
||||||
|
@ -178,7 +184,7 @@ class Allocator(Struct):
|
||||||
order_size = max(slotted_pp, slot_size)
|
order_size = max(slotted_pp, slot_size)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
abs_live_size < slot_size or
|
abs_live_size < slot_size
|
||||||
|
|
||||||
# NOTE: front/back "loading" heurstic:
|
# NOTE: front/back "loading" heurstic:
|
||||||
# if the remaining pp is in between 0-1.5x a slot's
|
# if the remaining pp is in between 0-1.5x a slot's
|
||||||
|
@ -187,14 +193,17 @@ class Allocator(Struct):
|
||||||
# **without** going past a net-zero pp. if the pp is
|
# **without** going past a net-zero pp. if the pp is
|
||||||
# > 1.5x a slot size, then front load: exit a slot's and
|
# > 1.5x a slot size, then front load: exit a slot's and
|
||||||
# expect net-zero to be acquired on the final exit.
|
# expect net-zero to be acquired on the final exit.
|
||||||
slot_size < pp_size < round((1.5*slot_size), ndigits=ld) or
|
or slot_size < pp_size < round((1.5*slot_size), ndigits=ld)
|
||||||
|
or (
|
||||||
|
|
||||||
# underlying requires discrete (int) units (eg. stocks)
|
# underlying requires discrete (int) units (eg. stocks)
|
||||||
# and thus our slot size (based on our limit) would
|
# and thus our slot size (based on our limit) would
|
||||||
# exit a fractional unit's worth so, presuming we aren't
|
# exit a fractional unit's worth so, presuming we aren't
|
||||||
# supporting a fractional-units-style broker, we need
|
# supporting a fractional-units-style broker, we need
|
||||||
# exit the final unit.
|
# exit the final unit.
|
||||||
ld == 0 and abs_live_size == 1
|
ld == 0
|
||||||
|
and abs_live_size == 1
|
||||||
|
)
|
||||||
):
|
):
|
||||||
order_size = abs_live_size
|
order_size = abs_live_size
|
||||||
|
|
||||||
|
@ -203,13 +212,14 @@ class Allocator(Struct):
|
||||||
# compute a fractional slots size to display
|
# compute a fractional slots size to display
|
||||||
slots_used = self.slots_used(
|
slots_used = self.slots_used(
|
||||||
Position(
|
Position(
|
||||||
symbol=sym,
|
mkt=mkt,
|
||||||
size=order_size,
|
size=order_size,
|
||||||
ppu=price,
|
ppu=price,
|
||||||
bsuid=sym,
|
bs_mktid=mkt.bs_mktid,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: render an actual ``Executable`` type here?
|
||||||
return {
|
return {
|
||||||
'size': abs(round(order_size, ndigits=ld)),
|
'size': abs(round(order_size, ndigits=ld)),
|
||||||
'size_digits': ld,
|
'size_digits': ld,
|
||||||
|
@ -249,7 +259,7 @@ class Allocator(Struct):
|
||||||
|
|
||||||
def mk_allocator(
|
def mk_allocator(
|
||||||
|
|
||||||
symbol: Symbol,
|
mkt: MktPair,
|
||||||
startup_pp: Position,
|
startup_pp: Position,
|
||||||
|
|
||||||
# default allocation settings
|
# default allocation settings
|
||||||
|
@ -276,6 +286,6 @@ def mk_allocator(
|
||||||
defaults.update(user_def)
|
defaults.update(user_def)
|
||||||
|
|
||||||
return Allocator(
|
return Allocator(
|
||||||
symbol=symbol,
|
mkt=mkt,
|
||||||
**defaults,
|
**defaults,
|
||||||
)
|
)
|
|
@ -0,0 +1,295 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Trade and transaction ledger processing.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
|
from collections import UserDict
|
||||||
|
from contextlib import contextmanager as cm
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Iterator,
|
||||||
|
Union,
|
||||||
|
Generator
|
||||||
|
)
|
||||||
|
|
||||||
|
from pendulum import (
|
||||||
|
datetime,
|
||||||
|
DateTime,
|
||||||
|
from_timestamp,
|
||||||
|
parse,
|
||||||
|
)
|
||||||
|
import tomli_w # for fast ledger writing
|
||||||
|
|
||||||
|
from .. import config
|
||||||
|
from ..data.types import Struct
|
||||||
|
from ..log import get_logger
|
||||||
|
from ._mktinfo import (
|
||||||
|
Symbol, # legacy
|
||||||
|
MktPair,
|
||||||
|
Asset,
|
||||||
|
)
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class Transaction(Struct, frozen=True):
|
||||||
|
|
||||||
|
# TODO: unify this with the `MktPair`,
|
||||||
|
# once we have that as a required field,
|
||||||
|
# we don't really need the fqme any more..
|
||||||
|
fqme: str
|
||||||
|
|
||||||
|
tid: Union[str, int] # unique transaction id
|
||||||
|
size: float
|
||||||
|
price: float
|
||||||
|
cost: float # commisions or other additional costs
|
||||||
|
dt: datetime
|
||||||
|
|
||||||
|
# TODO: we can drop this right since we
|
||||||
|
# can instead expect the backend to provide this
|
||||||
|
# via the `MktPair`?
|
||||||
|
expiry: datetime | None = None
|
||||||
|
|
||||||
|
# TODO: drop the Symbol type, construct using
|
||||||
|
# t.sys (the transaction system)
|
||||||
|
|
||||||
|
# the underlying "transaction system", normally one of a ``MktPair``
|
||||||
|
# (a description of a tradable double auction) or a ledger-recorded
|
||||||
|
# ("ledger" in any sense as long as you can record transfers) of any
|
||||||
|
# sort) ``Asset``.
|
||||||
|
sym: MktPair | Asset | Symbol | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sys(self) -> Symbol:
|
||||||
|
return self.sym
|
||||||
|
|
||||||
|
# (optional) key-id defined by the broker-service backend which
|
||||||
|
# ensures the instrument-symbol market key for this record is unique
|
||||||
|
# in the "their backend/system" sense; i.e. this uid for the market
|
||||||
|
# as defined (internally) in some namespace defined by the broker
|
||||||
|
# service.
|
||||||
|
bs_mktid: str | int | None = None
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
dct = super().to_dict()
|
||||||
|
|
||||||
|
# TODO: switch to sys!
|
||||||
|
dct.pop('sym')
|
||||||
|
|
||||||
|
# ensure we use a pendulum formatted
|
||||||
|
# ISO style str here!@
|
||||||
|
dct['dt'] = str(self.dt)
|
||||||
|
return dct
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionLedger(UserDict):
|
||||||
|
'''
|
||||||
|
Very simple ``dict`` wrapper + ``pathlib.Path`` handle to
|
||||||
|
a TOML formatted transaction file for enabling file writes
|
||||||
|
dynamically whilst still looking exactly like a ``dict`` from the
|
||||||
|
outside.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
ledger_dict: dict,
|
||||||
|
file_path: Path,
|
||||||
|
tx_sort: Callable,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
self.file_path = file_path
|
||||||
|
self.tx_sort = tx_sort
|
||||||
|
super().__init__(ledger_dict)
|
||||||
|
|
||||||
|
def update_from_t(
|
||||||
|
self,
|
||||||
|
t: Transaction,
|
||||||
|
) -> None:
|
||||||
|
self.data[t.tid] = t.to_dict()
|
||||||
|
|
||||||
|
def iter_trans(
|
||||||
|
self,
|
||||||
|
mkt_by_fqme: dict[str, MktPair],
|
||||||
|
broker: str = 'paper',
|
||||||
|
|
||||||
|
) -> Generator[
|
||||||
|
tuple[str, Transaction],
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Deliver trades records in ``(key: str, t: Transaction)``
|
||||||
|
form via generator.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if broker != 'paper':
|
||||||
|
raise NotImplementedError('Per broker support not dun yet!')
|
||||||
|
|
||||||
|
# TODO: lookup some standard normalizer
|
||||||
|
# func in the backend?
|
||||||
|
# from ..brokers import get_brokermod
|
||||||
|
# mod = get_brokermod(broker)
|
||||||
|
# trans_dict = mod.norm_trade_records(self.data)
|
||||||
|
|
||||||
|
# NOTE: instead i propose the normalizer is
|
||||||
|
# a one shot routine (that can be lru cached)
|
||||||
|
# and instead call it for each entry incrementally:
|
||||||
|
# normer = mod.norm_trade_record(txdict)
|
||||||
|
|
||||||
|
# TODO: use tx_sort here yah?
|
||||||
|
for tid, txdict in self.data.items():
|
||||||
|
# special field handling for datetimes
|
||||||
|
# to ensure pendulum is used!
|
||||||
|
fqme = txdict.get('fqme') or txdict['fqsn']
|
||||||
|
dt = parse(txdict['dt'])
|
||||||
|
expiry = txdict.get('expiry')
|
||||||
|
|
||||||
|
mkt = mkt_by_fqme.get(fqme)
|
||||||
|
if not mkt:
|
||||||
|
# we can't build a trans if we don't have
|
||||||
|
# the ``.sys: MktPair`` info, so skip.
|
||||||
|
continue
|
||||||
|
|
||||||
|
tx = Transaction(
|
||||||
|
fqme=fqme,
|
||||||
|
tid=txdict['tid'],
|
||||||
|
dt=dt,
|
||||||
|
price=txdict['price'],
|
||||||
|
size=txdict['size'],
|
||||||
|
cost=txdict.get('cost', 0),
|
||||||
|
bs_mktid=txdict['bs_mktid'],
|
||||||
|
|
||||||
|
# TODO: change to .sys!
|
||||||
|
sym=mkt,
|
||||||
|
expiry=parse(expiry) if expiry else None,
|
||||||
|
)
|
||||||
|
yield tid, tx
|
||||||
|
|
||||||
|
def to_trans(
|
||||||
|
self,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> dict[str, Transaction]:
|
||||||
|
'''
|
||||||
|
Return entire output from ``.iter_trans()`` in a ``dict``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return dict(self.iter_trans(**kwargs))
|
||||||
|
|
||||||
|
def write_config(
|
||||||
|
self,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Render the self.data ledger dict to it's TOML file form.
|
||||||
|
|
||||||
|
'''
|
||||||
|
cpy = self.data.copy()
|
||||||
|
towrite: dict[str, Any] = {}
|
||||||
|
for tid, trans in cpy.items():
|
||||||
|
|
||||||
|
# drop key for non-expiring assets
|
||||||
|
txdict = towrite[tid] = self.data[tid]
|
||||||
|
if (
|
||||||
|
'expiry' in txdict
|
||||||
|
and txdict['expiry'] is None
|
||||||
|
):
|
||||||
|
txdict.pop('expiry')
|
||||||
|
|
||||||
|
# re-write old acro-key
|
||||||
|
fqme = txdict.get('fqsn')
|
||||||
|
if fqme:
|
||||||
|
txdict['fqme'] = fqme
|
||||||
|
|
||||||
|
with self.file_path.open(mode='wb') as fp:
|
||||||
|
tomli_w.dump(towrite, fp)
|
||||||
|
|
||||||
|
|
||||||
|
def iter_by_dt(
|
||||||
|
records: dict[str, Any],
|
||||||
|
|
||||||
|
# NOTE: parsers are looked up in the insert order
|
||||||
|
# so if you know that the record stats show some field
|
||||||
|
# is more common then others, stick it at the top B)
|
||||||
|
parsers: dict[tuple[str], Callable] = {
|
||||||
|
'dt': None, # parity case
|
||||||
|
'datetime': parse, # datetime-str
|
||||||
|
'time': from_timestamp, # float epoch
|
||||||
|
},
|
||||||
|
key: Callable | None = None,
|
||||||
|
|
||||||
|
) -> Iterator[tuple[str, dict]]:
|
||||||
|
'''
|
||||||
|
Iterate entries of a ``records: dict`` table sorted by entry recorded
|
||||||
|
datetime presumably set at the ``'dt'`` field in each entry.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def dyn_parse_to_dt(
|
||||||
|
pair: tuple[str, dict],
|
||||||
|
) -> DateTime:
|
||||||
|
_, txdict = pair
|
||||||
|
k, v, parser = next(
|
||||||
|
(k, txdict[k], parsers[k]) for k in parsers if k in txdict
|
||||||
|
)
|
||||||
|
|
||||||
|
return parser(v) if parser else v
|
||||||
|
|
||||||
|
for tid, data in sorted(
|
||||||
|
records.items(),
|
||||||
|
key=key or dyn_parse_to_dt,
|
||||||
|
):
|
||||||
|
yield tid, data
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def open_trade_ledger(
|
||||||
|
broker: str,
|
||||||
|
account: str,
|
||||||
|
|
||||||
|
# default is to sort by detected datetime-ish field
|
||||||
|
tx_sort: Callable = iter_by_dt,
|
||||||
|
|
||||||
|
) -> Generator[dict, None, None]:
|
||||||
|
'''
|
||||||
|
Indempotently create and read in a trade log file from the
|
||||||
|
``<configuration_dir>/ledgers/`` directory.
|
||||||
|
|
||||||
|
Files are named per broker account of the form
|
||||||
|
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
|
||||||
|
name as defined in the user's ``brokers.toml`` config.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ledger_dict, fpath = config.load_ledger(broker, account)
|
||||||
|
cpy = ledger_dict.copy()
|
||||||
|
ledger = TransactionLedger(
|
||||||
|
ledger_dict=cpy,
|
||||||
|
file_path=fpath,
|
||||||
|
tx_sort=tx_sort,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
yield ledger
|
||||||
|
finally:
|
||||||
|
if ledger.data != ledger_dict:
|
||||||
|
|
||||||
|
# TODO: show diff output?
|
||||||
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||||
|
log.info(f'Updating ledger for {fpath}:\n')
|
||||||
|
ledger.write_config()
|
|
@ -0,0 +1,668 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Market (pair) meta-info layer: sane addressing semantics and meta-data
|
||||||
|
for cross-provider marketplaces.
|
||||||
|
|
||||||
|
We intoduce the concept of,
|
||||||
|
|
||||||
|
- a FQMA: fully qualified market address,
|
||||||
|
- a sane schema for FQMAs including derivatives,
|
||||||
|
- a msg-serializeable description of markets for
|
||||||
|
easy sharing with other pikers B)
|
||||||
|
|
||||||
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
|
from decimal import (
|
||||||
|
Decimal,
|
||||||
|
ROUND_HALF_EVEN,
|
||||||
|
)
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Literal,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
|
_underlyings: list[str] = [
|
||||||
|
'stock',
|
||||||
|
'bond',
|
||||||
|
'crypto',
|
||||||
|
'fiat',
|
||||||
|
'commodity',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
_derivs: list[str] = [
|
||||||
|
'swap',
|
||||||
|
'future',
|
||||||
|
'continuous_future',
|
||||||
|
'option',
|
||||||
|
'futures_option',
|
||||||
|
|
||||||
|
# if we can't figure it out, presume the worst XD
|
||||||
|
'unknown',
|
||||||
|
]
|
||||||
|
|
||||||
|
# NOTE: a tag for other subsystems to try
|
||||||
|
# and do default settings for certain things:
|
||||||
|
# - allocator does unit vs. dolla size limiting.
|
||||||
|
AssetTypeName: Literal[
|
||||||
|
_underlyings
|
||||||
|
+
|
||||||
|
_derivs
|
||||||
|
]
|
||||||
|
|
||||||
|
# egs. stock, futer, option, bond etc.
|
||||||
|
|
||||||
|
|
||||||
|
def dec_digits(
|
||||||
|
value: float | str | Decimal,
|
||||||
|
|
||||||
|
) -> int:
|
||||||
|
'''
|
||||||
|
Return the number of precision digits read from a decimal or float
|
||||||
|
value.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if value == 0:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
return int(
|
||||||
|
-Decimal(str(value)).as_tuple().exponent
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
float_digits = dec_digits
|
||||||
|
|
||||||
|
|
||||||
|
def digits_to_dec(
|
||||||
|
ndigits: int,
|
||||||
|
) -> Decimal:
|
||||||
|
'''
|
||||||
|
Return the minimum float value for an input integer value.
|
||||||
|
|
||||||
|
eg. 3 -> 0.001
|
||||||
|
|
||||||
|
'''
|
||||||
|
if ndigits == 0:
|
||||||
|
return Decimal('0')
|
||||||
|
|
||||||
|
return Decimal('0.' + '0'*(ndigits-1) + '1')
|
||||||
|
|
||||||
|
|
||||||
|
class Asset(Struct, frozen=True):
|
||||||
|
'''
|
||||||
|
Container type describing any transactable asset and its
|
||||||
|
contract-like and/or underlying technology meta-info.
|
||||||
|
|
||||||
|
'''
|
||||||
|
name: str
|
||||||
|
atype: str # AssetTypeName
|
||||||
|
|
||||||
|
# minimum transaction size / precision.
|
||||||
|
# eg. for buttcoin this is a "satoshi".
|
||||||
|
tx_tick: Decimal
|
||||||
|
|
||||||
|
# NOTE: additional info optionally packed in by the backend, but
|
||||||
|
# should not be explicitly required in our generic API.
|
||||||
|
info: dict = {} # make it frozen?
|
||||||
|
|
||||||
|
# TODO?
|
||||||
|
# _to_dict_skip = {'info'}
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
def quantize(
|
||||||
|
self,
|
||||||
|
size: float,
|
||||||
|
|
||||||
|
) -> Decimal:
|
||||||
|
'''
|
||||||
|
Truncate input ``size: float`` using ``Decimal``
|
||||||
|
quantized form of the digit precision defined
|
||||||
|
by ``self.lot_tick_size``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
digits = float_digits(self.tx_tick)
|
||||||
|
return Decimal(size).quantize(
|
||||||
|
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
||||||
|
rounding=ROUND_HALF_EVEN
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def guess_from_mkt_ep_key(
|
||||||
|
cls,
|
||||||
|
mkt_ep_key: str,
|
||||||
|
atype: str | None = None,
|
||||||
|
|
||||||
|
) -> Asset:
|
||||||
|
'''
|
||||||
|
A hacky guess method for presuming a (target) asset's properties
|
||||||
|
based on either the actualy market endpoint key, or config settings
|
||||||
|
from the user.
|
||||||
|
|
||||||
|
'''
|
||||||
|
atype = atype or 'unknown'
|
||||||
|
|
||||||
|
# attempt to strip off any source asset
|
||||||
|
# via presumed syntax of:
|
||||||
|
# - <dst>/<src>
|
||||||
|
# - <dst>.<src>
|
||||||
|
# - etc.
|
||||||
|
for char in ['/', '.']:
|
||||||
|
dst, _, src = mkt_ep_key.partition(char)
|
||||||
|
if src:
|
||||||
|
if not atype:
|
||||||
|
atype = 'fiat'
|
||||||
|
break
|
||||||
|
|
||||||
|
return Asset(
|
||||||
|
name=dst,
|
||||||
|
atype=atype,
|
||||||
|
tx_tick=Decimal('0.01'),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_cons_tokens(
|
||||||
|
tokens: list[Any],
|
||||||
|
delim_char: str = '.',
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Construct `str` output from a maybe-concatenation of input
|
||||||
|
sequence of elements in ``tokens``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return delim_char.join(filter(bool, tokens)).lower()
|
||||||
|
|
||||||
|
|
||||||
|
class MktPair(Struct, frozen=True):
|
||||||
|
'''
|
||||||
|
Market description for a pair of assets which are tradeable:
|
||||||
|
a market which enables transactions of the form,
|
||||||
|
buy: source asset -> destination asset
|
||||||
|
sell: destination asset -> source asset
|
||||||
|
|
||||||
|
The main intention of this type is for a **simple** cross-asset
|
||||||
|
venue/broker normalized descrption type from which all
|
||||||
|
market-auctions can be mapped from FQME identifiers.
|
||||||
|
|
||||||
|
TODO: our eventual target fqme format/schema is:
|
||||||
|
<dst>/<src>.<expiry>.<con_info_1>.<con_info_2>. -> .<venue>.<broker>
|
||||||
|
^ -- optional tokens ------------------------------- ^
|
||||||
|
|
||||||
|
'''
|
||||||
|
dst: str | Asset
|
||||||
|
# "destination asset" (name) used to buy *to*
|
||||||
|
# (or used to sell *from*)
|
||||||
|
|
||||||
|
price_tick: Decimal # minimum price increment
|
||||||
|
size_tick: Decimal # minimum size (aka vlm) increment
|
||||||
|
# the tick size is the number describing the smallest step in value
|
||||||
|
# available in this market between the source and destination
|
||||||
|
# assets.
|
||||||
|
# https://en.wikipedia.org/wiki/Tick_size
|
||||||
|
# https://en.wikipedia.org/wiki/Commodity_tick
|
||||||
|
# https://en.wikipedia.org/wiki/Percentage_in_point
|
||||||
|
|
||||||
|
# unique "broker id" since every market endpoint provider
|
||||||
|
# has their own nomenclature and schema for market maps.
|
||||||
|
bs_mktid: str
|
||||||
|
broker: str # the middle man giving access
|
||||||
|
|
||||||
|
# NOTE: to start this field is optional but should eventually be
|
||||||
|
# required; the reason is for backward compat since more positioning
|
||||||
|
# calculations were not originally stored with a src asset..
|
||||||
|
|
||||||
|
src: str | Asset = ''
|
||||||
|
# "source asset" (name) used to buy *from*
|
||||||
|
# (or used to sell *to*).
|
||||||
|
|
||||||
|
venue: str = '' # market venue provider name
|
||||||
|
expiry: str = '' # for derivs, expiry datetime parseable str
|
||||||
|
|
||||||
|
# destination asset's financial type/classification name
|
||||||
|
# NOTE: this is required for the order size allocator system,
|
||||||
|
# since we use different default settings based on the type
|
||||||
|
# of the destination asset, eg. futes use a units limits vs.
|
||||||
|
# equities a $limit.
|
||||||
|
# dst_type: AssetTypeName | None = None
|
||||||
|
|
||||||
|
# source asset's financial type/classification name
|
||||||
|
# TODO: is a src type required for trading?
|
||||||
|
# there's no reason to need any more then the one-way alloc-limiter
|
||||||
|
# config right?
|
||||||
|
# src_type: AssetTypeName
|
||||||
|
|
||||||
|
# for derivs, info describing contract, egs.
|
||||||
|
# strike price, call or put, swap type, exercise model, etc.
|
||||||
|
contract_info: list[str] | None = None
|
||||||
|
|
||||||
|
_atype: str = ''
|
||||||
|
|
||||||
|
# NOTE: when cast to `str` return fqme
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.fqme
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_msg(
|
||||||
|
cls,
|
||||||
|
msg: dict[str, Any],
|
||||||
|
|
||||||
|
) -> MktPair:
|
||||||
|
'''
|
||||||
|
Constructor for a received msg-dict normally received over IPC.
|
||||||
|
|
||||||
|
'''
|
||||||
|
dst_asset_msg = msg.pop('dst')
|
||||||
|
src_asset_msg = msg.pop('src')
|
||||||
|
|
||||||
|
if isinstance(dst_asset_msg, str):
|
||||||
|
src: str = str(src_asset_msg)
|
||||||
|
assert isinstance(src, str)
|
||||||
|
return cls.from_fqme(
|
||||||
|
dst_asset_msg,
|
||||||
|
src=src,
|
||||||
|
**msg,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# NOTE: we call `.copy()` here to ensure
|
||||||
|
# type casting!
|
||||||
|
dst = Asset(**dst_asset_msg).copy()
|
||||||
|
if not isinstance(src_asset_msg, str):
|
||||||
|
src = Asset(**src_asset_msg).copy()
|
||||||
|
else:
|
||||||
|
src = str(src_asset_msg)
|
||||||
|
|
||||||
|
return cls(
|
||||||
|
dst=dst,
|
||||||
|
src=src,
|
||||||
|
**msg,
|
||||||
|
# XXX NOTE: ``msgspec`` can encode `Decimal`
|
||||||
|
# but it doesn't decide to it by default since
|
||||||
|
# we aren't spec-cing these msgs as structs, SO
|
||||||
|
# we have to ensure we do a struct type case (which `.copy()`
|
||||||
|
# does) to ensure we get the right type!
|
||||||
|
).copy()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def resolved(self) -> bool:
|
||||||
|
return isinstance(self.dst, Asset)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_fqme(
|
||||||
|
cls,
|
||||||
|
fqme: str,
|
||||||
|
|
||||||
|
price_tick: float | str,
|
||||||
|
size_tick: float | str,
|
||||||
|
bs_mktid: str,
|
||||||
|
|
||||||
|
broker: str | None = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> MktPair:
|
||||||
|
|
||||||
|
_fqme: str = fqme
|
||||||
|
if (
|
||||||
|
broker
|
||||||
|
and broker not in fqme
|
||||||
|
):
|
||||||
|
_fqme = f'{fqme}.{broker}'
|
||||||
|
|
||||||
|
broker, mkt_ep_key, venue, suffix = unpack_fqme(_fqme)
|
||||||
|
dst: Asset = Asset.guess_from_mkt_ep_key(
|
||||||
|
mkt_ep_key,
|
||||||
|
atype=kwargs.get('_atype'),
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: loading from a fqme string will
|
||||||
|
# leave this pair as "un resolved" meaning
|
||||||
|
# we don't yet have `.dst` set as an `Asset`
|
||||||
|
# which we expect to be filled in by some
|
||||||
|
# backend client with access to that data-info.
|
||||||
|
return cls(
|
||||||
|
# XXX: not resolved to ``Asset`` :(
|
||||||
|
dst=dst,
|
||||||
|
|
||||||
|
broker=broker,
|
||||||
|
venue=venue,
|
||||||
|
# XXX NOTE: we presume this token
|
||||||
|
# if the expiry for now!
|
||||||
|
expiry=suffix,
|
||||||
|
|
||||||
|
price_tick=price_tick,
|
||||||
|
size_tick=size_tick,
|
||||||
|
bs_mktid=bs_mktid,
|
||||||
|
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
).copy()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def key(self) -> str:
|
||||||
|
'''
|
||||||
|
The "endpoint key" for this market.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return self.pair
|
||||||
|
|
||||||
|
def pair(
|
||||||
|
self,
|
||||||
|
delim_char: str | None = None,
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
The "endpoint asset pair key" for this market.
|
||||||
|
Eg. mnq/usd or btc/usdt or xmr/btc
|
||||||
|
|
||||||
|
In most other tina platforms this is referred to as the
|
||||||
|
"symbol".
|
||||||
|
|
||||||
|
'''
|
||||||
|
return maybe_cons_tokens(
|
||||||
|
[str(self.dst),
|
||||||
|
str(self.src)],
|
||||||
|
# TODO: make the default '/'
|
||||||
|
delim_char=delim_char or '',
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def suffix(self) -> str:
|
||||||
|
'''
|
||||||
|
The "contract suffix" for this market.
|
||||||
|
|
||||||
|
Eg. mnq/usd.20230616.cme.ib
|
||||||
|
^ ----- ^
|
||||||
|
or tsla/usd.20230324.200c.cboe.ib
|
||||||
|
^ ---------- ^
|
||||||
|
|
||||||
|
In most other tina platforms they only show you these details in
|
||||||
|
some kinda "meta data" format, we have FQMEs so we do this up
|
||||||
|
front and explicit.
|
||||||
|
|
||||||
|
'''
|
||||||
|
field_strs = [self.expiry]
|
||||||
|
con_info = self.contract_info
|
||||||
|
if con_info is not None:
|
||||||
|
field_strs.extend(con_info)
|
||||||
|
|
||||||
|
return maybe_cons_tokens(field_strs)
|
||||||
|
|
||||||
|
def get_fqme(
|
||||||
|
self,
|
||||||
|
|
||||||
|
# NOTE: allow dropping the source asset from the
|
||||||
|
# market endpoint's pair key. Eg. to change
|
||||||
|
# mnq/usd.<> -> mnq.<> which is useful when
|
||||||
|
# searching (legacy) stock exchanges.
|
||||||
|
without_src: bool = False,
|
||||||
|
delim_char: str | None = None,
|
||||||
|
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Return the fully qualified market endpoint-address for the
|
||||||
|
pair of transacting assets.
|
||||||
|
|
||||||
|
fqme = "fully qualified market endpoint"
|
||||||
|
|
||||||
|
And yes, you pronounce it colloquially as read..
|
||||||
|
|
||||||
|
Basically the idea here is for all client code (consumers of piker's
|
||||||
|
APIs which query the data/broker-provider agnostic layer(s)) should be
|
||||||
|
able to tell which backend / venue / derivative each data feed/flow is
|
||||||
|
from by an explicit string-key of the current form:
|
||||||
|
|
||||||
|
<market-instrument-name>
|
||||||
|
.<venue>
|
||||||
|
.<expiry>
|
||||||
|
.<derivative-suffix-info>
|
||||||
|
.<brokerbackendname>
|
||||||
|
|
||||||
|
eg. for an explicit daq mini futes contract: mnq.cme.20230317.ib
|
||||||
|
|
||||||
|
TODO: I have thoughts that we should actually change this to be
|
||||||
|
more like an "attr lookup" (like how the web should have done
|
||||||
|
urls, but marketting peeps ruined it etc. etc.)
|
||||||
|
|
||||||
|
<broker>.<venue>.<instrumentname>.<suffixwithmetadata>
|
||||||
|
|
||||||
|
TODO:
|
||||||
|
See community discussion on naming and nomenclature, order
|
||||||
|
of addressing hierarchy, general schema, internal representation:
|
||||||
|
|
||||||
|
https://github.com/pikers/piker/issues/467
|
||||||
|
|
||||||
|
'''
|
||||||
|
key: str = (
|
||||||
|
self.pair(delim_char=delim_char)
|
||||||
|
if not without_src
|
||||||
|
else str(self.dst)
|
||||||
|
)
|
||||||
|
|
||||||
|
return maybe_cons_tokens([
|
||||||
|
key, # final "pair name" (eg. qqq[/usd], btcusdt)
|
||||||
|
self.venue,
|
||||||
|
self.suffix, # includes expiry and other con info
|
||||||
|
self.broker,
|
||||||
|
])
|
||||||
|
|
||||||
|
# NOTE: the main idea behind an fqme is to map a "market address"
|
||||||
|
# to some endpoint from a transaction provider (eg. a broker) such
|
||||||
|
# that we build a table of `fqme: str -> bs_mktid: Any` where any "piker
|
||||||
|
# market address" maps 1-to-1 to some broker trading endpoint.
|
||||||
|
# @cached_property
|
||||||
|
fqme = property(get_fqme)
|
||||||
|
|
||||||
|
def get_bs_fqme(
|
||||||
|
self,
|
||||||
|
**kwargs,
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
FQME sin broker part XD
|
||||||
|
|
||||||
|
'''
|
||||||
|
sin_broker, *_ = self.get_fqme(**kwargs).rpartition('.')
|
||||||
|
return sin_broker
|
||||||
|
|
||||||
|
bs_fqme = property(get_bs_fqme)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fqsn(self) -> str:
|
||||||
|
return self.fqme
|
||||||
|
|
||||||
|
def quantize(
|
||||||
|
self,
|
||||||
|
size: float,
|
||||||
|
|
||||||
|
quantity_type: Literal['price', 'size'] = 'size',
|
||||||
|
|
||||||
|
) -> Decimal:
|
||||||
|
'''
|
||||||
|
Truncate input ``size: float`` using ``Decimal``
|
||||||
|
and ``.size_tick``'s # of digits.
|
||||||
|
|
||||||
|
'''
|
||||||
|
match quantity_type:
|
||||||
|
case 'price':
|
||||||
|
digits = float_digits(self.price_tick)
|
||||||
|
case 'size':
|
||||||
|
digits = float_digits(self.size_tick)
|
||||||
|
|
||||||
|
return Decimal(size).quantize(
|
||||||
|
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
||||||
|
rounding=ROUND_HALF_EVEN
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: BACKWARD COMPAT, TO REMOVE?
|
||||||
|
@property
|
||||||
|
def type_key(self) -> str:
|
||||||
|
if isinstance(self.dst, Asset):
|
||||||
|
return str(self.dst.atype)
|
||||||
|
|
||||||
|
return self._atype
|
||||||
|
|
||||||
|
@property
|
||||||
|
def price_tick_digits(self) -> int:
|
||||||
|
return float_digits(self.price_tick)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_tick_digits(self) -> int:
|
||||||
|
return float_digits(self.size_tick)
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_fqme(
|
||||||
|
fqme: str,
|
||||||
|
|
||||||
|
broker: str | None = None
|
||||||
|
|
||||||
|
) -> tuple[str, ...]:
|
||||||
|
'''
|
||||||
|
Unpack a fully-qualified-symbol-name to ``tuple``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
venue = ''
|
||||||
|
suffix = ''
|
||||||
|
|
||||||
|
# TODO: probably reverse the order of all this XD
|
||||||
|
tokens = fqme.split('.')
|
||||||
|
|
||||||
|
match tokens:
|
||||||
|
case [mkt_ep, broker]:
|
||||||
|
# probably crypto
|
||||||
|
return (
|
||||||
|
broker,
|
||||||
|
mkt_ep,
|
||||||
|
'',
|
||||||
|
'',
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: swap venue and suffix/deriv-info here?
|
||||||
|
case [mkt_ep, venue, suffix, broker]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# handle `bs_mktid` + `broker` input case
|
||||||
|
case [
|
||||||
|
mkt_ep, venue, suffix
|
||||||
|
] if (
|
||||||
|
broker
|
||||||
|
and suffix != broker
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
|
case [mkt_ep, venue, broker]:
|
||||||
|
suffix = ''
|
||||||
|
|
||||||
|
case _:
|
||||||
|
raise ValueError(f'Invalid fqme: {fqme}')
|
||||||
|
|
||||||
|
return (
|
||||||
|
broker,
|
||||||
|
mkt_ep,
|
||||||
|
venue,
|
||||||
|
# '.'.join([mkt_ep, venue]),
|
||||||
|
suffix,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class Symbol(Struct):
|
||||||
|
'''
|
||||||
|
I guess this is some kinda container thing for dealing with
|
||||||
|
all the different meta-data formats from brokers?
|
||||||
|
|
||||||
|
'''
|
||||||
|
key: str
|
||||||
|
|
||||||
|
broker: str = ''
|
||||||
|
venue: str = ''
|
||||||
|
|
||||||
|
# precision descriptors for price and vlm
|
||||||
|
tick_size: Decimal = Decimal('0.01')
|
||||||
|
lot_tick_size: Decimal = Decimal('0.0')
|
||||||
|
|
||||||
|
suffix: str = ''
|
||||||
|
broker_info: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_fqme(
|
||||||
|
cls,
|
||||||
|
fqsn: str,
|
||||||
|
info: dict[str, Any],
|
||||||
|
|
||||||
|
) -> Symbol:
|
||||||
|
broker, mktep, venue, suffix = unpack_fqme(fqsn)
|
||||||
|
tick_size = info.get('price_tick_size', 0.01)
|
||||||
|
lot_size = info.get('lot_tick_size', 0.0)
|
||||||
|
|
||||||
|
return Symbol(
|
||||||
|
broker=broker,
|
||||||
|
key=mktep,
|
||||||
|
tick_size=tick_size,
|
||||||
|
lot_tick_size=lot_size,
|
||||||
|
venue=venue,
|
||||||
|
suffix=suffix,
|
||||||
|
broker_info={broker: info},
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type_key(self) -> str:
|
||||||
|
return list(self.broker_info.values())[0]['asset_type']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def tick_size_digits(self) -> int:
|
||||||
|
return float_digits(self.tick_size)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lot_size_digits(self) -> int:
|
||||||
|
return float_digits(self.lot_tick_size)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def price_tick(self) -> Decimal:
|
||||||
|
return Decimal(str(self.tick_size))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_tick(self) -> Decimal:
|
||||||
|
return Decimal(str(self.lot_tick_size))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def broker(self) -> str:
|
||||||
|
return list(self.broker_info.keys())[0]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fqme(self) -> str:
|
||||||
|
return maybe_cons_tokens([
|
||||||
|
self.key, # final "pair name" (eg. qqq[/usd], btcusdt)
|
||||||
|
self.venue,
|
||||||
|
self.suffix, # includes expiry and other con info
|
||||||
|
self.broker,
|
||||||
|
])
|
||||||
|
|
||||||
|
def quantize(
|
||||||
|
self,
|
||||||
|
size: float,
|
||||||
|
) -> Decimal:
|
||||||
|
digits = float_digits(self.lot_tick_size)
|
||||||
|
return Decimal(size).quantize(
|
||||||
|
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
||||||
|
rounding=ROUND_HALF_EVEN
|
||||||
|
)
|
||||||
|
|
||||||
|
# NOTE: when cast to `str` return fqme
|
||||||
|
def __str__(self) -> str:
|
||||||
|
return self.fqme
|
|
@ -12,158 +12,104 @@
|
||||||
# GNU Affero General Public License for more details.
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Personal/Private position parsing, calculating, summarizing in a way
|
Personal/Private position parsing, calculating, summarizing in a way
|
||||||
that doesn't try to cuk most humans who prefer to not lose their moneys..
|
that doesn't try to cuk most humans who prefer to not lose their moneys..
|
||||||
|
|
||||||
(looking at you `ib` and dirt-bird friends)
|
(looking at you `ib` and dirt-bird friends)
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import contextmanager as cm
|
from contextlib import contextmanager as cm
|
||||||
from pprint import pformat
|
from decimal import Decimal
|
||||||
import os
|
|
||||||
from os import path
|
|
||||||
from math import copysign
|
from math import copysign
|
||||||
import re
|
from pprint import pformat
|
||||||
import time
|
from pathlib import Path
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Iterator,
|
Iterator,
|
||||||
Optional,
|
|
||||||
Union,
|
Union,
|
||||||
Generator
|
Generator
|
||||||
)
|
)
|
||||||
|
|
||||||
import pendulum
|
import pendulum
|
||||||
from pendulum import datetime, now
|
from pendulum import datetime, now
|
||||||
import tomli
|
import tomlkit
|
||||||
import toml
|
|
||||||
|
|
||||||
from . import config
|
from ._ledger import (
|
||||||
from .brokers import get_brokermod
|
Transaction,
|
||||||
from .clearing._messages import BrokerdPosition, Status
|
iter_by_dt,
|
||||||
from .data._source import Symbol, unpack_fqsn
|
open_trade_ledger,
|
||||||
from .log import get_logger
|
)
|
||||||
from .data.types import Struct
|
from ._mktinfo import (
|
||||||
|
MktPair,
|
||||||
|
Asset,
|
||||||
|
unpack_fqme,
|
||||||
|
)
|
||||||
|
from .. import config
|
||||||
|
from ..brokers import get_brokermod
|
||||||
|
from ..clearing._messages import (
|
||||||
|
BrokerdPosition,
|
||||||
|
Status,
|
||||||
|
)
|
||||||
|
from ..data.types import Struct
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@cm
|
|
||||||
def open_trade_ledger(
|
|
||||||
broker: str,
|
|
||||||
account: str,
|
|
||||||
|
|
||||||
) -> Generator[dict, None, None]:
|
|
||||||
'''
|
|
||||||
Indempotently create and read in a trade log file from the
|
|
||||||
``<configuration_dir>/ledgers/`` directory.
|
|
||||||
|
|
||||||
Files are named per broker account of the form
|
|
||||||
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
|
|
||||||
name as defined in the user's ``brokers.toml`` config.
|
|
||||||
|
|
||||||
'''
|
|
||||||
ldir = path.join(config._config_dir, 'ledgers')
|
|
||||||
if not path.isdir(ldir):
|
|
||||||
os.makedirs(ldir)
|
|
||||||
|
|
||||||
fname = f'trades_{broker}_{account}.toml'
|
|
||||||
tradesfile = path.join(ldir, fname)
|
|
||||||
|
|
||||||
if not path.isfile(tradesfile):
|
|
||||||
log.info(
|
|
||||||
f'Creating new local trades ledger: {tradesfile}'
|
|
||||||
)
|
|
||||||
with open(tradesfile, 'w') as cf:
|
|
||||||
pass # touch
|
|
||||||
with open(tradesfile, 'rb') as cf:
|
|
||||||
start = time.time()
|
|
||||||
ledger = tomli.load(cf)
|
|
||||||
log.info(f'Ledger load took {time.time() - start}s')
|
|
||||||
cpy = ledger.copy()
|
|
||||||
|
|
||||||
try:
|
|
||||||
yield cpy
|
|
||||||
finally:
|
|
||||||
if cpy != ledger:
|
|
||||||
|
|
||||||
# TODO: show diff output?
|
|
||||||
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
|
||||||
log.info(f'Updating ledger for {tradesfile}:\n')
|
|
||||||
ledger.update(cpy)
|
|
||||||
|
|
||||||
# we write on close the mutated ledger data
|
|
||||||
with open(tradesfile, 'w') as cf:
|
|
||||||
toml.dump(ledger, cf)
|
|
||||||
|
|
||||||
|
|
||||||
class Transaction(Struct, frozen=True):
|
|
||||||
# TODO: should this be ``.to`` (see below)?
|
|
||||||
fqsn: str
|
|
||||||
|
|
||||||
sym: Symbol
|
|
||||||
tid: Union[str, int] # unique transaction id
|
|
||||||
size: float
|
|
||||||
price: float
|
|
||||||
cost: float # commisions or other additional costs
|
|
||||||
dt: datetime
|
|
||||||
expiry: datetime | None = None
|
|
||||||
|
|
||||||
# optional key normally derived from the broker
|
|
||||||
# backend which ensures the instrument-symbol this record
|
|
||||||
# is for is truly unique.
|
|
||||||
bsuid: Union[str, int] | None = None
|
|
||||||
|
|
||||||
# optional fqsn for the source "asset"/money symbol?
|
|
||||||
# from: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
def iter_by_dt(
|
|
||||||
clears: dict[str, Any],
|
|
||||||
) -> Iterator[tuple[str, dict]]:
|
|
||||||
'''
|
|
||||||
Iterate entries of a ``clears: dict`` table sorted by entry recorded
|
|
||||||
datetime presumably set at the ``'dt'`` field in each entry.
|
|
||||||
|
|
||||||
'''
|
|
||||||
for tid, data in sorted(
|
|
||||||
list(clears.items()),
|
|
||||||
key=lambda item: item[1]['dt'],
|
|
||||||
):
|
|
||||||
yield tid, data
|
|
||||||
|
|
||||||
|
|
||||||
class Position(Struct):
|
class Position(Struct):
|
||||||
'''
|
'''
|
||||||
Basic pp (personal/piker position) model with attached clearing
|
An asset "position" model with attached clearing transaction history.
|
||||||
transaction history.
|
|
||||||
|
A financial "position" in `piker` terms is a summary of accounting
|
||||||
|
metrics computed from a transaction ledger; generally it describes
|
||||||
|
some acumulative "size" and "average price" from the summarized
|
||||||
|
underlying transaction set.
|
||||||
|
|
||||||
|
In piker we focus on the `.ppu` (price per unit) and the `.bep`
|
||||||
|
(break even price) including all transaction entries and exits since
|
||||||
|
the last "net-zero" size of the destination asset's holding.
|
||||||
|
|
||||||
|
This interface serves as an object API for computing and tracking
|
||||||
|
positions as well as supports serialization for storage in the local
|
||||||
|
file system (in TOML) and to interchange as a msg over IPC.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
symbol: Symbol
|
mkt: MktPair
|
||||||
|
|
||||||
# can be +ve or -ve for long/short
|
# can be +ve or -ve for long/short
|
||||||
size: float
|
size: float
|
||||||
|
|
||||||
# "breakeven price" above or below which pnl moves above and below
|
# "price-per-unit price" above or below which pnl moves above and
|
||||||
# zero for the entirety of the current "trade state".
|
# below zero for the entirety of the current "trade state". The ppu
|
||||||
|
# is only modified on "increases of" the absolute size of a position
|
||||||
|
# in one of a long/short "direction" (i.e. abs(.size_i) > 0 after
|
||||||
|
# the next transaction given .size was > 0 before that tx, and vice
|
||||||
|
# versa for -ve sized positions).
|
||||||
ppu: float
|
ppu: float
|
||||||
|
|
||||||
# unique backend symbol id
|
# TODO: break-even-price support!
|
||||||
bsuid: str
|
# bep: float
|
||||||
|
|
||||||
split_ratio: Optional[int] = None
|
# unique "backend system market id"
|
||||||
|
bs_mktid: str
|
||||||
|
|
||||||
|
split_ratio: int | None = None
|
||||||
|
|
||||||
# ordered record of known constituent trade messages
|
# ordered record of known constituent trade messages
|
||||||
clears: dict[
|
clears: dict[
|
||||||
Union[str, int, Status], # trade id
|
Union[str, int, Status], # trade id
|
||||||
dict[str, Any], # transaction history summaries
|
dict[str, Any], # transaction history summaries
|
||||||
] = {}
|
] = {}
|
||||||
first_clear_dt: Optional[datetime] = None
|
first_clear_dt: datetime | None = None
|
||||||
|
|
||||||
expiry: Optional[datetime] = None
|
expiry: datetime | None = None
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return pformat(self.to_dict())
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
def to_dict(self) -> dict:
|
||||||
return {
|
return {
|
||||||
|
@ -192,37 +138,40 @@ class Position(Struct):
|
||||||
# listing venue here even when the backend isn't providing
|
# listing venue here even when the backend isn't providing
|
||||||
# it via the trades ledger..
|
# it via the trades ledger..
|
||||||
# drop symbol obj in serialized form
|
# drop symbol obj in serialized form
|
||||||
s = d.pop('symbol')
|
mkt: MktPair = d.pop('mkt')
|
||||||
fqsn = s.front_fqsn()
|
assert isinstance(mkt, MktPair)
|
||||||
|
|
||||||
broker, key, suffix = unpack_fqsn(fqsn)
|
fqme = mkt.fqme
|
||||||
sym_info = s.broker_info[broker]
|
broker, mktep, venue, suffix = unpack_fqme(fqme)
|
||||||
|
|
||||||
d['asset_type'] = sym_info['asset_type']
|
# an asset resolved mkt where we have ``Asset`` info about
|
||||||
d['price_tick_size'] = (
|
# each tradeable asset in the market.
|
||||||
sym_info.get('price_tick_size')
|
if mkt.resolved:
|
||||||
or
|
dst: Asset = mkt.dst
|
||||||
s.tick_size
|
d['asset_type'] = dst.atype
|
||||||
)
|
|
||||||
d['lot_tick_size'] = (
|
d['price_tick'] = mkt.price_tick
|
||||||
sym_info.get('lot_tick_size')
|
d['size_tick'] = mkt.size_tick
|
||||||
or
|
|
||||||
s.lot_tick_size
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.expiry is None:
|
if self.expiry is None:
|
||||||
d.pop('expiry', None)
|
d.pop('expiry', None)
|
||||||
elif expiry:
|
elif expiry:
|
||||||
d['expiry'] = str(expiry)
|
d['expiry'] = str(expiry)
|
||||||
|
|
||||||
toml_clears_list = []
|
clears_table: tomlkit.Array = tomlkit.array()
|
||||||
|
clears_table.multiline(
|
||||||
|
multiline=True,
|
||||||
|
indent='',
|
||||||
|
)
|
||||||
|
|
||||||
# reverse sort so latest clears are at top of section?
|
# reverse sort so latest clears are at top of section?
|
||||||
for tid, data in iter_by_dt(clears):
|
for tid, data in iter_by_dt(clears):
|
||||||
inline_table = toml.TomlDecoder().get_empty_inline_table()
|
|
||||||
|
inline_table = tomlkit.inline_table()
|
||||||
|
|
||||||
# serialize datetime to parsable `str`
|
# serialize datetime to parsable `str`
|
||||||
inline_table['dt'] = str(data['dt'])
|
dtstr = inline_table['dt'] = data['dt'].isoformat('T')
|
||||||
|
assert 'Datetime' not in dtstr
|
||||||
|
|
||||||
# insert optional clear fields in column order
|
# insert optional clear fields in column order
|
||||||
for k in ['ppu', 'accum_size']:
|
for k in ['ppu', 'accum_size']:
|
||||||
|
@ -235,11 +184,11 @@ class Position(Struct):
|
||||||
inline_table[k] = data[k]
|
inline_table[k] = data[k]
|
||||||
|
|
||||||
inline_table['tid'] = tid
|
inline_table['tid'] = tid
|
||||||
toml_clears_list.append(inline_table)
|
clears_table.append(inline_table)
|
||||||
|
|
||||||
d['clears'] = toml_clears_list
|
d['clears'] = clears_table
|
||||||
|
|
||||||
return fqsn, d
|
return fqme, d
|
||||||
|
|
||||||
def ensure_state(self) -> None:
|
def ensure_state(self) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -249,7 +198,9 @@ class Position(Struct):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
clears = list(self.clears.values())
|
clears = list(self.clears.values())
|
||||||
self.first_clear_dt = min(list(entry['dt'] for entry in clears))
|
self.first_clear_dt = min(
|
||||||
|
list(entry['dt'] for entry in clears)
|
||||||
|
)
|
||||||
last_clear = clears[-1]
|
last_clear = clears[-1]
|
||||||
|
|
||||||
csize = self.calc_size()
|
csize = self.calc_size()
|
||||||
|
@ -294,22 +245,19 @@ class Position(Struct):
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# XXX: better place to do this?
|
# XXX: better place to do this?
|
||||||
symbol = self.symbol
|
mkt = self.mkt
|
||||||
|
size_tick_digits = mkt.size_tick_digits
|
||||||
|
price_tick_digits = mkt.price_tick_digits
|
||||||
|
|
||||||
lot_size_digits = symbol.lot_size_digits
|
self.ppu = round(
|
||||||
ppu, size = (
|
# TODO: change this to ppu?
|
||||||
round(
|
|
||||||
msg['avg_price'],
|
msg['avg_price'],
|
||||||
ndigits=symbol.tick_size_digits
|
ndigits=price_tick_digits,
|
||||||
),
|
)
|
||||||
round(
|
self.size = round(
|
||||||
msg['size'],
|
msg['size'],
|
||||||
ndigits=lot_size_digits
|
ndigits=size_tick_digits,
|
||||||
),
|
|
||||||
)
|
)
|
||||||
|
|
||||||
self.ppu = ppu
|
|
||||||
self.size = size
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dsize(self) -> float:
|
def dsize(self) -> float:
|
||||||
|
@ -337,10 +285,16 @@ class Position(Struct):
|
||||||
datetime-stamped order.
|
datetime-stamped order.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
return iter_by_dt(self.clears)
|
# sort on the already existing datetime that should have
|
||||||
|
# been generated for the entry's table
|
||||||
|
return iter_by_dt(
|
||||||
|
self.clears,
|
||||||
|
key=lambda entry: entry[1]['dt']
|
||||||
|
)
|
||||||
|
|
||||||
def calc_ppu(
|
def calc_ppu(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
# include transaction cost in breakeven price
|
# include transaction cost in breakeven price
|
||||||
# and presume the worst case of the same cost
|
# and presume the worst case of the same cost
|
||||||
# to exit this transaction (even though in reality
|
# to exit this transaction (even though in reality
|
||||||
|
@ -471,20 +425,28 @@ class Position(Struct):
|
||||||
asset using the clears/trade event table; zero if expired.
|
asset using the clears/trade event table; zero if expired.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
size: float = 0
|
size: float = 0.
|
||||||
|
|
||||||
# time-expired pps (normally derivatives) are "closed"
|
# time-expired pps (normally derivatives) are "closed"
|
||||||
# and have a zero size.
|
# and have a zero size.
|
||||||
if self.expired():
|
if self.expired():
|
||||||
return 0
|
return 0.
|
||||||
|
|
||||||
for tid, entry in self.clears.items():
|
for tid, entry in self.clears.items():
|
||||||
size += entry['size']
|
size += entry['size']
|
||||||
|
# XXX: do we need it every step?
|
||||||
|
# no right since rounding is an LT?
|
||||||
|
# size = self.mkt.quantize(
|
||||||
|
# size + entry['size'],
|
||||||
|
# quantity_type='size',
|
||||||
|
# )
|
||||||
|
|
||||||
if self.split_ratio is not None:
|
if self.split_ratio is not None:
|
||||||
size = round(size * self.split_ratio)
|
size = round(size * self.split_ratio)
|
||||||
|
|
||||||
return float(self.symbol.quantize_size(size))
|
return float(
|
||||||
|
self.mkt.quantize(size),
|
||||||
|
)
|
||||||
|
|
||||||
def minimize_clears(
|
def minimize_clears(
|
||||||
self,
|
self,
|
||||||
|
@ -506,7 +468,9 @@ class Position(Struct):
|
||||||
# scan for the last "net zero" position by iterating
|
# scan for the last "net zero" position by iterating
|
||||||
# transactions until the next net-zero size, rinse, repeat.
|
# transactions until the next net-zero size, rinse, repeat.
|
||||||
for tid, clear in self.clears.items():
|
for tid, clear in self.clears.items():
|
||||||
size += clear['size']
|
size = float(
|
||||||
|
self.mkt.quantize(size + clear['size'])
|
||||||
|
)
|
||||||
clears_since_zero.append((tid, clear))
|
clears_since_zero.append((tid, clear))
|
||||||
|
|
||||||
if size == 0:
|
if size == 0:
|
||||||
|
@ -543,8 +507,8 @@ class Position(Struct):
|
||||||
|
|
||||||
return clear
|
return clear
|
||||||
|
|
||||||
def sugest_split(self) -> float:
|
# def sugest_split(self) -> float:
|
||||||
...
|
# ...
|
||||||
|
|
||||||
|
|
||||||
class PpTable(Struct):
|
class PpTable(Struct):
|
||||||
|
@ -552,7 +516,8 @@ class PpTable(Struct):
|
||||||
brokername: str
|
brokername: str
|
||||||
acctid: str
|
acctid: str
|
||||||
pps: dict[str, Position]
|
pps: dict[str, Position]
|
||||||
conf: Optional[dict] = {}
|
conf_path: Path
|
||||||
|
conf: dict | None = {}
|
||||||
|
|
||||||
def update_from_trans(
|
def update_from_trans(
|
||||||
self,
|
self,
|
||||||
|
@ -564,24 +529,38 @@ class PpTable(Struct):
|
||||||
pps = self.pps
|
pps = self.pps
|
||||||
updated: dict[str, Position] = {}
|
updated: dict[str, Position] = {}
|
||||||
|
|
||||||
# lifo update all pps from records
|
# lifo update all pps from records, ensuring
|
||||||
for tid, t in trans.items():
|
# we compute the PPU and size sorted in time!
|
||||||
|
for t in sorted(
|
||||||
pp = pps.setdefault(
|
trans.values(),
|
||||||
t.bsuid,
|
key=lambda t: t.dt,
|
||||||
|
reverse=True,
|
||||||
|
):
|
||||||
|
fqme = t.fqme
|
||||||
|
bs_mktid = t.bs_mktid
|
||||||
|
|
||||||
|
# template the mkt-info presuming a legacy market ticks
|
||||||
|
# if no info exists in the transactions..
|
||||||
|
mkt: MktPair = t.sys
|
||||||
|
pp = pps.get(bs_mktid)
|
||||||
|
if not pp:
|
||||||
# if no existing pp, allocate fresh one.
|
# if no existing pp, allocate fresh one.
|
||||||
Position(
|
pp = pps[bs_mktid] = Position(
|
||||||
Symbol.from_fqsn(
|
mkt=mkt,
|
||||||
t.fqsn,
|
|
||||||
info={},
|
|
||||||
) if not t.sym else t.sym,
|
|
||||||
size=0.0,
|
size=0.0,
|
||||||
ppu=0.0,
|
ppu=0.0,
|
||||||
bsuid=t.bsuid,
|
bs_mktid=bs_mktid,
|
||||||
expiry=t.expiry,
|
expiry=t.expiry,
|
||||||
)
|
)
|
||||||
)
|
else:
|
||||||
|
# NOTE: if for some reason a "less resolved" mkt pair
|
||||||
|
# info has been set (based on the `.fqme` being
|
||||||
|
# a shorter string), instead use the one from the
|
||||||
|
# transaction since it likely has (more) full
|
||||||
|
# information from the provider.
|
||||||
|
if len(pp.mkt.fqme) < len(fqme):
|
||||||
|
pp.mkt = mkt
|
||||||
|
|
||||||
clears = pp.clears
|
clears = pp.clears
|
||||||
if clears:
|
if clears:
|
||||||
first_clear_dt = pp.first_clear_dt
|
first_clear_dt = pp.first_clear_dt
|
||||||
|
@ -590,7 +569,10 @@ class PpTable(Struct):
|
||||||
# included in the current pps state.
|
# included in the current pps state.
|
||||||
if (
|
if (
|
||||||
t.tid in clears
|
t.tid in clears
|
||||||
or first_clear_dt and t.dt < first_clear_dt
|
or (
|
||||||
|
first_clear_dt
|
||||||
|
and t.dt < first_clear_dt
|
||||||
|
)
|
||||||
):
|
):
|
||||||
# NOTE: likely you'll see repeats of the same
|
# NOTE: likely you'll see repeats of the same
|
||||||
# ``Transaction`` passed in here if/when you are restarting
|
# ``Transaction`` passed in here if/when you are restarting
|
||||||
|
@ -601,12 +583,14 @@ class PpTable(Struct):
|
||||||
|
|
||||||
# update clearing table
|
# update clearing table
|
||||||
pp.add_clear(t)
|
pp.add_clear(t)
|
||||||
updated[t.bsuid] = pp
|
updated[t.bs_mktid] = pp
|
||||||
|
|
||||||
# minimize clears tables and update sizing.
|
# minimize clears tables and update sizing.
|
||||||
for bsuid, pp in updated.items():
|
for bs_mktid, pp in updated.items():
|
||||||
pp.ensure_state()
|
pp.ensure_state()
|
||||||
|
|
||||||
|
# deliver only the position entries that were actually updated
|
||||||
|
# (modified the state) from the input transaction set.
|
||||||
return updated
|
return updated
|
||||||
|
|
||||||
def dump_active(
|
def dump_active(
|
||||||
|
@ -630,14 +614,8 @@ class PpTable(Struct):
|
||||||
open_pp_objs: dict[str, Position] = {}
|
open_pp_objs: dict[str, Position] = {}
|
||||||
|
|
||||||
pp_objs = self.pps
|
pp_objs = self.pps
|
||||||
for bsuid in list(pp_objs):
|
for bs_mktid in list(pp_objs):
|
||||||
pp = pp_objs[bsuid]
|
pp = pp_objs[bs_mktid]
|
||||||
|
|
||||||
# XXX: debug hook for size mismatches
|
|
||||||
# qqqbsuid = 320227571
|
|
||||||
# if bsuid == qqqbsuid:
|
|
||||||
# breakpoint()
|
|
||||||
|
|
||||||
pp.ensure_state()
|
pp.ensure_state()
|
||||||
|
|
||||||
if (
|
if (
|
||||||
|
@ -656,37 +634,42 @@ class PpTable(Struct):
|
||||||
# ignored; the closed positions won't be written to the
|
# ignored; the closed positions won't be written to the
|
||||||
# ``pps.toml`` since ``pp_active_entries`` above is what's
|
# ``pps.toml`` since ``pp_active_entries`` above is what's
|
||||||
# written.
|
# written.
|
||||||
closed_pp_objs[bsuid] = pp
|
closed_pp_objs[bs_mktid] = pp
|
||||||
|
|
||||||
else:
|
else:
|
||||||
open_pp_objs[bsuid] = pp
|
open_pp_objs[bs_mktid] = pp
|
||||||
|
|
||||||
return open_pp_objs, closed_pp_objs
|
return open_pp_objs, closed_pp_objs
|
||||||
|
|
||||||
def to_toml(
|
def to_toml(
|
||||||
self,
|
self,
|
||||||
|
active: dict[str, Position] | None = None,
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
|
|
||||||
active, closed = self.dump_active()
|
if active is None:
|
||||||
|
active, _ = self.dump_active()
|
||||||
|
|
||||||
# ONLY dict-serialize all active positions; those that are closed
|
# ONLY dict-serialize all active positions; those that are
|
||||||
# we don't store in the ``pps.toml``.
|
# closed we don't store in the ``pps.toml``.
|
||||||
to_toml_dict = {}
|
to_toml_dict = {}
|
||||||
|
|
||||||
for bsuid, pos in active.items():
|
pos: Position
|
||||||
|
for bs_mktid, pos in active.items():
|
||||||
# keep the minimal amount of clears that make up this
|
# NOTE: we only store the minimal amount of clears that make up this
|
||||||
# position since the last net-zero state.
|
# position since the last net-zero state.
|
||||||
pos.minimize_clears()
|
pos.minimize_clears()
|
||||||
pos.ensure_state()
|
pos.ensure_state()
|
||||||
|
|
||||||
# serialize to pre-toml form
|
# serialize to pre-toml form
|
||||||
fqsn, asdict = pos.to_pretoml()
|
fqme, asdict = pos.to_pretoml()
|
||||||
log.info(f'Updating active pp: {fqsn}')
|
|
||||||
|
assert 'Datetime' not in asdict['clears'][0]['dt']
|
||||||
|
log.info(f'Updating active pp: {fqme}')
|
||||||
|
|
||||||
# XXX: ugh, it's cuz we push the section under
|
# XXX: ugh, it's cuz we push the section under
|
||||||
# the broker name.. maybe we need to rethink this?
|
# the broker name.. maybe we need to rethink this?
|
||||||
brokerless_key = fqsn.removeprefix(f'{self.brokername}.')
|
brokerless_key = fqme.removeprefix(f'{self.brokername}.')
|
||||||
to_toml_dict[brokerless_key] = asdict
|
to_toml_dict[brokerless_key] = asdict
|
||||||
|
|
||||||
return to_toml_dict
|
return to_toml_dict
|
||||||
|
@ -699,33 +682,55 @@ class PpTable(Struct):
|
||||||
# TODO: show diff output?
|
# TODO: show diff output?
|
||||||
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||||
# active, closed_pp_objs = table.dump_active()
|
# active, closed_pp_objs = table.dump_active()
|
||||||
pp_entries = self.to_toml()
|
|
||||||
|
active, closed = self.dump_active()
|
||||||
|
pp_entries = self.to_toml(active=active)
|
||||||
if pp_entries:
|
if pp_entries:
|
||||||
log.info(f'Updating ``pps.toml`` for {path}:\n')
|
log.info(
|
||||||
log.info(f'Current positions:\n{pp_entries}')
|
f'Updating positions in ``{self.conf_path}``:\n'
|
||||||
self.conf[self.brokername][self.acctid] = pp_entries
|
f'n{pformat(pp_entries)}'
|
||||||
|
)
|
||||||
|
|
||||||
elif (
|
if self.brokername in self.conf:
|
||||||
self.brokername in self.conf and
|
log.warning(
|
||||||
self.acctid in self.conf[self.brokername]
|
f'Rewriting {self.conf_path} keys to drop <broker.acct>!'
|
||||||
):
|
)
|
||||||
del self.conf[self.brokername][self.acctid]
|
# legacy key schema including <brokername.account>, so
|
||||||
if len(self.conf[self.brokername]) == 0:
|
# rewrite all entries to drop those tables since we now
|
||||||
del self.conf[self.brokername]
|
# put that in the filename!
|
||||||
|
accounts = self.conf.pop(self.brokername)
|
||||||
|
assert len(accounts) == 1
|
||||||
|
entries = accounts.pop(self.acctid)
|
||||||
|
self.conf.update(entries)
|
||||||
|
|
||||||
# TODO: why tf haven't they already done this for inline
|
self.conf.update(pp_entries)
|
||||||
# tables smh..
|
|
||||||
enc = PpsEncoder(preserve=True)
|
# drop any entries that are computed as net-zero
|
||||||
# table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
|
# we don't care about storing in the pps file.
|
||||||
enc.dump_funcs[
|
if closed:
|
||||||
toml.decoder.InlineTableDict
|
bs_mktid: str
|
||||||
] = enc.dump_inline_table
|
for bs_mktid, pos in closed.items():
|
||||||
|
fqme: str = pos.mkt.fqme
|
||||||
|
if fqme in self.conf:
|
||||||
|
self.conf.pop(fqme)
|
||||||
|
else:
|
||||||
|
# TODO: we reallly need a diff set of
|
||||||
|
# loglevels/colors per subsys.
|
||||||
|
log.warning(
|
||||||
|
f'Recent position for {fqme} was closed!'
|
||||||
|
)
|
||||||
|
|
||||||
|
# if there are no active position entries according
|
||||||
|
# to the toml dump output above, then clear the config
|
||||||
|
# file of all entries.
|
||||||
|
elif self.conf:
|
||||||
|
for entry in list(self.conf):
|
||||||
|
del self.conf[entry]
|
||||||
|
|
||||||
config.write(
|
config.write(
|
||||||
self.conf,
|
config=self.conf,
|
||||||
'pps',
|
path=self.conf_path,
|
||||||
encoder=enc,
|
fail_empty=False,
|
||||||
fail_empty=False
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -735,7 +740,7 @@ def load_pps_from_ledger(
|
||||||
acctname: str,
|
acctname: str,
|
||||||
|
|
||||||
# post normalization filter on ledger entries to be processed
|
# post normalization filter on ledger entries to be processed
|
||||||
filter_by: Optional[list[dict]] = None,
|
filter_by: list[dict] | None = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
dict[str, Transaction],
|
dict[str, Transaction],
|
||||||
|
@ -745,7 +750,7 @@ def load_pps_from_ledger(
|
||||||
Open a ledger file by broker name and account and read in and
|
Open a ledger file by broker name and account and read in and
|
||||||
process any trade records into our normalized ``Transaction`` form
|
process any trade records into our normalized ``Transaction`` form
|
||||||
and then update the equivalent ``Pptable`` and deliver the two
|
and then update the equivalent ``Pptable`` and deliver the two
|
||||||
bsuid-mapped dict-sets of the transactions and pps.
|
bs_mktid-mapped dict-sets of the transactions and pps.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
with (
|
with (
|
||||||
|
@ -761,9 +766,9 @@ def load_pps_from_ledger(
|
||||||
|
|
||||||
if filter_by:
|
if filter_by:
|
||||||
records = {}
|
records = {}
|
||||||
bsuids = set(filter_by)
|
bs_mktids = set(filter_by)
|
||||||
for tid, r in src_records.items():
|
for tid, r in src_records.items():
|
||||||
if r.bsuid in bsuids:
|
if r.bs_mktid in bs_mktids:
|
||||||
records[tid] = r
|
records[tid] = r
|
||||||
else:
|
else:
|
||||||
records = src_records
|
records = src_records
|
||||||
|
@ -773,151 +778,33 @@ def load_pps_from_ledger(
|
||||||
return records, updated
|
return records, updated
|
||||||
|
|
||||||
|
|
||||||
# TODO: instead see if we can hack tomli and tomli-w to do the same:
|
|
||||||
# - https://github.com/hukkin/tomli
|
|
||||||
# - https://github.com/hukkin/tomli-w
|
|
||||||
class PpsEncoder(toml.TomlEncoder):
|
|
||||||
'''
|
|
||||||
Special "styled" encoder that makes a ``pps.toml`` redable and
|
|
||||||
compact by putting `.clears` tables inline and everything else
|
|
||||||
flat-ish.
|
|
||||||
|
|
||||||
'''
|
|
||||||
separator = ','
|
|
||||||
|
|
||||||
def dump_list(self, v):
|
|
||||||
'''
|
|
||||||
Dump an inline list with a newline after every element and
|
|
||||||
with consideration for denoted inline table types.
|
|
||||||
|
|
||||||
'''
|
|
||||||
retval = "[\n"
|
|
||||||
for u in v:
|
|
||||||
if isinstance(u, toml.decoder.InlineTableDict):
|
|
||||||
out = self.dump_inline_table(u)
|
|
||||||
else:
|
|
||||||
out = str(self.dump_value(u))
|
|
||||||
|
|
||||||
retval += " " + out + "," + "\n"
|
|
||||||
retval += "]"
|
|
||||||
return retval
|
|
||||||
|
|
||||||
def dump_inline_table(self, section):
|
|
||||||
"""Preserve inline table in its compact syntax instead of expanding
|
|
||||||
into subsection.
|
|
||||||
https://github.com/toml-lang/toml#user-content-inline-table
|
|
||||||
"""
|
|
||||||
val_list = []
|
|
||||||
for k, v in section.items():
|
|
||||||
# if isinstance(v, toml.decoder.InlineTableDict):
|
|
||||||
if isinstance(v, dict):
|
|
||||||
val = self.dump_inline_table(v)
|
|
||||||
else:
|
|
||||||
val = str(self.dump_value(v))
|
|
||||||
|
|
||||||
val_list.append(k + " = " + val)
|
|
||||||
|
|
||||||
retval = "{ " + ", ".join(val_list) + " }"
|
|
||||||
return retval
|
|
||||||
|
|
||||||
def dump_sections(self, o, sup):
|
|
||||||
retstr = ""
|
|
||||||
if sup != "" and sup[-1] != ".":
|
|
||||||
sup += '.'
|
|
||||||
retdict = self._dict()
|
|
||||||
arraystr = ""
|
|
||||||
for section in o:
|
|
||||||
qsection = str(section)
|
|
||||||
value = o[section]
|
|
||||||
|
|
||||||
if not re.match(r'^[A-Za-z0-9_-]+$', section):
|
|
||||||
qsection = toml.encoder._dump_str(section)
|
|
||||||
|
|
||||||
# arrayoftables = False
|
|
||||||
if (
|
|
||||||
self.preserve
|
|
||||||
and isinstance(value, toml.decoder.InlineTableDict)
|
|
||||||
):
|
|
||||||
retstr += (
|
|
||||||
qsection
|
|
||||||
+
|
|
||||||
" = "
|
|
||||||
+
|
|
||||||
self.dump_inline_table(o[section])
|
|
||||||
+
|
|
||||||
'\n' # only on the final terminating left brace
|
|
||||||
)
|
|
||||||
|
|
||||||
# XXX: this code i'm pretty sure is just blatantly bad
|
|
||||||
# and/or wrong..
|
|
||||||
# if isinstance(o[section], list):
|
|
||||||
# for a in o[section]:
|
|
||||||
# if isinstance(a, dict):
|
|
||||||
# arrayoftables = True
|
|
||||||
# if arrayoftables:
|
|
||||||
# for a in o[section]:
|
|
||||||
# arraytabstr = "\n"
|
|
||||||
# arraystr += "[[" + sup + qsection + "]]\n"
|
|
||||||
# s, d = self.dump_sections(a, sup + qsection)
|
|
||||||
# if s:
|
|
||||||
# if s[0] == "[":
|
|
||||||
# arraytabstr += s
|
|
||||||
# else:
|
|
||||||
# arraystr += s
|
|
||||||
# while d:
|
|
||||||
# newd = self._dict()
|
|
||||||
# for dsec in d:
|
|
||||||
# s1, d1 = self.dump_sections(d[dsec], sup +
|
|
||||||
# qsection + "." +
|
|
||||||
# dsec)
|
|
||||||
# if s1:
|
|
||||||
# arraytabstr += ("[" + sup + qsection +
|
|
||||||
# "." + dsec + "]\n")
|
|
||||||
# arraytabstr += s1
|
|
||||||
# for s1 in d1:
|
|
||||||
# newd[dsec + "." + s1] = d1[s1]
|
|
||||||
# d = newd
|
|
||||||
# arraystr += arraytabstr
|
|
||||||
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
retdict[qsection] = o[section]
|
|
||||||
|
|
||||||
elif o[section] is not None:
|
|
||||||
retstr += (
|
|
||||||
qsection
|
|
||||||
+
|
|
||||||
" = "
|
|
||||||
+
|
|
||||||
str(self.dump_value(o[section]))
|
|
||||||
)
|
|
||||||
|
|
||||||
# if not isinstance(value, dict):
|
|
||||||
if not isinstance(value, toml.decoder.InlineTableDict):
|
|
||||||
# inline tables should not contain newlines:
|
|
||||||
# https://toml.io/en/v1.0.0#inline-table
|
|
||||||
retstr += '\n'
|
|
||||||
|
|
||||||
else:
|
|
||||||
raise ValueError(value)
|
|
||||||
|
|
||||||
retstr += arraystr
|
|
||||||
return (retstr, retdict)
|
|
||||||
|
|
||||||
|
|
||||||
@cm
|
@cm
|
||||||
def open_pps(
|
def open_pps(
|
||||||
brokername: str,
|
brokername: str,
|
||||||
acctid: str,
|
acctid: str,
|
||||||
write_on_exit: bool = False,
|
write_on_exit: bool = False,
|
||||||
|
|
||||||
) -> Generator[PpTable, None, None]:
|
) -> Generator[PpTable, None, None]:
|
||||||
'''
|
'''
|
||||||
Read out broker-specific position entries from
|
Read out broker-specific position entries from
|
||||||
incremental update file: ``pps.toml``.
|
incremental update file: ``pps.toml``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
conf, path = config.load('pps')
|
conf: dict
|
||||||
brokersection = conf.setdefault(brokername, {})
|
conf_path: Path
|
||||||
pps = brokersection.setdefault(acctid, {})
|
conf, conf_path = config.load_account(brokername, acctid)
|
||||||
|
|
||||||
|
if brokername in conf:
|
||||||
|
log.warning(
|
||||||
|
f'Rewriting {conf_path} keys to drop <broker.acct>!'
|
||||||
|
)
|
||||||
|
# legacy key schema including <brokername.account>, so
|
||||||
|
# rewrite all entries to drop those tables since we now
|
||||||
|
# put that in the filename!
|
||||||
|
accounts = conf.pop(brokername)
|
||||||
|
for acctid in accounts.copy():
|
||||||
|
entries = accounts.pop(acctid)
|
||||||
|
conf.update(entries)
|
||||||
|
|
||||||
# TODO: ideally we can pass in an existing
|
# TODO: ideally we can pass in an existing
|
||||||
# pps state to this right? such that we
|
# pps state to this right? such that we
|
||||||
|
@ -934,61 +821,72 @@ def open_pps(
|
||||||
brokername,
|
brokername,
|
||||||
acctid,
|
acctid,
|
||||||
pp_objs,
|
pp_objs,
|
||||||
|
conf_path,
|
||||||
conf=conf,
|
conf=conf,
|
||||||
)
|
)
|
||||||
|
|
||||||
# unmarshal/load ``pps.toml`` config entries into object form
|
# unmarshal/load ``pps.toml`` config entries into object form
|
||||||
# and update `PpTable` obj entries.
|
# and update `PpTable` obj entries.
|
||||||
for fqsn, entry in pps.items():
|
for fqme, entry in conf.items():
|
||||||
bsuid = entry['bsuid']
|
|
||||||
symbol = Symbol.from_fqsn(
|
|
||||||
fqsn,
|
|
||||||
|
|
||||||
# NOTE & TODO: right now we fill in the defaults from
|
# atype = entry.get('asset_type', '<unknown>')
|
||||||
# `.data._source.Symbol` but eventually these should always
|
|
||||||
# either be already written to the pos table or provided at
|
# unique broker market id
|
||||||
# write time to ensure always having these values somewhere
|
bs_mktid = str(
|
||||||
# and thus allowing us to get our pos sizing precision
|
entry.get('bsuid')
|
||||||
# correct!
|
or entry.get('bs_mktid')
|
||||||
info={
|
|
||||||
'asset_type': entry.get('asset_type', '<unknown>'),
|
|
||||||
'price_tick_size': entry.get('price_tick_size', 0.01),
|
|
||||||
'lot_tick_size': entry.get('lot_tick_size', 0.0),
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
price_tick = Decimal(str(
|
||||||
|
entry.get('price_tick_size')
|
||||||
|
or entry.get('price_tick')
|
||||||
|
or '0.01'
|
||||||
|
))
|
||||||
|
size_tick = Decimal(str(
|
||||||
|
entry.get('lot_tick_size')
|
||||||
|
or entry.get('size_tick')
|
||||||
|
or '0.0'
|
||||||
|
))
|
||||||
|
|
||||||
|
# load the pair using the fqme which
|
||||||
|
# will make the pair "unresolved" until
|
||||||
|
# the backend broker actually loads
|
||||||
|
# the market and position info.
|
||||||
|
mkt = MktPair.from_fqme(
|
||||||
|
fqme,
|
||||||
|
price_tick=price_tick,
|
||||||
|
size_tick=size_tick,
|
||||||
|
bs_mktid=bs_mktid
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: RE: general "events" instead of just "clears":
|
||||||
|
# - make this an `events` field and support more event types
|
||||||
|
# such as 'split', 'name_change', 'mkt_info', etc..
|
||||||
|
# - should be make a ``Struct`` for clear/event entries? convert
|
||||||
|
# "clear events table" from the toml config (list of a dicts)
|
||||||
|
# and load it into object form for use in position processing of
|
||||||
|
# new clear events.
|
||||||
|
|
||||||
# convert clears sub-tables (only in this form
|
# convert clears sub-tables (only in this form
|
||||||
# for toml re-presentation) back into a master table.
|
# for toml re-presentation) back into a master table.
|
||||||
clears_list = entry['clears']
|
toml_clears_list: list[dict[str, Any]] = entry['clears']
|
||||||
|
|
||||||
# index clears entries in "object" form by tid in a top
|
|
||||||
# level dict instead of a list (as is presented in our
|
|
||||||
# ``pps.toml``).
|
|
||||||
clears = pp_objs.setdefault(bsuid, {})
|
|
||||||
|
|
||||||
# TODO: should be make a ``Struct`` for clear/event entries?
|
|
||||||
# convert "clear events table" from the toml config (list of
|
|
||||||
# a dicts) and load it into object form for use in position
|
|
||||||
# processing of new clear events.
|
|
||||||
trans: list[Transaction] = []
|
trans: list[Transaction] = []
|
||||||
|
for clears_table in toml_clears_list:
|
||||||
|
|
||||||
for clears_table in clears_list:
|
tid = clears_table.get('tid')
|
||||||
tid = clears_table.pop('tid')
|
|
||||||
dtstr = clears_table['dt']
|
dtstr = clears_table['dt']
|
||||||
dt = pendulum.parse(dtstr)
|
dt = pendulum.parse(dtstr)
|
||||||
clears_table['dt'] = dt
|
clears_table['dt'] = dt
|
||||||
|
|
||||||
trans.append(Transaction(
|
trans.append(Transaction(
|
||||||
fqsn=bsuid,
|
fqme=bs_mktid,
|
||||||
sym=symbol,
|
sym=mkt,
|
||||||
bsuid=bsuid,
|
bs_mktid=bs_mktid,
|
||||||
tid=tid,
|
tid=tid,
|
||||||
size=clears_table['size'],
|
size=clears_table['size'],
|
||||||
price=clears_table['price'],
|
price=clears_table['price'],
|
||||||
cost=clears_table['cost'],
|
cost=clears_table['cost'],
|
||||||
dt=dt,
|
dt=dt,
|
||||||
))
|
))
|
||||||
clears[tid] = clears_table
|
|
||||||
|
|
||||||
size = entry['size']
|
size = entry['size']
|
||||||
|
|
||||||
|
@ -1004,13 +902,13 @@ def open_pps(
|
||||||
if expiry:
|
if expiry:
|
||||||
expiry = pendulum.parse(expiry)
|
expiry = pendulum.parse(expiry)
|
||||||
|
|
||||||
pp = pp_objs[bsuid] = Position(
|
pp = pp_objs[bs_mktid] = Position(
|
||||||
symbol,
|
mkt,
|
||||||
size=size,
|
size=size,
|
||||||
ppu=ppu,
|
ppu=ppu,
|
||||||
split_ratio=split_ratio,
|
split_ratio=split_ratio,
|
||||||
expiry=expiry,
|
expiry=expiry,
|
||||||
bsuid=entry['bsuid'],
|
bs_mktid=bs_mktid,
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX: super critical, we need to be sure to include
|
# XXX: super critical, we need to be sure to include
|
||||||
|
@ -1029,19 +927,3 @@ def open_pps(
|
||||||
finally:
|
finally:
|
||||||
if write_on_exit:
|
if write_on_exit:
|
||||||
table.write_config()
|
table.write_config()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
import sys
|
|
||||||
|
|
||||||
args = sys.argv
|
|
||||||
assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
|
|
||||||
args = args[1:]
|
|
||||||
for acctid in args:
|
|
||||||
broker, name = acctid.split('.')
|
|
||||||
trans, updated_pps = load_pps_from_ledger(broker, name)
|
|
||||||
print(
|
|
||||||
f'Processing transactions into pps for {broker}:{acctid}\n'
|
|
||||||
f'{pformat(trans)}\n\n'
|
|
||||||
f'{pformat(updated_pps)}'
|
|
||||||
)
|
|
|
@ -0,0 +1,234 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
CLI front end for trades ledger and position tracking management.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
)
|
||||||
|
|
||||||
|
from rich.console import Console
|
||||||
|
from rich.markdown import Markdown
|
||||||
|
import tractor
|
||||||
|
import trio
|
||||||
|
import typer
|
||||||
|
|
||||||
|
from ..log import get_logger
|
||||||
|
from ..service import (
|
||||||
|
open_piker_runtime,
|
||||||
|
)
|
||||||
|
from ..clearing._messages import BrokerdPosition
|
||||||
|
from ..calc import humanize
|
||||||
|
|
||||||
|
|
||||||
|
ledger = typer.Typer()
|
||||||
|
|
||||||
|
|
||||||
|
def broker_init(
|
||||||
|
brokername: str,
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
|
**start_actor_kwargs,
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Given an input broker name, load all named arguments
|
||||||
|
which can be passed to a daemon + context spawn for
|
||||||
|
the relevant `brokerd` service endpoint.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from ..brokers import get_brokermod
|
||||||
|
brokermod = get_brokermod(brokername)
|
||||||
|
modpath = brokermod.__name__
|
||||||
|
|
||||||
|
start_actor_kwargs['name'] = f'brokerd.{brokername}'
|
||||||
|
start_actor_kwargs.update(
|
||||||
|
getattr(
|
||||||
|
brokermod,
|
||||||
|
'_spawn_kwargs',
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# lookup actor-enabled modules declared by the backend offering the
|
||||||
|
# `brokerd` endpoint(s).
|
||||||
|
enabled = start_actor_kwargs['enable_modules'] = [modpath]
|
||||||
|
for submodname in getattr(
|
||||||
|
brokermod,
|
||||||
|
'__enable_modules__',
|
||||||
|
[],
|
||||||
|
):
|
||||||
|
subpath = f'{modpath}.{submodname}'
|
||||||
|
enabled.append(subpath)
|
||||||
|
|
||||||
|
# TODO XXX: DO WE NEED THIS?
|
||||||
|
# enabled.append('piker.data.feed')
|
||||||
|
|
||||||
|
# non-blocking setup of brokerd service nursery
|
||||||
|
from ..brokers._daemon import _setup_persistent_brokerd
|
||||||
|
|
||||||
|
return (
|
||||||
|
start_actor_kwargs, # to `ActorNursery.start_actor()`
|
||||||
|
_setup_persistent_brokerd, # service task ep
|
||||||
|
getattr( # trades endpoint
|
||||||
|
brokermod,
|
||||||
|
'trades_dialogue',
|
||||||
|
None,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ledger.command()
|
||||||
|
def sync(
|
||||||
|
fully_qualified_account_name: str,
|
||||||
|
pdb: bool = False,
|
||||||
|
|
||||||
|
loglevel: str = typer.Option(
|
||||||
|
'error',
|
||||||
|
"-l",
|
||||||
|
),
|
||||||
|
):
|
||||||
|
log = get_logger(loglevel)
|
||||||
|
console = Console()
|
||||||
|
|
||||||
|
try:
|
||||||
|
brokername, account = fully_qualified_account_name.split('.')
|
||||||
|
except ValueError:
|
||||||
|
md = Markdown(
|
||||||
|
f'=> `{fully_qualified_account_name}` <=\n\n'
|
||||||
|
'is not a valid '
|
||||||
|
'__fully qualified account name?__\n\n'
|
||||||
|
'Your account name needs to be of the form '
|
||||||
|
'`<brokername>.<account_name>`\n'
|
||||||
|
)
|
||||||
|
console.print(md)
|
||||||
|
return
|
||||||
|
|
||||||
|
start_kwargs, _, trades_ep = broker_init(
|
||||||
|
brokername,
|
||||||
|
loglevel=loglevel,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_piker_runtime(
|
||||||
|
name='ledger_cli',
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=pdb,
|
||||||
|
|
||||||
|
) as (actor, sockaddr),
|
||||||
|
|
||||||
|
tractor.open_nursery() as an,
|
||||||
|
):
|
||||||
|
log.info(
|
||||||
|
f'Piker runtime up as {actor.uid}@{sockaddr}'
|
||||||
|
)
|
||||||
|
|
||||||
|
portal = await an.start_actor(
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=pdb,
|
||||||
|
**start_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
brokername == 'paper'
|
||||||
|
or trades_ep is None
|
||||||
|
):
|
||||||
|
from ..clearing import _paper_engine as paper
|
||||||
|
open_trades_endpoint = paper.open_paperboi(
|
||||||
|
fqme=None, # tell paper to not start clearing loop
|
||||||
|
broker=brokername,
|
||||||
|
loglevel=loglevel,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# open live brokerd trades endpoint
|
||||||
|
open_trades_endpoint = portal.open_context(
|
||||||
|
trades_ep,
|
||||||
|
loglevel=loglevel,
|
||||||
|
)
|
||||||
|
|
||||||
|
positions: dict[str, Any]
|
||||||
|
accounts: list[str]
|
||||||
|
async with (
|
||||||
|
open_trades_endpoint as (
|
||||||
|
brokerd_ctx,
|
||||||
|
(positions, accounts),
|
||||||
|
),
|
||||||
|
):
|
||||||
|
assert len(accounts) == 1
|
||||||
|
summary: str = (
|
||||||
|
'[dim underline]Piker Position Summary[/] '
|
||||||
|
f'[dim blue underline]{brokername}[/]'
|
||||||
|
'[dim].[/]'
|
||||||
|
f'[blue underline]{account}[/]'
|
||||||
|
f'[dim underline] -> total pps: [/]'
|
||||||
|
f'[green]{len(positions)}[/]\n'
|
||||||
|
)
|
||||||
|
for ppdict in positions:
|
||||||
|
ppmsg = BrokerdPosition(**ppdict)
|
||||||
|
size = ppmsg.size
|
||||||
|
if size:
|
||||||
|
ppu: float = round(
|
||||||
|
ppmsg.avg_price,
|
||||||
|
ndigits=2,
|
||||||
|
)
|
||||||
|
cost_basis: str = humanize(size * ppu)
|
||||||
|
h_size: str = humanize(size)
|
||||||
|
|
||||||
|
if size < 0:
|
||||||
|
pcolor = 'red'
|
||||||
|
else:
|
||||||
|
pcolor = 'green'
|
||||||
|
|
||||||
|
# sematic-highlight of fqme
|
||||||
|
fqme = ppmsg.symbol
|
||||||
|
tokens = fqme.split('.')
|
||||||
|
styled_fqme = f'[blue underline]{tokens[0]}[/]'
|
||||||
|
for tok in tokens[1:]:
|
||||||
|
styled_fqme += '[dim].[/]'
|
||||||
|
styled_fqme += f'[dim blue underline]{tok}[/]'
|
||||||
|
|
||||||
|
# TODO: instead display in a ``rich.Table``?
|
||||||
|
summary += (
|
||||||
|
styled_fqme +
|
||||||
|
'[dim]: [/]'
|
||||||
|
f'[{pcolor}]{h_size}[/]'
|
||||||
|
'[dim blue]u @[/]'
|
||||||
|
f'[{pcolor}]{ppu}[/]'
|
||||||
|
'[dim blue] = [/]'
|
||||||
|
f'[{pcolor}]$ {cost_basis}\n[/]'
|
||||||
|
)
|
||||||
|
|
||||||
|
console.print(summary)
|
||||||
|
|
||||||
|
# exit via ctx cancellation.
|
||||||
|
await brokerd_ctx.cancel(timeout=1)
|
||||||
|
# TODO: once ported to newer tractor branch we should
|
||||||
|
# be able to do a loop like this:
|
||||||
|
# while brokerd_ctx.cancel_called_remote is None:
|
||||||
|
# await trio.sleep(0.01)
|
||||||
|
# await brokerd_ctx.cancel()
|
||||||
|
|
||||||
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
ledger() # this is called from ``>> ledger <accountname>``
|
|
@ -25,6 +25,7 @@ __brokers__ = [
|
||||||
'ib',
|
'ib',
|
||||||
'kraken',
|
'kraken',
|
||||||
'kucoin'
|
'kucoin'
|
||||||
|
|
||||||
# broken but used to work
|
# broken but used to work
|
||||||
# 'questrade',
|
# 'questrade',
|
||||||
# 'robinhood',
|
# 'robinhood',
|
||||||
|
|
|
@ -0,0 +1,169 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Broker-daemon-actor "endpoint-hooks": the service task entry points for
|
||||||
|
``brokerd``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
|
|
||||||
|
import tractor
|
||||||
|
import trio
|
||||||
|
|
||||||
|
from . import _util
|
||||||
|
from . import get_brokermod
|
||||||
|
|
||||||
|
# `brokerd` enabled modules
|
||||||
|
# TODO: move this def to the `.data` subpkg..
|
||||||
|
# NOTE: keeping this list as small as possible is part of our caps-sec
|
||||||
|
# model and should be treated with utmost care!
|
||||||
|
_data_mods = [
|
||||||
|
'piker.brokers.core',
|
||||||
|
'piker.brokers.data',
|
||||||
|
'piker.brokers._daemon',
|
||||||
|
'piker.data',
|
||||||
|
'piker.data.feed',
|
||||||
|
'piker.data._sampling'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: we should rename the daemon to datad prolly once we split up
|
||||||
|
# broker vs. data tasks into separate actors?
|
||||||
|
@tractor.context
|
||||||
|
async def _setup_persistent_brokerd(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
brokername: str,
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Allocate a actor-wide service nursery in ``brokerd``
|
||||||
|
such that feeds can be run in the background persistently by
|
||||||
|
the broker backend as needed.
|
||||||
|
|
||||||
|
'''
|
||||||
|
log = _util.get_console_log(
|
||||||
|
loglevel or tractor.current_actor().loglevel,
|
||||||
|
name=f'{_util.subsys}.{brokername}',
|
||||||
|
)
|
||||||
|
# set global for this actor to this new process-wide instance B)
|
||||||
|
_util.log = log
|
||||||
|
|
||||||
|
from piker.data.feed import (
|
||||||
|
_bus,
|
||||||
|
get_feed_bus,
|
||||||
|
)
|
||||||
|
global _bus
|
||||||
|
assert not _bus
|
||||||
|
|
||||||
|
async with trio.open_nursery() as service_nursery:
|
||||||
|
# assign a nursery to the feeds bus for spawning
|
||||||
|
# background tasks from clients
|
||||||
|
get_feed_bus(brokername, service_nursery)
|
||||||
|
|
||||||
|
# unblock caller
|
||||||
|
await ctx.started()
|
||||||
|
|
||||||
|
# we pin this task to keep the feeds manager active until the
|
||||||
|
# parent actor decides to tear it down
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
async def spawn_brokerd(
|
||||||
|
|
||||||
|
brokername: str,
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
|
**tractor_kwargs,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
|
||||||
|
from piker.service import Services
|
||||||
|
from piker.service._util import log # use service mngr log
|
||||||
|
|
||||||
|
log.info(f'Spawning {brokername} broker daemon')
|
||||||
|
|
||||||
|
brokermod = get_brokermod(brokername)
|
||||||
|
dname = f'brokerd.{brokername}'
|
||||||
|
|
||||||
|
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
||||||
|
tractor_kwargs.update(extra_tractor_kwargs)
|
||||||
|
|
||||||
|
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||||
|
# actor nursery
|
||||||
|
modpath = brokermod.__name__
|
||||||
|
broker_enable = [modpath]
|
||||||
|
for submodname in getattr(
|
||||||
|
brokermod,
|
||||||
|
'__enable_modules__',
|
||||||
|
[],
|
||||||
|
):
|
||||||
|
subpath = f'{modpath}.{submodname}'
|
||||||
|
broker_enable.append(subpath)
|
||||||
|
|
||||||
|
portal = await Services.actor_n.start_actor(
|
||||||
|
dname,
|
||||||
|
enable_modules=_data_mods + broker_enable,
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=Services.debug_mode,
|
||||||
|
**tractor_kwargs
|
||||||
|
)
|
||||||
|
|
||||||
|
# non-blocking setup of brokerd service nursery
|
||||||
|
await Services.start_service_task(
|
||||||
|
dname,
|
||||||
|
portal,
|
||||||
|
|
||||||
|
# signature of target root-task endpoint
|
||||||
|
_setup_persistent_brokerd,
|
||||||
|
brokername=brokername,
|
||||||
|
loglevel=loglevel,
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_spawn_brokerd(
|
||||||
|
|
||||||
|
brokername: str,
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
|
**pikerd_kwargs,
|
||||||
|
|
||||||
|
) -> tractor.Portal:
|
||||||
|
'''
|
||||||
|
Helper to spawn a brokerd service *from* a client
|
||||||
|
who wishes to use the sub-actor-daemon.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from piker.service import maybe_spawn_daemon
|
||||||
|
|
||||||
|
async with maybe_spawn_daemon(
|
||||||
|
|
||||||
|
f'brokerd.{brokername}',
|
||||||
|
service_task_target=spawn_brokerd,
|
||||||
|
spawn_args={
|
||||||
|
'brokername': brokername,
|
||||||
|
},
|
||||||
|
loglevel=loglevel,
|
||||||
|
|
||||||
|
**pikerd_kwargs,
|
||||||
|
|
||||||
|
) as portal:
|
||||||
|
yield portal
|
|
@ -15,13 +15,29 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Handy utils.
|
Handy cross-broker utils.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import asks
|
import asks
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..log import colorize_json
|
from ..log import (
|
||||||
|
get_logger,
|
||||||
|
get_console_log,
|
||||||
|
colorize_json,
|
||||||
|
)
|
||||||
|
subsys: str = 'piker.brokers'
|
||||||
|
|
||||||
|
# NOTE: level should be reset by any actor that is spawned
|
||||||
|
log = get_logger(subsys)
|
||||||
|
|
||||||
|
get_console_log = partial(
|
||||||
|
get_console_log,
|
||||||
|
name=subsys,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class BrokerError(Exception):
|
class BrokerError(Exception):
|
||||||
|
@ -69,7 +85,6 @@ class DataThrottle(BrokerError):
|
||||||
# TODO: add in throttle metrics/feedback
|
# TODO: add in throttle metrics/feedback
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def resproc(
|
def resproc(
|
||||||
resp: asks.response_objects.Response,
|
resp: asks.response_objects.Response,
|
||||||
log: logging.Logger,
|
log: logging.Logger,
|
||||||
|
|
|
@ -1,5 +1,8 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
# Copyright (C)
|
||||||
|
# Guillermo Rodriguez
|
||||||
|
# Tyler Goodlet
|
||||||
|
# (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -18,15 +21,19 @@
|
||||||
Binance backend
|
Binance backend
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
aclosing,
|
||||||
|
)
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from decimal import Decimal
|
||||||
|
import itertools
|
||||||
from typing import (
|
from typing import (
|
||||||
Any, Union, Optional,
|
Any, Union, Optional,
|
||||||
AsyncGenerator, Callable,
|
AsyncGenerator, Callable,
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from trio_util import trio_async_generator
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import pendulum
|
import pendulum
|
||||||
|
@ -34,26 +41,30 @@ import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
import wsproto
|
|
||||||
|
|
||||||
|
from .._cacheables import async_lifo_cache
|
||||||
|
from ..accounting._mktinfo import (
|
||||||
|
Asset,
|
||||||
|
MktPair,
|
||||||
|
digits_to_dec,
|
||||||
|
)
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
from ._util import (
|
from ._util import (
|
||||||
resproc,
|
resproc,
|
||||||
SymbolNotFound,
|
SymbolNotFound,
|
||||||
DataUnavailable,
|
DataUnavailable,
|
||||||
)
|
)
|
||||||
from ..log import (
|
from ._util import (
|
||||||
get_logger,
|
log,
|
||||||
get_console_log,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from ..data.types import Struct
|
from ..data.types import Struct
|
||||||
|
from ..data.validate import FeedInit
|
||||||
from ..data._web_bs import (
|
from ..data._web_bs import (
|
||||||
open_autorecon_ws,
|
open_autorecon_ws,
|
||||||
NoBsWs,
|
NoBsWs,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
_url = 'https://api.binance.com'
|
_url = 'https://api.binance.com'
|
||||||
|
|
||||||
|
@ -88,6 +99,9 @@ _show_wap_in_history = False
|
||||||
|
|
||||||
|
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||||
|
|
||||||
|
# TODO: make this frozen again by pre-processing the
|
||||||
|
# filters list to a dict at init time?
|
||||||
class Pair(Struct, frozen=True):
|
class Pair(Struct, frozen=True):
|
||||||
symbol: str
|
symbol: str
|
||||||
status: str
|
status: str
|
||||||
|
@ -114,9 +128,24 @@ class Pair(Struct, frozen=True):
|
||||||
defaultSelfTradePreventionMode: str
|
defaultSelfTradePreventionMode: str
|
||||||
allowedSelfTradePreventionModes: list[str]
|
allowedSelfTradePreventionModes: list[str]
|
||||||
|
|
||||||
filters: list[dict[str, Union[str, int, float]]]
|
filters: dict[
|
||||||
|
str,
|
||||||
|
Union[str, int, float]
|
||||||
|
]
|
||||||
permissions: list[str]
|
permissions: list[str]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def price_tick(self) -> Decimal:
|
||||||
|
# XXX: lul, after manually inspecting the response format we
|
||||||
|
# just directly pick out the info we need
|
||||||
|
step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0')
|
||||||
|
return Decimal(step_size)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_tick(self) -> Decimal:
|
||||||
|
step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0')
|
||||||
|
return Decimal(step_size)
|
||||||
|
|
||||||
|
|
||||||
class OHLC(Struct):
|
class OHLC(Struct):
|
||||||
'''
|
'''
|
||||||
|
@ -147,6 +176,18 @@ class OHLC(Struct):
|
||||||
bar_wap: float = 0.0
|
bar_wap: float = 0.0
|
||||||
|
|
||||||
|
|
||||||
|
class L1(Struct):
|
||||||
|
# https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams
|
||||||
|
|
||||||
|
update_id: int
|
||||||
|
sym: str
|
||||||
|
|
||||||
|
bid: float
|
||||||
|
bsize: float
|
||||||
|
ask: float
|
||||||
|
asize: float
|
||||||
|
|
||||||
|
|
||||||
# convert datetime obj timestamp to unixtime in milliseconds
|
# convert datetime obj timestamp to unixtime in milliseconds
|
||||||
def binance_timestamp(
|
def binance_timestamp(
|
||||||
when: datetime
|
when: datetime
|
||||||
|
@ -159,7 +200,7 @@ class Client:
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._sesh = asks.Session(connections=4)
|
self._sesh = asks.Session(connections=4)
|
||||||
self._sesh.base_location = _url
|
self._sesh.base_location = _url
|
||||||
self._pairs: dict[str, Any] = {}
|
self._pairs: dict[str, Pair] = {}
|
||||||
|
|
||||||
async def _api(
|
async def _api(
|
||||||
self,
|
self,
|
||||||
|
@ -173,49 +214,59 @@ class Client:
|
||||||
)
|
)
|
||||||
return resproc(resp, log)
|
return resproc(resp, log)
|
||||||
|
|
||||||
async def symbol_info(
|
async def exch_info(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
sym: Optional[str] = None,
|
sym: str | None = None,
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Pair] | Pair:
|
||||||
'''Get symbol info for the exchange.
|
'''
|
||||||
|
Fresh exchange-pairs info query for symbol ``sym: str``:
|
||||||
|
https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# TODO: we can load from our self._pairs cache
|
cached_pair = self._pairs.get(sym)
|
||||||
# on repeat calls...
|
if cached_pair:
|
||||||
|
return cached_pair
|
||||||
|
|
||||||
# will retrieve all symbols by default
|
# retrieve all symbols by default
|
||||||
params = {}
|
params = {}
|
||||||
|
|
||||||
if sym is not None:
|
if sym is not None:
|
||||||
sym = sym.lower()
|
sym = sym.lower()
|
||||||
params = {'symbol': sym}
|
params = {'symbol': sym}
|
||||||
|
|
||||||
resp = await self._api(
|
resp = await self._api('exchangeInfo', params=params)
|
||||||
'exchangeInfo',
|
|
||||||
params=params,
|
|
||||||
)
|
|
||||||
|
|
||||||
entries = resp['symbols']
|
entries = resp['symbols']
|
||||||
if not entries:
|
if not entries:
|
||||||
raise SymbolNotFound(f'{sym} not found')
|
raise SymbolNotFound(f'{sym} not found:\n{resp}')
|
||||||
|
|
||||||
syms = {item['symbol']: item for item in entries}
|
# pre-process .filters field into a table
|
||||||
|
pairs = {}
|
||||||
|
for item in entries:
|
||||||
|
symbol = item['symbol']
|
||||||
|
filters = {}
|
||||||
|
filters_ls: list = item.pop('filters')
|
||||||
|
for entry in filters_ls:
|
||||||
|
ftype = entry['filterType']
|
||||||
|
filters[ftype] = entry
|
||||||
|
|
||||||
|
pairs[symbol] = Pair(
|
||||||
|
filters=filters,
|
||||||
|
**item,
|
||||||
|
)
|
||||||
|
|
||||||
|
# pairs = {
|
||||||
|
# item['symbol']: Pair(**item) for item in entries
|
||||||
|
# }
|
||||||
|
self._pairs.update(pairs)
|
||||||
|
|
||||||
if sym is not None:
|
if sym is not None:
|
||||||
return syms[sym]
|
return pairs[sym]
|
||||||
else:
|
else:
|
||||||
return syms
|
|
||||||
|
|
||||||
async def cache_symbols(
|
|
||||||
self,
|
|
||||||
) -> dict:
|
|
||||||
if not self._pairs:
|
|
||||||
self._pairs = await self.symbol_info()
|
|
||||||
|
|
||||||
return self._pairs
|
return self._pairs
|
||||||
|
|
||||||
|
symbol_info = exch_info
|
||||||
|
|
||||||
async def search_symbols(
|
async def search_symbols(
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
|
@ -224,7 +275,7 @@ class Client:
|
||||||
if self._pairs is not None:
|
if self._pairs is not None:
|
||||||
data = self._pairs
|
data = self._pairs
|
||||||
else:
|
else:
|
||||||
data = await self.symbol_info()
|
data = await self.exch_info()
|
||||||
|
|
||||||
matches = fuzzy.extractBests(
|
matches = fuzzy.extractBests(
|
||||||
pattern,
|
pattern,
|
||||||
|
@ -299,7 +350,8 @@ class Client:
|
||||||
@acm
|
@acm
|
||||||
async def get_client() -> Client:
|
async def get_client() -> Client:
|
||||||
client = Client()
|
client = Client()
|
||||||
await client.cache_symbols()
|
log.info('Caching exchange infos..')
|
||||||
|
await client.exch_info()
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
@ -318,64 +370,90 @@ class AggTrade(Struct):
|
||||||
M: bool # Ignore
|
M: bool # Ignore
|
||||||
|
|
||||||
|
|
||||||
@trio_async_generator
|
|
||||||
async def stream_messages(
|
async def stream_messages(
|
||||||
ws: NoBsWs,
|
ws: NoBsWs,
|
||||||
) -> AsyncGenerator[NoBsWs, dict]:
|
) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
|
|
||||||
timeouts = 0
|
# TODO: match syntax here!
|
||||||
while True:
|
msg: dict[str, Any]
|
||||||
|
async for msg in ws:
|
||||||
with trio.move_on_after(3) as cs:
|
match msg:
|
||||||
msg = await ws.recv_msg()
|
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
|
||||||
|
|
||||||
timeouts += 1
|
|
||||||
if timeouts > 2:
|
|
||||||
log.error("binance feed seems down and slow af? rebooting...")
|
|
||||||
await ws._connect()
|
|
||||||
|
|
||||||
continue
|
|
||||||
|
|
||||||
# for l1 streams binance doesn't add an event type field so
|
# for l1 streams binance doesn't add an event type field so
|
||||||
# identify those messages by matching keys
|
# identify those messages by matching keys
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams
|
# https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams
|
||||||
|
case {
|
||||||
|
# NOTE: this is never an old value it seems, so
|
||||||
|
# they are always sending real L1 spread updates.
|
||||||
|
'u': upid, # update id
|
||||||
|
's': sym,
|
||||||
|
'b': bid,
|
||||||
|
'B': bsize,
|
||||||
|
'a': ask,
|
||||||
|
'A': asize,
|
||||||
|
}:
|
||||||
|
# TODO: it would be super nice to have a `L1` piker type
|
||||||
|
# which "renders" incremental tick updates from a packed
|
||||||
|
# msg-struct:
|
||||||
|
# - backend msgs after packed into the type such that we
|
||||||
|
# can reduce IPC usage but without each backend having
|
||||||
|
# to do that incremental update logic manually B)
|
||||||
|
# - would it maybe be more efficient to use this instead?
|
||||||
|
# https://binance-docs.github.io/apidocs/spot/en/#diff-depth-stream
|
||||||
|
l1 = L1(
|
||||||
|
update_id=upid,
|
||||||
|
sym=sym,
|
||||||
|
bid=bid,
|
||||||
|
bsize=bsize,
|
||||||
|
ask=ask,
|
||||||
|
asize=asize,
|
||||||
|
)
|
||||||
|
l1.typecast()
|
||||||
|
|
||||||
if msg.get('u'):
|
# repack into piker's tick-quote format
|
||||||
sym = msg['s']
|
|
||||||
bid = float(msg['b'])
|
|
||||||
bsize = float(msg['B'])
|
|
||||||
ask = float(msg['a'])
|
|
||||||
asize = float(msg['A'])
|
|
||||||
|
|
||||||
yield 'l1', {
|
yield 'l1', {
|
||||||
'symbol': sym,
|
'symbol': l1.sym,
|
||||||
'ticks': [
|
'ticks': [
|
||||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
{
|
||||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
'type': 'bid',
|
||||||
{'type': 'ask', 'price': ask, 'size': asize},
|
'price': l1.bid,
|
||||||
{'type': 'asize', 'price': ask, 'size': asize}
|
'size': l1.bsize,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'bsize',
|
||||||
|
'price': l1.bid,
|
||||||
|
'size': l1.bsize,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'ask',
|
||||||
|
'price': l1.ask,
|
||||||
|
'size': l1.asize,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
'type': 'asize',
|
||||||
|
'price': l1.ask,
|
||||||
|
'size': l1.asize,
|
||||||
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
||||||
elif msg.get('e') == 'aggTrade':
|
# https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams
|
||||||
|
case {
|
||||||
# NOTE: this is purely for a definition, ``msgspec.Struct``
|
'e': 'aggTrade',
|
||||||
# does not runtime-validate until you decode/encode.
|
}:
|
||||||
# see: https://jcristharif.com/msgspec/structs.html#type-validation
|
# NOTE: this is purely for a definition,
|
||||||
|
# ``msgspec.Struct`` does not runtime-validate until you
|
||||||
|
# decode/encode, see:
|
||||||
|
# https://jcristharif.com/msgspec/structs.html#type-validation
|
||||||
msg = AggTrade(**msg)
|
msg = AggTrade(**msg)
|
||||||
|
msg.typecast()
|
||||||
# TODO: type out and require this quote format
|
|
||||||
# from all backends!
|
|
||||||
yield 'trade', {
|
yield 'trade', {
|
||||||
'symbol': msg.s,
|
'symbol': msg.s,
|
||||||
'last': msg.p,
|
'last': msg.p,
|
||||||
'brokerd_ts': time.time(),
|
'brokerd_ts': time.time(),
|
||||||
'ticks': [{
|
'ticks': [{
|
||||||
'type': 'trade',
|
'type': 'trade',
|
||||||
'price': float(msg.p),
|
'price': msg.p,
|
||||||
'size': float(msg.q),
|
'size': msg.q,
|
||||||
'broker_ts': msg.T,
|
'broker_ts': msg.T,
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
|
@ -398,10 +476,12 @@ def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]:
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_history_client(
|
async def open_history_client(
|
||||||
symbol: str,
|
mkt: MktPair,
|
||||||
|
|
||||||
) -> tuple[Callable, int]:
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
symbol: str = mkt.bs_fqme
|
||||||
|
|
||||||
# TODO implement history getter for the new storage layer.
|
# TODO implement history getter for the new storage layer.
|
||||||
async with open_cached_client('binance') as client:
|
async with open_cached_client('binance') as client:
|
||||||
|
|
||||||
|
@ -439,6 +519,35 @@ async def open_history_client(
|
||||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||||
|
|
||||||
|
|
||||||
|
@async_lifo_cache()
|
||||||
|
async def get_mkt_info(
|
||||||
|
fqme: str,
|
||||||
|
|
||||||
|
) -> tuple[MktPair, Pair]:
|
||||||
|
|
||||||
|
async with open_cached_client('binance') as client:
|
||||||
|
|
||||||
|
pair: Pair = await client.exch_info(fqme.upper())
|
||||||
|
mkt = MktPair(
|
||||||
|
dst=Asset(
|
||||||
|
name=pair.baseAsset,
|
||||||
|
atype='crypto',
|
||||||
|
tx_tick=digits_to_dec(pair.baseAssetPrecision),
|
||||||
|
),
|
||||||
|
src=Asset(
|
||||||
|
name=pair.quoteAsset,
|
||||||
|
atype='crypto',
|
||||||
|
tx_tick=digits_to_dec(pair.quoteAssetPrecision),
|
||||||
|
),
|
||||||
|
price_tick=pair.price_tick,
|
||||||
|
size_tick=pair.size_tick,
|
||||||
|
bs_mktid=pair.symbol,
|
||||||
|
broker='binance',
|
||||||
|
)
|
||||||
|
both = mkt, pair
|
||||||
|
return both
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
|
@ -453,67 +562,43 @@ async def stream_quotes(
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
sym_infos = {}
|
|
||||||
uid = 0
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_cached_client('binance') as client,
|
|
||||||
send_chan as send_chan,
|
send_chan as send_chan,
|
||||||
):
|
):
|
||||||
|
init_msgs: list[FeedInit] = []
|
||||||
# keep client cached for real-time section
|
|
||||||
cache = await client.cache_symbols()
|
|
||||||
|
|
||||||
for sym in symbols:
|
for sym in symbols:
|
||||||
d = cache[sym.upper()]
|
mkt, pair = await get_mkt_info(sym)
|
||||||
syminfo = Pair(**d) # validation
|
|
||||||
|
|
||||||
si = sym_infos[sym] = syminfo.to_dict()
|
# build out init msgs according to latest spec
|
||||||
filters = {}
|
init_msgs.append(
|
||||||
for entry in syminfo.filters:
|
FeedInit(mkt_info=mkt)
|
||||||
ftype = entry['filterType']
|
|
||||||
filters[ftype] = entry
|
|
||||||
|
|
||||||
# XXX: after manually inspecting the response format we
|
|
||||||
# just directly pick out the info we need
|
|
||||||
si['price_tick_size'] = float(
|
|
||||||
filters['PRICE_FILTER']['tickSize']
|
|
||||||
)
|
)
|
||||||
si['lot_tick_size'] = float(
|
|
||||||
filters['LOT_SIZE']['stepSize']
|
|
||||||
)
|
|
||||||
si['asset_type'] = 'crypto'
|
|
||||||
|
|
||||||
symbol = symbols[0]
|
iter_subids = itertools.count()
|
||||||
|
|
||||||
init_msgs = {
|
|
||||||
# pass back token, and bool, signalling if we're the writer
|
|
||||||
# and that history has been written
|
|
||||||
symbol: {
|
|
||||||
'symbol_info': sym_infos[sym],
|
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
|
||||||
'fqsn': sym,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def subscribe(ws: wsproto.WSConnection):
|
async def subscribe(ws: NoBsWs):
|
||||||
# setup subs
|
# setup subs
|
||||||
|
|
||||||
|
subid: int = next(iter_subids)
|
||||||
|
|
||||||
# trade data (aka L1)
|
# trade data (aka L1)
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker
|
# https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker
|
||||||
l1_sub = make_sub(symbols, 'bookTicker', uid)
|
l1_sub = make_sub(symbols, 'bookTicker', subid)
|
||||||
await ws.send_msg(l1_sub)
|
await ws.send_msg(l1_sub)
|
||||||
|
|
||||||
# aggregate (each order clear by taker **not** by maker)
|
# aggregate (each order clear by taker **not** by maker)
|
||||||
# trades data:
|
# trades data:
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams
|
# https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams
|
||||||
agg_trades_sub = make_sub(symbols, 'aggTrade', uid)
|
agg_trades_sub = make_sub(symbols, 'aggTrade', subid)
|
||||||
await ws.send_msg(agg_trades_sub)
|
await ws.send_msg(agg_trades_sub)
|
||||||
|
|
||||||
# ack from ws server
|
# might get ack from ws server, or maybe some
|
||||||
|
# other msg still in transit..
|
||||||
res = await ws.recv_msg()
|
res = await ws.recv_msg()
|
||||||
assert res['id'] == uid
|
subid: str | None = res.get('id')
|
||||||
|
if subid:
|
||||||
|
assert res['id'] == subid
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
@ -527,7 +612,7 @@ async def stream_quotes(
|
||||||
await ws.send_msg({
|
await ws.send_msg({
|
||||||
"method": "UNSUBSCRIBE",
|
"method": "UNSUBSCRIBE",
|
||||||
"params": subs,
|
"params": subs,
|
||||||
"id": uid,
|
"id": subid,
|
||||||
})
|
})
|
||||||
|
|
||||||
# XXX: do we need to ack the unsub?
|
# XXX: do we need to ack the unsub?
|
||||||
|
@ -543,7 +628,7 @@ async def stream_quotes(
|
||||||
) as ws,
|
) as ws,
|
||||||
|
|
||||||
# avoid stream-gen closure from breaking trio..
|
# avoid stream-gen closure from breaking trio..
|
||||||
stream_messages(ws) as msg_gen,
|
aclosing(stream_messages(ws)) as msg_gen,
|
||||||
):
|
):
|
||||||
typ, quote = await anext(msg_gen)
|
typ, quote = await anext(msg_gen)
|
||||||
|
|
||||||
|
@ -579,13 +664,13 @@ async def open_symbol_search(
|
||||||
async with open_cached_client('binance') as client:
|
async with open_cached_client('binance') as client:
|
||||||
|
|
||||||
# load all symbols locally for fast search
|
# load all symbols locally for fast search
|
||||||
cache = await client.cache_symbols()
|
cache = await client.exch_info()
|
||||||
await ctx.started()
|
await ctx.started()
|
||||||
|
|
||||||
async with ctx.open_stream() as stream:
|
async with ctx.open_stream() as stream:
|
||||||
|
|
||||||
async for pattern in stream:
|
async for pattern in stream:
|
||||||
# results = await client.symbol_info(sym=pattern.upper())
|
# results = await client.exch_info(sym=pattern.upper())
|
||||||
|
|
||||||
matches = fuzzy.extractBests(
|
matches = fuzzy.extractBests(
|
||||||
pattern,
|
pattern,
|
||||||
|
@ -593,7 +678,7 @@ async def open_symbol_search(
|
||||||
score_cutoff=50,
|
score_cutoff=50,
|
||||||
)
|
)
|
||||||
# repack in dict form
|
# repack in dict form
|
||||||
await stream.send(
|
await stream.send({
|
||||||
{item[0]['symbol']: item[0]
|
item[0].symbol: item[0]
|
||||||
for item in matches}
|
for item in matches
|
||||||
)
|
})
|
||||||
|
|
|
@ -28,7 +28,13 @@ import tractor
|
||||||
|
|
||||||
from ..cli import cli
|
from ..cli import cli
|
||||||
from .. import watchlists as wl
|
from .. import watchlists as wl
|
||||||
from ..log import get_console_log, colorize_json, get_logger
|
from ..log import (
|
||||||
|
colorize_json,
|
||||||
|
)
|
||||||
|
from ._util import (
|
||||||
|
log,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
from ..service import (
|
from ..service import (
|
||||||
maybe_spawn_brokerd,
|
maybe_spawn_brokerd,
|
||||||
maybe_open_pikerd,
|
maybe_open_pikerd,
|
||||||
|
@ -38,9 +44,7 @@ from ..brokers import (
|
||||||
get_brokermod,
|
get_brokermod,
|
||||||
data,
|
data,
|
||||||
)
|
)
|
||||||
|
DEFAULT_BROKER = 'binance'
|
||||||
log = get_logger('cli')
|
|
||||||
DEFAULT_BROKER = 'questrade'
|
|
||||||
|
|
||||||
_config_dir = click.get_app_dir('piker')
|
_config_dir = click.get_app_dir('piker')
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
|
|
|
@ -26,15 +26,12 @@ from typing import List, Dict, Any, Optional
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ..log import get_logger
|
from ._util import log
|
||||||
from . import get_brokermod
|
from . import get_brokermod
|
||||||
from ..service import maybe_spawn_brokerd
|
from ..service import maybe_spawn_brokerd
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def api(brokername: str, methname: str, **kwargs) -> dict:
|
async def api(brokername: str, methname: str, **kwargs) -> dict:
|
||||||
"""Make (proxy through) a broker API call by name and return its result.
|
"""Make (proxy through) a broker API call by name and return its result.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -41,13 +41,13 @@ import tractor
|
||||||
from tractor.experimental import msgpub
|
from tractor.experimental import msgpub
|
||||||
from async_generator import asynccontextmanager
|
from async_generator import asynccontextmanager
|
||||||
|
|
||||||
from ..log import get_logger, get_console_log
|
from ._util import (
|
||||||
|
log,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
from . import get_brokermod
|
from . import get_brokermod
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def wait_for_network(
|
async def wait_for_network(
|
||||||
net_func: Callable,
|
net_func: Callable,
|
||||||
sleep: int = 1
|
sleep: int = 1
|
||||||
|
|
|
@ -62,9 +62,10 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_history_client(
|
async def open_history_client(
|
||||||
instrument: str,
|
mkt: MktPair,
|
||||||
) -> tuple[Callable, int]:
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
fnstrument: str = mkt.bs_fqme
|
||||||
# TODO implement history getter for the new storage layer.
|
# TODO implement history getter for the new storage layer.
|
||||||
async with open_cached_client('deribit') as client:
|
async with open_cached_client('deribit') as client:
|
||||||
|
|
||||||
|
|
|
@ -127,7 +127,7 @@ your ``pps.toml`` file will have position entries like,
|
||||||
[ib.algopaper."mnq.globex.20221216"]
|
[ib.algopaper."mnq.globex.20221216"]
|
||||||
size = -1.0
|
size = -1.0
|
||||||
ppu = 12423.630576923071
|
ppu = 12423.630576923071
|
||||||
bsuid = 515416577
|
bs_mktid = 515416577
|
||||||
expiry = "2022-12-16T00:00:00+00:00"
|
expiry = "2022-12-16T00:00:00+00:00"
|
||||||
clears = [
|
clears = [
|
||||||
{ dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
|
{ dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
|
||||||
|
|
|
@ -35,7 +35,6 @@ from .feed import (
|
||||||
)
|
)
|
||||||
from .broker import (
|
from .broker import (
|
||||||
trades_dialogue,
|
trades_dialogue,
|
||||||
norm_trade_records,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
@ -46,14 +45,23 @@ __all__ = [
|
||||||
'stream_quotes',
|
'stream_quotes',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
_brokerd_mods: list[str] = [
|
||||||
# tractor RPC enable arg
|
|
||||||
__enable_modules__: list[str] = [
|
|
||||||
'api',
|
'api',
|
||||||
'feed',
|
|
||||||
'broker',
|
'broker',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
_datad_mods: list[str] = [
|
||||||
|
'feed',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# tractor RPC enable arg
|
||||||
|
__enable_modules__: list[str] = (
|
||||||
|
_brokerd_mods
|
||||||
|
+
|
||||||
|
_datad_mods
|
||||||
|
)
|
||||||
|
|
||||||
# passed to ``tractor.ActorNursery.start_actor()``
|
# passed to ``tractor.ActorNursery.start_actor()``
|
||||||
_spawn_kwargs = {
|
_spawn_kwargs = {
|
||||||
'infect_asyncio': True,
|
'infect_asyncio': True,
|
||||||
|
|
|
@ -0,0 +1,191 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
"FLEX" report processing utils.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from bidict import bidict
|
||||||
|
import pendulum
|
||||||
|
from pprint import pformat
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from .api import (
|
||||||
|
get_config,
|
||||||
|
log,
|
||||||
|
)
|
||||||
|
from piker.accounting import (
|
||||||
|
open_trade_ledger,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def parse_flex_dt(
|
||||||
|
record: str,
|
||||||
|
) -> pendulum.datetime:
|
||||||
|
'''
|
||||||
|
Parse stupid flex record datetime stamps for the `dateTime` field..
|
||||||
|
|
||||||
|
'''
|
||||||
|
date, ts = record.split(';')
|
||||||
|
dt = pendulum.parse(date)
|
||||||
|
ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}'
|
||||||
|
tsdt = pendulum.parse(ts)
|
||||||
|
return dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second)
|
||||||
|
|
||||||
|
|
||||||
|
def flex_records_to_ledger_entries(
|
||||||
|
accounts: bidict,
|
||||||
|
trade_entries: list[object],
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Convert flex report entry objects into ``dict`` form, pretty much
|
||||||
|
straight up without modification except add a `pydatetime` field
|
||||||
|
from the parsed timestamp.
|
||||||
|
|
||||||
|
'''
|
||||||
|
trades_by_account = {}
|
||||||
|
for t in trade_entries:
|
||||||
|
entry = t.__dict__
|
||||||
|
|
||||||
|
# XXX: LOL apparently ``toml`` has a bug
|
||||||
|
# where a section key error will show up in the write
|
||||||
|
# if you leave a table key as an `int`? So i guess
|
||||||
|
# cast to strs for all keys..
|
||||||
|
|
||||||
|
# oddly for some so-called "BookTrade" entries
|
||||||
|
# this field seems to be blank, no cuckin clue.
|
||||||
|
# trade['ibExecID']
|
||||||
|
tid = str(entry.get('ibExecID') or entry['tradeID'])
|
||||||
|
# date = str(entry['tradeDate'])
|
||||||
|
|
||||||
|
# XXX: is it going to cause problems if a account name
|
||||||
|
# get's lost? The user should be able to find it based
|
||||||
|
# on the actual exec history right?
|
||||||
|
acctid = accounts[str(entry['accountId'])]
|
||||||
|
|
||||||
|
# probably a flex record with a wonky non-std timestamp..
|
||||||
|
dt = entry['pydatetime'] = parse_flex_dt(entry['dateTime'])
|
||||||
|
entry['datetime'] = str(dt)
|
||||||
|
|
||||||
|
if not tid:
|
||||||
|
# this is likely some kind of internal adjustment
|
||||||
|
# transaction, likely one of the following:
|
||||||
|
# - an expiry event that will show a "book trade" indicating
|
||||||
|
# some adjustment to cash balances: zeroing or itm settle.
|
||||||
|
# - a manual cash balance position adjustment likely done by
|
||||||
|
# the user from the accounts window in TWS where they can
|
||||||
|
# manually set the avg price and size:
|
||||||
|
# https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST
|
||||||
|
log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
trades_by_account.setdefault(
|
||||||
|
acctid, {}
|
||||||
|
)[tid] = entry
|
||||||
|
|
||||||
|
for acctid in trades_by_account:
|
||||||
|
trades_by_account[acctid] = dict(sorted(
|
||||||
|
trades_by_account[acctid].items(),
|
||||||
|
key=lambda entry: entry[1]['pydatetime'],
|
||||||
|
))
|
||||||
|
|
||||||
|
return trades_by_account
|
||||||
|
|
||||||
|
|
||||||
|
def load_flex_trades(
|
||||||
|
path: str | None = None,
|
||||||
|
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
|
||||||
|
from ib_insync import flexreport, util
|
||||||
|
|
||||||
|
conf = get_config()
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
# load ``brokers.toml`` and try to get the flex
|
||||||
|
# token and query id that must be previously defined
|
||||||
|
# by the user.
|
||||||
|
token = conf.get('flex_token')
|
||||||
|
if not token:
|
||||||
|
raise ValueError(
|
||||||
|
'You must specify a ``flex_token`` field in your'
|
||||||
|
'`brokers.toml` in order load your trade log, see our'
|
||||||
|
'intructions for how to set this up here:\n'
|
||||||
|
'PUT LINK HERE!'
|
||||||
|
)
|
||||||
|
|
||||||
|
qid = conf['flex_trades_query_id']
|
||||||
|
|
||||||
|
# TODO: hack this into our logging
|
||||||
|
# system like we do with the API client..
|
||||||
|
util.logToConsole()
|
||||||
|
|
||||||
|
# TODO: rewrite the query part of this with async..httpx?
|
||||||
|
report = flexreport.FlexReport(
|
||||||
|
token=token,
|
||||||
|
queryId=qid,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# XXX: another project we could potentially look at,
|
||||||
|
# https://pypi.org/project/ibflex/
|
||||||
|
report = flexreport.FlexReport(path=path)
|
||||||
|
|
||||||
|
trade_entries = report.extract('Trade')
|
||||||
|
ln = len(trade_entries)
|
||||||
|
log.info(f'Loaded {ln} trades from flex query')
|
||||||
|
|
||||||
|
trades_by_account = flex_records_to_ledger_entries(
|
||||||
|
conf['accounts'].inverse, # reverse map to user account names
|
||||||
|
trade_entries,
|
||||||
|
)
|
||||||
|
|
||||||
|
ledger_dict: dict | None = None
|
||||||
|
|
||||||
|
for acctid in trades_by_account:
|
||||||
|
trades_by_id = trades_by_account[acctid]
|
||||||
|
|
||||||
|
with open_trade_ledger('ib', acctid) as ledger_dict:
|
||||||
|
tid_delta = set(trades_by_id) - set(ledger_dict)
|
||||||
|
log.info(
|
||||||
|
'New trades detected\n'
|
||||||
|
f'{pformat(tid_delta)}'
|
||||||
|
)
|
||||||
|
if tid_delta:
|
||||||
|
sorted_delta = dict(sorted(
|
||||||
|
{tid: trades_by_id[tid] for tid in tid_delta}.items(),
|
||||||
|
key=lambda entry: entry[1].pop('pydatetime'),
|
||||||
|
))
|
||||||
|
ledger_dict.update(sorted_delta)
|
||||||
|
|
||||||
|
return ledger_dict
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
args = sys.argv
|
||||||
|
if len(args) > 1:
|
||||||
|
args = args[1:]
|
||||||
|
for arg in args:
|
||||||
|
path = os.path.abspath(arg)
|
||||||
|
load_flex_trades(path=path)
|
||||||
|
else:
|
||||||
|
# expect brokers.toml to have an entry and
|
||||||
|
# pull from the web service.
|
||||||
|
load_flex_trades()
|
|
@ -19,14 +19,23 @@
|
||||||
runnable script-programs.
|
runnable script-programs.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from typing import Literal
|
from __future__ import annotations
|
||||||
|
from functools import partial
|
||||||
|
from typing import (
|
||||||
|
Literal,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
import subprocess
|
import subprocess
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from piker.log import get_logger
|
from .._util import log
|
||||||
|
|
||||||
log = get_logger(__name__)
|
if TYPE_CHECKING:
|
||||||
|
from .api import (
|
||||||
|
MethodProxy,
|
||||||
|
ib_Client
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
_reset_tech: Literal[
|
_reset_tech: Literal[
|
||||||
|
@ -41,7 +50,8 @@ _reset_tech: Literal[
|
||||||
|
|
||||||
|
|
||||||
async def data_reset_hack(
|
async def data_reset_hack(
|
||||||
reset_type: str = 'data',
|
vnc_host: str,
|
||||||
|
reset_type: Literal['data', 'connection'],
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -71,18 +81,40 @@ async def data_reset_hack(
|
||||||
that need to be wrangle.
|
that need to be wrangle.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
no_setup_msg:str = (
|
||||||
|
'No data reset hack test setup for {vnc_host}!\n'
|
||||||
|
'See setup @\n'
|
||||||
|
'https://github.com/pikers/piker/tree/master/piker/brokers/ib'
|
||||||
|
)
|
||||||
global _reset_tech
|
global _reset_tech
|
||||||
|
|
||||||
match _reset_tech:
|
match _reset_tech:
|
||||||
case 'vnc':
|
case 'vnc':
|
||||||
try:
|
try:
|
||||||
await tractor.to_asyncio.run_task(vnc_click_hack)
|
await tractor.to_asyncio.run_task(
|
||||||
|
partial(
|
||||||
|
vnc_click_hack,
|
||||||
|
host=vnc_host,
|
||||||
|
)
|
||||||
|
)
|
||||||
except OSError:
|
except OSError:
|
||||||
_reset_tech = 'i3ipc_xdotool'
|
if vnc_host != 'localhost':
|
||||||
|
log.warning(no_setup_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
import i3ipc
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
log.warning(no_setup_msg)
|
||||||
|
return False
|
||||||
|
|
||||||
try:
|
try:
|
||||||
i3ipc_xdotool_manual_click_hack()
|
i3ipc_xdotool_manual_click_hack()
|
||||||
|
_reset_tech = 'i3ipc_xdotool'
|
||||||
return True
|
return True
|
||||||
except OSError:
|
except OSError:
|
||||||
|
log.exception(no_setup_msg)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
case 'i3ipc_xdotool':
|
case 'i3ipc_xdotool':
|
||||||
|
@ -96,19 +128,32 @@ async def data_reset_hack(
|
||||||
|
|
||||||
|
|
||||||
async def vnc_click_hack(
|
async def vnc_click_hack(
|
||||||
|
host: str = 'localhost',
|
||||||
reset_type: str = 'data'
|
reset_type: str = 'data'
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Reset the data or netowork connection for the VNC attached
|
Reset the data or network connection for the VNC attached
|
||||||
ib gateway using magic combos.
|
ib gateway using magic combos.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
key = {'data': 'f', 'connection': 'r'}[reset_type]
|
try:
|
||||||
|
|
||||||
import asyncvnc
|
import asyncvnc
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
log.warning(
|
||||||
|
"In order to leverage `piker`'s built-in data reset hacks, install "
|
||||||
|
"the `asyncvnc` project: https://github.com/barneygale/asyncvnc"
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# two different hot keys which trigger diff types of reset
|
||||||
|
# requests B)
|
||||||
|
key = {
|
||||||
|
'data': 'f',
|
||||||
|
'connection': 'r'
|
||||||
|
}[reset_type]
|
||||||
|
|
||||||
async with asyncvnc.connect(
|
async with asyncvnc.connect(
|
||||||
'localhost',
|
host,
|
||||||
port=3003,
|
port=3003,
|
||||||
# password='ibcansmbz',
|
# password='ibcansmbz',
|
||||||
) as client:
|
) as client:
|
||||||
|
@ -124,9 +169,11 @@ async def vnc_click_hack(
|
||||||
|
|
||||||
|
|
||||||
def i3ipc_xdotool_manual_click_hack() -> None:
|
def i3ipc_xdotool_manual_click_hack() -> None:
|
||||||
import i3ipc
|
|
||||||
|
|
||||||
i3 = i3ipc.Connection()
|
i3 = i3ipc.Connection()
|
||||||
|
|
||||||
|
# TODO: might be worth offering some kinda api for grabbing
|
||||||
|
# the window id from the pid?
|
||||||
|
# https://stackoverflow.com/a/2250879
|
||||||
t = i3.get_tree()
|
t = i3.get_tree()
|
||||||
|
|
||||||
orig_win_id = t.find_focused().window
|
orig_win_id = t.find_focused().window
|
||||||
|
|
|
@ -14,21 +14,27 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
``ib`` core API client machinery; mostly sane wrapping around
|
Core API client machinery; mostly sane/useful wrapping around `ib_insync`..
|
||||||
``ib_insync``.
|
|
||||||
|
|
||||||
"""
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
contextmanager as cm,
|
||||||
|
)
|
||||||
from contextlib import AsyncExitStack
|
from contextlib import AsyncExitStack
|
||||||
from dataclasses import asdict, astuple
|
from dataclasses import asdict, astuple
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import partial
|
from functools import (
|
||||||
|
partial,
|
||||||
|
# lru_cache,
|
||||||
|
)
|
||||||
import itertools
|
import itertools
|
||||||
from math import isnan
|
from math import isnan
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Callable,
|
||||||
Optional,
|
Optional,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
@ -44,6 +50,7 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor import to_asyncio
|
from tractor import to_asyncio
|
||||||
import pendulum
|
import pendulum
|
||||||
|
from eventkit import Event
|
||||||
import ib_insync as ibis
|
import ib_insync as ibis
|
||||||
from ib_insync.contract import (
|
from ib_insync.contract import (
|
||||||
Contract,
|
Contract,
|
||||||
|
@ -67,13 +74,13 @@ from ib_insync.client import Client as ib_Client
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from piker import config
|
from piker import config
|
||||||
from piker.log import get_logger
|
from piker.brokers._util import (
|
||||||
|
log,
|
||||||
|
get_logger,
|
||||||
|
)
|
||||||
from piker.data._source import base_ohlc_dtype
|
from piker.data._source import base_ohlc_dtype
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
_time_units = {
|
_time_units = {
|
||||||
's': ' sec',
|
's': ' sec',
|
||||||
'm': ' mins',
|
'm': ' mins',
|
||||||
|
@ -130,11 +137,13 @@ class NonShittyWrapper(Wrapper):
|
||||||
|
|
||||||
|
|
||||||
class NonShittyIB(ibis.IB):
|
class NonShittyIB(ibis.IB):
|
||||||
"""The beginning of overriding quite a few decisions in this lib.
|
'''
|
||||||
|
The beginning of overriding quite a few decisions in this lib.
|
||||||
|
|
||||||
- Don't use datetimes
|
- Don't use datetimes
|
||||||
- Don't use named tuples
|
- Don't use named tuples
|
||||||
"""
|
|
||||||
|
'''
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
|
||||||
# override `ib_insync` internal loggers so we can see wtf
|
# override `ib_insync` internal loggers so we can see wtf
|
||||||
|
@ -172,6 +181,8 @@ _adhoc_cmdty_set = {
|
||||||
'xagusd.cmdty', # silver spot
|
'xagusd.cmdty', # silver spot
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# NOTE: if you aren't seeing one of these symbol's futues contracts
|
||||||
|
# show up, it's likely the `.<venue>` part is wrong!
|
||||||
_adhoc_futes_set = {
|
_adhoc_futes_set = {
|
||||||
|
|
||||||
# equities
|
# equities
|
||||||
|
@ -183,6 +194,7 @@ _adhoc_futes_set = {
|
||||||
|
|
||||||
# cypto$
|
# cypto$
|
||||||
'brr.cme',
|
'brr.cme',
|
||||||
|
'mbt.cme', # micro
|
||||||
'ethusdrr.cme',
|
'ethusdrr.cme',
|
||||||
|
|
||||||
# agriculture
|
# agriculture
|
||||||
|
@ -197,7 +209,7 @@ _adhoc_futes_set = {
|
||||||
'mgc.comex', # micro
|
'mgc.comex', # micro
|
||||||
|
|
||||||
# oil & gas
|
# oil & gas
|
||||||
'cl.comex',
|
'cl.nymex',
|
||||||
|
|
||||||
'ni.comex', # silver futes
|
'ni.comex', # silver futes
|
||||||
'qi.comex', # mini-silver futes
|
'qi.comex', # mini-silver futes
|
||||||
|
@ -311,6 +323,22 @@ _samplings: dict[int, tuple[str, str]] = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def remove_handler_on_err(
|
||||||
|
event: Event,
|
||||||
|
handler: Callable,
|
||||||
|
) -> None:
|
||||||
|
try:
|
||||||
|
yield
|
||||||
|
except trio.BrokenResourceError:
|
||||||
|
# XXX: eventkit's ``Event.emit()`` for whatever redic
|
||||||
|
# reason will catch and ignore regular exceptions
|
||||||
|
# resulting in tracebacks spammed to console..
|
||||||
|
# Manually do the dereg ourselves.
|
||||||
|
log.exception(f'Disconnected from {event} updates')
|
||||||
|
event.disconnect(handler)
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
'''
|
'''
|
||||||
IB wrapped for our broker backend API.
|
IB wrapped for our broker backend API.
|
||||||
|
@ -330,7 +358,7 @@ class Client:
|
||||||
self.ib.RaiseRequestErrors = True
|
self.ib.RaiseRequestErrors = True
|
||||||
|
|
||||||
# contract cache
|
# contract cache
|
||||||
self._feeds: dict[str, trio.abc.SendChannel] = {}
|
self._cons: dict[str, Contract] = {}
|
||||||
|
|
||||||
# NOTE: the ib.client here is "throttled" to 45 rps by default
|
# NOTE: the ib.client here is "throttled" to 45 rps by default
|
||||||
|
|
||||||
|
@ -359,7 +387,7 @@ class Client:
|
||||||
|
|
||||||
async def bars(
|
async def bars(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
|
|
||||||
# EST in ISO 8601 format is required... below is EPOCH
|
# EST in ISO 8601 format is required... below is EPOCH
|
||||||
start_dt: Union[datetime, str] = "1970-01-01T00:00:00.000000-05:00",
|
start_dt: Union[datetime, str] = "1970-01-01T00:00:00.000000-05:00",
|
||||||
|
@ -376,7 +404,7 @@ class Client:
|
||||||
|
|
||||||
) -> tuple[BarDataList, np.ndarray, pendulum.Duration]:
|
) -> tuple[BarDataList, np.ndarray, pendulum.Duration]:
|
||||||
'''
|
'''
|
||||||
Retreive OHLCV bars for a fqsn over a range to the present.
|
Retreive OHLCV bars for a fqme over a range to the present.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# See API docs here:
|
# See API docs here:
|
||||||
|
@ -386,8 +414,7 @@ class Client:
|
||||||
bar_size, duration, dt_duration = _samplings[sample_period_s]
|
bar_size, duration, dt_duration = _samplings[sample_period_s]
|
||||||
|
|
||||||
global _enters
|
global _enters
|
||||||
# log.info(f'REQUESTING BARS {_enters} @ end={end_dt}')
|
log.info(
|
||||||
print(
|
|
||||||
f"REQUESTING {duration}'s worth {bar_size} BARS\n"
|
f"REQUESTING {duration}'s worth {bar_size} BARS\n"
|
||||||
f'{_enters} @ end={end_dt}"'
|
f'{_enters} @ end={end_dt}"'
|
||||||
)
|
)
|
||||||
|
@ -397,7 +424,7 @@ class Client:
|
||||||
|
|
||||||
_enters += 1
|
_enters += 1
|
||||||
|
|
||||||
contract = (await self.find_contracts(fqsn))[0]
|
contract = (await self.find_contracts(fqme))[0]
|
||||||
bars_kwargs.update(getattr(contract, 'bars_kwargs', {}))
|
bars_kwargs.update(getattr(contract, 'bars_kwargs', {}))
|
||||||
|
|
||||||
bars = await self.ib.reqHistoricalDataAsync(
|
bars = await self.ib.reqHistoricalDataAsync(
|
||||||
|
@ -473,7 +500,7 @@ class Client:
|
||||||
# nested dataclass we probably don't need and that won't
|
# nested dataclass we probably don't need and that won't
|
||||||
# IPC serialize..
|
# IPC serialize..
|
||||||
d.secIdList = ''
|
d.secIdList = ''
|
||||||
key, calc_price = con2fqsn(d.contract)
|
key, calc_price = con2fqme(d.contract)
|
||||||
details[key] = d
|
details[key] = d
|
||||||
|
|
||||||
return details
|
return details
|
||||||
|
@ -614,15 +641,22 @@ class Client:
|
||||||
|
|
||||||
return con
|
return con
|
||||||
|
|
||||||
|
# TODO: make this work with our `MethodProxy`..
|
||||||
|
# @lru_cache(maxsize=None)
|
||||||
async def get_con(
|
async def get_con(
|
||||||
self,
|
self,
|
||||||
conid: int,
|
conid: int,
|
||||||
) -> Contract:
|
) -> Contract:
|
||||||
return await self.ib.qualifyContractsAsync(
|
try:
|
||||||
|
return self._cons[conid]
|
||||||
|
except KeyError:
|
||||||
|
con: Contract = await self.ib.qualifyContractsAsync(
|
||||||
ibis.Contract(conId=conid)
|
ibis.Contract(conId=conid)
|
||||||
)
|
)
|
||||||
|
self._cons[conid] = con
|
||||||
|
return con
|
||||||
|
|
||||||
def parse_patt2fqsn(
|
def parse_patt2fqme(
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
|
|
||||||
|
@ -641,11 +675,11 @@ class Client:
|
||||||
|
|
||||||
currency = ''
|
currency = ''
|
||||||
|
|
||||||
# fqsn parsing stage
|
# fqme parsing stage
|
||||||
# ------------------
|
# ------------------
|
||||||
if '.ib' in pattern:
|
if '.ib' in pattern:
|
||||||
from ..data._source import unpack_fqsn
|
from piker.accounting import unpack_fqme
|
||||||
_, symbol, expiry = unpack_fqsn(pattern)
|
_, symbol, venue, expiry = unpack_fqme(pattern)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
symbol = pattern
|
symbol = pattern
|
||||||
|
@ -687,7 +721,7 @@ class Client:
|
||||||
) -> Contract:
|
) -> Contract:
|
||||||
|
|
||||||
if pattern is not None:
|
if pattern is not None:
|
||||||
symbol, currency, exch, expiry = self.parse_patt2fqsn(
|
symbol, currency, exch, expiry = self.parse_patt2fqme(
|
||||||
pattern,
|
pattern,
|
||||||
)
|
)
|
||||||
sectype = ''
|
sectype = ''
|
||||||
|
@ -722,7 +756,7 @@ class Client:
|
||||||
)
|
)
|
||||||
|
|
||||||
elif (
|
elif (
|
||||||
exch in ('IDEALPRO')
|
exch in {'IDEALPRO'}
|
||||||
or sectype == 'CASH'
|
or sectype == 'CASH'
|
||||||
):
|
):
|
||||||
# if '/' in symbol:
|
# if '/' in symbol:
|
||||||
|
@ -806,14 +840,14 @@ class Client:
|
||||||
|
|
||||||
async def get_head_time(
|
async def get_head_time(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
|
|
||||||
) -> datetime:
|
) -> datetime:
|
||||||
'''
|
'''
|
||||||
Return the first datetime stamp for ``contract``.
|
Return the first datetime stamp for ``contract``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
contract = (await self.find_contracts(fqsn))[0]
|
contract = (await self.find_contracts(fqme))[0]
|
||||||
return await self.ib.reqHeadTimeStampAsync(
|
return await self.ib.reqHeadTimeStampAsync(
|
||||||
contract,
|
contract,
|
||||||
whatToShow='TRADES',
|
whatToShow='TRADES',
|
||||||
|
@ -825,29 +859,34 @@ class Client:
|
||||||
self,
|
self,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
|
|
||||||
) -> tuple[Contract, Ticker, ContractDetails]:
|
) -> tuple[
|
||||||
|
Contract,
|
||||||
|
ContractDetails,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Get summary (meta) data for a given symbol str including
|
||||||
|
``Contract`` and its details and a (first snapshot of the)
|
||||||
|
``Ticker``.
|
||||||
|
|
||||||
|
'''
|
||||||
contract = (await self.find_contracts(symbol))[0]
|
contract = (await self.find_contracts(symbol))[0]
|
||||||
|
details_fute = self.ib.reqContractDetailsAsync(contract)
|
||||||
|
details = (await details_fute)[0]
|
||||||
|
return contract, details
|
||||||
|
|
||||||
|
async def get_quote(
|
||||||
|
self,
|
||||||
|
contract: Contract,
|
||||||
|
|
||||||
|
) -> Ticker:
|
||||||
|
'''
|
||||||
|
Return a single (snap) quote for symbol.
|
||||||
|
|
||||||
|
'''
|
||||||
ticker: Ticker = self.ib.reqMktData(
|
ticker: Ticker = self.ib.reqMktData(
|
||||||
contract,
|
contract,
|
||||||
snapshot=True,
|
snapshot=True,
|
||||||
)
|
)
|
||||||
details_fute = self.ib.reqContractDetailsAsync(contract)
|
|
||||||
details = (await details_fute)[0]
|
|
||||||
|
|
||||||
return contract, ticker, details
|
|
||||||
|
|
||||||
async def get_quote(
|
|
||||||
self,
|
|
||||||
symbol: str,
|
|
||||||
|
|
||||||
) -> tuple[Contract, Ticker, ContractDetails]:
|
|
||||||
'''
|
|
||||||
Return a single quote for symbol.
|
|
||||||
|
|
||||||
'''
|
|
||||||
contract, ticker, details = await self.get_sym_details(symbol)
|
|
||||||
|
|
||||||
ready = ticker.updateEvent
|
ready = ticker.updateEvent
|
||||||
|
|
||||||
# ensure a last price gets filled in before we deliver quote
|
# ensure a last price gets filled in before we deliver quote
|
||||||
|
@ -864,21 +903,22 @@ class Client:
|
||||||
else:
|
else:
|
||||||
if not warnset:
|
if not warnset:
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Quote for {symbol} timed out: market is closed?'
|
f'Quote for {contract} timed out: market is closed?'
|
||||||
)
|
)
|
||||||
warnset = True
|
warnset = True
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.info(f'Got first quote for {symbol}')
|
log.info(f'Got first quote for {contract}')
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
if not warnset:
|
if not warnset:
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Symbol {symbol} is not returning a quote '
|
f'Contract {contract} is not returning a quote '
|
||||||
'it may be outside trading hours?')
|
'it may be outside trading hours?'
|
||||||
|
)
|
||||||
warnset = True
|
warnset = True
|
||||||
|
|
||||||
return contract, ticker, details
|
return ticker
|
||||||
|
|
||||||
# async to be consistent for the client proxy, and cuz why not.
|
# async to be consistent for the client proxy, and cuz why not.
|
||||||
def submit_limit(
|
def submit_limit(
|
||||||
|
@ -1008,6 +1048,21 @@ class Client:
|
||||||
|
|
||||||
self.ib.errorEvent.connect(push_err)
|
self.ib.errorEvent.connect(push_err)
|
||||||
|
|
||||||
|
api_err = self.ib.client.apiError
|
||||||
|
|
||||||
|
def report_api_err(msg: str) -> None:
|
||||||
|
with remove_handler_on_err(
|
||||||
|
api_err,
|
||||||
|
report_api_err,
|
||||||
|
):
|
||||||
|
to_trio.send_nowait((
|
||||||
|
'error',
|
||||||
|
msg,
|
||||||
|
))
|
||||||
|
api_err.clear() # drop msg history
|
||||||
|
|
||||||
|
api_err.connect(report_api_err)
|
||||||
|
|
||||||
def positions(
|
def positions(
|
||||||
self,
|
self,
|
||||||
account: str = '',
|
account: str = '',
|
||||||
|
@ -1019,13 +1074,13 @@ class Client:
|
||||||
return self.ib.positions(account=account)
|
return self.ib.positions(account=account)
|
||||||
|
|
||||||
|
|
||||||
def con2fqsn(
|
def con2fqme(
|
||||||
con: Contract,
|
con: Contract,
|
||||||
_cache: dict[int, (str, bool)] = {}
|
_cache: dict[int, (str, bool)] = {}
|
||||||
|
|
||||||
) -> tuple[str, bool]:
|
) -> tuple[str, bool]:
|
||||||
'''
|
'''
|
||||||
Convert contracts to fqsn-style strings to be used both in symbol-search
|
Convert contracts to fqme-style strings to be used both in symbol-search
|
||||||
matching and as feed tokens passed to the front end data deed layer.
|
matching and as feed tokens passed to the front end data deed layer.
|
||||||
|
|
||||||
Previously seen contracts are cached by id.
|
Previously seen contracts are cached by id.
|
||||||
|
@ -1085,12 +1140,12 @@ def con2fqsn(
|
||||||
if expiry:
|
if expiry:
|
||||||
suffix += f'.{expiry}'
|
suffix += f'.{expiry}'
|
||||||
|
|
||||||
fqsn_key = symbol.lower()
|
fqme_key = symbol.lower()
|
||||||
if suffix:
|
if suffix:
|
||||||
fqsn_key = '.'.join((fqsn_key, suffix)).lower()
|
fqme_key = '.'.join((fqme_key, suffix)).lower()
|
||||||
|
|
||||||
_cache[con.conId] = fqsn_key, calc_price
|
_cache[con.conId] = fqme_key, calc_price
|
||||||
return fqsn_key, calc_price
|
return fqme_key, calc_price
|
||||||
|
|
||||||
|
|
||||||
# per-actor API ep caching
|
# per-actor API ep caching
|
||||||
|
@ -1137,7 +1192,7 @@ async def load_aio_clients(
|
||||||
# the API TCP in `ib_insync` connection can be flaky af so instead
|
# the API TCP in `ib_insync` connection can be flaky af so instead
|
||||||
# retry a few times to get the client going..
|
# retry a few times to get the client going..
|
||||||
connect_retries: int = 3,
|
connect_retries: int = 3,
|
||||||
connect_timeout: float = 0.5,
|
connect_timeout: float = 1,
|
||||||
disconnect_on_exit: bool = True,
|
disconnect_on_exit: bool = True,
|
||||||
|
|
||||||
) -> dict[str, Client]:
|
) -> dict[str, Client]:
|
||||||
|
@ -1191,9 +1246,14 @@ async def load_aio_clients(
|
||||||
for host, port in combos:
|
for host, port in combos:
|
||||||
|
|
||||||
sockaddr = (host, port)
|
sockaddr = (host, port)
|
||||||
|
|
||||||
|
maybe_client = _client_cache.get(sockaddr)
|
||||||
if (
|
if (
|
||||||
sockaddr in _client_cache
|
sockaddr in _scan_ignore
|
||||||
or sockaddr in _scan_ignore
|
or (
|
||||||
|
maybe_client
|
||||||
|
and maybe_client.ib.isConnected()
|
||||||
|
)
|
||||||
):
|
):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -1204,9 +1264,9 @@ async def load_aio_clients(
|
||||||
await ib.connectAsync(
|
await ib.connectAsync(
|
||||||
host,
|
host,
|
||||||
port,
|
port,
|
||||||
clientId=client_id,
|
clientId=client_id + i,
|
||||||
|
|
||||||
# this timeout is sensative on windows and will
|
# this timeout is sensitive on windows and will
|
||||||
# fail without a good "timeout error" so be
|
# fail without a good "timeout error" so be
|
||||||
# careful.
|
# careful.
|
||||||
timeout=connect_timeout,
|
timeout=connect_timeout,
|
||||||
|
@ -1230,15 +1290,10 @@ async def load_aio_clients(
|
||||||
OSError,
|
OSError,
|
||||||
) as ce:
|
) as ce:
|
||||||
_err = ce
|
_err = ce
|
||||||
|
|
||||||
if i > 8:
|
|
||||||
# cache logic to avoid rescanning if we already have all
|
|
||||||
# clients loaded.
|
|
||||||
_scan_ignore.add(sockaddr)
|
|
||||||
raise
|
|
||||||
|
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Failed to connect on {port} for {i} time, retrying...')
|
f'Failed to connect on {port} for {i} time with,\n'
|
||||||
|
f'{ib.client.apiError.value()}\n'
|
||||||
|
'retrying with a new client id..')
|
||||||
|
|
||||||
# Pre-collect all accounts available for this
|
# Pre-collect all accounts available for this
|
||||||
# connection and map account names to this client
|
# connection and map account names to this client
|
||||||
|
@ -1299,14 +1354,8 @@ async def load_clients_for_trio(
|
||||||
a ``tractor.to_asyncio.open_channel_from()``.
|
a ``tractor.to_asyncio.open_channel_from()``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
global _accounts2clients
|
|
||||||
|
|
||||||
if _accounts2clients:
|
|
||||||
to_trio.send_nowait(_accounts2clients)
|
|
||||||
await asyncio.sleep(float('inf'))
|
|
||||||
|
|
||||||
else:
|
|
||||||
async with load_aio_clients() as accts2clients:
|
async with load_aio_clients() as accts2clients:
|
||||||
|
|
||||||
to_trio.send_nowait(accts2clients)
|
to_trio.send_nowait(accts2clients)
|
||||||
|
|
||||||
# TODO: maybe a sync event to wait on instead?
|
# TODO: maybe a sync event to wait on instead?
|
||||||
|
@ -1400,6 +1449,14 @@ class MethodProxy:
|
||||||
while not chan.closed():
|
while not chan.closed():
|
||||||
# send through method + ``kwargs: dict`` as pair
|
# send through method + ``kwargs: dict`` as pair
|
||||||
msg = await chan.receive()
|
msg = await chan.receive()
|
||||||
|
|
||||||
|
# TODO: implement reconnect functionality like
|
||||||
|
# in our `.data._web_bs.NoBsWs`
|
||||||
|
# try:
|
||||||
|
# msg = await chan.receive()
|
||||||
|
# except ConnectionError:
|
||||||
|
# self.reset()
|
||||||
|
|
||||||
# print(f'NEXT MSG: {msg}')
|
# print(f'NEXT MSG: {msg}')
|
||||||
|
|
||||||
# TODO: py3.10 ``match:`` syntax B)
|
# TODO: py3.10 ``match:`` syntax B)
|
||||||
|
@ -1451,6 +1508,7 @@ async def open_aio_client_method_relay(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
# sync with `open_client_proxy()` caller
|
||||||
to_trio.send_nowait(client)
|
to_trio.send_nowait(client)
|
||||||
|
|
||||||
# TODO: separate channel for error handling?
|
# TODO: separate channel for error handling?
|
||||||
|
@ -1460,10 +1518,13 @@ async def open_aio_client_method_relay(
|
||||||
# back results
|
# back results
|
||||||
while not to_trio._closed:
|
while not to_trio._closed:
|
||||||
msg = await from_trio.get()
|
msg = await from_trio.get()
|
||||||
if msg is None:
|
|
||||||
|
match msg:
|
||||||
|
case None: # termination sentinel
|
||||||
print('asyncio PROXY-RELAY SHUTDOWN')
|
print('asyncio PROXY-RELAY SHUTDOWN')
|
||||||
break
|
break
|
||||||
|
|
||||||
|
case (meth_name, kwargs):
|
||||||
meth_name, kwargs = msg
|
meth_name, kwargs = msg
|
||||||
meth = getattr(client, meth_name)
|
meth = getattr(client, meth_name)
|
||||||
|
|
||||||
|
@ -1480,6 +1541,12 @@ async def open_aio_client_method_relay(
|
||||||
) as err:
|
) as err:
|
||||||
to_trio.send_nowait({'exception': err})
|
to_trio.send_nowait({'exception': err})
|
||||||
|
|
||||||
|
case {'error': content}:
|
||||||
|
to_trio.send_nowait({'exception': content})
|
||||||
|
|
||||||
|
case _:
|
||||||
|
raise ValueError(f'Unhandled msg {msg}')
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_client_proxy(
|
async def open_client_proxy(
|
||||||
|
@ -1509,7 +1576,8 @@ async def open_client_proxy(
|
||||||
|
|
||||||
# mock all remote methods on ib ``Client``.
|
# mock all remote methods on ib ``Client``.
|
||||||
for name, method in inspect.getmembers(
|
for name, method in inspect.getmembers(
|
||||||
Client, predicate=inspect.isfunction
|
Client,
|
||||||
|
predicate=inspect.isfunction,
|
||||||
):
|
):
|
||||||
if '_' == name[0]:
|
if '_' == name[0]:
|
||||||
continue
|
continue
|
||||||
|
|
|
@ -13,6 +13,7 @@
|
||||||
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Order and trades endpoints for use with ``piker``'s EMS.
|
Order and trades endpoints for use with ``piker``'s EMS.
|
||||||
|
|
||||||
|
@ -21,6 +22,7 @@ from __future__ import annotations
|
||||||
from bisect import insort
|
from bisect import insort
|
||||||
from contextlib import ExitStack
|
from contextlib import ExitStack
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
|
from decimal import Decimal
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
import time
|
import time
|
||||||
|
@ -37,6 +39,7 @@ from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
from ib_insync.contract import (
|
from ib_insync.contract import (
|
||||||
Contract,
|
Contract,
|
||||||
|
Option,
|
||||||
)
|
)
|
||||||
from ib_insync.order import (
|
from ib_insync.order import (
|
||||||
Trade,
|
Trade,
|
||||||
|
@ -51,14 +54,17 @@ from ib_insync.objects import Position as IbPosition
|
||||||
import pendulum
|
import pendulum
|
||||||
|
|
||||||
from piker import config
|
from piker import config
|
||||||
from piker.pp import (
|
from piker.accounting import (
|
||||||
|
dec_digits,
|
||||||
|
digits_to_dec,
|
||||||
Position,
|
Position,
|
||||||
Transaction,
|
Transaction,
|
||||||
open_trade_ledger,
|
open_trade_ledger,
|
||||||
|
iter_by_dt,
|
||||||
open_pps,
|
open_pps,
|
||||||
PpTable,
|
PpTable,
|
||||||
)
|
)
|
||||||
from piker.log import get_console_log
|
from .._util import get_console_log
|
||||||
from piker.clearing._messages import (
|
from piker.clearing._messages import (
|
||||||
Order,
|
Order,
|
||||||
Status,
|
Status,
|
||||||
|
@ -70,36 +76,39 @@ from piker.clearing._messages import (
|
||||||
BrokerdFill,
|
BrokerdFill,
|
||||||
BrokerdError,
|
BrokerdError,
|
||||||
)
|
)
|
||||||
from piker.data._source import (
|
from piker.accounting import (
|
||||||
Symbol,
|
MktPair,
|
||||||
float_digits,
|
|
||||||
)
|
)
|
||||||
from .api import (
|
from .api import (
|
||||||
_accounts2clients,
|
_accounts2clients,
|
||||||
con2fqsn,
|
con2fqme,
|
||||||
log,
|
log,
|
||||||
get_config,
|
get_config,
|
||||||
open_client_proxies,
|
open_client_proxies,
|
||||||
Client,
|
Client,
|
||||||
MethodProxy,
|
MethodProxy,
|
||||||
)
|
)
|
||||||
|
from ._flex_reports import parse_flex_dt
|
||||||
|
|
||||||
|
|
||||||
def pack_position(
|
def pack_position(
|
||||||
pos: IbPosition
|
pos: IbPosition
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
) -> tuple[
|
||||||
|
str,
|
||||||
|
dict[str, Any]
|
||||||
|
]:
|
||||||
|
|
||||||
con = pos.contract
|
con = pos.contract
|
||||||
fqsn, calc_price = con2fqsn(con)
|
fqme, calc_price = con2fqme(con)
|
||||||
|
|
||||||
# TODO: options contracts into a sane format..
|
# TODO: options contracts into a sane format..
|
||||||
return (
|
return (
|
||||||
con.conId,
|
str(con.conId),
|
||||||
BrokerdPosition(
|
BrokerdPosition(
|
||||||
broker='ib',
|
broker='ib',
|
||||||
account=pos.account,
|
account=pos.account,
|
||||||
symbol=fqsn,
|
symbol=fqme,
|
||||||
currency=con.currency,
|
currency=con.currency,
|
||||||
size=float(pos.position),
|
size=float(pos.position),
|
||||||
avg_price=float(pos.avgCost) / float(con.multiplier or 1.0),
|
avg_price=float(pos.avgCost) / float(con.multiplier or 1.0),
|
||||||
|
@ -281,18 +290,21 @@ async def recv_trade_updates(
|
||||||
async def update_ledger_from_api_trades(
|
async def update_ledger_from_api_trades(
|
||||||
trade_entries: list[dict[str, Any]],
|
trade_entries: list[dict[str, Any]],
|
||||||
client: Union[Client, MethodProxy],
|
client: Union[Client, MethodProxy],
|
||||||
|
accounts_def_inv: bidict[str, str],
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
dict[str, Transaction],
|
dict[str, Transaction],
|
||||||
dict[str, dict],
|
dict[str, dict],
|
||||||
]:
|
]:
|
||||||
|
|
||||||
# XXX; ERRGGG..
|
# XXX; ERRGGG..
|
||||||
# pack in the "primary/listing exchange" value from a
|
# pack in the "primary/listing exchange" value from a
|
||||||
# contract lookup since it seems this isn't available by
|
# contract lookup since it seems this isn't available by
|
||||||
# default from the `.fills()` method endpoint...
|
# default from the `.fills()` method endpoint...
|
||||||
for entry in trade_entries:
|
for entry in trade_entries:
|
||||||
condict = entry['contract']
|
condict = entry['contract']
|
||||||
|
# print(
|
||||||
|
# f"{condict['symbol']}: GETTING CONTRACT INFO!\n"
|
||||||
|
# )
|
||||||
conid = condict['conId']
|
conid = condict['conId']
|
||||||
pexch = condict['primaryExchange']
|
pexch = condict['primaryExchange']
|
||||||
|
|
||||||
|
@ -310,9 +322,8 @@ async def update_ledger_from_api_trades(
|
||||||
# pack in the ``Contract.secType``
|
# pack in the ``Contract.secType``
|
||||||
entry['asset_type'] = condict['secType']
|
entry['asset_type'] = condict['secType']
|
||||||
|
|
||||||
conf = get_config()
|
|
||||||
entries = api_trades_to_ledger_entries(
|
entries = api_trades_to_ledger_entries(
|
||||||
conf['accounts'].inverse,
|
accounts_def_inv,
|
||||||
trade_entries,
|
trade_entries,
|
||||||
)
|
)
|
||||||
# normalize recent session's trades to the `Transaction` type
|
# normalize recent session's trades to the `Transaction` type
|
||||||
|
@ -334,15 +345,17 @@ async def update_and_audit_msgs(
|
||||||
) -> list[BrokerdPosition]:
|
) -> list[BrokerdPosition]:
|
||||||
|
|
||||||
msgs: list[BrokerdPosition] = []
|
msgs: list[BrokerdPosition] = []
|
||||||
|
p: Position
|
||||||
for p in pps:
|
for p in pps:
|
||||||
bsuid = p.bsuid
|
bs_mktid = p.bs_mktid
|
||||||
|
|
||||||
# retreive equivalent ib reported position message
|
# retreive equivalent ib reported position message
|
||||||
# for comparison/audit versus the piker equivalent
|
# for comparison/audit versus the piker equivalent
|
||||||
# breakeven pp calcs.
|
# breakeven pp calcs.
|
||||||
ibppmsg = cids2pps.get((acctid, bsuid))
|
ibppmsg = cids2pps.get((acctid, bs_mktid))
|
||||||
|
|
||||||
if ibppmsg:
|
if ibppmsg:
|
||||||
|
symbol = ibppmsg.symbol
|
||||||
msg = BrokerdPosition(
|
msg = BrokerdPosition(
|
||||||
broker='ib',
|
broker='ib',
|
||||||
|
|
||||||
|
@ -353,13 +366,16 @@ async def update_and_audit_msgs(
|
||||||
# table..
|
# table..
|
||||||
account=ibppmsg.account,
|
account=ibppmsg.account,
|
||||||
# XXX: the `.ib` is stripped..?
|
# XXX: the `.ib` is stripped..?
|
||||||
symbol=ibppmsg.symbol,
|
symbol=symbol,
|
||||||
currency=ibppmsg.currency,
|
currency=ibppmsg.currency,
|
||||||
size=p.size,
|
size=p.size,
|
||||||
avg_price=p.ppu,
|
avg_price=p.ppu,
|
||||||
)
|
)
|
||||||
msgs.append(msg)
|
msgs.append(msg)
|
||||||
|
|
||||||
|
ibfmtmsg = pformat(ibppmsg.to_dict())
|
||||||
|
pikerfmtmsg = pformat(msg.to_dict())
|
||||||
|
|
||||||
if validate:
|
if validate:
|
||||||
ibsize = ibppmsg.size
|
ibsize = ibppmsg.size
|
||||||
pikersize = msg.size
|
pikersize = msg.size
|
||||||
|
@ -379,26 +395,24 @@ async def update_and_audit_msgs(
|
||||||
|
|
||||||
# raise ValueError(
|
# raise ValueError(
|
||||||
log.error(
|
log.error(
|
||||||
f'POSITION MISMATCH ib <-> piker ledger:\n'
|
f'Pos mismatch in ib vs. the piker ledger!\n'
|
||||||
f'ib: {ibppmsg}\n'
|
f'IB:\n{ibfmtmsg}\n\n'
|
||||||
f'piker: {msg}\n'
|
f'PIKER:\n{pikerfmtmsg}\n\n'
|
||||||
f'reverse_split_ratio: {reverse_split_ratio}\n'
|
|
||||||
f'split_ratio: {split_ratio}\n\n'
|
|
||||||
'FIGURE OUT WHY TF YOUR LEDGER IS OFF!?!?\n\n'
|
|
||||||
'If you are expecting a (reverse) split in this '
|
'If you are expecting a (reverse) split in this '
|
||||||
'instrument you should probably put the following '
|
'instrument you should probably put the following'
|
||||||
f'in the `pps.toml` section:\n{entry}'
|
'in the `pps.toml` section:\n'
|
||||||
|
f'{entry}\n'
|
||||||
|
# f'reverse_split_ratio: {reverse_split_ratio}\n'
|
||||||
|
# f'split_ratio: {split_ratio}\n\n'
|
||||||
)
|
)
|
||||||
msg.size = ibsize
|
msg.size = ibsize
|
||||||
|
|
||||||
if ibppmsg.avg_price != msg.avg_price:
|
if ibppmsg.avg_price != msg.avg_price:
|
||||||
|
# TODO: make this a "propaganda" log level?
|
||||||
# TODO: make this a "propoganda" log level?
|
|
||||||
log.warning(
|
log.warning(
|
||||||
'The mega-cucks at IB want you to believe with their '
|
f'IB "FIFO" avg price for {msg.symbol} is DIFF:\n'
|
||||||
f'"FIFO" positioning for {msg.symbol}:\n'
|
f'ib: {ibppmsg.avg_price}\n'
|
||||||
f'"ib" mega-cucker avg price: {ibppmsg.avg_price}\n'
|
f'piker: {msg.avg_price}'
|
||||||
f'piker, LIFO breakeven PnL price: {msg.avg_price}'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -414,7 +428,7 @@ async def update_and_audit_msgs(
|
||||||
# right since `.broker` is already included?
|
# right since `.broker` is already included?
|
||||||
account=f'ib.{acctid}',
|
account=f'ib.{acctid}',
|
||||||
# XXX: the `.ib` is stripped..?
|
# XXX: the `.ib` is stripped..?
|
||||||
symbol=p.symbol.front_fqsn(),
|
symbol=p.mkt.fqme,
|
||||||
# currency=ibppmsg.currency,
|
# currency=ibppmsg.currency,
|
||||||
size=p.size,
|
size=p.size,
|
||||||
avg_price=p.ppu,
|
avg_price=p.ppu,
|
||||||
|
@ -422,16 +436,89 @@ async def update_and_audit_msgs(
|
||||||
if validate and p.size:
|
if validate and p.size:
|
||||||
# raise ValueError(
|
# raise ValueError(
|
||||||
log.error(
|
log.error(
|
||||||
f'UNEXPECTED POSITION says ib:\n'
|
f'UNEXPECTED POSITION says IB => {msg.symbol}\n'
|
||||||
f'piker: {msg}\n'
|
'Maybe they LIQUIDATED YOU or are missing ledger entries?\n'
|
||||||
'YOU SHOULD FIGURE OUT WHY TF YOUR LEDGER IS OFF!?\n'
|
|
||||||
'THEY LIQUIDATED YOU OR YOUR MISSING LEDGER RECORDS!?'
|
|
||||||
)
|
)
|
||||||
msgs.append(msg)
|
msgs.append(msg)
|
||||||
|
|
||||||
return msgs
|
return msgs
|
||||||
|
|
||||||
|
|
||||||
|
async def aggr_open_orders(
|
||||||
|
order_msgs: list[Status],
|
||||||
|
client: Client,
|
||||||
|
proxy: MethodProxy,
|
||||||
|
accounts_def: bidict[str, str],
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Collect all open orders from client and fill in `order_msgs: list`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
trades: list[Trade] = client.ib.openTrades()
|
||||||
|
for trade in trades:
|
||||||
|
order = trade.order
|
||||||
|
quant = trade.order.totalQuantity
|
||||||
|
action = order.action.lower()
|
||||||
|
size = {
|
||||||
|
'sell': -1,
|
||||||
|
'buy': 1,
|
||||||
|
}[action] * quant
|
||||||
|
con = trade.contract
|
||||||
|
|
||||||
|
# TODO: in the case of the SMART venue (aka ib's
|
||||||
|
# router-clearing sys) we probably should handle
|
||||||
|
# showing such orders overtop of the fqme for the
|
||||||
|
# primary exchange, how to map this easily is going
|
||||||
|
# to be a bit tricky though?
|
||||||
|
deats = await proxy.con_deats(contracts=[con])
|
||||||
|
fqme = list(deats)[0]
|
||||||
|
|
||||||
|
reqid = order.orderId
|
||||||
|
|
||||||
|
# TODO: maybe embed a ``BrokerdOrder`` instead
|
||||||
|
# since then we can directly load it on the client
|
||||||
|
# side in the order mode loop?
|
||||||
|
msg = Status(
|
||||||
|
time_ns=time.time_ns(),
|
||||||
|
resp='open',
|
||||||
|
oid=str(reqid),
|
||||||
|
reqid=reqid,
|
||||||
|
|
||||||
|
# embedded order info
|
||||||
|
req=Order(
|
||||||
|
action=action,
|
||||||
|
exec_mode='live',
|
||||||
|
oid=str(reqid),
|
||||||
|
symbol=fqme,
|
||||||
|
account=accounts_def.inverse[order.account],
|
||||||
|
price=order.lmtPrice,
|
||||||
|
size=size,
|
||||||
|
),
|
||||||
|
src='ib',
|
||||||
|
)
|
||||||
|
order_msgs.append(msg)
|
||||||
|
|
||||||
|
return order_msgs
|
||||||
|
|
||||||
|
|
||||||
|
# proxy wrapper for starting trade event stream
|
||||||
|
async def open_trade_event_stream(
|
||||||
|
client: Client,
|
||||||
|
task_status: TaskStatus[
|
||||||
|
trio.abc.ReceiveChannel
|
||||||
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
|
):
|
||||||
|
# each api client has a unique event stream
|
||||||
|
async with tractor.to_asyncio.open_channel_from(
|
||||||
|
recv_trade_updates,
|
||||||
|
client=client,
|
||||||
|
) as (first, trade_event_stream):
|
||||||
|
|
||||||
|
task_status.started(trade_event_stream)
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def trades_dialogue(
|
async def trades_dialogue(
|
||||||
|
|
||||||
|
@ -465,7 +552,10 @@ async def trades_dialogue(
|
||||||
# we might also want to delegate a specific actor for
|
# we might also want to delegate a specific actor for
|
||||||
# ledger writing / reading for speed?
|
# ledger writing / reading for speed?
|
||||||
async with (
|
async with (
|
||||||
open_client_proxies() as (proxies, aioclients),
|
open_client_proxies() as (
|
||||||
|
proxies,
|
||||||
|
aioclients,
|
||||||
|
),
|
||||||
):
|
):
|
||||||
# Open a trade ledgers stack for appending trade records over
|
# Open a trade ledgers stack for appending trade records over
|
||||||
# multiple accounts.
|
# multiple accounts.
|
||||||
|
@ -473,6 +563,9 @@ async def trades_dialogue(
|
||||||
ledgers: dict[str, dict] = {}
|
ledgers: dict[str, dict] = {}
|
||||||
tables: dict[str, PpTable] = {}
|
tables: dict[str, PpTable] = {}
|
||||||
order_msgs: list[Status] = []
|
order_msgs: list[Status] = []
|
||||||
|
conf = get_config()
|
||||||
|
accounts_def_inv: bidict[str, str] = bidict(conf['accounts']).inverse
|
||||||
|
|
||||||
with (
|
with (
|
||||||
ExitStack() as lstack,
|
ExitStack() as lstack,
|
||||||
):
|
):
|
||||||
|
@ -489,149 +582,16 @@ async def trades_dialogue(
|
||||||
open_trade_ledger(
|
open_trade_ledger(
|
||||||
'ib',
|
'ib',
|
||||||
acctid,
|
acctid,
|
||||||
)
|
tx_sort=partial(
|
||||||
)
|
iter_by_dt,
|
||||||
table = tables[acctid] = lstack.enter_context(
|
parsers={
|
||||||
open_pps(
|
'dateTime': parse_flex_dt,
|
||||||
'ib',
|
'datetime': pendulum.parse,
|
||||||
acctid,
|
},
|
||||||
write_on_exit=True,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
for account, proxy in proxies.items():
|
|
||||||
client = aioclients[account]
|
|
||||||
trades: list[Trade] = client.ib.openTrades()
|
|
||||||
for trade in trades:
|
|
||||||
order = trade.order
|
|
||||||
quant = trade.order.totalQuantity
|
|
||||||
action = order.action.lower()
|
|
||||||
size = {
|
|
||||||
'sell': -1,
|
|
||||||
'buy': 1,
|
|
||||||
}[action] * quant
|
|
||||||
con = trade.contract
|
|
||||||
|
|
||||||
# TODO: in the case of the SMART venue (aka ib's
|
|
||||||
# router-clearing sys) we probably should handle
|
|
||||||
# showing such orders overtop of the fqsn for the
|
|
||||||
# primary exchange, how to map this easily is going
|
|
||||||
# to be a bit tricky though?
|
|
||||||
deats = await proxy.con_deats(contracts=[con])
|
|
||||||
fqsn = list(deats)[0]
|
|
||||||
|
|
||||||
reqid = order.orderId
|
|
||||||
|
|
||||||
# TODO: maybe embed a ``BrokerdOrder`` instead
|
|
||||||
# since then we can directly load it on the client
|
|
||||||
# side in the order mode loop?
|
|
||||||
msg = Status(
|
|
||||||
time_ns=time.time_ns(),
|
|
||||||
resp='open',
|
|
||||||
oid=str(reqid),
|
|
||||||
reqid=reqid,
|
|
||||||
|
|
||||||
# embedded order info
|
|
||||||
req=Order(
|
|
||||||
action=action,
|
|
||||||
exec_mode='live',
|
|
||||||
oid=str(reqid),
|
|
||||||
symbol=fqsn,
|
|
||||||
account=accounts_def.inverse[order.account],
|
|
||||||
price=order.lmtPrice,
|
|
||||||
size=size,
|
|
||||||
),
|
),
|
||||||
src='ib',
|
|
||||||
)
|
)
|
||||||
order_msgs.append(msg)
|
|
||||||
|
|
||||||
# process pp value reported from ib's system. we only use these
|
|
||||||
# to cross-check sizing since average pricing on their end uses
|
|
||||||
# the so called (bs) "FIFO" style which more or less results in
|
|
||||||
# a price that's not useful for traders who want to not lose
|
|
||||||
# money.. xb
|
|
||||||
for pos in client.positions():
|
|
||||||
|
|
||||||
# collect all ib-pp reported positions so that we can be
|
|
||||||
# sure know which positions to update from the ledger if
|
|
||||||
# any are missing from the ``pps.toml``
|
|
||||||
bsuid, msg = pack_position(pos)
|
|
||||||
|
|
||||||
acctid = msg.account = accounts_def.inverse[msg.account]
|
|
||||||
acctid = acctid.strip('ib.')
|
|
||||||
cids2pps[(acctid, bsuid)] = msg
|
|
||||||
assert msg.account in accounts, (
|
|
||||||
f'Position for unknown account: {msg.account}')
|
|
||||||
|
|
||||||
ledger = ledgers[acctid]
|
|
||||||
table = tables[acctid]
|
|
||||||
|
|
||||||
pp = table.pps.get(bsuid)
|
|
||||||
if (
|
|
||||||
not pp
|
|
||||||
or pp.size != msg.size
|
|
||||||
):
|
|
||||||
trans = norm_trade_records(ledger)
|
|
||||||
table.update_from_trans(trans)
|
|
||||||
|
|
||||||
# update trades ledgers for all accounts from connected
|
|
||||||
# api clients which report trades for **this session**.
|
|
||||||
trades = await proxy.trades()
|
|
||||||
(
|
|
||||||
trans_by_acct,
|
|
||||||
api_to_ledger_entries,
|
|
||||||
) = await update_ledger_from_api_trades(
|
|
||||||
trades,
|
|
||||||
proxy,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# if new trades are detected from the API, prepare
|
|
||||||
# them for the ledger file and update the pptable.
|
|
||||||
if api_to_ledger_entries:
|
|
||||||
trade_entries = api_to_ledger_entries.get(acctid)
|
|
||||||
|
|
||||||
if trade_entries:
|
|
||||||
# write ledger with all new trades **AFTER**
|
|
||||||
# we've updated the `pps.toml` from the
|
|
||||||
# original ledger state! (i.e. this is
|
|
||||||
# currently done on exit)
|
|
||||||
ledger.update(trade_entries)
|
|
||||||
|
|
||||||
trans = trans_by_acct.get(acctid)
|
|
||||||
if trans:
|
|
||||||
table.update_from_trans(trans)
|
|
||||||
|
|
||||||
# XXX: not sure exactly why it wouldn't be in
|
|
||||||
# the updated output (maybe this is a bug?) but
|
|
||||||
# if you create a pos from TWS and then load it
|
|
||||||
# from the api trades it seems we get a key
|
|
||||||
# error from ``update[bsuid]`` ?
|
|
||||||
pp = table.pps.get(bsuid)
|
|
||||||
if not pp:
|
|
||||||
log.error(
|
|
||||||
f'The contract id for {msg} may have '
|
|
||||||
f'changed to {bsuid}\nYou may need to '
|
|
||||||
'adjust your ledger for this, skipping '
|
|
||||||
'for now.'
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
# XXX: not sure exactly why it wouldn't be in
|
|
||||||
# the updated output (maybe this is a bug?) but
|
|
||||||
# if you create a pos from TWS and then load it
|
|
||||||
# from the api trades it seems we get a key
|
|
||||||
# error from ``update[bsuid]`` ?
|
|
||||||
pp = table.pps[bsuid]
|
|
||||||
pairinfo = pp.symbol
|
|
||||||
if msg.size != pp.size:
|
|
||||||
log.error(
|
|
||||||
f'Pos size mismatch {pairinfo.front_fqsn()}:\n'
|
|
||||||
f'ib: {msg.size}\n'
|
|
||||||
f'piker: {pp.size}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
active_pps, closed_pps = table.dump_active()
|
|
||||||
|
|
||||||
# load all positions from `pps.toml`, cross check with
|
# load all positions from `pps.toml`, cross check with
|
||||||
# ib's positions data, and relay re-formatted pps as
|
# ib's positions data, and relay re-formatted pps as
|
||||||
# msgs to the ems.
|
# msgs to the ems.
|
||||||
|
@ -641,6 +601,105 @@ async def trades_dialogue(
|
||||||
# - no new trades yet but we want to reload and audit any
|
# - no new trades yet but we want to reload and audit any
|
||||||
# positions reported by ib's sys that may not yet be in
|
# positions reported by ib's sys that may not yet be in
|
||||||
# piker's ``pps.toml`` state-file.
|
# piker's ``pps.toml`` state-file.
|
||||||
|
tables[acctid] = lstack.enter_context(
|
||||||
|
open_pps(
|
||||||
|
'ib',
|
||||||
|
acctid,
|
||||||
|
write_on_exit=True,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
for account, proxy in proxies.items():
|
||||||
|
client = aioclients[account]
|
||||||
|
|
||||||
|
# order_msgs is filled in by this helper
|
||||||
|
await aggr_open_orders(
|
||||||
|
order_msgs,
|
||||||
|
client,
|
||||||
|
proxy,
|
||||||
|
accounts_def,
|
||||||
|
)
|
||||||
|
acctid: str = account.strip('ib.')
|
||||||
|
ledger: dict = ledgers[acctid]
|
||||||
|
table: PpTable = tables[acctid]
|
||||||
|
|
||||||
|
# update trades ledgers for all accounts from connected
|
||||||
|
# api clients which report trades for **this session**.
|
||||||
|
api_trades = await proxy.trades()
|
||||||
|
if api_trades:
|
||||||
|
|
||||||
|
trans_by_acct: dict[str, Transaction]
|
||||||
|
api_to_ledger_entries: dict[str, dict]
|
||||||
|
(
|
||||||
|
trans_by_acct,
|
||||||
|
api_to_ledger_entries,
|
||||||
|
) = await update_ledger_from_api_trades(
|
||||||
|
api_trades,
|
||||||
|
proxy,
|
||||||
|
accounts_def_inv,
|
||||||
|
)
|
||||||
|
|
||||||
|
# if new api_trades are detected from the API, prepare
|
||||||
|
# them for the ledger file and update the pptable.
|
||||||
|
if api_to_ledger_entries:
|
||||||
|
trade_entries = api_to_ledger_entries.get(acctid)
|
||||||
|
|
||||||
|
# TODO: fix this `tractor` BUG!
|
||||||
|
# https://github.com/goodboy/tractor/issues/354
|
||||||
|
# await tractor.breakpoint()
|
||||||
|
|
||||||
|
if trade_entries:
|
||||||
|
# write ledger with all new api_trades
|
||||||
|
# **AFTER** we've updated the `pps.toml`
|
||||||
|
# from the original ledger state! (i.e. this
|
||||||
|
# is currently done on exit)
|
||||||
|
for tid, entry in trade_entries.items():
|
||||||
|
ledger.setdefault(tid, {}).update(entry)
|
||||||
|
|
||||||
|
trans = trans_by_acct.get(acctid)
|
||||||
|
if trans:
|
||||||
|
table.update_from_trans(trans)
|
||||||
|
|
||||||
|
# update position table with latest ledger from all
|
||||||
|
# gathered transactions: ledger file + api records.
|
||||||
|
trans: dict[str, Transaction] = norm_trade_records(ledger)
|
||||||
|
table.update_from_trans(trans)
|
||||||
|
|
||||||
|
# process pp value reported from ib's system. we only
|
||||||
|
# use these to cross-check sizing since average pricing
|
||||||
|
# on their end uses the so called (bs) "FIFO" style
|
||||||
|
# which more or less results in a price that's not
|
||||||
|
# useful for traders who want to not lose money.. xb
|
||||||
|
# -> collect all ib-pp reported positions so that we can be
|
||||||
|
# sure know which positions to update from the ledger if
|
||||||
|
# any are missing from the ``pps.toml``
|
||||||
|
|
||||||
|
pos: IbPosition # named tuple subtype
|
||||||
|
for pos in client.positions():
|
||||||
|
|
||||||
|
# NOTE XXX: we skip options for now since we don't
|
||||||
|
# yet support the symbology nor the live feeds.
|
||||||
|
if isinstance(pos.contract, Option):
|
||||||
|
log.warning(
|
||||||
|
f'Option contracts not supported for now:\n'
|
||||||
|
f'{pos._asdict()}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
bs_mktid, msg = pack_position(pos)
|
||||||
|
acctid = msg.account = accounts_def.inverse[msg.account]
|
||||||
|
acctid = acctid.strip('ib.')
|
||||||
|
cids2pps[(acctid, bs_mktid)] = msg
|
||||||
|
|
||||||
|
assert msg.account in accounts, (
|
||||||
|
f'Position for unknown account: {msg.account}')
|
||||||
|
|
||||||
|
# iterate all (newly) updated pps tables for every
|
||||||
|
# client-account and build out position msgs to deliver to
|
||||||
|
# EMS.
|
||||||
|
for acctid, table in tables.items():
|
||||||
|
active_pps, closed_pps = table.dump_active()
|
||||||
|
|
||||||
for pps in [active_pps, closed_pps]:
|
for pps in [active_pps, closed_pps]:
|
||||||
msgs = await update_and_audit_msgs(
|
msgs = await update_and_audit_msgs(
|
||||||
acctid,
|
acctid,
|
||||||
|
@ -661,22 +720,6 @@ async def trades_dialogue(
|
||||||
tuple(name for name in accounts_def if name in accounts),
|
tuple(name for name in accounts_def if name in accounts),
|
||||||
))
|
))
|
||||||
|
|
||||||
# proxy wrapper for starting trade event stream
|
|
||||||
async def open_trade_event_stream(
|
|
||||||
client: Client,
|
|
||||||
task_status: TaskStatus[
|
|
||||||
trio.abc.ReceiveChannel
|
|
||||||
] = trio.TASK_STATUS_IGNORED,
|
|
||||||
):
|
|
||||||
# each api client has a unique event stream
|
|
||||||
async with tractor.to_asyncio.open_channel_from(
|
|
||||||
recv_trade_updates,
|
|
||||||
client=client,
|
|
||||||
) as (first, trade_event_stream):
|
|
||||||
|
|
||||||
task_status.started(trade_event_stream)
|
|
||||||
await trio.sleep_forever()
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as ems_stream,
|
ctx.open_stream() as ems_stream,
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
|
@ -723,44 +766,50 @@ async def trades_dialogue(
|
||||||
async def emit_pp_update(
|
async def emit_pp_update(
|
||||||
ems_stream: tractor.MsgStream,
|
ems_stream: tractor.MsgStream,
|
||||||
trade_entry: dict,
|
trade_entry: dict,
|
||||||
accounts_def: bidict,
|
accounts_def: bidict[str, str],
|
||||||
proxies: dict,
|
proxies: dict,
|
||||||
cids2pps: dict,
|
cids2pps: dict,
|
||||||
|
|
||||||
ledgers,
|
ledgers: dict[str, dict[str, Any]],
|
||||||
tables,
|
tables: dict[str, PpTable],
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# compute and relay incrementally updated piker pp
|
# compute and relay incrementally updated piker pp
|
||||||
acctid = accounts_def.inverse[trade_entry['execution']['acctNumber']]
|
accounts_def_inv: bidict[str, str] = accounts_def.inverse
|
||||||
proxy = proxies[acctid]
|
fq_acctid = accounts_def_inv[trade_entry['execution']['acctNumber']]
|
||||||
|
proxy = proxies[fq_acctid]
|
||||||
acctid = acctid.strip('ib.')
|
|
||||||
(
|
(
|
||||||
records_by_acct,
|
records_by_acct,
|
||||||
api_to_ledger_entries,
|
api_to_ledger_entries,
|
||||||
) = await update_ledger_from_api_trades(
|
) = await update_ledger_from_api_trades(
|
||||||
[trade_entry],
|
[trade_entry],
|
||||||
proxy,
|
proxy,
|
||||||
|
accounts_def_inv,
|
||||||
)
|
)
|
||||||
trans = records_by_acct[acctid]
|
trans = records_by_acct[fq_acctid]
|
||||||
r = list(trans.values())[0]
|
r = list(trans.values())[0]
|
||||||
|
|
||||||
|
acctid = fq_acctid.strip('ib.')
|
||||||
table = tables[acctid]
|
table = tables[acctid]
|
||||||
table.update_from_trans(trans)
|
table.update_from_trans(trans)
|
||||||
active, closed = table.dump_active()
|
active, closed = table.dump_active()
|
||||||
|
|
||||||
# NOTE: update ledger with all new trades
|
# NOTE: update ledger with all new trades
|
||||||
for acctid, trades_by_id in api_to_ledger_entries.items():
|
for fq_acctid, trades_by_id in api_to_ledger_entries.items():
|
||||||
|
acctid = fq_acctid.strip('ib.')
|
||||||
ledger = ledgers[acctid]
|
ledger = ledgers[acctid]
|
||||||
ledger.update(trades_by_id)
|
|
||||||
|
for tid, tdict in trades_by_id.items():
|
||||||
|
# NOTE: don't override flex/previous entries with new API
|
||||||
|
# ones, just update with new fields!
|
||||||
|
ledger.setdefault(tid, {}).update(tdict)
|
||||||
|
|
||||||
# generate pp msgs and cross check with ib's positions data, relay
|
# generate pp msgs and cross check with ib's positions data, relay
|
||||||
# re-formatted pps as msgs to the ems.
|
# re-formatted pps as msgs to the ems.
|
||||||
for pos in filter(
|
for pos in filter(
|
||||||
bool,
|
bool,
|
||||||
[active.get(r.bsuid), closed.get(r.bsuid)]
|
[active.get(r.bs_mktid), closed.get(r.bs_mktid)]
|
||||||
):
|
):
|
||||||
msgs = await update_and_audit_msgs(
|
msgs = await update_and_audit_msgs(
|
||||||
acctid,
|
acctid,
|
||||||
|
@ -859,8 +908,8 @@ async def deliver_trade_events(
|
||||||
# https://github.com/erdewit/ib_insync/issues/363
|
# https://github.com/erdewit/ib_insync/issues/363
|
||||||
# acctid = accounts_def.inverse[trade.order.account]
|
# acctid = accounts_def.inverse[trade.order.account]
|
||||||
|
|
||||||
# # double check there is no error when
|
# double check there is no error when
|
||||||
# # cancelling.. gawwwd
|
# cancelling.. gawwwd
|
||||||
# if ib_status_key == 'cancelled':
|
# if ib_status_key == 'cancelled':
|
||||||
# last_log = trade.log[-1]
|
# last_log = trade.log[-1]
|
||||||
# if (
|
# if (
|
||||||
|
@ -1000,6 +1049,7 @@ async def deliver_trade_events(
|
||||||
accounts_def,
|
accounts_def,
|
||||||
proxies,
|
proxies,
|
||||||
cids2pps,
|
cids2pps,
|
||||||
|
|
||||||
ledgers,
|
ledgers,
|
||||||
tables,
|
tables,
|
||||||
)
|
)
|
||||||
|
@ -1034,6 +1084,7 @@ async def deliver_trade_events(
|
||||||
accounts_def,
|
accounts_def,
|
||||||
proxies,
|
proxies,
|
||||||
cids2pps,
|
cids2pps,
|
||||||
|
|
||||||
ledgers,
|
ledgers,
|
||||||
tables,
|
tables,
|
||||||
)
|
)
|
||||||
|
@ -1095,7 +1146,7 @@ async def deliver_trade_events(
|
||||||
def norm_trade_records(
|
def norm_trade_records(
|
||||||
ledger: dict[str, Any],
|
ledger: dict[str, Any],
|
||||||
|
|
||||||
) -> list[Transaction]:
|
) -> dict[str, Transaction]:
|
||||||
'''
|
'''
|
||||||
Normalize a flex report or API retrieved executions
|
Normalize a flex report or API retrieved executions
|
||||||
ledger into our standard record format.
|
ledger into our standard record format.
|
||||||
|
@ -1110,7 +1161,6 @@ def norm_trade_records(
|
||||||
comms = -1*record['ibCommission']
|
comms = -1*record['ibCommission']
|
||||||
|
|
||||||
price = record.get('price') or record['tradePrice']
|
price = record.get('price') or record['tradePrice']
|
||||||
price_tick_digits = float_digits(price)
|
|
||||||
|
|
||||||
# the api doesn't do the -/+ on the quantity for you but flex
|
# the api doesn't do the -/+ on the quantity for you but flex
|
||||||
# records do.. are you fucking serious ib...!?
|
# records do.. are you fucking serious ib...!?
|
||||||
|
@ -1122,6 +1172,12 @@ def norm_trade_records(
|
||||||
exch = record['exchange']
|
exch = record['exchange']
|
||||||
lexch = record.get('listingExchange')
|
lexch = record.get('listingExchange')
|
||||||
|
|
||||||
|
# NOTE: remove null values since `tomlkit` can't serialize
|
||||||
|
# them to file.
|
||||||
|
dnc = record.pop('deltaNeutralContract', False)
|
||||||
|
if dnc is not None:
|
||||||
|
record['deltaNeutralContract'] = dnc
|
||||||
|
|
||||||
suffix = lexch or exch
|
suffix = lexch or exch
|
||||||
symbol = record['symbol']
|
symbol = record['symbol']
|
||||||
|
|
||||||
|
@ -1153,7 +1209,9 @@ def norm_trade_records(
|
||||||
|
|
||||||
# special handling of symbol extraction from
|
# special handling of symbol extraction from
|
||||||
# flex records using some ad-hoc schema parsing.
|
# flex records using some ad-hoc schema parsing.
|
||||||
asset_type: str = record.get('assetCategory') or record['secType']
|
asset_type: str = record.get(
|
||||||
|
'assetCategory'
|
||||||
|
) or record.get('secType', 'STK')
|
||||||
|
|
||||||
# TODO: XXX: WOA this is kinda hacky.. probably
|
# TODO: XXX: WOA this is kinda hacky.. probably
|
||||||
# should figure out the correct future pair key more
|
# should figure out the correct future pair key more
|
||||||
|
@ -1161,58 +1219,54 @@ def norm_trade_records(
|
||||||
if asset_type == 'FUT':
|
if asset_type == 'FUT':
|
||||||
# (flex) ledger entries don't have any simple 3-char key?
|
# (flex) ledger entries don't have any simple 3-char key?
|
||||||
symbol = record['symbol'][:3]
|
symbol = record['symbol'][:3]
|
||||||
|
asset_type: str = 'future'
|
||||||
|
|
||||||
|
elif asset_type == 'STK':
|
||||||
|
asset_type: str = 'stock'
|
||||||
|
|
||||||
|
# try to build out piker fqme from record.
|
||||||
|
expiry = (
|
||||||
|
record.get('lastTradeDateOrContractMonth')
|
||||||
|
or record.get('expiry')
|
||||||
|
)
|
||||||
|
|
||||||
# try to build out piker fqsn from record.
|
|
||||||
expiry = record.get(
|
|
||||||
'lastTradeDateOrContractMonth') or record.get('expiry')
|
|
||||||
if expiry:
|
if expiry:
|
||||||
expiry = str(expiry).strip(' ')
|
expiry = str(expiry).strip(' ')
|
||||||
suffix = f'{exch}.{expiry}'
|
suffix = f'{exch}.{expiry}'
|
||||||
expiry = pendulum.parse(expiry)
|
expiry = pendulum.parse(expiry)
|
||||||
|
|
||||||
src: str = record['currency']
|
# src: str = record['currency']
|
||||||
|
price_tick: Decimal = digits_to_dec(dec_digits(price))
|
||||||
|
|
||||||
pair = Symbol.from_fqsn(
|
pair = MktPair.from_fqme(
|
||||||
fqsn=f'{symbol}.{suffix}.ib',
|
fqme=f'{symbol}.{suffix}.ib',
|
||||||
info={
|
bs_mktid=str(conid),
|
||||||
'tick_size_digits': price_tick_digits,
|
_atype=str(asset_type), # XXX: can't serlialize `tomlkit.String`
|
||||||
|
|
||||||
|
price_tick=price_tick,
|
||||||
# NOTE: for "legacy" assets, volume is normally discreet, not
|
# NOTE: for "legacy" assets, volume is normally discreet, not
|
||||||
# a float, but we keep a digit in case the suitz decide
|
# a float, but we keep a digit in case the suitz decide
|
||||||
# to get crazy and change it; we'll be kinda ready
|
# to get crazy and change it; we'll be kinda ready
|
||||||
# schema-wise..
|
# schema-wise..
|
||||||
'lot_size_digits': 1,
|
size_tick='1',
|
||||||
|
|
||||||
# TODO: remove when we switching from
|
|
||||||
# ``Symbol`` -> ``MktPair``
|
|
||||||
'asset_type': asset_type,
|
|
||||||
|
|
||||||
# TODO: figure out a target fin-type name
|
|
||||||
# set and normalize to that here!
|
|
||||||
'dst_type': asset_type.lower(),
|
|
||||||
|
|
||||||
# starting to use new key naming as in ``MktPair``
|
|
||||||
# type have drafted...
|
|
||||||
'src': src,
|
|
||||||
'src_type': 'fiat',
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
fqsn = pair.front_fqsn().rstrip('.ib')
|
|
||||||
|
|
||||||
# NOTE: for flex records the normal fields for defining an fqsn
|
fqme = pair.fqme
|
||||||
|
|
||||||
|
# NOTE: for flex records the normal fields for defining an fqme
|
||||||
# sometimes won't be available so we rely on two approaches for
|
# sometimes won't be available so we rely on two approaches for
|
||||||
# the "reverse lookup" of piker style fqsn keys:
|
# the "reverse lookup" of piker style fqme keys:
|
||||||
# - when dealing with API trade records received from
|
# - when dealing with API trade records received from
|
||||||
# `IB.trades()` we do a contract lookup at he time of processing
|
# `IB.trades()` we do a contract lookup at he time of processing
|
||||||
# - when dealing with flex records, it is assumed the record
|
# - when dealing with flex records, it is assumed the record
|
||||||
# is at least a day old and thus the TWS position reporting system
|
# is at least a day old and thus the TWS position reporting system
|
||||||
# should already have entries if the pps are still open, in
|
# should already have entries if the pps are still open, in
|
||||||
# which case, we can pull the fqsn from that table (see
|
# which case, we can pull the fqme from that table (see
|
||||||
# `trades_dialogue()` above).
|
# `trades_dialogue()` above).
|
||||||
insort(
|
insort(
|
||||||
records,
|
records,
|
||||||
Transaction(
|
Transaction(
|
||||||
fqsn=fqsn,
|
fqme=fqme,
|
||||||
sym=pair,
|
sym=pair,
|
||||||
tid=tid,
|
tid=tid,
|
||||||
size=size,
|
size=size,
|
||||||
|
@ -1220,7 +1274,7 @@ def norm_trade_records(
|
||||||
cost=comms,
|
cost=comms,
|
||||||
dt=dt,
|
dt=dt,
|
||||||
expiry=expiry,
|
expiry=expiry,
|
||||||
bsuid=conid,
|
bs_mktid=str(conid),
|
||||||
),
|
),
|
||||||
key=lambda t: t.dt
|
key=lambda t: t.dt
|
||||||
)
|
)
|
||||||
|
@ -1228,18 +1282,8 @@ def norm_trade_records(
|
||||||
return {r.tid: r for r in records}
|
return {r.tid: r for r in records}
|
||||||
|
|
||||||
|
|
||||||
def parse_flex_dt(
|
|
||||||
record: str,
|
|
||||||
) -> pendulum.datetime:
|
|
||||||
date, ts = record.split(';')
|
|
||||||
dt = pendulum.parse(date)
|
|
||||||
ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}'
|
|
||||||
tsdt = pendulum.parse(ts)
|
|
||||||
return dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second)
|
|
||||||
|
|
||||||
|
|
||||||
def api_trades_to_ledger_entries(
|
def api_trades_to_ledger_entries(
|
||||||
accounts: bidict,
|
accounts: bidict[str, str],
|
||||||
|
|
||||||
# TODO: maybe we should just be passing through the
|
# TODO: maybe we should just be passing through the
|
||||||
# ``ib_insync.order.Trade`` instance directly here
|
# ``ib_insync.order.Trade`` instance directly here
|
||||||
|
@ -1309,148 +1353,3 @@ def api_trades_to_ledger_entries(
|
||||||
))
|
))
|
||||||
|
|
||||||
return trades_by_account
|
return trades_by_account
|
||||||
|
|
||||||
|
|
||||||
def flex_records_to_ledger_entries(
|
|
||||||
accounts: bidict,
|
|
||||||
trade_entries: list[object],
|
|
||||||
|
|
||||||
) -> dict:
|
|
||||||
'''
|
|
||||||
Convert flex report entry objects into ``dict`` form, pretty much
|
|
||||||
straight up without modification except add a `pydatetime` field
|
|
||||||
from the parsed timestamp.
|
|
||||||
|
|
||||||
'''
|
|
||||||
trades_by_account = {}
|
|
||||||
for t in trade_entries:
|
|
||||||
entry = t.__dict__
|
|
||||||
|
|
||||||
# XXX: LOL apparently ``toml`` has a bug
|
|
||||||
# where a section key error will show up in the write
|
|
||||||
# if you leave a table key as an `int`? So i guess
|
|
||||||
# cast to strs for all keys..
|
|
||||||
|
|
||||||
# oddly for some so-called "BookTrade" entries
|
|
||||||
# this field seems to be blank, no cuckin clue.
|
|
||||||
# trade['ibExecID']
|
|
||||||
tid = str(entry.get('ibExecID') or entry['tradeID'])
|
|
||||||
# date = str(entry['tradeDate'])
|
|
||||||
|
|
||||||
# XXX: is it going to cause problems if a account name
|
|
||||||
# get's lost? The user should be able to find it based
|
|
||||||
# on the actual exec history right?
|
|
||||||
acctid = accounts[str(entry['accountId'])]
|
|
||||||
|
|
||||||
# probably a flex record with a wonky non-std timestamp..
|
|
||||||
dt = entry['pydatetime'] = parse_flex_dt(entry['dateTime'])
|
|
||||||
entry['datetime'] = str(dt)
|
|
||||||
|
|
||||||
if not tid:
|
|
||||||
# this is likely some kind of internal adjustment
|
|
||||||
# transaction, likely one of the following:
|
|
||||||
# - an expiry event that will show a "book trade" indicating
|
|
||||||
# some adjustment to cash balances: zeroing or itm settle.
|
|
||||||
# - a manual cash balance position adjustment likely done by
|
|
||||||
# the user from the accounts window in TWS where they can
|
|
||||||
# manually set the avg price and size:
|
|
||||||
# https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST
|
|
||||||
log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}')
|
|
||||||
continue
|
|
||||||
|
|
||||||
trades_by_account.setdefault(
|
|
||||||
acctid, {}
|
|
||||||
)[tid] = entry
|
|
||||||
|
|
||||||
for acctid in trades_by_account:
|
|
||||||
trades_by_account[acctid] = dict(sorted(
|
|
||||||
trades_by_account[acctid].items(),
|
|
||||||
key=lambda entry: entry[1]['pydatetime'],
|
|
||||||
))
|
|
||||||
|
|
||||||
return trades_by_account
|
|
||||||
|
|
||||||
|
|
||||||
def load_flex_trades(
|
|
||||||
path: Optional[str] = None,
|
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
|
||||||
|
|
||||||
from ib_insync import flexreport, util
|
|
||||||
|
|
||||||
conf = get_config()
|
|
||||||
|
|
||||||
if not path:
|
|
||||||
# load ``brokers.toml`` and try to get the flex
|
|
||||||
# token and query id that must be previously defined
|
|
||||||
# by the user.
|
|
||||||
token = conf.get('flex_token')
|
|
||||||
if not token:
|
|
||||||
raise ValueError(
|
|
||||||
'You must specify a ``flex_token`` field in your'
|
|
||||||
'`brokers.toml` in order load your trade log, see our'
|
|
||||||
'intructions for how to set this up here:\n'
|
|
||||||
'PUT LINK HERE!'
|
|
||||||
)
|
|
||||||
|
|
||||||
qid = conf['flex_trades_query_id']
|
|
||||||
|
|
||||||
# TODO: hack this into our logging
|
|
||||||
# system like we do with the API client..
|
|
||||||
util.logToConsole()
|
|
||||||
|
|
||||||
# TODO: rewrite the query part of this with async..httpx?
|
|
||||||
report = flexreport.FlexReport(
|
|
||||||
token=token,
|
|
||||||
queryId=qid,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
# XXX: another project we could potentially look at,
|
|
||||||
# https://pypi.org/project/ibflex/
|
|
||||||
report = flexreport.FlexReport(path=path)
|
|
||||||
|
|
||||||
trade_entries = report.extract('Trade')
|
|
||||||
ln = len(trade_entries)
|
|
||||||
log.info(f'Loaded {ln} trades from flex query')
|
|
||||||
|
|
||||||
trades_by_account = flex_records_to_ledger_entries(
|
|
||||||
conf['accounts'].inverse, # reverse map to user account names
|
|
||||||
trade_entries,
|
|
||||||
)
|
|
||||||
|
|
||||||
ledger_dict: Optional[dict] = None
|
|
||||||
|
|
||||||
for acctid in trades_by_account:
|
|
||||||
trades_by_id = trades_by_account[acctid]
|
|
||||||
|
|
||||||
with open_trade_ledger('ib', acctid) as ledger_dict:
|
|
||||||
tid_delta = set(trades_by_id) - set(ledger_dict)
|
|
||||||
log.info(
|
|
||||||
'New trades detected\n'
|
|
||||||
f'{pformat(tid_delta)}'
|
|
||||||
)
|
|
||||||
if tid_delta:
|
|
||||||
sorted_delta = dict(sorted(
|
|
||||||
{tid: trades_by_id[tid] for tid in tid_delta}.items(),
|
|
||||||
key=lambda entry: entry[1].pop('pydatetime'),
|
|
||||||
))
|
|
||||||
ledger_dict.update(sorted_delta)
|
|
||||||
|
|
||||||
return ledger_dict
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
args = sys.argv
|
|
||||||
if len(args) > 1:
|
|
||||||
args = args[1:]
|
|
||||||
for arg in args:
|
|
||||||
path = os.path.abspath(arg)
|
|
||||||
load_flex_trades(path=path)
|
|
||||||
else:
|
|
||||||
# expect brokers.toml to have an entry and
|
|
||||||
# pull from the web service.
|
|
||||||
load_flex_trades()
|
|
||||||
|
|
|
@ -19,7 +19,11 @@ Data feed endpoints pre-wrapped and ready for use with ``tractor``/``trio``.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import asyncio
|
import asyncio
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
nullcontext,
|
||||||
|
)
|
||||||
|
from decimal import Decimal
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
@ -46,7 +50,7 @@ from .._util import (
|
||||||
)
|
)
|
||||||
from .api import (
|
from .api import (
|
||||||
# _adhoc_futes_set,
|
# _adhoc_futes_set,
|
||||||
con2fqsn,
|
con2fqme,
|
||||||
log,
|
log,
|
||||||
load_aio_clients,
|
load_aio_clients,
|
||||||
ibis,
|
ibis,
|
||||||
|
@ -54,10 +58,18 @@ from .api import (
|
||||||
open_client_proxies,
|
open_client_proxies,
|
||||||
get_preferred_data_client,
|
get_preferred_data_client,
|
||||||
Ticker,
|
Ticker,
|
||||||
RequestError,
|
|
||||||
Contract,
|
Contract,
|
||||||
|
RequestError,
|
||||||
)
|
)
|
||||||
from ._util import data_reset_hack
|
from ._util import data_reset_hack
|
||||||
|
from piker._cacheables import (
|
||||||
|
async_lifo_cache,
|
||||||
|
)
|
||||||
|
from piker.accounting import (
|
||||||
|
Asset,
|
||||||
|
MktPair,
|
||||||
|
)
|
||||||
|
from piker.data.validate import FeedInit
|
||||||
|
|
||||||
|
|
||||||
# https://interactivebrokers.github.io/tws-api/tick_types.html
|
# https://interactivebrokers.github.io/tws-api/tick_types.html
|
||||||
|
@ -108,7 +120,7 @@ async def open_data_client() -> MethodProxy:
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_history_client(
|
async def open_history_client(
|
||||||
fqsn: str,
|
mkt: MktPair,
|
||||||
|
|
||||||
) -> tuple[Callable, int]:
|
) -> tuple[Callable, int]:
|
||||||
'''
|
'''
|
||||||
|
@ -116,7 +128,7 @@ async def open_history_client(
|
||||||
that takes in ``pendulum.datetime`` and returns ``numpy`` arrays.
|
that takes in ``pendulum.datetime`` and returns ``numpy`` arrays.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# TODO:
|
# TODO: mostly meta-data processing to drive shm and tsdb storage..
|
||||||
# - add logic to handle tradable hours and only grab
|
# - add logic to handle tradable hours and only grab
|
||||||
# valid bars in the range?
|
# valid bars in the range?
|
||||||
# - we want to avoid overrunning the underlying shm array buffer and
|
# - we want to avoid overrunning the underlying shm array buffer and
|
||||||
|
@ -125,8 +137,33 @@ async def open_history_client(
|
||||||
# the shm size will be driven by user config and available sys
|
# the shm size will be driven by user config and available sys
|
||||||
# memory.
|
# memory.
|
||||||
|
|
||||||
|
# IB's internal symbology does not expect the "source asset" in
|
||||||
|
# the "symbol name", what we call the "market name". This is
|
||||||
|
# common in most legacy market brokers since it's presumed that
|
||||||
|
# given a certain stock exchange, listed assets are traded
|
||||||
|
# "from" a particular source fiat, normally something like USD.
|
||||||
|
if (
|
||||||
|
mkt.src
|
||||||
|
and mkt.src.atype == 'fiat'
|
||||||
|
):
|
||||||
|
fqme_kwargs: dict[str, Any] = {}
|
||||||
|
|
||||||
|
if mkt.dst.atype == 'forex':
|
||||||
|
|
||||||
|
# XXX: for now we do need the src token kept in since
|
||||||
|
fqme_kwargs = {
|
||||||
|
'without_src': False, # default is True
|
||||||
|
'delim_char': '', # bc they would normally use a frickin `.` smh
|
||||||
|
}
|
||||||
|
|
||||||
|
fqme: str = mkt.get_bs_fqme(**(fqme_kwargs))
|
||||||
|
|
||||||
|
else:
|
||||||
|
fqme = mkt.bs_fqme
|
||||||
|
|
||||||
async with open_data_client() as proxy:
|
async with open_data_client() as proxy:
|
||||||
|
|
||||||
|
|
||||||
max_timeout: float = 2.
|
max_timeout: float = 2.
|
||||||
mean: float = 0
|
mean: float = 0
|
||||||
count: int = 0
|
count: int = 0
|
||||||
|
@ -134,10 +171,10 @@ async def open_history_client(
|
||||||
head_dt: None | datetime = None
|
head_dt: None | datetime = None
|
||||||
if (
|
if (
|
||||||
# fx cons seem to not provide this endpoint?
|
# fx cons seem to not provide this endpoint?
|
||||||
'idealpro' not in fqsn
|
'idealpro' not in fqme
|
||||||
):
|
):
|
||||||
try:
|
try:
|
||||||
head_dt = await proxy.get_head_time(fqsn=fqsn)
|
head_dt = await proxy.get_head_time(fqme=fqme)
|
||||||
except RequestError:
|
except RequestError:
|
||||||
head_dt = None
|
head_dt = None
|
||||||
|
|
||||||
|
@ -152,7 +189,7 @@ async def open_history_client(
|
||||||
query_start = time.time()
|
query_start = time.time()
|
||||||
out, timedout = await get_bars(
|
out, timedout = await get_bars(
|
||||||
proxy,
|
proxy,
|
||||||
fqsn,
|
fqme,
|
||||||
timeframe,
|
timeframe,
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
)
|
)
|
||||||
|
@ -211,7 +248,7 @@ _pacing: str = (
|
||||||
async def wait_on_data_reset(
|
async def wait_on_data_reset(
|
||||||
proxy: MethodProxy,
|
proxy: MethodProxy,
|
||||||
reset_type: str = 'data',
|
reset_type: str = 'data',
|
||||||
timeout: float = 16,
|
timeout: float = 16, # float('inf'),
|
||||||
|
|
||||||
task_status: TaskStatus[
|
task_status: TaskStatus[
|
||||||
tuple[
|
tuple[
|
||||||
|
@ -227,7 +264,7 @@ async def wait_on_data_reset(
|
||||||
'HMDS data farm connection is OK:ushmds'
|
'HMDS data farm connection is OK:ushmds'
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX: other event messages we might want to try and
|
# TODO: other event messages we might want to try and
|
||||||
# wait for but i wasn't able to get any of this
|
# wait for but i wasn't able to get any of this
|
||||||
# reliable..
|
# reliable..
|
||||||
# reconnect_start = proxy.status_event(
|
# reconnect_start = proxy.status_event(
|
||||||
|
@ -238,14 +275,21 @@ async def wait_on_data_reset(
|
||||||
# )
|
# )
|
||||||
# try to wait on the reset event(s) to arrive, a timeout
|
# try to wait on the reset event(s) to arrive, a timeout
|
||||||
# will trigger a retry up to 6 times (for now).
|
# will trigger a retry up to 6 times (for now).
|
||||||
|
client = proxy._aio_ns.ib.client
|
||||||
|
|
||||||
done = trio.Event()
|
done = trio.Event()
|
||||||
with trio.move_on_after(timeout) as cs:
|
with trio.move_on_after(timeout) as cs:
|
||||||
|
|
||||||
task_status.started((cs, done))
|
task_status.started((cs, done))
|
||||||
|
|
||||||
log.warning('Sending DATA RESET request')
|
log.warning(
|
||||||
res = await data_reset_hack(reset_type=reset_type)
|
'Sending DATA RESET request:\n'
|
||||||
|
f'{client}'
|
||||||
|
)
|
||||||
|
res = await data_reset_hack(
|
||||||
|
vnc_host=client.host,
|
||||||
|
reset_type=reset_type,
|
||||||
|
)
|
||||||
|
|
||||||
if not res:
|
if not res:
|
||||||
log.warning(
|
log.warning(
|
||||||
|
@ -279,12 +323,12 @@ async def wait_on_data_reset(
|
||||||
|
|
||||||
|
|
||||||
_data_resetter_task: trio.Task | None = None
|
_data_resetter_task: trio.Task | None = None
|
||||||
|
_failed_resets: int = 0
|
||||||
|
|
||||||
async def get_bars(
|
async def get_bars(
|
||||||
|
|
||||||
proxy: MethodProxy,
|
proxy: MethodProxy,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
timeframe: int,
|
timeframe: int,
|
||||||
|
|
||||||
# blank to start which tells ib to look up the latest datum
|
# blank to start which tells ib to look up the latest datum
|
||||||
|
@ -298,6 +342,7 @@ async def get_bars(
|
||||||
# history queries for instrument, presuming that most don't
|
# history queries for instrument, presuming that most don't
|
||||||
# not trade for a week XD
|
# not trade for a week XD
|
||||||
max_nodatas: int = 6,
|
max_nodatas: int = 6,
|
||||||
|
max_failed_resets: int = 6,
|
||||||
|
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
@ -307,7 +352,7 @@ async def get_bars(
|
||||||
a ``MethoProxy``.
|
a ``MethoProxy``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
global _data_resetter_task
|
global _data_resetter_task, _failed_resets
|
||||||
nodatas_count: int = 0
|
nodatas_count: int = 0
|
||||||
|
|
||||||
data_cs: trio.CancelScope | None = None
|
data_cs: trio.CancelScope | None = None
|
||||||
|
@ -320,11 +365,14 @@ async def get_bars(
|
||||||
result_ready = trio.Event()
|
result_ready = trio.Event()
|
||||||
|
|
||||||
async def query():
|
async def query():
|
||||||
|
|
||||||
|
global _failed_resets
|
||||||
nonlocal result, data_cs, end_dt, nodatas_count
|
nonlocal result, data_cs, end_dt, nodatas_count
|
||||||
while True:
|
|
||||||
|
while _failed_resets < max_failed_resets:
|
||||||
try:
|
try:
|
||||||
out = await proxy.bars(
|
out = await proxy.bars(
|
||||||
fqsn=fqsn,
|
fqme=fqme,
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
sample_period_s=timeframe,
|
sample_period_s=timeframe,
|
||||||
|
|
||||||
|
@ -339,7 +387,10 @@ async def get_bars(
|
||||||
|
|
||||||
bars, bars_array, dt_duration = out
|
bars, bars_array, dt_duration = out
|
||||||
|
|
||||||
if not bars:
|
if (
|
||||||
|
not bars
|
||||||
|
and end_dt
|
||||||
|
):
|
||||||
log.warning(
|
log.warning(
|
||||||
f'History is blank for {dt_duration} from {end_dt}'
|
f'History is blank for {dt_duration} from {end_dt}'
|
||||||
)
|
)
|
||||||
|
@ -347,7 +398,7 @@ async def get_bars(
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if bars_array is None:
|
if bars_array is None:
|
||||||
raise SymbolNotFound(fqsn)
|
raise SymbolNotFound(fqme)
|
||||||
|
|
||||||
first_dt = pendulum.from_timestamp(
|
first_dt = pendulum.from_timestamp(
|
||||||
bars[0].date.timestamp())
|
bars[0].date.timestamp())
|
||||||
|
@ -378,11 +429,10 @@ async def get_bars(
|
||||||
if 'No market data permissions for' in msg:
|
if 'No market data permissions for' in msg:
|
||||||
# TODO: signalling for no permissions searches
|
# TODO: signalling for no permissions searches
|
||||||
raise NoData(
|
raise NoData(
|
||||||
f'Symbol: {fqsn}',
|
f'Symbol: {fqme}',
|
||||||
)
|
)
|
||||||
|
|
||||||
elif err.code == 162:
|
elif (
|
||||||
if (
|
|
||||||
'HMDS query returned no data' in msg
|
'HMDS query returned no data' in msg
|
||||||
):
|
):
|
||||||
# XXX: this is now done in the storage mgmt
|
# XXX: this is now done in the storage mgmt
|
||||||
|
@ -405,7 +455,7 @@ async def get_bars(
|
||||||
|
|
||||||
if nodatas_count >= max_nodatas:
|
if nodatas_count >= max_nodatas:
|
||||||
raise DataUnavailable(
|
raise DataUnavailable(
|
||||||
f'Presuming {fqsn} has no further history '
|
f'Presuming {fqme} has no further history '
|
||||||
f'after {max_nodatas} tries..'
|
f'after {max_nodatas} tries..'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -432,8 +482,11 @@ async def get_bars(
|
||||||
'Resetting farms with `ctrl-alt-f` hack\n'
|
'Resetting farms with `ctrl-alt-f` hack\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
client = proxy._aio_ns.ib.client
|
||||||
|
|
||||||
# cancel any existing reset task
|
# cancel any existing reset task
|
||||||
if data_cs:
|
if data_cs:
|
||||||
|
log.cancel(f'Cancelling existing reset for {client}')
|
||||||
data_cs.cancel()
|
data_cs.cancel()
|
||||||
|
|
||||||
# spawn new data reset task
|
# spawn new data reset task
|
||||||
|
@ -441,10 +494,13 @@ async def get_bars(
|
||||||
partial(
|
partial(
|
||||||
wait_on_data_reset,
|
wait_on_data_reset,
|
||||||
proxy,
|
proxy,
|
||||||
timeout=float('inf'),
|
|
||||||
reset_type='connection'
|
reset_type='connection'
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
if reset_done:
|
||||||
|
_failed_resets = 0
|
||||||
|
else:
|
||||||
|
_failed_resets += 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -481,7 +537,7 @@ async def get_bars(
|
||||||
partial(
|
partial(
|
||||||
wait_on_data_reset,
|
wait_on_data_reset,
|
||||||
proxy,
|
proxy,
|
||||||
timeout=float('inf'),
|
reset_type='data',
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
# sync wait on reset to complete
|
# sync wait on reset to complete
|
||||||
|
@ -491,7 +547,9 @@ async def get_bars(
|
||||||
return result, data_cs is not None
|
return result, data_cs is not None
|
||||||
|
|
||||||
|
|
||||||
asset_type_map = {
|
# re-mapping to piker asset type names
|
||||||
|
# https://github.com/erdewit/ib_insync/blob/master/ib_insync/contract.py#L113
|
||||||
|
_asset_type_map = {
|
||||||
'STK': 'stock',
|
'STK': 'stock',
|
||||||
'OPT': 'option',
|
'OPT': 'option',
|
||||||
'FUT': 'future',
|
'FUT': 'future',
|
||||||
|
@ -532,7 +590,7 @@ async def _setup_quote_stream(
|
||||||
'294', # Trade rate / minute
|
'294', # Trade rate / minute
|
||||||
'295', # Vlm rate / minute
|
'295', # Vlm rate / minute
|
||||||
),
|
),
|
||||||
contract: Optional[Contract] = None,
|
contract: Contract | None = None,
|
||||||
|
|
||||||
) -> trio.abc.ReceiveChannel:
|
) -> trio.abc.ReceiveChannel:
|
||||||
'''
|
'''
|
||||||
|
@ -618,7 +676,7 @@ async def _setup_quote_stream(
|
||||||
async def open_aio_quote_stream(
|
async def open_aio_quote_stream(
|
||||||
|
|
||||||
symbol: str,
|
symbol: str,
|
||||||
contract: Optional[Contract] = None,
|
contract: Contract | None = None,
|
||||||
|
|
||||||
) -> trio.abc.ReceiveStream:
|
) -> trio.abc.ReceiveStream:
|
||||||
|
|
||||||
|
@ -661,7 +719,7 @@ def normalize(
|
||||||
|
|
||||||
# check for special contract types
|
# check for special contract types
|
||||||
con = ticker.contract
|
con = ticker.contract
|
||||||
fqsn, calc_price = con2fqsn(con)
|
fqme, calc_price = con2fqme(con)
|
||||||
|
|
||||||
# convert named tuples to dicts so we send usable keys
|
# convert named tuples to dicts so we send usable keys
|
||||||
new_ticks = []
|
new_ticks = []
|
||||||
|
@ -691,9 +749,9 @@ def normalize(
|
||||||
# serialize for transport
|
# serialize for transport
|
||||||
data = asdict(ticker)
|
data = asdict(ticker)
|
||||||
|
|
||||||
# generate fqsn with possible specialized suffix
|
# generate fqme with possible specialized suffix
|
||||||
# for derivatives, note the lowercase.
|
# for derivatives, note the lowercase.
|
||||||
data['symbol'] = data['fqsn'] = fqsn
|
data['symbol'] = data['fqme'] = fqme
|
||||||
|
|
||||||
# convert named tuples to dicts for transport
|
# convert named tuples to dicts for transport
|
||||||
tbts = data.get('tickByTicks')
|
tbts = data.get('tickByTicks')
|
||||||
|
@ -713,6 +771,98 @@ def normalize(
|
||||||
return data
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
@async_lifo_cache()
|
||||||
|
async def get_mkt_info(
|
||||||
|
fqme: str,
|
||||||
|
|
||||||
|
proxy: MethodProxy | None = None,
|
||||||
|
|
||||||
|
) -> tuple[MktPair, ibis.ContractDetails]:
|
||||||
|
|
||||||
|
# XXX: we don't need to split off any fqme broker part?
|
||||||
|
# bs_fqme, _, broker = fqme.partition('.')
|
||||||
|
|
||||||
|
proxy: MethodProxy
|
||||||
|
get_details: bool = False
|
||||||
|
if proxy is not None:
|
||||||
|
client_ctx = nullcontext(proxy)
|
||||||
|
else:
|
||||||
|
client_ctx = open_data_client
|
||||||
|
|
||||||
|
async with client_ctx as proxy:
|
||||||
|
try:
|
||||||
|
(
|
||||||
|
con, # Contract
|
||||||
|
details, # ContractDetails
|
||||||
|
) = await proxy.get_sym_details(symbol=fqme)
|
||||||
|
except ConnectionError:
|
||||||
|
log.exception(f'Proxy is ded {proxy._aio_ns}')
|
||||||
|
raise
|
||||||
|
|
||||||
|
# TODO: more consistent field translation
|
||||||
|
init_info: dict = {}
|
||||||
|
atype = _asset_type_map[con.secType]
|
||||||
|
|
||||||
|
if atype == 'commodity':
|
||||||
|
venue: str = 'cmdty'
|
||||||
|
else:
|
||||||
|
venue = con.primaryExchange or con.exchange
|
||||||
|
|
||||||
|
price_tick: Decimal = Decimal(str(details.minTick))
|
||||||
|
|
||||||
|
if atype == 'stock':
|
||||||
|
# XXX: GRRRR they don't support fractional share sizes for
|
||||||
|
# stocks from the API?!
|
||||||
|
# if con.secType == 'STK':
|
||||||
|
size_tick = Decimal('1')
|
||||||
|
else:
|
||||||
|
size_tick: Decimal = Decimal(str(details.minSize).rstrip('0'))
|
||||||
|
# |-> TODO: there is also the Contract.sizeIncrement, bt wtf is it?
|
||||||
|
|
||||||
|
# NOTE: this is duplicate from the .broker.norm_trade_records()
|
||||||
|
# routine, we should factor all this parsing somewhere..
|
||||||
|
expiry_str = str(con.lastTradeDateOrContractMonth)
|
||||||
|
# if expiry:
|
||||||
|
# expiry_str: str = str(pendulum.parse(
|
||||||
|
# str(expiry).strip(' ')
|
||||||
|
# ))
|
||||||
|
|
||||||
|
# TODO: currently we can't pass the fiat src asset because
|
||||||
|
# then we'll get a `MNQUSD` request for history data..
|
||||||
|
# we need to figure out how we're going to handle this (later?)
|
||||||
|
# but likely we want all backends to eventually handle
|
||||||
|
# ``dst/src.venue.`` style !?
|
||||||
|
src: str | Asset = ''
|
||||||
|
if atype == 'forex':
|
||||||
|
src = Asset(
|
||||||
|
name=str(con.currency),
|
||||||
|
atype='fiat',
|
||||||
|
tx_tick=Decimal('0.01'), # right?
|
||||||
|
)
|
||||||
|
|
||||||
|
mkt = MktPair(
|
||||||
|
dst=Asset(
|
||||||
|
name=con.symbol.lower(),
|
||||||
|
atype=atype,
|
||||||
|
tx_tick=size_tick,
|
||||||
|
),
|
||||||
|
src=src,
|
||||||
|
|
||||||
|
price_tick=price_tick,
|
||||||
|
size_tick=size_tick,
|
||||||
|
|
||||||
|
bs_mktid=str(con.conId),
|
||||||
|
venue=str(venue),
|
||||||
|
expiry=expiry_str,
|
||||||
|
broker='ib',
|
||||||
|
|
||||||
|
# TODO: options contract info as str?
|
||||||
|
# contract_info=<optionsdetails>
|
||||||
|
)
|
||||||
|
|
||||||
|
return mkt, details
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
|
@ -735,80 +885,49 @@ async def stream_quotes(
|
||||||
sym = symbols[0]
|
sym = symbols[0]
|
||||||
log.info(f'request for real-time quotes: {sym}')
|
log.info(f'request for real-time quotes: {sym}')
|
||||||
|
|
||||||
|
init_msgs: list[FeedInit] = []
|
||||||
|
|
||||||
|
proxy: MethodProxy
|
||||||
|
mkt: MktPair
|
||||||
|
details: ibis.ContractDetails
|
||||||
async with open_data_client() as proxy:
|
async with open_data_client() as proxy:
|
||||||
|
mkt, details = await get_mkt_info(
|
||||||
|
sym,
|
||||||
|
proxy=proxy, # passed to avoid implicit client load
|
||||||
|
)
|
||||||
|
|
||||||
con, first_ticker, details = await proxy.get_sym_details(symbol=sym)
|
init_msg = FeedInit(mkt_info=mkt)
|
||||||
first_quote = normalize(first_ticker)
|
|
||||||
# print(f'first quote: {first_quote}')
|
|
||||||
|
|
||||||
def mk_init_msgs() -> dict[str, dict]:
|
if mkt.dst.atype in {
|
||||||
'''
|
|
||||||
Collect a bunch of meta-data useful for feed startup and
|
|
||||||
pack in a `dict`-msg.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# pass back some symbol info like min_tick, trading_hours, etc.
|
|
||||||
syminfo = asdict(details)
|
|
||||||
syminfo.update(syminfo['contract'])
|
|
||||||
|
|
||||||
# nested dataclass we probably don't need and that won't IPC
|
|
||||||
# serialize
|
|
||||||
syminfo.pop('secIdList')
|
|
||||||
|
|
||||||
# TODO: more consistent field translation
|
|
||||||
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
|
|
||||||
|
|
||||||
if atype in {
|
|
||||||
'forex',
|
'forex',
|
||||||
'index',
|
'index',
|
||||||
'commodity',
|
'commodity',
|
||||||
}:
|
}:
|
||||||
syminfo['no_vlm'] = True
|
# tell sampler config that it shouldn't do vlm summing.
|
||||||
|
init_msg.shm_write_opts['sum_tick_vlm'] = False
|
||||||
|
init_msg.shm_write_opts['has_vlm'] = False
|
||||||
|
|
||||||
# for stocks it seems TWS reports too small a tick size
|
init_msgs.append(init_msg)
|
||||||
# such that you can't submit orders with that granularity?
|
|
||||||
min_tick = 0.01 if atype == 'stock' else 0
|
|
||||||
|
|
||||||
syminfo['price_tick_size'] = max(syminfo['minTick'], min_tick)
|
con: Contract = details.contract
|
||||||
|
first_ticker: Ticker = await proxy.get_quote(contract=con)
|
||||||
# for "legacy" assets, volume is normally discreet, not
|
first_quote: dict = normalize(first_ticker)
|
||||||
# a float
|
log.runtime(f'FIRST QUOTE: {first_quote}')
|
||||||
syminfo['lot_tick_size'] = 0.0
|
|
||||||
|
|
||||||
ibclient = proxy._aio_ns.ib.client
|
|
||||||
host, port = ibclient.host, ibclient.port
|
|
||||||
|
|
||||||
# TODO: for loop through all symbols passed in
|
|
||||||
init_msgs = {
|
|
||||||
# pass back token, and bool, signalling if we're the writer
|
|
||||||
# and that history has been written
|
|
||||||
sym: {
|
|
||||||
'symbol_info': syminfo,
|
|
||||||
'fqsn': first_quote['fqsn'],
|
|
||||||
},
|
|
||||||
'status': {
|
|
||||||
'data_ep': f'{host}:{port}',
|
|
||||||
},
|
|
||||||
|
|
||||||
}
|
|
||||||
return init_msgs, syminfo
|
|
||||||
|
|
||||||
init_msgs, syminfo = mk_init_msgs()
|
|
||||||
|
|
||||||
# TODO: we should instead spawn a task that waits on a feed to start
|
# TODO: we should instead spawn a task that waits on a feed to start
|
||||||
# and let it wait indefinitely..instead of this hard coded stuff.
|
# and let it wait indefinitely..instead of this hard coded stuff.
|
||||||
with trio.move_on_after(1):
|
with trio.move_on_after(1):
|
||||||
contract, first_ticker, details = await proxy.get_quote(symbol=sym)
|
first_ticker = await proxy.get_quote(contract=con)
|
||||||
|
|
||||||
# it might be outside regular trading hours so see if we can at
|
# it might be outside regular trading hours so see if we can at
|
||||||
# least grab history.
|
# least grab history.
|
||||||
if (
|
if (
|
||||||
isnan(first_ticker.last)
|
isnan(first_ticker.last) # last quote price value is nan
|
||||||
and type(first_ticker.contract) not in (
|
and mkt.dst.atype not in {
|
||||||
ibis.Commodity,
|
'commodity',
|
||||||
ibis.Forex,
|
'forex',
|
||||||
ibis.Crypto,
|
'crypto',
|
||||||
)
|
}
|
||||||
):
|
):
|
||||||
task_status.started((init_msgs, first_quote))
|
task_status.started((init_msgs, first_quote))
|
||||||
|
|
||||||
|
@ -820,7 +939,7 @@ async def stream_quotes(
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
return # we never expect feed to come up?
|
return # we never expect feed to come up?
|
||||||
|
|
||||||
cs: Optional[trio.CancelScope] = None
|
cs: trio.CancelScope | None = None
|
||||||
startup: bool = True
|
startup: bool = True
|
||||||
while (
|
while (
|
||||||
startup
|
startup
|
||||||
|
@ -860,13 +979,14 @@ async def stream_quotes(
|
||||||
nurse.start_soon(reset_on_feed)
|
nurse.start_soon(reset_on_feed)
|
||||||
|
|
||||||
async with aclosing(stream):
|
async with aclosing(stream):
|
||||||
if syminfo.get('no_vlm', False):
|
# if syminfo.get('no_vlm', False):
|
||||||
|
if not init_msg.shm_write_opts['has_vlm']:
|
||||||
|
|
||||||
# generally speaking these feeds don't
|
# generally speaking these feeds don't
|
||||||
# include vlm data.
|
# include vlm data.
|
||||||
atype = syminfo['asset_type']
|
atype = mkt.dst.atype
|
||||||
log.info(
|
log.info(
|
||||||
f'No-vlm {sym}@{atype}, skipping quote poll'
|
f'No-vlm {mkt.fqme}@{atype}, skipping quote poll'
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -906,9 +1026,9 @@ async def stream_quotes(
|
||||||
# last = time.time()
|
# last = time.time()
|
||||||
async for ticker in stream:
|
async for ticker in stream:
|
||||||
quote = normalize(ticker)
|
quote = normalize(ticker)
|
||||||
fqsn = quote['fqsn']
|
fqme = quote['fqme']
|
||||||
# print(f'sending {fqsn}:\n{quote}')
|
# print(f'sending {fqme}:\n{quote}')
|
||||||
await send_chan.send({fqsn: quote})
|
await send_chan.send({fqme: quote})
|
||||||
|
|
||||||
# ugh, clear ticks since we've consumed them
|
# ugh, clear ticks since we've consumed them
|
||||||
ticker.ticks = []
|
ticker.ticks = []
|
||||||
|
|
|
@ -58,7 +58,7 @@ your ``pps.toml`` file will have position entries like,
|
||||||
[kraken.spot."xmreur.kraken"]
|
[kraken.spot."xmreur.kraken"]
|
||||||
size = 4.80907954
|
size = 4.80907954
|
||||||
ppu = 103.97000000
|
ppu = 103.97000000
|
||||||
bsuid = "XXMRZEUR"
|
bs_mktid = "XXMRZEUR"
|
||||||
clears = [
|
clears = [
|
||||||
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
||||||
]
|
]
|
||||||
|
|
|
@ -34,6 +34,7 @@ from .api import (
|
||||||
get_client,
|
get_client,
|
||||||
)
|
)
|
||||||
from .feed import (
|
from .feed import (
|
||||||
|
get_mkt_info,
|
||||||
open_history_client,
|
open_history_client,
|
||||||
open_symbol_search,
|
open_symbol_search,
|
||||||
stream_quotes,
|
stream_quotes,
|
||||||
|
|
|
@ -20,10 +20,10 @@ Kraken web API wrapping.
|
||||||
'''
|
'''
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from decimal import Decimal
|
||||||
import itertools
|
import itertools
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Optional,
|
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
|
@ -41,14 +41,17 @@ import trio
|
||||||
|
|
||||||
from piker import config
|
from piker import config
|
||||||
from piker.data.types import Struct
|
from piker.data.types import Struct
|
||||||
from piker.data._source import Symbol
|
from piker.accounting._mktinfo import (
|
||||||
|
Asset,
|
||||||
|
digits_to_dec,
|
||||||
|
)
|
||||||
from piker.brokers._util import (
|
from piker.brokers._util import (
|
||||||
resproc,
|
resproc,
|
||||||
SymbolNotFound,
|
SymbolNotFound,
|
||||||
BrokerError,
|
BrokerError,
|
||||||
DataThrottle,
|
DataThrottle,
|
||||||
)
|
)
|
||||||
from piker.pp import Transaction
|
from piker.accounting import Transaction
|
||||||
from . import log
|
from . import log
|
||||||
|
|
||||||
# <uri>/<version>/
|
# <uri>/<version>/
|
||||||
|
@ -155,12 +158,23 @@ class Pair(Struct):
|
||||||
short_position_limit: float = 0
|
short_position_limit: float = 0
|
||||||
long_position_limit: float = float('inf')
|
long_position_limit: float = float('inf')
|
||||||
|
|
||||||
|
@property
|
||||||
|
def price_tick(self) -> Decimal:
|
||||||
|
return digits_to_dec(self.pair_decimals)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_tick(self) -> Decimal:
|
||||||
|
return digits_to_dec(self.lot_decimals)
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
|
|
||||||
# global symbol normalization table
|
# symbol mapping from all names to the altname
|
||||||
_ntable: dict[str, str] = {}
|
_ntable: dict[str, str] = {}
|
||||||
_atable: bidict[str, str] = bidict()
|
|
||||||
|
# 2-way map of symbol names to their "alt names" ffs XD
|
||||||
|
_altnames: bidict[str, str] = bidict()
|
||||||
|
|
||||||
_pairs: dict[str, Pair] = {}
|
_pairs: dict[str, Pair] = {}
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -176,11 +190,13 @@ class Client:
|
||||||
'User-Agent':
|
'User-Agent':
|
||||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||||
})
|
})
|
||||||
self.conf: dict[str, str] = config
|
|
||||||
self._name = name
|
self._name = name
|
||||||
self._api_key = api_key
|
self._api_key = api_key
|
||||||
self._secret = secret
|
self._secret = secret
|
||||||
|
|
||||||
|
self.conf: dict[str, str] = config
|
||||||
|
self.assets: dict[str, Asset] = {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pairs(self) -> dict[str, Pair]:
|
def pairs(self) -> dict[str, Pair]:
|
||||||
if self._pairs is None:
|
if self._pairs is None:
|
||||||
|
@ -247,20 +263,49 @@ class Client:
|
||||||
'Balance',
|
'Balance',
|
||||||
{},
|
{},
|
||||||
)
|
)
|
||||||
by_bsuid = resp['result']
|
by_bsmktid = resp['result']
|
||||||
|
|
||||||
|
# TODO: we need to pull out the "asset" decimals
|
||||||
|
# data and return a `decimal.Decimal` instead here!
|
||||||
|
# using the underlying Asset
|
||||||
return {
|
return {
|
||||||
self._atable[sym].lower(): float(bal)
|
self._altnames[sym].lower(): float(bal)
|
||||||
for sym, bal in by_bsuid.items()
|
for sym, bal in by_bsmktid.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
async def get_assets(self) -> dict[str, dict]:
|
async def get_assets(self) -> dict[str, Asset]:
|
||||||
resp = await self._public('Assets', {})
|
'''
|
||||||
return resp['result']
|
Load and cache all asset infos and pack into
|
||||||
|
our native ``Asset`` struct.
|
||||||
|
|
||||||
async def cache_assets(self) -> None:
|
https://docs.kraken.com/rest/#tag/Market-Data/operation/getAssetInfo
|
||||||
assets = self.assets = await self.get_assets()
|
|
||||||
for bsuid, info in assets.items():
|
return msg:
|
||||||
self._atable[bsuid] = info['altname']
|
"asset1": {
|
||||||
|
"aclass": "string",
|
||||||
|
"altname": "string",
|
||||||
|
"decimals": 0,
|
||||||
|
"display_decimals": 0,
|
||||||
|
"collateral_value": 0,
|
||||||
|
"status": "string"
|
||||||
|
}
|
||||||
|
|
||||||
|
'''
|
||||||
|
resp = await self._public('Assets', {})
|
||||||
|
assets = resp['result']
|
||||||
|
|
||||||
|
for bs_mktid, info in assets.items():
|
||||||
|
altname = self._altnames[bs_mktid] = info['altname']
|
||||||
|
aclass: str = info['aclass']
|
||||||
|
|
||||||
|
self.assets[bs_mktid] = Asset(
|
||||||
|
name=altname.lower(),
|
||||||
|
atype=f'crypto_{aclass}',
|
||||||
|
tx_tick=digits_to_dec(info['decimals']),
|
||||||
|
info=info,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self.assets
|
||||||
|
|
||||||
async def get_trades(
|
async def get_trades(
|
||||||
self,
|
self,
|
||||||
|
@ -323,10 +368,15 @@ class Client:
|
||||||
Currently only withdrawals are supported.
|
Currently only withdrawals are supported.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
xfers: list[dict] = (await self.endpoint(
|
resp = await self.endpoint(
|
||||||
'WithdrawStatus',
|
'WithdrawStatus',
|
||||||
{'asset': asset},
|
{'asset': asset},
|
||||||
))['result']
|
)
|
||||||
|
try:
|
||||||
|
xfers: list[dict] = resp['result']
|
||||||
|
except KeyError:
|
||||||
|
log.exception(f'Kraken suxxx: {resp}')
|
||||||
|
return []
|
||||||
|
|
||||||
# eg. resp schema:
|
# eg. resp schema:
|
||||||
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
||||||
|
@ -341,28 +391,21 @@ class Client:
|
||||||
|
|
||||||
# look up the normalized name and asset info
|
# look up the normalized name and asset info
|
||||||
asset_key = entry['asset']
|
asset_key = entry['asset']
|
||||||
asset_info = self.assets[asset_key]
|
asset = self.assets[asset_key]
|
||||||
asset = self._atable[asset_key].lower()
|
asset_key = self._altnames[asset_key].lower()
|
||||||
|
|
||||||
# XXX: this is in the asset units (likely) so it isn't
|
# XXX: this is in the asset units (likely) so it isn't
|
||||||
# quite the same as a commisions cost necessarily..)
|
# quite the same as a commisions cost necessarily..)
|
||||||
cost = float(entry['fee'])
|
cost = float(entry['fee'])
|
||||||
|
|
||||||
fqsn = asset + '.kraken'
|
fqme = asset_key + '.kraken'
|
||||||
pairinfo = Symbol.from_fqsn(
|
|
||||||
fqsn,
|
|
||||||
info={
|
|
||||||
'asset_type': 'crypto',
|
|
||||||
'lot_tick_size': asset_info['decimals'],
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
tran = Transaction(
|
tx = Transaction(
|
||||||
fqsn=fqsn,
|
fqme=fqme,
|
||||||
sym=pairinfo,
|
sym=asset,
|
||||||
tid=entry['txid'],
|
tid=entry['txid'],
|
||||||
dt=pendulum.from_timestamp(entry['time']),
|
dt=pendulum.from_timestamp(entry['time']),
|
||||||
bsuid=f'{asset}{src_asset}',
|
bs_mktid=f'{asset_key}{src_asset}',
|
||||||
size=-1*(
|
size=-1*(
|
||||||
float(entry['amount'])
|
float(entry['amount'])
|
||||||
+
|
+
|
||||||
|
@ -375,7 +418,7 @@ class Client:
|
||||||
# XXX: see note above
|
# XXX: see note above
|
||||||
cost=cost,
|
cost=cost,
|
||||||
)
|
)
|
||||||
trans[tran.tid] = tran
|
trans[tx.tid] = tx
|
||||||
|
|
||||||
return trans
|
return trans
|
||||||
|
|
||||||
|
@ -424,30 +467,44 @@ class Client:
|
||||||
# txid is a transaction id given by kraken
|
# txid is a transaction id given by kraken
|
||||||
return await self.endpoint('CancelOrder', {"txid": reqid})
|
return await self.endpoint('CancelOrder', {"txid": reqid})
|
||||||
|
|
||||||
async def symbol_info(
|
async def pair_info(
|
||||||
self,
|
self,
|
||||||
pair: Optional[str] = None,
|
pair_patt: str | None = None,
|
||||||
|
|
||||||
) -> dict[str, Pair] | Pair:
|
) -> dict[str, Pair] | Pair:
|
||||||
|
'''
|
||||||
|
Query for a tradeable asset pair (info), or all if no input
|
||||||
|
pattern is provided.
|
||||||
|
|
||||||
if pair is not None:
|
https://docs.kraken.com/rest/#tag/Market-Data/operation/getTradableAssetPairs
|
||||||
pairs = {'pair': pair}
|
|
||||||
else:
|
|
||||||
pairs = None # get all pairs
|
|
||||||
|
|
||||||
resp = await self._public('AssetPairs', pairs)
|
'''
|
||||||
|
# get all pairs by default, or filter
|
||||||
|
# to whatever pattern is provided as input.
|
||||||
|
pairs: dict[str, str] | None = None
|
||||||
|
if pair_patt is not None:
|
||||||
|
pairs = {'pair': pair_patt}
|
||||||
|
|
||||||
|
resp = await self._public(
|
||||||
|
'AssetPairs',
|
||||||
|
pairs,
|
||||||
|
)
|
||||||
err = resp['error']
|
err = resp['error']
|
||||||
if err:
|
if err:
|
||||||
symbolname = pairs['pair'] if pair else None
|
raise SymbolNotFound(pair_patt)
|
||||||
raise SymbolNotFound(f'{symbolname}.kraken')
|
|
||||||
|
|
||||||
pairs = resp['result']
|
pairs: dict[str, Pair] = {
|
||||||
|
|
||||||
if pair is not None:
|
key: Pair(**data)
|
||||||
_, data = next(iter(pairs.items()))
|
for key, data in resp['result'].items()
|
||||||
return Pair(**data)
|
}
|
||||||
else:
|
# always cache so we can possibly do faster lookup
|
||||||
return {key: Pair(**data) for key, data in pairs.items()}
|
self._pairs.update(pairs)
|
||||||
|
|
||||||
|
if pair_patt is not None:
|
||||||
|
return next(iter(pairs.items()))[1]
|
||||||
|
|
||||||
|
return pairs
|
||||||
|
|
||||||
async def cache_symbols(self) -> dict:
|
async def cache_symbols(self) -> dict:
|
||||||
'''
|
'''
|
||||||
|
@ -460,17 +517,18 @@ class Client:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if not self._pairs:
|
if not self._pairs:
|
||||||
self._pairs.update(await self.symbol_info())
|
pairs = await self.pair_info()
|
||||||
|
assert self._pairs == pairs
|
||||||
|
|
||||||
# table of all ws and rest keys to their alt-name values.
|
# table of all ws and rest keys to their alt-name values.
|
||||||
ntable: dict[str, str] = {}
|
ntable: dict[str, str] = {}
|
||||||
|
|
||||||
for rest_key in list(self._pairs.keys()):
|
for rest_key in list(pairs.keys()):
|
||||||
|
|
||||||
pair: Pair = self._pairs[rest_key]
|
pair: Pair = pairs[rest_key]
|
||||||
altname = pair.altname
|
altname = pair.altname
|
||||||
wsname = pair.wsname
|
wsname = pair.wsname
|
||||||
ntable[rest_key] = ntable[wsname] = altname
|
ntable[altname] = ntable[rest_key] = ntable[wsname] = altname
|
||||||
|
|
||||||
# register the pair under all monikers, a giant flat
|
# register the pair under all monikers, a giant flat
|
||||||
# surjection of all possible names to each info obj.
|
# surjection of all possible names to each info obj.
|
||||||
|
@ -483,7 +541,6 @@ class Client:
|
||||||
async def search_symbols(
|
async def search_symbols(
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
limit: int = None,
|
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
'''
|
'''
|
||||||
|
@ -594,8 +651,7 @@ class Client:
|
||||||
the 'AssetPairs' endpoint, see methods above.
|
the 'AssetPairs' endpoint, see methods above.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
ticker = cls._ntable[ticker]
|
return cls._ntable[ticker].lower()
|
||||||
return ticker.lower(), cls._pairs[ticker]
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
@ -615,7 +671,7 @@ async def get_client() -> Client:
|
||||||
# at startup, load all symbols, and asset info in
|
# at startup, load all symbols, and asset info in
|
||||||
# batch requests.
|
# batch requests.
|
||||||
async with trio.open_nursery() as nurse:
|
async with trio.open_nursery() as nurse:
|
||||||
nurse.start_soon(client.cache_assets)
|
nurse.start_soon(client.get_assets)
|
||||||
await client.cache_symbols()
|
await client.cache_symbols()
|
||||||
|
|
||||||
yield client
|
yield client
|
||||||
|
|
|
@ -21,7 +21,7 @@ Order api and machinery
|
||||||
from collections import ChainMap, defaultdict
|
from collections import ChainMap, defaultdict
|
||||||
from contextlib import (
|
from contextlib import (
|
||||||
asynccontextmanager as acm,
|
asynccontextmanager as acm,
|
||||||
contextmanager as cm,
|
aclosing,
|
||||||
)
|
)
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from itertools import count
|
from itertools import count
|
||||||
|
@ -35,20 +35,23 @@ from typing import (
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
from async_generator import aclosing
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import pendulum
|
import pendulum
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from piker.pp import (
|
from piker.accounting import (
|
||||||
Position,
|
Position,
|
||||||
PpTable,
|
PpTable,
|
||||||
Transaction,
|
Transaction,
|
||||||
|
TransactionLedger,
|
||||||
open_trade_ledger,
|
open_trade_ledger,
|
||||||
open_pps,
|
open_pps,
|
||||||
|
get_likely_pair,
|
||||||
|
)
|
||||||
|
from piker.accounting._mktinfo import (
|
||||||
|
MktPair,
|
||||||
)
|
)
|
||||||
from piker.data._source import Symbol
|
|
||||||
from piker.clearing._messages import (
|
from piker.clearing._messages import (
|
||||||
Order,
|
Order,
|
||||||
Status,
|
Status,
|
||||||
|
@ -67,7 +70,7 @@ from .api import (
|
||||||
get_client,
|
get_client,
|
||||||
)
|
)
|
||||||
from .feed import (
|
from .feed import (
|
||||||
get_console_log,
|
get_mkt_info,
|
||||||
open_autorecon_ws,
|
open_autorecon_ws,
|
||||||
NoBsWs,
|
NoBsWs,
|
||||||
stream_messages,
|
stream_messages,
|
||||||
|
@ -367,6 +370,8 @@ def trades2pps(
|
||||||
acctid: str,
|
acctid: str,
|
||||||
new_trans: dict[str, Transaction] = {},
|
new_trans: dict[str, Transaction] = {},
|
||||||
|
|
||||||
|
write_storage: bool = True,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
list[BrokerdPosition],
|
list[BrokerdPosition],
|
||||||
list[Transaction],
|
list[Transaction],
|
||||||
|
@ -397,13 +402,20 @@ def trades2pps(
|
||||||
# right since `.broker` is already
|
# right since `.broker` is already
|
||||||
# included?
|
# included?
|
||||||
account='kraken.' + acctid,
|
account='kraken.' + acctid,
|
||||||
symbol=p.symbol.front_fqsn(),
|
symbol=p.mkt.fqme,
|
||||||
size=p.size,
|
size=p.size,
|
||||||
avg_price=p.ppu,
|
avg_price=p.ppu,
|
||||||
currency='',
|
currency='',
|
||||||
)
|
)
|
||||||
position_msgs.append(msg)
|
position_msgs.append(msg)
|
||||||
|
|
||||||
|
if write_storage:
|
||||||
|
# TODO: ideally this blocks the this task
|
||||||
|
# as little as possible. we need to either do
|
||||||
|
# these writes in another actor, or try out `trio`'s
|
||||||
|
# async file IO api?
|
||||||
|
table.write_config()
|
||||||
|
|
||||||
return position_msgs
|
return position_msgs
|
||||||
|
|
||||||
|
|
||||||
|
@ -414,14 +426,11 @@ async def trades_dialogue(
|
||||||
|
|
||||||
) -> AsyncIterator[dict[str, Any]]:
|
) -> AsyncIterator[dict[str, Any]]:
|
||||||
|
|
||||||
# XXX: required to propagate ``tractor`` loglevel to ``piker`` logging
|
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
|
||||||
|
|
||||||
async with get_client() as client:
|
async with get_client() as client:
|
||||||
|
|
||||||
if not client._api_key:
|
if not client._api_key:
|
||||||
raise RuntimeError(
|
await ctx.started('paper')
|
||||||
'Missing Kraken API key in `brokers.toml`!?!?')
|
return
|
||||||
|
|
||||||
# TODO: make ems flip to paper mode via
|
# TODO: make ems flip to paper mode via
|
||||||
# some returned signal if the user only wants to use
|
# some returned signal if the user only wants to use
|
||||||
|
@ -467,40 +476,55 @@ async def trades_dialogue(
|
||||||
# update things correctly.
|
# update things correctly.
|
||||||
simulate_pp_update: bool = False
|
simulate_pp_update: bool = False
|
||||||
|
|
||||||
|
table: PpTable
|
||||||
|
ledger: TransactionLedger
|
||||||
with (
|
with (
|
||||||
open_pps(
|
open_pps(
|
||||||
'kraken',
|
'kraken',
|
||||||
acctid
|
acctid,
|
||||||
|
write_on_exit=True,
|
||||||
) as table,
|
) as table,
|
||||||
|
|
||||||
open_trade_ledger(
|
open_trade_ledger(
|
||||||
'kraken',
|
'kraken',
|
||||||
acctid
|
acctid,
|
||||||
) as ledger_dict,
|
) as ledger,
|
||||||
):
|
):
|
||||||
# transaction-ify the ledger entries
|
# transaction-ify the ledger entries
|
||||||
ledger_trans = norm_trade_records(ledger_dict)
|
ledger_trans = await norm_trade_records(ledger)
|
||||||
|
|
||||||
|
if not table.pps:
|
||||||
|
# NOTE: we can't use this since it first needs
|
||||||
|
# broker: str input support!
|
||||||
|
# table.update_from_trans(ledger.to_trans())
|
||||||
|
table.update_from_trans(ledger_trans)
|
||||||
|
table.write_config()
|
||||||
|
|
||||||
# TODO: eventually probably only load
|
# TODO: eventually probably only load
|
||||||
# as far back as it seems is not deliverd in the
|
# as far back as it seems is not deliverd in the
|
||||||
# most recent 50 trades and assume that by ordering we
|
# most recent 50 trades and assume that by ordering we
|
||||||
# already have those records in the ledger.
|
# already have those records in the ledger.
|
||||||
tids2trades = await client.get_trades()
|
tids2trades = await client.get_trades()
|
||||||
ledger_dict.update(tids2trades)
|
ledger.update(tids2trades)
|
||||||
api_trans = norm_trade_records(tids2trades)
|
if tids2trades:
|
||||||
|
ledger.write_config()
|
||||||
|
|
||||||
|
api_trans = await norm_trade_records(tids2trades)
|
||||||
|
|
||||||
# retrieve kraken reported balances
|
# retrieve kraken reported balances
|
||||||
# and do diff with ledger to determine
|
# and do diff with ledger to determine
|
||||||
# what amount of trades-transactions need
|
# what amount of trades-transactions need
|
||||||
# to be reloaded.
|
# to be reloaded.
|
||||||
balances = await client.get_balances()
|
balances = await client.get_balances()
|
||||||
|
|
||||||
for dst, size in balances.items():
|
for dst, size in balances.items():
|
||||||
|
|
||||||
# we don't care about tracking positions
|
# we don't care about tracking positions
|
||||||
# in the user's source fiat currency.
|
# in the user's source fiat currency.
|
||||||
if (
|
if (
|
||||||
dst == src_fiat
|
dst == src_fiat
|
||||||
or not any(
|
or not any(
|
||||||
dst in bsuid for bsuid in table.pps
|
dst in bs_mktid for bs_mktid in table.pps
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
log.warning(
|
log.warning(
|
||||||
|
@ -508,45 +532,20 @@ async def trades_dialogue(
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
def get_likely_pair(
|
|
||||||
dst: str,
|
|
||||||
bsuid: str,
|
|
||||||
src_fiat: str = src_fiat
|
|
||||||
|
|
||||||
) -> str:
|
|
||||||
'''
|
|
||||||
Attempt to get the likely trading pair masting
|
|
||||||
a given destination asset `dst: str`.
|
|
||||||
|
|
||||||
'''
|
|
||||||
try:
|
|
||||||
src_name_start = bsuid.rindex(src_fiat)
|
|
||||||
except (
|
|
||||||
ValueError, # substr not found
|
|
||||||
):
|
|
||||||
# TODO: handle nested positions..(i.e.
|
|
||||||
# positions where the src fiat was used to
|
|
||||||
# buy some other dst which was furhter used
|
|
||||||
# to buy another dst..)
|
|
||||||
log.warning(
|
|
||||||
f'No src fiat {src_fiat} found in {bsuid}?'
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
likely_dst = bsuid[:src_name_start]
|
|
||||||
if likely_dst == dst:
|
|
||||||
return bsuid
|
|
||||||
|
|
||||||
def has_pp(
|
def has_pp(
|
||||||
dst: str,
|
dst: str,
|
||||||
size: float,
|
size: float,
|
||||||
|
|
||||||
) -> Position | bool:
|
) -> Position | None:
|
||||||
|
|
||||||
src2dst: dict[str, str] = {}
|
src2dst: dict[str, str] = {}
|
||||||
|
|
||||||
for bsuid in table.pps:
|
for bs_mktid in table.pps:
|
||||||
likely_pair = get_likely_pair(dst, bsuid)
|
likely_pair = get_likely_pair(
|
||||||
|
src_fiat,
|
||||||
|
dst,
|
||||||
|
bs_mktid,
|
||||||
|
)
|
||||||
if likely_pair:
|
if likely_pair:
|
||||||
src2dst[src_fiat] = dst
|
src2dst[src_fiat] = dst
|
||||||
|
|
||||||
|
@ -565,7 +564,7 @@ async def trades_dialogue(
|
||||||
):
|
):
|
||||||
log.warning(
|
log.warning(
|
||||||
f'`kraken` account says you have a ZERO '
|
f'`kraken` account says you have a ZERO '
|
||||||
f'balance for {bsuid}:{pair}\n'
|
f'balance for {bs_mktid}:{pair}\n'
|
||||||
f'but piker seems to think `{pp.size}`\n'
|
f'but piker seems to think `{pp.size}`\n'
|
||||||
'This is likely a discrepancy in piker '
|
'This is likely a discrepancy in piker '
|
||||||
'accounting if the above number is'
|
'accounting if the above number is'
|
||||||
|
@ -574,7 +573,7 @@ async def trades_dialogue(
|
||||||
)
|
)
|
||||||
return pp
|
return pp
|
||||||
|
|
||||||
return False
|
return None # signal no entry
|
||||||
|
|
||||||
pos = has_pp(dst, size)
|
pos = has_pp(dst, size)
|
||||||
if not pos:
|
if not pos:
|
||||||
|
@ -601,8 +600,12 @@ async def trades_dialogue(
|
||||||
# in the ``pps.toml`` for the necessary pair
|
# in the ``pps.toml`` for the necessary pair
|
||||||
# yet and thus this likely pair grabber will
|
# yet and thus this likely pair grabber will
|
||||||
# likely fail.
|
# likely fail.
|
||||||
for bsuid in table.pps:
|
for bs_mktid in table.pps:
|
||||||
likely_pair = get_likely_pair(dst, bsuid)
|
likely_pair = get_likely_pair(
|
||||||
|
src_fiat,
|
||||||
|
dst,
|
||||||
|
bs_mktid,
|
||||||
|
)
|
||||||
if likely_pair:
|
if likely_pair:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
@ -652,6 +655,12 @@ async def trades_dialogue(
|
||||||
)
|
)
|
||||||
await ctx.started((ppmsgs, [acc_name]))
|
await ctx.started((ppmsgs, [acc_name]))
|
||||||
|
|
||||||
|
# TODO: ideally this blocks the this task
|
||||||
|
# as little as possible. we need to either do
|
||||||
|
# these writes in another actor, or try out `trio`'s
|
||||||
|
# async file IO api?
|
||||||
|
table.write_config()
|
||||||
|
|
||||||
# Get websocket token for authenticated data stream
|
# Get websocket token for authenticated data stream
|
||||||
# Assert that a token was actually received.
|
# Assert that a token was actually received.
|
||||||
resp = await client.endpoint('GetWebSocketsToken', {})
|
resp = await client.endpoint('GetWebSocketsToken', {})
|
||||||
|
@ -674,8 +683,6 @@ async def trades_dialogue(
|
||||||
aclosing(stream_messages(ws)) as stream,
|
aclosing(stream_messages(ws)) as stream,
|
||||||
trio.open_nursery() as nurse,
|
trio.open_nursery() as nurse,
|
||||||
):
|
):
|
||||||
stream = stream_messages(ws)
|
|
||||||
|
|
||||||
# task for processing inbound requests from ems
|
# task for processing inbound requests from ems
|
||||||
nurse.start_soon(
|
nurse.start_soon(
|
||||||
handle_order_requests,
|
handle_order_requests,
|
||||||
|
@ -724,8 +731,8 @@ async def handle_order_updates(
|
||||||
'''
|
'''
|
||||||
Main msg handling loop for all things order management.
|
Main msg handling loop for all things order management.
|
||||||
|
|
||||||
This code is broken out to make the context explicit and state variables
|
This code is broken out to make the context explicit and state
|
||||||
defined in the signature clear to the reader.
|
variables defined in the signature clear to the reader.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async for msg in ws_stream:
|
async for msg in ws_stream:
|
||||||
|
@ -818,7 +825,7 @@ async def handle_order_updates(
|
||||||
)
|
)
|
||||||
await ems_stream.send(status_msg)
|
await ems_stream.send(status_msg)
|
||||||
|
|
||||||
new_trans = norm_trade_records(trades)
|
new_trans = await norm_trade_records(trades)
|
||||||
ppmsgs = trades2pps(
|
ppmsgs = trades2pps(
|
||||||
table,
|
table,
|
||||||
acctid,
|
acctid,
|
||||||
|
@ -827,8 +834,6 @@ async def handle_order_updates(
|
||||||
for pp_msg in ppmsgs:
|
for pp_msg in ppmsgs:
|
||||||
await ems_stream.send(pp_msg)
|
await ems_stream.send(pp_msg)
|
||||||
|
|
||||||
ledger_trans.update(new_trans)
|
|
||||||
|
|
||||||
# process and relay order state change events
|
# process and relay order state change events
|
||||||
# https://docs.kraken.com/websockets/#message-openOrders
|
# https://docs.kraken.com/websockets/#message-openOrders
|
||||||
case [
|
case [
|
||||||
|
@ -890,7 +895,7 @@ async def handle_order_updates(
|
||||||
ids.inverse.get(reqid) is None
|
ids.inverse.get(reqid) is None
|
||||||
):
|
):
|
||||||
# parse out existing live order
|
# parse out existing live order
|
||||||
fqsn = pair.replace('/', '').lower()
|
fqme = pair.replace('/', '').lower()
|
||||||
price = float(price)
|
price = float(price)
|
||||||
size = float(vol)
|
size = float(vol)
|
||||||
|
|
||||||
|
@ -917,7 +922,7 @@ async def handle_order_updates(
|
||||||
action=action,
|
action=action,
|
||||||
exec_mode='live',
|
exec_mode='live',
|
||||||
oid=oid,
|
oid=oid,
|
||||||
symbol=fqsn,
|
symbol=fqme,
|
||||||
account=acc_name,
|
account=acc_name,
|
||||||
price=price,
|
price=price,
|
||||||
size=size,
|
size=size,
|
||||||
|
@ -1182,7 +1187,7 @@ async def handle_order_updates(
|
||||||
log.warning(f'Unhandled trades update msg: {msg}')
|
log.warning(f'Unhandled trades update msg: {msg}')
|
||||||
|
|
||||||
|
|
||||||
def norm_trade_records(
|
async def norm_trade_records(
|
||||||
ledger: dict[str, Any],
|
ledger: dict[str, Any],
|
||||||
|
|
||||||
) -> dict[str, Transaction]:
|
) -> dict[str, Transaction]:
|
||||||
|
@ -1197,30 +1202,19 @@ def norm_trade_records(
|
||||||
}[record['type']]
|
}[record['type']]
|
||||||
|
|
||||||
# we normalize to kraken's `altname` always..
|
# we normalize to kraken's `altname` always..
|
||||||
bsuid, pair_info = Client.normalize_symbol(record['pair'])
|
bs_mktid = Client.normalize_symbol(record['pair'])
|
||||||
fqsn = f'{bsuid}.kraken'
|
fqme = f'{bs_mktid}.kraken'
|
||||||
|
mkt: MktPair = (await get_mkt_info(fqme))[0]
|
||||||
mktpair = Symbol.from_fqsn(
|
|
||||||
fqsn,
|
|
||||||
info={
|
|
||||||
'lot_size_digits': pair_info.lot_decimals,
|
|
||||||
'tick_size_digits': pair_info.pair_decimals,
|
|
||||||
'asset_type': 'crypto',
|
|
||||||
},
|
|
||||||
)
|
|
||||||
|
|
||||||
records[tid] = Transaction(
|
records[tid] = Transaction(
|
||||||
fqsn=fqsn,
|
fqme=fqme,
|
||||||
sym=mktpair,
|
sym=mkt,
|
||||||
tid=tid,
|
tid=tid,
|
||||||
size=size,
|
size=size,
|
||||||
price=float(record['price']),
|
price=float(record['price']),
|
||||||
cost=float(record['fee']),
|
cost=float(record['fee']),
|
||||||
dt=pendulum.from_timestamp(float(record['time'])),
|
dt=pendulum.from_timestamp(float(record['time'])),
|
||||||
bsuid=bsuid,
|
bs_mktid=bs_mktid,
|
||||||
|
|
||||||
# XXX: there are no derivs on kraken right?
|
|
||||||
# expiry=expiry,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return records
|
return records
|
||||||
|
|
|
@ -18,12 +18,16 @@
|
||||||
Real-time and historical data feed endpoints.
|
Real-time and historical data feed endpoints.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
aclosing,
|
||||||
|
)
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Optional,
|
AsyncGenerator,
|
||||||
Callable,
|
Callable,
|
||||||
|
Optional,
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
@ -31,18 +35,24 @@ from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pendulum
|
import pendulum
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
from trio_util import trio_async_generator
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from piker._cacheables import open_cached_client
|
from piker.accounting._mktinfo import (
|
||||||
|
Asset,
|
||||||
|
MktPair,
|
||||||
|
)
|
||||||
|
from piker._cacheables import (
|
||||||
|
open_cached_client,
|
||||||
|
async_lifo_cache,
|
||||||
|
)
|
||||||
from piker.brokers._util import (
|
from piker.brokers._util import (
|
||||||
BrokerError,
|
BrokerError,
|
||||||
DataThrottle,
|
DataThrottle,
|
||||||
DataUnavailable,
|
DataUnavailable,
|
||||||
)
|
)
|
||||||
from piker.log import get_console_log
|
|
||||||
from piker.data.types import Struct
|
from piker.data.types import Struct
|
||||||
|
from piker.data.validate import FeedInit
|
||||||
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
||||||
from . import log
|
from . import log
|
||||||
from .api import (
|
from .api import (
|
||||||
|
@ -85,26 +95,9 @@ async def stream_messages(
|
||||||
though a single async generator.
|
though a single async generator.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
too_slow_count = last_hb = 0
|
last_hb: float = 0
|
||||||
|
|
||||||
while True:
|
|
||||||
|
|
||||||
with trio.move_on_after(5) as cs:
|
|
||||||
msg = await ws.recv_msg()
|
|
||||||
|
|
||||||
# trigger reconnection if heartbeat is laggy
|
|
||||||
if cs.cancelled_caught:
|
|
||||||
|
|
||||||
too_slow_count += 1
|
|
||||||
|
|
||||||
if too_slow_count > 20:
|
|
||||||
log.warning(
|
|
||||||
"Heartbeat is too slow, resetting ws connection")
|
|
||||||
|
|
||||||
await ws._connect()
|
|
||||||
too_slow_count = 0
|
|
||||||
continue
|
|
||||||
|
|
||||||
|
async for msg in ws:
|
||||||
match msg:
|
match msg:
|
||||||
case {'event': 'heartbeat'}:
|
case {'event': 'heartbeat'}:
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
@ -122,7 +115,6 @@ async def stream_messages(
|
||||||
yield msg
|
yield msg
|
||||||
|
|
||||||
|
|
||||||
@trio_async_generator
|
|
||||||
async def process_data_feed_msgs(
|
async def process_data_feed_msgs(
|
||||||
ws: NoBsWs,
|
ws: NoBsWs,
|
||||||
):
|
):
|
||||||
|
@ -130,7 +122,8 @@ async def process_data_feed_msgs(
|
||||||
Parse and pack data feed messages.
|
Parse and pack data feed messages.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async for msg in stream_messages(ws):
|
async with aclosing(stream_messages(ws)) as ws_stream:
|
||||||
|
async for msg in ws_stream:
|
||||||
match msg:
|
match msg:
|
||||||
case {
|
case {
|
||||||
'errorMessage': errmsg
|
'errorMessage': errmsg
|
||||||
|
@ -184,6 +177,17 @@ async def process_data_feed_msgs(
|
||||||
# chan_id, *payload_array, chan_name, pair = msg
|
# chan_id, *payload_array, chan_name, pair = msg
|
||||||
# print(msg)
|
# print(msg)
|
||||||
|
|
||||||
|
case {
|
||||||
|
'connectionID': conid,
|
||||||
|
'event': 'systemStatus',
|
||||||
|
'status': 'online',
|
||||||
|
'version': ver,
|
||||||
|
}:
|
||||||
|
log.info(
|
||||||
|
f'Established {ver} ws connection with id: {conid}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
case _:
|
case _:
|
||||||
print(f'UNHANDLED MSG: {msg}')
|
print(f'UNHANDLED MSG: {msg}')
|
||||||
# yield msg
|
# yield msg
|
||||||
|
@ -211,9 +215,11 @@ def normalize(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_history_client(
|
async def open_history_client(
|
||||||
symbol: str,
|
mkt: MktPair,
|
||||||
|
|
||||||
) -> tuple[Callable, int]:
|
) -> AsyncGenerator[Callable, None]:
|
||||||
|
|
||||||
|
symbol: str = mkt.bs_fqme
|
||||||
|
|
||||||
# TODO implement history getter for the new storage layer.
|
# TODO implement history getter for the new storage layer.
|
||||||
async with open_cached_client('kraken') as client:
|
async with open_cached_client('kraken') as client:
|
||||||
|
@ -263,6 +269,44 @@ async def open_history_client(
|
||||||
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
||||||
|
|
||||||
|
|
||||||
|
@async_lifo_cache()
|
||||||
|
async def get_mkt_info(
|
||||||
|
fqme: str,
|
||||||
|
|
||||||
|
) -> tuple[MktPair, Pair]:
|
||||||
|
'''
|
||||||
|
Query for and return a `MktPair` and backend-native `Pair` (or
|
||||||
|
wtv else) info.
|
||||||
|
|
||||||
|
If more then one fqme is provided return a ``dict`` of native
|
||||||
|
key-strs to `MktPair`s.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
# uppercase since kraken bs_mktid is always upper
|
||||||
|
bs_fqme, _, broker = fqme.partition('.')
|
||||||
|
pair_str: str = bs_fqme.upper()
|
||||||
|
bs_mktid: str = Client.normalize_symbol(pair_str)
|
||||||
|
pair: Pair = await client.pair_info(pair_str)
|
||||||
|
|
||||||
|
assets = client.assets
|
||||||
|
dst_asset: Asset = assets[pair.base]
|
||||||
|
src_asset: Asset = assets[pair.quote]
|
||||||
|
|
||||||
|
mkt = MktPair(
|
||||||
|
dst=dst_asset,
|
||||||
|
src=src_asset,
|
||||||
|
|
||||||
|
price_tick=pair.price_tick,
|
||||||
|
size_tick=pair.size_tick,
|
||||||
|
bs_mktid=bs_mktid,
|
||||||
|
|
||||||
|
broker='kraken',
|
||||||
|
)
|
||||||
|
return mkt, pair
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
|
@ -283,45 +327,20 @@ async def stream_quotes(
|
||||||
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
|
||||||
|
|
||||||
ws_pairs = {}
|
ws_pairs: list[str] = []
|
||||||
sym_infos = {}
|
init_msgs: list[FeedInit] = []
|
||||||
|
|
||||||
async with open_cached_client('kraken') as client, send_chan as send_chan:
|
async with (
|
||||||
|
send_chan as send_chan,
|
||||||
|
):
|
||||||
|
for sym_str in symbols:
|
||||||
|
mkt, pair = await get_mkt_info(sym_str)
|
||||||
|
init_msgs.append(
|
||||||
|
FeedInit(mkt_info=mkt)
|
||||||
|
)
|
||||||
|
|
||||||
# keep client cached for real-time section
|
ws_pairs.append(pair.wsname)
|
||||||
for sym in symbols:
|
|
||||||
|
|
||||||
# transform to upper since piker style is always lower
|
|
||||||
sym = sym.upper()
|
|
||||||
si: Pair = await client.symbol_info(sym)
|
|
||||||
# try:
|
|
||||||
# si = Pair(**sym_info) # validation
|
|
||||||
# except TypeError:
|
|
||||||
# fields_diff = set(sym_info) - set(Pair.__struct_fields__)
|
|
||||||
# raise TypeError(
|
|
||||||
# f'Missing msg fields {fields_diff}'
|
|
||||||
# )
|
|
||||||
syminfo = si.to_dict()
|
|
||||||
syminfo['price_tick_size'] = 1. / 10**si.pair_decimals
|
|
||||||
syminfo['lot_tick_size'] = 1. / 10**si.lot_decimals
|
|
||||||
syminfo['asset_type'] = 'crypto'
|
|
||||||
sym_infos[sym] = syminfo
|
|
||||||
ws_pairs[sym] = si.wsname
|
|
||||||
|
|
||||||
symbol = symbols[0].lower()
|
|
||||||
|
|
||||||
init_msgs = {
|
|
||||||
# pass back token, and bool, signalling if we're the writer
|
|
||||||
# and that history has been written
|
|
||||||
symbol: {
|
|
||||||
'symbol_info': sym_infos[sym],
|
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
|
||||||
'fqsn': sym,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def subscribe(ws: NoBsWs):
|
async def subscribe(ws: NoBsWs):
|
||||||
|
@ -332,7 +351,7 @@ async def stream_quotes(
|
||||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||||
ohlc_sub = {
|
ohlc_sub = {
|
||||||
'event': 'subscribe',
|
'event': 'subscribe',
|
||||||
'pair': list(ws_pairs.values()),
|
'pair': ws_pairs,
|
||||||
'subscription': {
|
'subscription': {
|
||||||
'name': 'ohlc',
|
'name': 'ohlc',
|
||||||
'interval': 1,
|
'interval': 1,
|
||||||
|
@ -348,7 +367,7 @@ async def stream_quotes(
|
||||||
# trade data (aka L1)
|
# trade data (aka L1)
|
||||||
l1_sub = {
|
l1_sub = {
|
||||||
'event': 'subscribe',
|
'event': 'subscribe',
|
||||||
'pair': list(ws_pairs.values()),
|
'pair': ws_pairs,
|
||||||
'subscription': {
|
'subscription': {
|
||||||
'name': 'spread',
|
'name': 'spread',
|
||||||
# 'depth': 10}
|
# 'depth': 10}
|
||||||
|
@ -363,7 +382,7 @@ async def stream_quotes(
|
||||||
# unsub from all pairs on teardown
|
# unsub from all pairs on teardown
|
||||||
if ws.connected():
|
if ws.connected():
|
||||||
await ws.send_msg({
|
await ws.send_msg({
|
||||||
'pair': list(ws_pairs.values()),
|
'pair': ws_pairs,
|
||||||
'event': 'unsubscribe',
|
'event': 'unsubscribe',
|
||||||
'subscription': ['ohlc', 'spread'],
|
'subscription': ['ohlc', 'spread'],
|
||||||
})
|
})
|
||||||
|
@ -378,21 +397,20 @@ async def stream_quotes(
|
||||||
open_autorecon_ws(
|
open_autorecon_ws(
|
||||||
'wss://ws.kraken.com/',
|
'wss://ws.kraken.com/',
|
||||||
fixture=subscribe,
|
fixture=subscribe,
|
||||||
|
reset_after=20,
|
||||||
) as ws,
|
) as ws,
|
||||||
|
|
||||||
# avoid stream-gen closure from breaking trio..
|
# avoid stream-gen closure from breaking trio..
|
||||||
# NOTE: not sure this actually works XD particularly
|
# NOTE: not sure this actually works XD particularly
|
||||||
# if we call `ws._connect()` manally in the streaming
|
# if we call `ws._connect()` manally in the streaming
|
||||||
# async gen..
|
# async gen..
|
||||||
process_data_feed_msgs(ws) as msg_gen,
|
aclosing(process_data_feed_msgs(ws)) as msg_gen,
|
||||||
):
|
):
|
||||||
# pull a first quote and deliver
|
# pull a first quote and deliver
|
||||||
typ, ohlc_last = await anext(msg_gen)
|
typ, ohlc_last = await anext(msg_gen)
|
||||||
topic, quote = normalize(ohlc_last)
|
topic, quote = normalize(ohlc_last)
|
||||||
|
|
||||||
task_status.started((init_msgs, quote))
|
task_status.started((init_msgs, quote))
|
||||||
|
|
||||||
# lol, only "closes" when they're margin squeezing clients ;P
|
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
||||||
# keep start of last interval for volume tracking
|
# keep start of last interval for volume tracking
|
||||||
|
|
|
@ -1,4 +1,6 @@
|
||||||
# Copyright (C) Jared Goldman (in stewardship for pikers)
|
# Copyright (C) (in stewardship for pikers)
|
||||||
|
# - Jared Goldman
|
||||||
|
# - Tyler Goodlet
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -18,34 +20,54 @@ Kucoin broker backend
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
from typing import Any, Callable, Literal, AsyncGenerator
|
from contextlib import (
|
||||||
from contextlib import asynccontextmanager as acm
|
asynccontextmanager as acm,
|
||||||
|
aclosing,
|
||||||
|
)
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import time
|
from decimal import Decimal
|
||||||
import base64
|
import base64
|
||||||
import hmac
|
import hmac
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import time
|
||||||
|
from functools import partial
|
||||||
|
from pprint import pformat
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Literal,
|
||||||
|
AsyncGenerator,
|
||||||
|
)
|
||||||
import wsproto
|
import wsproto
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
from trio_typing import TaskStatus
|
||||||
import asks
|
import asks
|
||||||
|
from bidict import bidict
|
||||||
|
import numpy as np
|
||||||
|
import pendulum
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from trio_util import trio_async_generator
|
|
||||||
from trio_typing import TaskStatus
|
|
||||||
from fuzzywuzzy import process as fuzzy
|
|
||||||
import pendulum
|
|
||||||
import numpy as np
|
|
||||||
|
|
||||||
from piker._cacheables import open_cached_client
|
from piker.accounting._mktinfo import (
|
||||||
|
Asset,
|
||||||
|
digits_to_dec,
|
||||||
|
MktPair,
|
||||||
|
)
|
||||||
|
from piker.data.validate import FeedInit
|
||||||
|
from piker import config
|
||||||
|
from piker._cacheables import (
|
||||||
|
open_cached_client,
|
||||||
|
async_lifo_cache,
|
||||||
|
)
|
||||||
from piker.log import get_logger
|
from piker.log import get_logger
|
||||||
from ._util import DataUnavailable
|
from piker.data.types import Struct
|
||||||
from piker.pp import config
|
from piker.data._web_bs import (
|
||||||
from ..data.types import Struct
|
|
||||||
from ..data._web_bs import (
|
|
||||||
open_autorecon_ws,
|
open_autorecon_ws,
|
||||||
NoBsWs,
|
NoBsWs,
|
||||||
)
|
)
|
||||||
|
from ._util import DataUnavailable
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -67,11 +89,20 @@ class KucoinMktPair(Struct, frozen=True):
|
||||||
https://docs.kucoin.com/#get-symbols-list
|
https://docs.kucoin.com/#get-symbols-list
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
baseCurrency: str
|
baseCurrency: str
|
||||||
baseIncrement: float
|
baseIncrement: float
|
||||||
|
|
||||||
|
@property
|
||||||
|
def price_tick(self) -> Decimal:
|
||||||
|
return Decimal(str(self.baseIncrement))
|
||||||
|
|
||||||
baseMaxSize: float
|
baseMaxSize: float
|
||||||
baseMinSize: float
|
baseMinSize: float
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_tick(self) -> Decimal:
|
||||||
|
return Decimal(str(self.baseMinSize))
|
||||||
|
|
||||||
enableTrading: bool
|
enableTrading: bool
|
||||||
feeCurrency: str
|
feeCurrency: str
|
||||||
isMarginEnabled: bool
|
isMarginEnabled: bool
|
||||||
|
@ -84,7 +115,7 @@ class KucoinMktPair(Struct, frozen=True):
|
||||||
quoteIncrement: float
|
quoteIncrement: float
|
||||||
quoteMaxSize: float
|
quoteMaxSize: float
|
||||||
quoteMinSize: float
|
quoteMinSize: float
|
||||||
symbol: str
|
symbol: str # our bs_mktid, kucoin's internal id
|
||||||
|
|
||||||
|
|
||||||
class AccountTrade(Struct, frozen=True):
|
class AccountTrade(Struct, frozen=True):
|
||||||
|
@ -93,7 +124,6 @@ class AccountTrade(Struct, frozen=True):
|
||||||
https://docs.kucoin.com/#get-account-ledgers
|
https://docs.kucoin.com/#get-account-ledgers
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
id: str
|
id: str
|
||||||
currency: str
|
currency: str
|
||||||
amount: float
|
amount: float
|
||||||
|
@ -111,7 +141,6 @@ class AccountResponse(Struct, frozen=True):
|
||||||
https://docs.kucoin.com/#get-account-ledgers
|
https://docs.kucoin.com/#get-account-ledgers
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
currentPage: int
|
currentPage: int
|
||||||
pageSize: int
|
pageSize: int
|
||||||
totalNum: int
|
totalNum: int
|
||||||
|
@ -125,7 +154,6 @@ class KucoinTrade(Struct, frozen=True):
|
||||||
https://docs.kucoin.com/#symbol-ticker
|
https://docs.kucoin.com/#symbol-ticker
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
bestAsk: float
|
bestAsk: float
|
||||||
bestAskSize: float
|
bestAskSize: float
|
||||||
bestBid: float
|
bestBid: float
|
||||||
|
@ -148,16 +176,24 @@ class KucoinL2(Struct, frozen=True):
|
||||||
timestamp: float
|
timestamp: float
|
||||||
|
|
||||||
|
|
||||||
class KucoinMsg(Struct, frozen=True):
|
class Currency(Struct, frozen=True):
|
||||||
'''
|
'''
|
||||||
Generic outer-wrapper for any Kucoin ws msg
|
Currency (asset) info:
|
||||||
|
https://docs.kucoin.com/#get-currencies
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
currency: str
|
||||||
type: str
|
name: str
|
||||||
topic: str
|
fullName: str
|
||||||
subject: str
|
precision: int
|
||||||
data: list[KucoinTrade | KucoinL2]
|
confirms: int
|
||||||
|
contractAddress: str
|
||||||
|
withdrawalMinSize: str
|
||||||
|
withdrawalMinFee: str
|
||||||
|
isWithdrawEnabled: bool
|
||||||
|
isDepositEnabled: bool
|
||||||
|
isMarginEnabled: bool
|
||||||
|
isDebitEnabled: bool
|
||||||
|
|
||||||
|
|
||||||
class BrokerConfig(Struct, frozen=True):
|
class BrokerConfig(Struct, frozen=True):
|
||||||
|
@ -180,15 +216,18 @@ def get_config() -> BrokerConfig | None:
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
self._pairs: dict[str, KucoinMktPair] = {}
|
|
||||||
self._bars: list[list[float]] = []
|
|
||||||
self._config: BrokerConfig | None = get_config()
|
self._config: BrokerConfig | None = get_config()
|
||||||
|
self._pairs: dict[str, KucoinMktPair] = {}
|
||||||
|
self._fqmes2mktids: bidict[str, str] = bidict()
|
||||||
|
self._bars: list[list[float]] = []
|
||||||
|
self._currencies: dict[str, Currency] = {}
|
||||||
|
|
||||||
def _gen_auth_req_headers(
|
def _gen_auth_req_headers(
|
||||||
self,
|
self,
|
||||||
action: Literal['POST', 'GET'],
|
action: Literal['POST', 'GET'],
|
||||||
endpoint: str,
|
endpoint: str,
|
||||||
api_v: str = 'v2',
|
api: str = 'v2',
|
||||||
|
|
||||||
) -> dict[str, str | bytes]:
|
) -> dict[str, str | bytes]:
|
||||||
'''
|
'''
|
||||||
Generate authenticated request headers
|
Generate authenticated request headers
|
||||||
|
@ -202,7 +241,7 @@ class Client:
|
||||||
|
|
||||||
str_to_sign = (
|
str_to_sign = (
|
||||||
str(int(time.time() * 1000))
|
str(int(time.time() * 1000))
|
||||||
+ action + f'/api/{api_v}{endpoint}'
|
+ action + f'/api/{api}/{endpoint.lstrip("/")}'
|
||||||
)
|
)
|
||||||
|
|
||||||
signature = base64.b64encode(
|
signature = base64.b64encode(
|
||||||
|
@ -234,7 +273,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
action: Literal['POST', 'GET'],
|
action: Literal['POST', 'GET'],
|
||||||
endpoint: str,
|
endpoint: str,
|
||||||
api_v: str = 'v2',
|
api: str = 'v2',
|
||||||
headers: dict = {},
|
headers: dict = {},
|
||||||
) -> Any:
|
) -> Any:
|
||||||
'''
|
'''
|
||||||
|
@ -243,19 +282,24 @@ class Client:
|
||||||
'''
|
'''
|
||||||
if self._config:
|
if self._config:
|
||||||
headers = self._gen_auth_req_headers(
|
headers = self._gen_auth_req_headers(
|
||||||
action, endpoint, api_v)
|
action,
|
||||||
|
endpoint,
|
||||||
|
api,
|
||||||
|
)
|
||||||
|
|
||||||
api_url = f'https://api.kucoin.com/api/{api_v}{endpoint}'
|
api_url = f'https://api.kucoin.com/api/{api}/{endpoint}'
|
||||||
|
|
||||||
res = await asks.request(action, api_url, headers=headers)
|
res = await asks.request(action, api_url, headers=headers)
|
||||||
|
|
||||||
if 'data' in res.json():
|
json = res.json()
|
||||||
return res.json()['data']
|
if 'data' in json:
|
||||||
|
return json['data']
|
||||||
else:
|
else:
|
||||||
log.error(
|
log.error(
|
||||||
f'Error making request to {api_url} -> {res.json()["msg"]}'
|
f'Error making request to {api_url} ->\n'
|
||||||
|
f'{pformat(res)}'
|
||||||
)
|
)
|
||||||
return res.json()['msg']
|
return json['msg']
|
||||||
|
|
||||||
async def _get_ws_token(
|
async def _get_ws_token(
|
||||||
self,
|
self,
|
||||||
|
@ -271,7 +315,9 @@ class Client:
|
||||||
token_type = 'private' if private else 'public'
|
token_type = 'private' if private else 'public'
|
||||||
try:
|
try:
|
||||||
data: dict[str, Any] | None = await self._request(
|
data: dict[str, Any] | None = await self._request(
|
||||||
'POST', f'/bullet-{token_type}', 'v1'
|
'POST',
|
||||||
|
endpoint=f'bullet-{token_type}',
|
||||||
|
api='v1'
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.error(
|
log.error(
|
||||||
|
@ -288,27 +334,72 @@ class Client:
|
||||||
f'{data.json()["msg"]}'
|
f'{data.json()["msg"]}'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def get_currencies(
|
||||||
|
self,
|
||||||
|
update: bool = False,
|
||||||
|
) -> dict[str, Currency]:
|
||||||
|
'''
|
||||||
|
Retrieve all "currency" info:
|
||||||
|
https://docs.kucoin.com/#get-currencies
|
||||||
|
|
||||||
|
We use this for creating piker-interal ``Asset``s.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if (
|
||||||
|
not self._currencies
|
||||||
|
or update
|
||||||
|
):
|
||||||
|
currencies: dict[str, Currency] = {}
|
||||||
|
entries: list[dict] = await self._request(
|
||||||
|
'GET',
|
||||||
|
api='v1',
|
||||||
|
endpoint='currencies',
|
||||||
|
)
|
||||||
|
for entry in entries:
|
||||||
|
curr = Currency(**entry).copy()
|
||||||
|
currencies[curr.name] = curr
|
||||||
|
|
||||||
|
self._currencies.update(currencies)
|
||||||
|
|
||||||
|
return self._currencies
|
||||||
|
|
||||||
async def _get_pairs(
|
async def _get_pairs(
|
||||||
self,
|
self,
|
||||||
) -> dict[str, KucoinMktPair]:
|
) -> tuple[
|
||||||
entries = await self._request('GET', '/symbols')
|
dict[str, KucoinMktPair],
|
||||||
syms = {
|
bidict[str, KucoinMktPair],
|
||||||
kucoin_sym_to_fqsn(item['name']): KucoinMktPair(**item)
|
]:
|
||||||
for item in entries
|
entries = await self._request('GET', 'symbols')
|
||||||
}
|
log.info(f' {len(entries)} Kucoin market pairs fetched')
|
||||||
|
|
||||||
log.info(f' {len(syms)} Kucoin market pairs fetched')
|
pairs: dict[str, KucoinMktPair] = {}
|
||||||
return syms
|
fqmes2mktids: bidict[str, str] = bidict()
|
||||||
|
for item in entries:
|
||||||
|
pair = pairs[item['name']] = KucoinMktPair(**item)
|
||||||
|
fqmes2mktids[
|
||||||
|
item['name'].lower().replace('-', '')
|
||||||
|
] = pair.name
|
||||||
|
|
||||||
|
return pairs, fqmes2mktids
|
||||||
|
|
||||||
async def cache_pairs(
|
async def cache_pairs(
|
||||||
self,
|
self,
|
||||||
|
update: bool = False,
|
||||||
|
|
||||||
) -> dict[str, KucoinMktPair]:
|
) -> dict[str, KucoinMktPair]:
|
||||||
'''
|
'''
|
||||||
Get cached pairs and convert keyed symbols into fqsns if ya want
|
Get request all market pairs and store in a local cache.
|
||||||
|
|
||||||
|
Also create a table of piker style fqme -> kucoin symbols.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if not self._pairs:
|
if (
|
||||||
self._pairs = await self._get_pairs()
|
not self._pairs
|
||||||
|
or update
|
||||||
|
):
|
||||||
|
pairs, fqmes = await self._get_pairs()
|
||||||
|
self._pairs.update(pairs)
|
||||||
|
self._fqmes2mktids.update(fqmes)
|
||||||
|
|
||||||
return self._pairs
|
return self._pairs
|
||||||
|
|
||||||
|
@ -316,7 +407,12 @@ class Client:
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
limit: int = 30,
|
limit: int = 30,
|
||||||
|
|
||||||
) -> dict[str, KucoinMktPair]:
|
) -> dict[str, KucoinMktPair]:
|
||||||
|
'''
|
||||||
|
Use fuzzy search to match against all market names.
|
||||||
|
|
||||||
|
'''
|
||||||
data = await self.cache_pairs()
|
data = await self.cache_pairs()
|
||||||
|
|
||||||
matches = fuzzy.extractBests(
|
matches = fuzzy.extractBests(
|
||||||
|
@ -327,19 +423,23 @@ class Client:
|
||||||
|
|
||||||
async def last_trades(self, sym: str) -> list[AccountTrade]:
|
async def last_trades(self, sym: str) -> list[AccountTrade]:
|
||||||
trades = await self._request(
|
trades = await self._request(
|
||||||
'GET', f'/accounts/ledgers?currency={sym}', 'v1'
|
'GET',
|
||||||
|
endpoint=f'accounts/ledgers?currency={sym}',
|
||||||
|
api='v1'
|
||||||
)
|
)
|
||||||
trades = AccountResponse(**trades)
|
trades = AccountResponse(**trades)
|
||||||
return trades.items
|
return trades.items
|
||||||
|
|
||||||
async def _get_bars(
|
async def _get_bars(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
|
|
||||||
start_dt: datetime | None = None,
|
start_dt: datetime | None = None,
|
||||||
end_dt: datetime | None = None,
|
end_dt: datetime | None = None,
|
||||||
limit: int = 1000,
|
limit: int = 1000,
|
||||||
as_np: bool = True,
|
as_np: bool = True,
|
||||||
type: str = '1min',
|
type: str = '1min',
|
||||||
|
|
||||||
) -> np.ndarray:
|
) -> np.ndarray:
|
||||||
'''
|
'''
|
||||||
Get OHLC data and convert to numpy array for perffff:
|
Get OHLC data and convert to numpy array for perffff:
|
||||||
|
@ -381,10 +481,10 @@ class Client:
|
||||||
start_dt = int(start_dt.timestamp())
|
start_dt = int(start_dt.timestamp())
|
||||||
end_dt = int(end_dt.timestamp())
|
end_dt = int(end_dt.timestamp())
|
||||||
|
|
||||||
kucoin_sym = fqsn_to_kucoin_sym(fqsn, self._pairs)
|
kucoin_sym = self._fqmes2mktids[fqme]
|
||||||
|
|
||||||
url = (
|
url = (
|
||||||
f'/market/candles?type={type}'
|
f'market/candles?type={type}'
|
||||||
f'&symbol={kucoin_sym}'
|
f'&symbol={kucoin_sym}'
|
||||||
f'&startAt={start_dt}'
|
f'&startAt={start_dt}'
|
||||||
f'&endAt={end_dt}'
|
f'&endAt={end_dt}'
|
||||||
|
@ -394,7 +494,7 @@ class Client:
|
||||||
data: list[list[str]] | dict = await self._request(
|
data: list[list[str]] | dict = await self._request(
|
||||||
'GET',
|
'GET',
|
||||||
url,
|
url,
|
||||||
api_v='v1',
|
api='v1',
|
||||||
)
|
)
|
||||||
|
|
||||||
if not isinstance(data, list):
|
if not isinstance(data, list):
|
||||||
|
@ -439,19 +539,22 @@ class Client:
|
||||||
return array
|
return array
|
||||||
|
|
||||||
|
|
||||||
def fqsn_to_kucoin_sym(fqsn: str, pairs: dict[str, KucoinMktPair]) -> str:
|
def fqme_to_kucoin_sym(
|
||||||
pair_data = pairs[fqsn]
|
fqme: str,
|
||||||
|
pairs: dict[str, KucoinMktPair],
|
||||||
|
|
||||||
|
) -> str:
|
||||||
|
pair_data = pairs[fqme]
|
||||||
return pair_data.baseCurrency + '-' + pair_data.quoteCurrency
|
return pair_data.baseCurrency + '-' + pair_data.quoteCurrency
|
||||||
|
|
||||||
|
|
||||||
def kucoin_sym_to_fqsn(sym: str) -> str:
|
|
||||||
return sym.lower().replace('-', '')
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def get_client() -> AsyncGenerator[Client, None]:
|
async def get_client() -> AsyncGenerator[Client, None]:
|
||||||
client = Client()
|
client = Client()
|
||||||
await client.cache_pairs()
|
|
||||||
|
async with trio.open_nursery() as n:
|
||||||
|
n.start_soon(client.cache_pairs)
|
||||||
|
await client.get_currencies()
|
||||||
|
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
@ -497,59 +600,112 @@ async def open_ping_task(
|
||||||
n.cancel_scope.cancel()
|
n.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
@async_lifo_cache()
|
||||||
|
async def get_mkt_info(
|
||||||
|
fqme: str,
|
||||||
|
|
||||||
|
) -> tuple[MktPair, KucoinMktPair]:
|
||||||
|
'''
|
||||||
|
Query for and return a `MktPair` and `KucoinMktPair`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with open_cached_client('kucoin') as client:
|
||||||
|
# split off any fqme broker part
|
||||||
|
bs_fqme, _, broker = fqme.partition('.')
|
||||||
|
|
||||||
|
pairs: dict[str, KucoinMktPair] = await client.cache_pairs()
|
||||||
|
|
||||||
|
try:
|
||||||
|
# likely search result key which is already in native mkt symbol form
|
||||||
|
pair: KucoinMktPair = pairs[bs_fqme]
|
||||||
|
bs_mktid: str = bs_fqme
|
||||||
|
|
||||||
|
except KeyError:
|
||||||
|
|
||||||
|
# likely a piker-style fqme from API request or CLI
|
||||||
|
bs_mktid: str = client._fqmes2mktids[bs_fqme]
|
||||||
|
pair: KucoinMktPair = pairs[bs_mktid]
|
||||||
|
|
||||||
|
# symbology sanity
|
||||||
|
assert bs_mktid == pair.symbol
|
||||||
|
|
||||||
|
assets: dict[str, Currency] = client._currencies
|
||||||
|
|
||||||
|
# TODO: maybe just do this processing in
|
||||||
|
# a .get_assets() method (see kraken)?
|
||||||
|
src: Currency = assets[pair.quoteCurrency]
|
||||||
|
src_asset = Asset(
|
||||||
|
name=src.name,
|
||||||
|
atype='crypto_currency',
|
||||||
|
tx_tick=digits_to_dec(src.precision),
|
||||||
|
info=src.to_dict(),
|
||||||
|
)
|
||||||
|
dst: Currency = assets[pair.baseCurrency]
|
||||||
|
dst_asset = Asset(
|
||||||
|
name=dst.name,
|
||||||
|
atype='crypto_currency',
|
||||||
|
tx_tick=digits_to_dec(dst.precision),
|
||||||
|
info=dst.to_dict(),
|
||||||
|
)
|
||||||
|
mkt = MktPair(
|
||||||
|
dst=dst_asset,
|
||||||
|
src=src_asset,
|
||||||
|
|
||||||
|
price_tick=pair.price_tick,
|
||||||
|
size_tick=pair.size_tick,
|
||||||
|
bs_mktid=bs_mktid,
|
||||||
|
|
||||||
|
broker='kucoin',
|
||||||
|
)
|
||||||
|
return mkt, pair
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: list[str],
|
symbols: list[str],
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
loglevel: str = '',
|
|
||||||
# startup sync
|
task_status: TaskStatus[
|
||||||
task_status: TaskStatus[tuple[dict, dict]
|
tuple[dict, dict]
|
||||||
] = trio.TASK_STATUS_IGNORED,
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Required piker api to stream real-time data.
|
Required piker api to stream real-time data.
|
||||||
Where the rubber hits the road baby
|
Where the rubber hits the road baby
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
init_msgs: list[FeedInit] = []
|
||||||
|
|
||||||
async with open_cached_client('kucoin') as client:
|
async with open_cached_client('kucoin') as client:
|
||||||
token, ping_interval = await client._get_ws_token()
|
|
||||||
connect_id = str(uuid4())
|
log.info(f'Starting up quote stream(s) for {symbols}')
|
||||||
pairs = await client.cache_pairs()
|
for sym_str in symbols:
|
||||||
ws_url = (
|
mkt, pair = await get_mkt_info(sym_str)
|
||||||
f'wss://ws-api-spot.kucoin.com/?'
|
init_msgs.append(
|
||||||
f'token={token}&[connectId={connect_id}]'
|
FeedInit(mkt_info=mkt)
|
||||||
)
|
)
|
||||||
|
|
||||||
# open ping task
|
ws: NoBsWs
|
||||||
|
token, ping_interval = await client._get_ws_token()
|
||||||
|
connect_id = str(uuid4())
|
||||||
async with (
|
async with (
|
||||||
open_autorecon_ws(ws_url) as ws,
|
open_autorecon_ws(
|
||||||
|
(
|
||||||
|
f'wss://ws-api-spot.kucoin.com/?'
|
||||||
|
f'token={token}&[connectId={connect_id}]'
|
||||||
|
),
|
||||||
|
fixture=partial(
|
||||||
|
subscribe,
|
||||||
|
connect_id=connect_id,
|
||||||
|
bs_mktid=pair.symbol,
|
||||||
|
),
|
||||||
|
) as ws,
|
||||||
open_ping_task(ws, ping_interval, connect_id),
|
open_ping_task(ws, ping_interval, connect_id),
|
||||||
):
|
aclosing(stream_messages(ws, sym_str)) as msg_gen,
|
||||||
log.info('Starting up quote stream')
|
|
||||||
# loop through symbols and sub to feedz
|
|
||||||
for sym in symbols:
|
|
||||||
pair: KucoinMktPair = pairs[sym]
|
|
||||||
kucoin_sym = pair.symbol
|
|
||||||
|
|
||||||
init_msgs = {
|
|
||||||
# pass back token, and bool, signalling if we're the writer
|
|
||||||
# and that history has been written
|
|
||||||
sym: {
|
|
||||||
'symbol_info': {
|
|
||||||
'asset_type': 'crypto',
|
|
||||||
'price_tick_size': float(pair.baseIncrement),
|
|
||||||
'lot_tick_size': float(pair.baseMinSize),
|
|
||||||
},
|
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
|
||||||
'fqsn': sym,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async with (
|
|
||||||
subscribe(ws, connect_id, kucoin_sym),
|
|
||||||
stream_messages(ws, sym) as msg_gen,
|
|
||||||
):
|
):
|
||||||
typ, quote = await anext(msg_gen)
|
typ, quote = await anext(msg_gen)
|
||||||
|
|
||||||
while typ != 'trade':
|
while typ != 'trade':
|
||||||
# take care to not unblock here until we get a real
|
# take care to not unblock here until we get a real
|
||||||
# trade quote
|
# trade quote
|
||||||
|
@ -559,74 +715,83 @@ async def stream_quotes(
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
||||||
async for typ, msg in msg_gen:
|
async for typ, msg in msg_gen:
|
||||||
await send_chan.send({sym: msg})
|
await send_chan.send({sym_str: msg})
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def subscribe(ws: wsproto.WSConnection, connect_id, sym) -> AsyncGenerator[None, None]:
|
async def subscribe(
|
||||||
# level 2 sub
|
ws: NoBsWs,
|
||||||
|
connect_id,
|
||||||
|
bs_mktid,
|
||||||
|
|
||||||
|
# subs are filled in with `bs_mktid` from avbove
|
||||||
|
topics: list[str] = [
|
||||||
|
'/market/ticker:{bs_mktid}', # clearing events
|
||||||
|
'/spotMarket/level2Depth5:{bs_mktid}', # level 2
|
||||||
|
],
|
||||||
|
|
||||||
|
) -> AsyncGenerator[None, None]:
|
||||||
|
|
||||||
|
eps: list[str] = []
|
||||||
|
for topic in topics:
|
||||||
|
ep: str = topic.format(bs_mktid=bs_mktid)
|
||||||
|
eps.append(ep)
|
||||||
await ws.send_msg(
|
await ws.send_msg(
|
||||||
{
|
{
|
||||||
'id': connect_id,
|
'id': connect_id,
|
||||||
'type': 'subscribe',
|
'type': 'subscribe',
|
||||||
'topic': f'/spotMarket/level2Depth5:{sym}',
|
'topic': ep,
|
||||||
'privateChannel': False,
|
'privateChannel': False,
|
||||||
'response': True,
|
'response': True,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# watch trades
|
welcome_msg = await ws.recv_msg()
|
||||||
await ws.send_msg(
|
log.info(f'WS welcome: {welcome_msg}')
|
||||||
{
|
|
||||||
'id': connect_id,
|
for _ in topics:
|
||||||
'type': 'subscribe',
|
ack_msg = await ws.recv_msg()
|
||||||
'topic': f'/market/ticker:{sym}',
|
log.info(f'Sub ACK: {ack_msg}')
|
||||||
'privateChannel': False,
|
|
||||||
'response': True,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
||||||
# unsub
|
# unsub
|
||||||
if ws.connected():
|
if ws.connected():
|
||||||
log.info(f'Unsubscribing to {sym} feed')
|
log.info(f'Unsubscribing to {bs_mktid} feed')
|
||||||
|
for ep in eps:
|
||||||
await ws.send_msg(
|
await ws.send_msg(
|
||||||
{
|
{
|
||||||
'id': connect_id,
|
'id': connect_id,
|
||||||
'type': 'unsubscribe',
|
'type': 'unsubscribe',
|
||||||
'topic': f'/market/ticker:{sym}',
|
'topic': ep,
|
||||||
'privateChannel': False,
|
'privateChannel': False,
|
||||||
'response': True,
|
'response': True,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@trio_async_generator
|
|
||||||
async def stream_messages(
|
async def stream_messages(
|
||||||
ws: NoBsWs, sym: str
|
ws: NoBsWs,
|
||||||
|
sym: str,
|
||||||
|
|
||||||
) -> AsyncGenerator[tuple[str, dict], None]:
|
) -> AsyncGenerator[tuple[str, dict], None]:
|
||||||
timeouts = 0
|
'''
|
||||||
last_trade_ts = 0
|
Core (live) feed msg handler: relay market events
|
||||||
|
to the piker-ized tick-stream format.
|
||||||
|
|
||||||
while True:
|
'''
|
||||||
with trio.move_on_after(3) as cs:
|
last_trade_ts: float = 0
|
||||||
msg = await ws.recv_msg()
|
|
||||||
if cs.cancelled_caught:
|
|
||||||
timeouts += 1
|
|
||||||
if timeouts > 2:
|
|
||||||
log.error(
|
|
||||||
'kucoin feed is sh**ing the bed... rebooting...')
|
|
||||||
await ws._connect()
|
|
||||||
|
|
||||||
continue
|
dict_msg: dict[str, Any]
|
||||||
if msg.get('subject'):
|
async for dict_msg in ws:
|
||||||
msg = KucoinMsg(**msg)
|
match dict_msg:
|
||||||
match msg.subject:
|
case {
|
||||||
case 'trade.ticker':
|
'subject': 'trade.ticker',
|
||||||
trade_data = KucoinTrade(**msg.data)
|
'data': trade_data_dict,
|
||||||
|
}:
|
||||||
|
trade_data = KucoinTrade(**trade_data_dict)
|
||||||
|
|
||||||
# XXX: Filter for duplicate messages as ws feed will
|
# XXX: Filter out duplicate messages as ws feed will
|
||||||
# send duplicate market state
|
# send duplicate market state
|
||||||
# https://docs.kucoin.com/#level2-5-best-ask-bid-orders
|
# https://docs.kucoin.com/#level2-5-best-ask-bid-orders
|
||||||
if trade_data.time == last_trade_ts:
|
if trade_data.time == last_trade_ts:
|
||||||
|
@ -648,8 +813,11 @@ async def stream_messages(
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
case 'level2':
|
case {
|
||||||
l2_data = KucoinL2(**msg.data)
|
'subject': 'level2',
|
||||||
|
'data': trade_data_dict,
|
||||||
|
}:
|
||||||
|
l2_data = KucoinL2(**trade_data_dict)
|
||||||
first_ask = l2_data.asks[0]
|
first_ask = l2_data.asks[0]
|
||||||
first_bid = l2_data.bids[0]
|
first_bid = l2_data.bids[0]
|
||||||
yield 'l1', {
|
yield 'l1', {
|
||||||
|
@ -678,14 +846,22 @@ async def stream_messages(
|
||||||
],
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
|
case {'type': 'pong'}:
|
||||||
|
# resp to ping task req
|
||||||
|
continue
|
||||||
|
|
||||||
case _:
|
case _:
|
||||||
log.warn(f'Unhandled message: {msg}')
|
log.warn(f'Unhandled message: {dict_msg}')
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_history_client(
|
async def open_history_client(
|
||||||
symbol: str,
|
mkt: MktPair,
|
||||||
|
|
||||||
) -> AsyncGenerator[Callable, None]:
|
) -> AsyncGenerator[Callable, None]:
|
||||||
|
|
||||||
|
symbol: str = mkt.bs_fqme
|
||||||
|
|
||||||
async with open_cached_client('kucoin') as client:
|
async with open_cached_client('kucoin') as client:
|
||||||
log.info('Attempting to open kucoin history client')
|
log.info('Attempting to open kucoin history client')
|
||||||
|
|
||||||
|
@ -709,6 +885,11 @@ async def open_history_client(
|
||||||
|
|
||||||
times = array['time']
|
times = array['time']
|
||||||
|
|
||||||
|
if not len(times):
|
||||||
|
raise DataUnavailable(
|
||||||
|
f'No more history before {start_dt}?'
|
||||||
|
)
|
||||||
|
|
||||||
if end_dt is None:
|
if end_dt is None:
|
||||||
inow = round(time.time())
|
inow = round(time.time())
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -43,10 +43,13 @@ from ..calc import humanize, percent_change
|
||||||
from .._cacheables import open_cached_client, async_lifo_cache
|
from .._cacheables import open_cached_client, async_lifo_cache
|
||||||
from .. import config
|
from .. import config
|
||||||
from ._util import resproc, BrokerError, SymbolNotFound
|
from ._util import resproc, BrokerError, SymbolNotFound
|
||||||
from ..log import get_logger, colorize_json, get_console_log
|
from ..log import (
|
||||||
|
colorize_json,
|
||||||
|
)
|
||||||
log = get_logger(__name__)
|
from ._util import (
|
||||||
|
log,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
|
|
||||||
_use_practice_account = False
|
_use_practice_account = False
|
||||||
_refresh_token_ep = 'https://{}login.questrade.com/oauth2/'
|
_refresh_token_ep = 'https://{}login.questrade.com/oauth2/'
|
||||||
|
|
|
@ -27,12 +27,13 @@ from typing import List
|
||||||
from async_generator import asynccontextmanager
|
from async_generator import asynccontextmanager
|
||||||
import asks
|
import asks
|
||||||
|
|
||||||
from ..log import get_logger
|
from ._util import (
|
||||||
from ._util import resproc, BrokerError
|
resproc,
|
||||||
|
BrokerError,
|
||||||
|
log,
|
||||||
|
)
|
||||||
from ..calc import percent_change
|
from ..calc import percent_change
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
_service_ep = 'https://api.robinhood.com'
|
_service_ep = 'https://api.robinhood.com'
|
||||||
|
|
||||||
|
|
||||||
|
@ -65,8 +66,10 @@ class Client:
|
||||||
self.api = _API(self._sess)
|
self.api = _API(self._sess)
|
||||||
|
|
||||||
def _zip_in_order(self, symbols: [str], quotes: List[dict]):
|
def _zip_in_order(self, symbols: [str], quotes: List[dict]):
|
||||||
return {quote.get('symbol', sym) if quote else sym: quote
|
return {
|
||||||
for sym, quote in zip(symbols, results_dict)}
|
quote.get('symbol', sym) if quote else sym: quote
|
||||||
|
for sym, quote in zip(symbols, quotes)
|
||||||
|
}
|
||||||
|
|
||||||
async def quote(self, symbols: [str]):
|
async def quote(self, symbols: [str]):
|
||||||
"""Retrieve quotes for a list of ``symbols``.
|
"""Retrieve quotes for a list of ``symbols``.
|
||||||
|
|
|
@ -18,9 +18,17 @@
|
||||||
Market machinery for order executions, book, management.
|
Market machinery for order executions, book, management.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from ._client import open_ems
|
from ..log import get_logger
|
||||||
|
from ._client import (
|
||||||
|
open_ems,
|
||||||
|
OrderClient,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'open_ems',
|
'open_ems',
|
||||||
|
'OrderClient',
|
||||||
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -27,68 +27,104 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import broadcast_receiver
|
from tractor.trionics import broadcast_receiver
|
||||||
|
|
||||||
from ..log import get_logger
|
from ._util import (
|
||||||
|
log, # sub-sys logger
|
||||||
|
)
|
||||||
from ..data.types import Struct
|
from ..data.types import Struct
|
||||||
from ..service import maybe_open_emsd
|
from ..service import maybe_open_emsd
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
Order,
|
Order,
|
||||||
Cancel,
|
Cancel,
|
||||||
|
BrokerdPosition,
|
||||||
)
|
)
|
||||||
from ..brokers import get_brokermod
|
from ..brokers import get_brokermod
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
BrokerdPosition,
|
|
||||||
Status,
|
Status,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
class OrderClient(Struct):
|
||||||
|
'''
|
||||||
|
EMS-client-side order book ctl and tracking.
|
||||||
|
|
||||||
|
(A)sync API for submitting orders and alerts to the `emsd` service;
|
||||||
class OrderBook(Struct):
|
this is the main control for execution management from client code.
|
||||||
'''EMS-client-side order book ctl and tracking.
|
|
||||||
|
|
||||||
A style similar to "model-view" is used here where this api is
|
|
||||||
provided as a supervised control for an EMS actor which does all the
|
|
||||||
hard/fast work of talking to brokers/exchanges to conduct
|
|
||||||
executions.
|
|
||||||
|
|
||||||
Currently, this is mostly for keeping local state to match the EMS
|
|
||||||
and use received events to trigger graphics updates.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
# IPC stream to `emsd` actor
|
||||||
|
_ems_stream: tractor.MsgStream
|
||||||
|
|
||||||
# mem channels used to relay order requests to the EMS daemon
|
# mem channels used to relay order requests to the EMS daemon
|
||||||
_to_ems: trio.abc.SendChannel
|
_to_relay_task: trio.abc.SendChannel
|
||||||
_from_order_book: trio.abc.ReceiveChannel
|
_from_sync_order_client: trio.abc.ReceiveChannel
|
||||||
|
|
||||||
|
# history table
|
||||||
_sent_orders: dict[str, Order] = {}
|
_sent_orders: dict[str, Order] = {}
|
||||||
|
|
||||||
def send(
|
def send_nowait(
|
||||||
self,
|
self,
|
||||||
msg: Order | dict,
|
msg: Order | dict,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict | Order:
|
||||||
|
'''
|
||||||
|
Sync version of ``.send()``.
|
||||||
|
|
||||||
|
'''
|
||||||
self._sent_orders[msg.oid] = msg
|
self._sent_orders[msg.oid] = msg
|
||||||
self._to_ems.send_nowait(msg)
|
self._to_relay_task.send_nowait(msg)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def send_update(
|
async def send(
|
||||||
self,
|
self,
|
||||||
|
msg: Order | dict,
|
||||||
|
|
||||||
|
) -> dict | Order:
|
||||||
|
'''
|
||||||
|
Send a new order msg async to the `emsd` service.
|
||||||
|
|
||||||
|
'''
|
||||||
|
self._sent_orders[msg.oid] = msg
|
||||||
|
await self._ems_stream.send(msg)
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def update_nowait(
|
||||||
|
self,
|
||||||
uuid: str,
|
uuid: str,
|
||||||
**data: dict,
|
**data: dict,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Sync version of ``.update()``.
|
||||||
|
|
||||||
|
'''
|
||||||
cmd = self._sent_orders[uuid]
|
cmd = self._sent_orders[uuid]
|
||||||
msg = cmd.copy(update=data)
|
msg = cmd.copy(update=data)
|
||||||
self._sent_orders[uuid] = msg
|
self._sent_orders[uuid] = msg
|
||||||
self._to_ems.send_nowait(msg)
|
self._to_relay_task.send_nowait(msg)
|
||||||
return cmd
|
return msg
|
||||||
|
|
||||||
def cancel(self, uuid: str) -> bool:
|
async def update(
|
||||||
"""Cancel an order (or alert) in the EMS.
|
self,
|
||||||
|
uuid: str,
|
||||||
|
**data: dict,
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Update an existing order dialog with a msg updated from
|
||||||
|
``update`` kwargs.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
cmd = self._sent_orders[uuid]
|
||||||
|
msg = cmd.copy(update=data)
|
||||||
|
self._sent_orders[uuid] = msg
|
||||||
|
await self._ems_stream.send(msg)
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def _mk_cancel_msg(
|
||||||
|
self,
|
||||||
|
uuid: str,
|
||||||
|
) -> Cancel:
|
||||||
cmd = self._sent_orders.get(uuid)
|
cmd = self._sent_orders.get(uuid)
|
||||||
if not cmd:
|
if not cmd:
|
||||||
log.error(
|
log.error(
|
||||||
|
@ -96,77 +132,75 @@ class OrderBook(Struct):
|
||||||
f'Maybe there is a stale entry or line?\n'
|
f'Maybe there is a stale entry or line?\n'
|
||||||
f'You should report this as a bug!'
|
f'You should report this as a bug!'
|
||||||
)
|
)
|
||||||
msg = Cancel(
|
fqme = str(cmd.symbol)
|
||||||
|
return Cancel(
|
||||||
oid=uuid,
|
oid=uuid,
|
||||||
symbol=cmd.symbol,
|
symbol=fqme,
|
||||||
)
|
|
||||||
self._to_ems.send_nowait(msg)
|
|
||||||
|
|
||||||
|
|
||||||
_orders: OrderBook = None
|
|
||||||
|
|
||||||
|
|
||||||
def get_orders(
|
|
||||||
emsd_uid: tuple[str, str] = None
|
|
||||||
) -> OrderBook:
|
|
||||||
""""
|
|
||||||
OrderBook singleton factory per actor.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if emsd_uid is not None:
|
|
||||||
# TODO: read in target emsd's active book on startup
|
|
||||||
pass
|
|
||||||
|
|
||||||
global _orders
|
|
||||||
|
|
||||||
if _orders is None:
|
|
||||||
size = 100
|
|
||||||
tx, rx = trio.open_memory_channel(size)
|
|
||||||
brx = broadcast_receiver(rx, size)
|
|
||||||
|
|
||||||
# setup local ui event streaming channels for request/resp
|
|
||||||
# streamging with EMS daemon
|
|
||||||
_orders = OrderBook(
|
|
||||||
_to_ems=tx,
|
|
||||||
_from_order_book=brx,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return _orders
|
def cancel_nowait(
|
||||||
|
self,
|
||||||
|
uuid: str,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Sync version of ``.cancel()``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
self._to_relay_task.send_nowait(
|
||||||
|
self._mk_cancel_msg(uuid)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def cancel(
|
||||||
|
self,
|
||||||
|
uuid: str,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Cancel an already existintg order (or alert) dialog.
|
||||||
|
|
||||||
|
'''
|
||||||
|
await self._ems_stream.send(
|
||||||
|
self._mk_cancel_msg(uuid)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# TODO: we can get rid of this relay loop once we move
|
|
||||||
# order_mode inputs to async code!
|
|
||||||
async def relay_order_cmds_from_sync_code(
|
|
||||||
|
|
||||||
|
async def relay_orders_from_sync_code(
|
||||||
|
|
||||||
|
client: OrderClient,
|
||||||
symbol_key: str,
|
symbol_key: str,
|
||||||
to_ems_stream: tractor.MsgStream,
|
to_ems_stream: tractor.MsgStream,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
'''
|
||||||
Order streaming task: deliver orders transmitted from UI
|
Order submission relay task: deliver orders sent from synchronous (UI)
|
||||||
to downstream consumers.
|
code to the EMS via ``OrderClient._from_sync_order_client``.
|
||||||
|
|
||||||
This is run in the UI actor (usually the one running Qt but could be
|
This is run in the UI actor (usually the one running Qt but could be
|
||||||
any other client service code). This process simply delivers order
|
any other client service code). This process simply delivers order
|
||||||
messages to the above ``_to_ems`` send channel (from sync code using
|
messages to the above ``_to_relay_task`` send channel (from sync code using
|
||||||
``.send_nowait()``), these values are pulled from the channel here
|
``.send_nowait()``), these values are pulled from the channel here
|
||||||
and relayed to any consumer(s) that called this function using
|
and relayed to any consumer(s) that called this function using
|
||||||
a ``tractor`` portal.
|
a ``tractor`` portal.
|
||||||
|
|
||||||
This effectively makes order messages look like they're being
|
This effectively makes order messages look like they're being
|
||||||
"pushed" from the parent to the EMS where local sync code is likely
|
"pushed" from the parent to the EMS where local sync code is likely
|
||||||
doing the pushing from some UI.
|
doing the pushing from some non-async UI handler.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
book = get_orders()
|
async with (
|
||||||
async with book._from_order_book.subscribe() as orders_stream:
|
client._from_sync_order_client.subscribe() as sync_order_cmds
|
||||||
async for cmd in orders_stream:
|
):
|
||||||
|
async for cmd in sync_order_cmds:
|
||||||
sym = cmd.symbol
|
sym = cmd.symbol
|
||||||
msg = pformat(cmd)
|
msg = pformat(cmd.to_dict())
|
||||||
|
|
||||||
if sym == symbol_key:
|
if sym == symbol_key:
|
||||||
log.info(f'Send order cmd:\n{msg}')
|
log.info(f'Send order cmd:\n{msg}')
|
||||||
# send msg over IPC / wire
|
# send msg over IPC / wire
|
||||||
await to_ems_stream.send(cmd)
|
await to_ems_stream.send(cmd)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
|
f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
|
||||||
|
@ -176,62 +210,39 @@ async def relay_order_cmds_from_sync_code(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_ems(
|
async def open_ems(
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
mode: str = 'live',
|
mode: str = 'live',
|
||||||
loglevel: str = 'error',
|
loglevel: str = 'error',
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
OrderBook,
|
OrderClient,
|
||||||
tractor.MsgStream,
|
tractor.MsgStream,
|
||||||
dict[
|
dict[
|
||||||
# brokername, acctid
|
# brokername, acctid
|
||||||
tuple[str, str],
|
tuple[str, str],
|
||||||
list[BrokerdPosition],
|
dict[str, BrokerdPosition],
|
||||||
],
|
],
|
||||||
list[str],
|
list[str],
|
||||||
dict[str, Status],
|
dict[str, Status],
|
||||||
]:
|
]:
|
||||||
'''
|
'''
|
||||||
Spawn an EMS daemon and begin sending orders and receiving
|
(Maybe) spawn an EMS-daemon (emsd), deliver an `OrderClient` for
|
||||||
alerts.
|
requesting orders/alerts and a `trades_stream` which delivers all
|
||||||
|
response-msgs.
|
||||||
|
|
||||||
This EMS tries to reduce most broker's terrible order entry apis to
|
This is a "client side" entrypoint which may spawn the `emsd` service
|
||||||
a very simple protocol built on a few easy to grok and/or
|
if it can't be discovered and generally speaking is the lowest level
|
||||||
"rantsy" premises:
|
broker control client-API.
|
||||||
|
|
||||||
- most users will prefer "dark mode" where orders are not submitted
|
|
||||||
to a broker until and execution condition is triggered
|
|
||||||
(aka client-side "hidden orders")
|
|
||||||
|
|
||||||
- Brokers over-complicate their apis and generally speaking hire
|
|
||||||
poor designers to create them. We're better off using creating a super
|
|
||||||
minimal, schema-simple, request-event-stream protocol to unify all the
|
|
||||||
existing piles of shit (and shocker, it'll probably just end up
|
|
||||||
looking like a decent crypto exchange's api)
|
|
||||||
|
|
||||||
- all order types can be implemented with client-side limit orders
|
|
||||||
|
|
||||||
- we aren't reinventing a wheel in this case since none of these
|
|
||||||
brokers are exposing FIX protocol; it is they doing the re-invention.
|
|
||||||
|
|
||||||
|
|
||||||
TODO: make some fancy diagrams using mermaid.io
|
|
||||||
|
|
||||||
the possible set of responses from the stream is currently:
|
|
||||||
- 'dark_submitted', 'broker_submitted'
|
|
||||||
- 'dark_cancelled', 'broker_cancelled'
|
|
||||||
- 'dark_executed', 'broker_executed'
|
|
||||||
- 'broker_filled'
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# wait for service to connect back to us signalling
|
# TODO: prolly hand in the `MktPair` instance directly here as well!
|
||||||
# ready for order commands
|
from piker.accounting import unpack_fqme
|
||||||
book = get_orders()
|
broker, mktep, venue, suffix = unpack_fqme(fqme)
|
||||||
|
|
||||||
from ..data._source import unpack_fqsn
|
async with maybe_open_emsd(
|
||||||
broker, symbol, suffix = unpack_fqsn(fqsn)
|
broker,
|
||||||
|
loglevel=loglevel,
|
||||||
async with maybe_open_emsd(broker) as portal:
|
) as portal:
|
||||||
|
|
||||||
mod = get_brokermod(broker)
|
mod = get_brokermod(broker)
|
||||||
if (
|
if (
|
||||||
|
@ -244,9 +255,8 @@ async def open_ems(
|
||||||
async with (
|
async with (
|
||||||
# connect to emsd
|
# connect to emsd
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
|
|
||||||
_emsd_main,
|
_emsd_main,
|
||||||
fqsn=fqsn,
|
fqme=fqme,
|
||||||
exec_mode=mode,
|
exec_mode=mode,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
|
@ -262,18 +272,36 @@ async def open_ems(
|
||||||
# open 2-way trade command stream
|
# open 2-way trade command stream
|
||||||
ctx.open_stream() as trades_stream,
|
ctx.open_stream() as trades_stream,
|
||||||
):
|
):
|
||||||
|
size: int = 100 # what should this be?
|
||||||
|
tx, rx = trio.open_memory_channel(size)
|
||||||
|
brx = broadcast_receiver(rx, size)
|
||||||
|
|
||||||
|
# setup local ui event streaming channels for request/resp
|
||||||
|
# streamging with EMS daemon
|
||||||
|
client = OrderClient(
|
||||||
|
_ems_stream=trades_stream,
|
||||||
|
_to_relay_task=tx,
|
||||||
|
_from_sync_order_client=brx,
|
||||||
|
)
|
||||||
|
|
||||||
|
client._ems_stream = trades_stream
|
||||||
|
|
||||||
# start sync code order msg delivery task
|
# start sync code order msg delivery task
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
n.start_soon(
|
n.start_soon(
|
||||||
relay_order_cmds_from_sync_code,
|
relay_orders_from_sync_code,
|
||||||
fqsn,
|
client,
|
||||||
|
fqme,
|
||||||
trades_stream
|
trades_stream
|
||||||
)
|
)
|
||||||
|
|
||||||
yield (
|
yield (
|
||||||
book,
|
client,
|
||||||
trades_stream,
|
trades_stream,
|
||||||
positions,
|
positions,
|
||||||
accounts,
|
accounts,
|
||||||
dialogs,
|
dialogs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# stop the sync-msg-relay task on exit.
|
||||||
|
n.cancel_scope.cancel()
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -41,11 +41,13 @@ import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from ..log import get_logger
|
from ._util import (
|
||||||
|
log, # sub-sys logger
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data._source import (
|
from ..accounting._mktinfo import (
|
||||||
unpack_fqsn,
|
unpack_fqme,
|
||||||
mk_fqsn,
|
|
||||||
float_digits,
|
float_digits,
|
||||||
)
|
)
|
||||||
from ..data.feed import (
|
from ..data.feed import (
|
||||||
|
@ -69,9 +71,6 @@ from ._messages import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: numba all of this
|
# TODO: numba all of this
|
||||||
def mk_check(
|
def mk_check(
|
||||||
|
|
||||||
|
@ -157,7 +156,7 @@ async def clear_dark_triggers(
|
||||||
brokerd_orders_stream: tractor.MsgStream,
|
brokerd_orders_stream: tractor.MsgStream,
|
||||||
quote_stream: tractor.ReceiveMsgStream, # noqa
|
quote_stream: tractor.ReceiveMsgStream, # noqa
|
||||||
broker: str,
|
broker: str,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
|
|
||||||
book: DarkBook,
|
book: DarkBook,
|
||||||
|
|
||||||
|
@ -232,7 +231,7 @@ async def clear_dark_triggers(
|
||||||
account=account,
|
account=account,
|
||||||
size=size,
|
size=size,
|
||||||
):
|
):
|
||||||
bfqsn: str = symbol.replace(f'.{broker}', '')
|
bfqme: str = symbol.replace(f'.{broker}', '')
|
||||||
submit_price = price + abs_diff_away
|
submit_price = price + abs_diff_away
|
||||||
resp = 'triggered' # hidden on client-side
|
resp = 'triggered' # hidden on client-side
|
||||||
|
|
||||||
|
@ -245,7 +244,7 @@ async def clear_dark_triggers(
|
||||||
oid=oid,
|
oid=oid,
|
||||||
account=account,
|
account=account,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
symbol=bfqsn,
|
symbol=bfqme,
|
||||||
price=submit_price,
|
price=submit_price,
|
||||||
size=size,
|
size=size,
|
||||||
)
|
)
|
||||||
|
@ -288,14 +287,14 @@ async def clear_dark_triggers(
|
||||||
|
|
||||||
# send response to client-side
|
# send response to client-side
|
||||||
await router.client_broadcast(
|
await router.client_broadcast(
|
||||||
fqsn,
|
fqme,
|
||||||
status,
|
status,
|
||||||
)
|
)
|
||||||
|
|
||||||
else: # condition scan loop complete
|
else: # condition scan loop complete
|
||||||
log.debug(f'execs are {execs}')
|
log.debug(f'execs are {execs}')
|
||||||
if execs:
|
if execs:
|
||||||
book.triggers[fqsn] = execs
|
book.triggers[fqme] = execs
|
||||||
|
|
||||||
# print(f'execs scan took: {time.time() - start}')
|
# print(f'execs scan took: {time.time() - start}')
|
||||||
|
|
||||||
|
@ -316,9 +315,6 @@ class TradesRelay(Struct):
|
||||||
# allowed account names
|
# allowed account names
|
||||||
accounts: tuple[str]
|
accounts: tuple[str]
|
||||||
|
|
||||||
# count of connected ems clients for this ``brokerd``
|
|
||||||
consumers: int = 0
|
|
||||||
|
|
||||||
|
|
||||||
class Router(Struct):
|
class Router(Struct):
|
||||||
'''
|
'''
|
||||||
|
@ -334,9 +330,12 @@ class Router(Struct):
|
||||||
# broker to book map
|
# broker to book map
|
||||||
books: dict[str, DarkBook] = {}
|
books: dict[str, DarkBook] = {}
|
||||||
|
|
||||||
|
# NOTE: disable for since stupid "dunst"
|
||||||
|
notify_on_order_loads: bool = False
|
||||||
|
|
||||||
# sets of clients mapped from subscription keys
|
# sets of clients mapped from subscription keys
|
||||||
subscribers: defaultdict[
|
subscribers: defaultdict[
|
||||||
str, # sub key, default fqsn
|
str, # sub key, default fqme
|
||||||
set[tractor.MsgStream], # unique client streams
|
set[tractor.MsgStream], # unique client streams
|
||||||
] = defaultdict(set)
|
] = defaultdict(set)
|
||||||
|
|
||||||
|
@ -387,7 +386,7 @@ class Router(Struct):
|
||||||
brokermod: ModuleType,
|
brokermod: ModuleType,
|
||||||
portal: tractor.Portal,
|
portal: tractor.Portal,
|
||||||
exec_mode: str,
|
exec_mode: str,
|
||||||
symbol: str,
|
fqme: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -408,11 +407,12 @@ class Router(Struct):
|
||||||
yield relay
|
yield relay
|
||||||
return
|
return
|
||||||
|
|
||||||
trades_endpoint = getattr(brokermod, 'trades_dialogue', None)
|
def mk_paper_ep():
|
||||||
if (
|
nonlocal brokermod, exec_mode
|
||||||
trades_endpoint is None
|
|
||||||
or exec_mode == 'paper'
|
# for logging purposes
|
||||||
):
|
brokermod = paper
|
||||||
|
|
||||||
# for paper mode we need to mock this trades response feed
|
# for paper mode we need to mock this trades response feed
|
||||||
# so we load bidir stream to a new sub-actor running
|
# so we load bidir stream to a new sub-actor running
|
||||||
# a paper-simulator clearing engine.
|
# a paper-simulator clearing engine.
|
||||||
|
@ -424,26 +424,53 @@ class Router(Struct):
|
||||||
# load the paper trading engine as a subactor of this emsd
|
# load the paper trading engine as a subactor of this emsd
|
||||||
# actor to simulate the real IPC load it'll have when also
|
# actor to simulate the real IPC load it'll have when also
|
||||||
# pulling data from feeds
|
# pulling data from feeds
|
||||||
open_trades_endpoint = paper.open_paperboi(
|
return paper.open_paperboi(
|
||||||
fqsn='.'.join([symbol, broker]),
|
fqme=fqme,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
trades_endpoint = getattr(brokermod, 'trades_dialogue', None)
|
||||||
|
if (
|
||||||
|
trades_endpoint is not None
|
||||||
|
or exec_mode != 'paper'
|
||||||
|
):
|
||||||
# open live brokerd trades endpoint
|
# open live brokerd trades endpoint
|
||||||
open_trades_endpoint = portal.open_context(
|
open_trades_endpoint = portal.open_context(
|
||||||
trades_endpoint,
|
trades_endpoint,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
exec_mode: str = 'paper'
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_paper_ep():
|
||||||
|
if exec_mode == 'paper':
|
||||||
|
async with mk_paper_ep() as msg:
|
||||||
|
yield msg
|
||||||
|
return
|
||||||
|
|
||||||
# open trades-dialog endpoint with backend broker
|
# open trades-dialog endpoint with backend broker
|
||||||
|
async with open_trades_endpoint as msg:
|
||||||
|
ctx, first = msg
|
||||||
|
|
||||||
|
# runtime indication that the backend can't support live
|
||||||
|
# order ctrl yet, so boot the paperboi B0
|
||||||
|
if first == 'paper':
|
||||||
|
async with mk_paper_ep() as msg:
|
||||||
|
yield msg
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
# working live ep case B)
|
||||||
|
yield msg
|
||||||
|
return
|
||||||
|
|
||||||
positions: list[BrokerdPosition]
|
positions: list[BrokerdPosition]
|
||||||
accounts: tuple[str]
|
accounts: tuple[str]
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_trades_endpoint as (
|
maybe_open_paper_ep() as (
|
||||||
brokerd_ctx,
|
brokerd_ctx,
|
||||||
(positions, accounts,),
|
(positions, accounts),
|
||||||
),
|
),
|
||||||
brokerd_ctx.open_stream() as brokerd_trades_stream,
|
brokerd_ctx.open_stream() as brokerd_trades_stream,
|
||||||
):
|
):
|
||||||
|
@ -466,30 +493,31 @@ class Router(Struct):
|
||||||
# client set.
|
# client set.
|
||||||
|
|
||||||
# locally cache and track positions per account with
|
# locally cache and track positions per account with
|
||||||
# a table of (brokername, acctid) -> `BrokerdPosition`
|
# a nested table of msgs:
|
||||||
# msgs.
|
# tuple(brokername, acctid) ->
|
||||||
pps = {}
|
# (fqme: str ->
|
||||||
for msg in positions:
|
# `BrokerdPosition`)
|
||||||
log.info(f'loading pp: {msg}')
|
|
||||||
|
|
||||||
account = msg['account']
|
|
||||||
|
|
||||||
# TODO: better value error for this which
|
|
||||||
# dumps the account and message and states the
|
|
||||||
# mismatch..
|
|
||||||
assert account in accounts
|
|
||||||
|
|
||||||
pps.setdefault(
|
|
||||||
(broker, account),
|
|
||||||
[],
|
|
||||||
).append(msg)
|
|
||||||
|
|
||||||
relay = TradesRelay(
|
relay = TradesRelay(
|
||||||
brokerd_stream=brokerd_trades_stream,
|
brokerd_stream=brokerd_trades_stream,
|
||||||
positions=pps,
|
positions={},
|
||||||
accounts=accounts,
|
accounts=accounts,
|
||||||
consumers=1,
|
|
||||||
)
|
)
|
||||||
|
for msg in positions:
|
||||||
|
|
||||||
|
msg = BrokerdPosition(**msg)
|
||||||
|
log.info(
|
||||||
|
f'loading pp for {brokermod.__name__}:\n'
|
||||||
|
f'{pformat(msg.to_dict())}',
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: state any mismatch here?
|
||||||
|
account = msg.account
|
||||||
|
assert account in accounts
|
||||||
|
|
||||||
|
relay.positions.setdefault(
|
||||||
|
(broker, account),
|
||||||
|
{},
|
||||||
|
)[msg.symbol] = msg
|
||||||
|
|
||||||
self.relays[broker] = relay
|
self.relays[broker] = relay
|
||||||
|
|
||||||
|
@ -507,7 +535,7 @@ class Router(Struct):
|
||||||
|
|
||||||
async def open_trade_relays(
|
async def open_trade_relays(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
exec_mode: str,
|
exec_mode: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
|
@ -517,35 +545,33 @@ class Router(Struct):
|
||||||
|
|
||||||
) -> tuple[TradesRelay, Feed]:
|
) -> tuple[TradesRelay, Feed]:
|
||||||
'''
|
'''
|
||||||
Open and yield ``brokerd`` trades dialogue context-stream if
|
Maybe open a live feed to the target fqme, start `brokerd` order
|
||||||
none already exists.
|
msg relay and dark clearing tasks to run in the background
|
||||||
|
indefinitely.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from ..data._source import unpack_fqsn
|
|
||||||
broker, symbol, suffix = unpack_fqsn(fqsn)
|
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
maybe_open_feed(
|
maybe_open_feed(
|
||||||
[fqsn],
|
[fqme],
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as feed,
|
) as feed,
|
||||||
):
|
):
|
||||||
brokername, _, _ = unpack_fqsn(fqsn)
|
brokername, _, _, _ = unpack_fqme(fqme)
|
||||||
brokermod = feed.mods[brokername]
|
brokermod = feed.mods[brokername]
|
||||||
broker = brokermod.name
|
broker = brokermod.name
|
||||||
portal = feed.portals[brokermod]
|
portal = feed.portals[brokermod]
|
||||||
|
|
||||||
# XXX: this should be initial price quote from target provider
|
# XXX: this should be initial price quote from target provider
|
||||||
flume = feed.flumes[fqsn]
|
flume = feed.flumes[fqme]
|
||||||
first_quote: dict = flume.first_quote
|
first_quote: dict = flume.first_quote
|
||||||
book: DarkBook = self.get_dark_book(broker)
|
book: DarkBook = self.get_dark_book(broker)
|
||||||
book.lasts[fqsn]: float = first_quote['last']
|
book.lasts[fqme]: float = float(first_quote['last'])
|
||||||
|
|
||||||
async with self.maybe_open_brokerd_dialog(
|
async with self.maybe_open_brokerd_dialog(
|
||||||
brokermod=brokermod,
|
brokermod=brokermod,
|
||||||
portal=portal,
|
portal=portal,
|
||||||
exec_mode=exec_mode,
|
exec_mode=exec_mode,
|
||||||
symbol=symbol,
|
fqme=fqme,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as relay:
|
) as relay:
|
||||||
|
|
||||||
|
@ -558,7 +584,7 @@ class Router(Struct):
|
||||||
relay.brokerd_stream,
|
relay.brokerd_stream,
|
||||||
flume.stream,
|
flume.stream,
|
||||||
broker,
|
broker,
|
||||||
fqsn, # form: <name>.<venue>.<suffix>.<broker>
|
fqme, # form: <name>.<venue>.<suffix>.<broker>
|
||||||
book
|
book
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -619,6 +645,7 @@ class Router(Struct):
|
||||||
|
|
||||||
if (
|
if (
|
||||||
not sent_some
|
not sent_some
|
||||||
|
and self.notify_on_order_loads
|
||||||
and notify_on_headless
|
and notify_on_headless
|
||||||
):
|
):
|
||||||
log.info(
|
log.info(
|
||||||
|
@ -638,11 +665,14 @@ _router: Router = None
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def _setup_persistent_emsd(
|
async def _setup_persistent_emsd(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
if loglevel:
|
||||||
|
get_console_log(loglevel)
|
||||||
|
|
||||||
global _router
|
global _router
|
||||||
|
|
||||||
# open a root "service nursery" for the ``emsd`` actor
|
# open a root "service nursery" for the ``emsd`` actor
|
||||||
|
@ -692,16 +722,15 @@ async def translate_and_relay_brokerd_events(
|
||||||
async for brokerd_msg in brokerd_trades_stream:
|
async for brokerd_msg in brokerd_trades_stream:
|
||||||
fmsg = pformat(brokerd_msg)
|
fmsg = pformat(brokerd_msg)
|
||||||
log.info(
|
log.info(
|
||||||
f'Received broker trade event:\n'
|
f'Rx brokerd trade msg:\n'
|
||||||
f'{fmsg}'
|
f'{fmsg}'
|
||||||
)
|
)
|
||||||
status_msg: Optional[Status] = None
|
status_msg: Status | None = None
|
||||||
|
|
||||||
match brokerd_msg:
|
match brokerd_msg:
|
||||||
# BrokerdPosition
|
# BrokerdPosition
|
||||||
case {
|
case {
|
||||||
'name': 'position',
|
'name': 'position',
|
||||||
'symbol': sym,
|
|
||||||
'broker': broker,
|
'broker': broker,
|
||||||
}:
|
}:
|
||||||
pos_msg = BrokerdPosition(**brokerd_msg)
|
pos_msg = BrokerdPosition(**brokerd_msg)
|
||||||
|
@ -712,9 +741,9 @@ async def translate_and_relay_brokerd_events(
|
||||||
|
|
||||||
relay.positions.setdefault(
|
relay.positions.setdefault(
|
||||||
# NOTE: translate to a FQSN!
|
# NOTE: translate to a FQSN!
|
||||||
(broker, sym),
|
(broker, pos_msg.account),
|
||||||
[]
|
{}
|
||||||
).append(pos_msg)
|
)[pos_msg.symbol] = pos_msg
|
||||||
|
|
||||||
# fan-out-relay position msgs immediately by
|
# fan-out-relay position msgs immediately by
|
||||||
# broadcasting updates on all client streams
|
# broadcasting updates on all client streams
|
||||||
|
@ -781,12 +810,11 @@ async def translate_and_relay_brokerd_events(
|
||||||
# no msg to client necessary
|
# no msg to client necessary
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# BrokerdOrderError
|
# BrokerdError
|
||||||
case {
|
case {
|
||||||
'name': 'error',
|
'name': 'error',
|
||||||
'oid': oid, # ems order-dialog id
|
'oid': oid, # ems order-dialog id
|
||||||
'reqid': reqid, # brokerd generated order-request id
|
'reqid': reqid, # brokerd generated order-request id
|
||||||
'symbol': sym,
|
|
||||||
}:
|
}:
|
||||||
status_msg = book._active.get(oid)
|
status_msg = book._active.get(oid)
|
||||||
msg = BrokerdError(**brokerd_msg)
|
msg = BrokerdError(**brokerd_msg)
|
||||||
|
@ -947,9 +975,9 @@ async def translate_and_relay_brokerd_events(
|
||||||
# may end up with collisions?
|
# may end up with collisions?
|
||||||
status_msg = Status(**brokerd_msg)
|
status_msg = Status(**brokerd_msg)
|
||||||
|
|
||||||
# NOTE: be sure to pack an fqsn for the client side!
|
# NOTE: be sure to pack an fqme for the client side!
|
||||||
order = Order(**status_msg.req)
|
order = Order(**status_msg.req)
|
||||||
order.symbol = mk_fqsn(broker, order.symbol)
|
order.symbol = f'{order.symbol}.{broker}'
|
||||||
|
|
||||||
assert order.price and order.size
|
assert order.price and order.size
|
||||||
status_msg.req = order
|
status_msg.req = order
|
||||||
|
@ -1024,7 +1052,7 @@ async def process_client_order_cmds(
|
||||||
client_order_stream: tractor.MsgStream,
|
client_order_stream: tractor.MsgStream,
|
||||||
brokerd_order_stream: tractor.MsgStream,
|
brokerd_order_stream: tractor.MsgStream,
|
||||||
|
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
flume: Flume,
|
flume: Flume,
|
||||||
dark_book: DarkBook,
|
dark_book: DarkBook,
|
||||||
router: Router,
|
router: Router,
|
||||||
|
@ -1051,11 +1079,11 @@ async def process_client_order_cmds(
|
||||||
# backend can be routed and relayed to subscribed clients.
|
# backend can be routed and relayed to subscribed clients.
|
||||||
subs = router.dialogs[oid]
|
subs = router.dialogs[oid]
|
||||||
|
|
||||||
# add all subscribed clients for this fqsn (should eventually be
|
# add all subscribed clients for this fqme (should eventually be
|
||||||
# a more generalize subscription system) to received order msg
|
# a more generalize subscription system) to received order msg
|
||||||
# updates (and thus show stuff in the UI).
|
# updates (and thus show stuff in the UI).
|
||||||
subs.add(client_order_stream)
|
subs.add(client_order_stream)
|
||||||
subs.update(router.subscribers[fqsn])
|
subs.update(router.subscribers[fqme])
|
||||||
|
|
||||||
reqid = dark_book._ems2brokerd_ids.inverse.get(oid)
|
reqid = dark_book._ems2brokerd_ids.inverse.get(oid)
|
||||||
|
|
||||||
|
@ -1113,7 +1141,7 @@ async def process_client_order_cmds(
|
||||||
and status.resp == 'dark_open'
|
and status.resp == 'dark_open'
|
||||||
):
|
):
|
||||||
# remove from dark book clearing
|
# remove from dark book clearing
|
||||||
entry = dark_book.triggers[fqsn].pop(oid, None)
|
entry = dark_book.triggers[fqme].pop(oid, None)
|
||||||
if entry:
|
if entry:
|
||||||
(
|
(
|
||||||
pred,
|
pred,
|
||||||
|
@ -1129,7 +1157,7 @@ async def process_client_order_cmds(
|
||||||
status.req = cmd
|
status.req = cmd
|
||||||
|
|
||||||
await router.client_broadcast(
|
await router.client_broadcast(
|
||||||
fqsn,
|
fqme,
|
||||||
status,
|
status,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1139,7 +1167,7 @@ async def process_client_order_cmds(
|
||||||
dark_book._active.pop(oid)
|
dark_book._active.pop(oid)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.exception(f'No dark order for {fqsn}?')
|
log.exception(f'No dark order for {fqme}?')
|
||||||
|
|
||||||
# TODO: eventually we should be receiving
|
# TODO: eventually we should be receiving
|
||||||
# this struct on the wire unpacked in a scoped protocol
|
# this struct on the wire unpacked in a scoped protocol
|
||||||
|
@ -1148,7 +1176,7 @@ async def process_client_order_cmds(
|
||||||
# LIVE order REQUEST
|
# LIVE order REQUEST
|
||||||
case {
|
case {
|
||||||
'oid': oid,
|
'oid': oid,
|
||||||
'symbol': fqsn,
|
'symbol': fqme,
|
||||||
'price': trigger_price,
|
'price': trigger_price,
|
||||||
'size': size,
|
'size': size,
|
||||||
'action': ('buy' | 'sell') as action,
|
'action': ('buy' | 'sell') as action,
|
||||||
|
@ -1161,7 +1189,7 @@ async def process_client_order_cmds(
|
||||||
# remove the broker part before creating a message
|
# remove the broker part before creating a message
|
||||||
# to send to the specific broker since they probably
|
# to send to the specific broker since they probably
|
||||||
# aren't expectig their own name, but should they?
|
# aren't expectig their own name, but should they?
|
||||||
sym = fqsn.replace(f'.{broker}', '')
|
sym = fqme.replace(f'.{broker}', '')
|
||||||
|
|
||||||
if status is not None:
|
if status is not None:
|
||||||
# if we already had a broker order id then
|
# if we already had a broker order id then
|
||||||
|
@ -1218,7 +1246,7 @@ async def process_client_order_cmds(
|
||||||
# DARK-order / alert REQUEST
|
# DARK-order / alert REQUEST
|
||||||
case {
|
case {
|
||||||
'oid': oid,
|
'oid': oid,
|
||||||
'symbol': fqsn,
|
'symbol': fqme,
|
||||||
'price': trigger_price,
|
'price': trigger_price,
|
||||||
'size': size,
|
'size': size,
|
||||||
'exec_mode': exec_mode,
|
'exec_mode': exec_mode,
|
||||||
|
@ -1240,7 +1268,7 @@ async def process_client_order_cmds(
|
||||||
# price received from the feed, instead of being
|
# price received from the feed, instead of being
|
||||||
# like every other shitty tina platform that makes
|
# like every other shitty tina platform that makes
|
||||||
# the user choose the predicate operator.
|
# the user choose the predicate operator.
|
||||||
last = dark_book.lasts[fqsn]
|
last = dark_book.lasts[fqme]
|
||||||
|
|
||||||
# sometimes the real-time feed hasn't come up
|
# sometimes the real-time feed hasn't come up
|
||||||
# so just pull from the latest history.
|
# so just pull from the latest history.
|
||||||
|
@ -1249,8 +1277,13 @@ async def process_client_order_cmds(
|
||||||
|
|
||||||
pred = mk_check(trigger_price, last, action)
|
pred = mk_check(trigger_price, last, action)
|
||||||
|
|
||||||
|
# NOTE: for dark orders currently we submit
|
||||||
|
# the triggered live order at a price 5 ticks
|
||||||
|
# above/below the L1 prices.
|
||||||
|
# TODO: make this configurable from our top level
|
||||||
|
# config, prolly in a .clearing` section?
|
||||||
spread_slap: float = 5
|
spread_slap: float = 5
|
||||||
min_tick = flume.symbol.tick_size
|
min_tick = float(flume.mkt.size_tick)
|
||||||
min_tick_digits = float_digits(min_tick)
|
min_tick_digits = float_digits(min_tick)
|
||||||
|
|
||||||
if action == 'buy':
|
if action == 'buy':
|
||||||
|
@ -1282,7 +1315,7 @@ async def process_client_order_cmds(
|
||||||
# NOTE: this may result in an override of an existing
|
# NOTE: this may result in an override of an existing
|
||||||
# dark book entry if the order id already exists
|
# dark book entry if the order id already exists
|
||||||
dark_book.triggers.setdefault(
|
dark_book.triggers.setdefault(
|
||||||
fqsn, {}
|
fqme, {}
|
||||||
)[oid] = (
|
)[oid] = (
|
||||||
pred,
|
pred,
|
||||||
tickfilter,
|
tickfilter,
|
||||||
|
@ -1307,7 +1340,7 @@ async def process_client_order_cmds(
|
||||||
|
|
||||||
# broadcast status to all subscribed clients
|
# broadcast status to all subscribed clients
|
||||||
await router.client_broadcast(
|
await router.client_broadcast(
|
||||||
fqsn,
|
fqme,
|
||||||
status,
|
status,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1318,35 +1351,36 @@ async def process_client_order_cmds(
|
||||||
@acm
|
@acm
|
||||||
async def maybe_open_trade_relays(
|
async def maybe_open_trade_relays(
|
||||||
router: Router,
|
router: Router,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
exec_mode: str, # ('paper', 'live')
|
exec_mode: str, # ('paper', 'live')
|
||||||
loglevel: str = 'info',
|
loglevel: str = 'info',
|
||||||
|
|
||||||
) -> tuple:
|
) -> tuple:
|
||||||
|
|
||||||
def cache_on_fqsn_unless_paper(
|
def cache_on_fqme_unless_paper(
|
||||||
router: Router,
|
router: Router,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
exec_mode: str, # ('paper', 'live')
|
exec_mode: str, # ('paper', 'live')
|
||||||
loglevel: str = 'info',
|
loglevel: str = 'info',
|
||||||
) -> Hashable:
|
) -> Hashable:
|
||||||
if exec_mode == 'paper':
|
if exec_mode == 'paper':
|
||||||
return f'paper_{fqsn}'
|
return f'paper_{fqme}'
|
||||||
else:
|
else:
|
||||||
return fqsn
|
return fqme
|
||||||
|
|
||||||
# XXX: closure to enable below use of
|
# XXX: closure to enable below use of
|
||||||
# ``tractor.trionics.maybe_open_context()``
|
# ``tractor.trionics.maybe_open_context()``
|
||||||
@acm
|
@acm
|
||||||
async def cached_mngr(
|
async def cached_mngr(
|
||||||
router: Router,
|
router: Router,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
exec_mode: str, # ('paper', 'live')
|
exec_mode: str, # ('paper', 'live')
|
||||||
loglevel: str = 'info',
|
loglevel: str = 'info',
|
||||||
):
|
):
|
||||||
|
|
||||||
relay, feed, client_ready = await _router.nursery.start(
|
relay, feed, client_ready = await _router.nursery.start(
|
||||||
_router.open_trade_relays,
|
_router.open_trade_relays,
|
||||||
fqsn,
|
fqme,
|
||||||
exec_mode,
|
exec_mode,
|
||||||
loglevel,
|
loglevel,
|
||||||
)
|
)
|
||||||
|
@ -1356,24 +1390,28 @@ async def maybe_open_trade_relays(
|
||||||
acm_func=cached_mngr,
|
acm_func=cached_mngr,
|
||||||
kwargs={
|
kwargs={
|
||||||
'router': _router,
|
'router': _router,
|
||||||
'fqsn': fqsn,
|
'fqme': fqme,
|
||||||
'exec_mode': exec_mode,
|
'exec_mode': exec_mode,
|
||||||
'loglevel': loglevel,
|
'loglevel': loglevel,
|
||||||
},
|
},
|
||||||
key=cache_on_fqsn_unless_paper,
|
key=cache_on_fqme_unless_paper,
|
||||||
) as (
|
) as (
|
||||||
cache_hit,
|
cache_hit,
|
||||||
(relay, feed, client_ready)
|
(relay, feed, client_ready)
|
||||||
):
|
):
|
||||||
|
if cache_hit:
|
||||||
|
log.info(f'Reusing existing trades relay for {fqme}:\n'
|
||||||
|
f'{relay}\n')
|
||||||
|
|
||||||
yield relay, feed, client_ready
|
yield relay, feed, client_ready
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def _emsd_main(
|
async def _emsd_main(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
exec_mode: str, # ('paper', 'live')
|
exec_mode: str, # ('paper', 'live')
|
||||||
loglevel: str = 'info',
|
loglevel: str | None = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
dict[
|
dict[
|
||||||
|
@ -1428,7 +1466,7 @@ async def _emsd_main(
|
||||||
global _router
|
global _router
|
||||||
assert _router
|
assert _router
|
||||||
|
|
||||||
broker, symbol, suffix = unpack_fqsn(fqsn)
|
broker, _, _, _ = unpack_fqme(fqme)
|
||||||
|
|
||||||
# TODO: would be nice if in tractor we can require either a ctx arg,
|
# TODO: would be nice if in tractor we can require either a ctx arg,
|
||||||
# or a named arg with ctx in it and a type annotation of
|
# or a named arg with ctx in it and a type annotation of
|
||||||
|
@ -1445,7 +1483,7 @@ async def _emsd_main(
|
||||||
# few duplicate streams as necessary per ems actor.
|
# few duplicate streams as necessary per ems actor.
|
||||||
async with maybe_open_trade_relays(
|
async with maybe_open_trade_relays(
|
||||||
_router,
|
_router,
|
||||||
fqsn,
|
fqme,
|
||||||
exec_mode,
|
exec_mode,
|
||||||
loglevel,
|
loglevel,
|
||||||
) as (relay, feed, client_ready):
|
) as (relay, feed, client_ready):
|
||||||
|
@ -1468,28 +1506,28 @@ async def _emsd_main(
|
||||||
# register the client side before starting the
|
# register the client side before starting the
|
||||||
# brokerd-side relay task to ensure the client is
|
# brokerd-side relay task to ensure the client is
|
||||||
# delivered all exisiting open orders on startup.
|
# delivered all exisiting open orders on startup.
|
||||||
# TODO: instead of by fqsn we need a subscription
|
# TODO: instead of by fqme we need a subscription
|
||||||
# system/schema here to limit what each new client is
|
# system/schema here to limit what each new client is
|
||||||
# allowed to see in terms of broadcasted order flow
|
# allowed to see in terms of broadcasted order flow
|
||||||
# updates per dialog.
|
# updates per dialog.
|
||||||
_router.subscribers[fqsn].add(client_stream)
|
_router.subscribers[fqme].add(client_stream)
|
||||||
client_ready.set()
|
client_ready.set()
|
||||||
|
|
||||||
# start inbound (from attached client) order request processing
|
# start inbound (from attached client) order request processing
|
||||||
# main entrypoint, run here until cancelled.
|
# main entrypoint, run here until cancelled.
|
||||||
try:
|
try:
|
||||||
flume = feed.flumes[fqsn]
|
flume = feed.flumes[fqme]
|
||||||
await process_client_order_cmds(
|
await process_client_order_cmds(
|
||||||
client_stream,
|
client_stream,
|
||||||
brokerd_stream,
|
brokerd_stream,
|
||||||
fqsn,
|
fqme,
|
||||||
flume,
|
flume,
|
||||||
dark_book,
|
dark_book,
|
||||||
_router,
|
_router,
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
# try to remove client from subscription registry
|
# try to remove client from subscription registry
|
||||||
_router.subscribers[fqsn].remove(client_stream)
|
_router.subscribers[fqme].remove(client_stream)
|
||||||
|
|
||||||
for oid, client_streams in _router.dialogs.items():
|
for oid, client_streams in _router.dialogs.items():
|
||||||
client_streams.discard(client_stream)
|
client_streams.discard(client_stream)
|
||||||
|
|
|
@ -29,7 +29,6 @@ from typing import (
|
||||||
|
|
||||||
from msgspec import field
|
from msgspec import field
|
||||||
|
|
||||||
from ..data._source import Symbol
|
|
||||||
from ..data.types import Struct
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
|
@ -94,7 +93,8 @@ class Order(Struct):
|
||||||
|
|
||||||
# internal ``emdsd`` unique "order id"
|
# internal ``emdsd`` unique "order id"
|
||||||
oid: str # uuid4
|
oid: str # uuid4
|
||||||
symbol: str | Symbol
|
# TODO: figure out how to optionally typecast this to `MktPair`?
|
||||||
|
symbol: str # | MktPair
|
||||||
account: str # should we set a default as '' ?
|
account: str # should we set a default as '' ?
|
||||||
|
|
||||||
price: float
|
price: float
|
||||||
|
@ -191,7 +191,7 @@ class BrokerdOrder(Struct):
|
||||||
account: str
|
account: str
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
|
||||||
symbol: str # fqsn
|
symbol: str # fqme
|
||||||
price: float
|
price: float
|
||||||
size: float
|
size: float
|
||||||
|
|
||||||
|
@ -300,10 +300,10 @@ class BrokerdError(Struct):
|
||||||
|
|
||||||
|
|
||||||
class BrokerdPosition(Struct):
|
class BrokerdPosition(Struct):
|
||||||
'''Position update event from brokerd.
|
'''
|
||||||
|
Position update event from brokerd.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
broker: str
|
broker: str
|
||||||
account: str
|
account: str
|
||||||
symbol: str
|
symbol: str
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -14,19 +14,20 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
Fake trading for forward testing.
|
Fake trading: a full forward testing simulation engine.
|
||||||
|
|
||||||
"""
|
We can real-time emulate any mkt conditions you want bruddr B)
|
||||||
|
Just slide us the model que quieres..
|
||||||
|
|
||||||
|
'''
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
import itertools
|
import itertools
|
||||||
import time
|
import time
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
|
||||||
Optional,
|
|
||||||
Callable,
|
Callable,
|
||||||
)
|
)
|
||||||
import uuid
|
import uuid
|
||||||
|
@ -36,18 +37,25 @@ import pendulum
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
|
from ..brokers import get_brokermod
|
||||||
from .. import data
|
from .. import data
|
||||||
from ..data.types import Struct
|
from ..data.types import Struct
|
||||||
from ..data._source import Symbol
|
from ..accounting._mktinfo import (
|
||||||
from ..pp import (
|
MktPair,
|
||||||
|
)
|
||||||
|
from ..accounting import (
|
||||||
Position,
|
Position,
|
||||||
|
PpTable,
|
||||||
Transaction,
|
Transaction,
|
||||||
|
TransactionLedger,
|
||||||
open_trade_ledger,
|
open_trade_ledger,
|
||||||
open_pps,
|
open_pps,
|
||||||
)
|
)
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data._source import unpack_fqsn
|
from ..accounting import unpack_fqme
|
||||||
from ..log import get_logger
|
from ._util import (
|
||||||
|
log, # sub-sys logger
|
||||||
|
)
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
BrokerdCancel,
|
BrokerdCancel,
|
||||||
BrokerdOrder,
|
BrokerdOrder,
|
||||||
|
@ -58,10 +66,6 @@ from ._messages import (
|
||||||
BrokerdError,
|
BrokerdError,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..config import load
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PaperBoi(Struct):
|
class PaperBoi(Struct):
|
||||||
'''
|
'''
|
||||||
|
@ -75,14 +79,15 @@ class PaperBoi(Struct):
|
||||||
|
|
||||||
ems_trades_stream: tractor.MsgStream
|
ems_trades_stream: tractor.MsgStream
|
||||||
|
|
||||||
|
ppt: PpTable
|
||||||
|
ledger: TransactionLedger
|
||||||
|
|
||||||
# map of paper "live" orders which be used
|
# map of paper "live" orders which be used
|
||||||
# to simulate fills based on paper engine settings
|
# to simulate fills based on paper engine settings
|
||||||
_buys: defaultdict[str, bidict]
|
_buys: defaultdict[str, bidict]
|
||||||
_sells: defaultdict[str, bidict]
|
_sells: defaultdict[str, bidict]
|
||||||
_reqids: bidict
|
_reqids: bidict
|
||||||
_positions: dict[str, Position]
|
_mkts: dict[str, MktPair] = {}
|
||||||
_trade_ledger: dict[str, Any]
|
|
||||||
_syms: dict[str, Symbol] = {}
|
|
||||||
|
|
||||||
# init edge case L1 spread
|
# init edge case L1 spread
|
||||||
last_ask: tuple[float, float] = (float('inf'), 0) # price, size
|
last_ask: tuple[float, float] = (float('inf'), 0) # price, size
|
||||||
|
@ -95,7 +100,7 @@ class PaperBoi(Struct):
|
||||||
price: float,
|
price: float,
|
||||||
action: str,
|
action: str,
|
||||||
size: float,
|
size: float,
|
||||||
reqid: Optional[str],
|
reqid: str | None,
|
||||||
|
|
||||||
) -> int:
|
) -> int:
|
||||||
'''
|
'''
|
||||||
|
@ -121,7 +126,10 @@ class PaperBoi(Struct):
|
||||||
# in the broker trades event processing loop
|
# in the broker trades event processing loop
|
||||||
await trio.sleep(0.05)
|
await trio.sleep(0.05)
|
||||||
|
|
||||||
if action == 'sell':
|
if (
|
||||||
|
action == 'sell'
|
||||||
|
and size > 0
|
||||||
|
):
|
||||||
size = -size
|
size = -size
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
|
@ -197,7 +205,7 @@ class PaperBoi(Struct):
|
||||||
async def fake_fill(
|
async def fake_fill(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
price: float,
|
price: float,
|
||||||
size: float,
|
size: float,
|
||||||
action: str, # one of {'buy', 'sell'}
|
action: str, # one of {'buy', 'sell'}
|
||||||
|
@ -250,41 +258,44 @@ class PaperBoi(Struct):
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg)
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
# lookup any existing position
|
# NOTE: for paper we set the "bs_mktid" as just the fqme since
|
||||||
key = fqsn.rstrip(f'.{self.broker}')
|
# we don't actually have any unique backend symbol ourselves
|
||||||
|
# other then this thing, our fqme address.
|
||||||
|
bs_mktid: str = fqme
|
||||||
t = Transaction(
|
t = Transaction(
|
||||||
fqsn=fqsn,
|
fqme=fqme,
|
||||||
sym=self._syms[fqsn],
|
sym=self._mkts[fqme],
|
||||||
tid=oid,
|
tid=oid,
|
||||||
size=size,
|
size=size,
|
||||||
price=price,
|
price=price,
|
||||||
cost=0, # TODO: cost model
|
cost=0, # TODO: cost model
|
||||||
dt=pendulum.from_timestamp(fill_time_s),
|
dt=pendulum.from_timestamp(fill_time_s),
|
||||||
bsuid=key,
|
bs_mktid=bs_mktid,
|
||||||
)
|
)
|
||||||
|
|
||||||
with (
|
# update in-mem ledger and pos table
|
||||||
open_trade_ledger(self.broker, 'paper') as ledger,
|
self.ledger.update_from_t(t)
|
||||||
open_pps(self.broker, 'paper', write_on_exit=True) as table
|
self.ppt.update_from_trans({oid: t})
|
||||||
):
|
|
||||||
tx = t.to_dict()
|
|
||||||
tx.pop('sym')
|
|
||||||
ledger.update({oid: tx})
|
|
||||||
# Write to pps toml right now
|
|
||||||
table.update_from_trans({oid: t})
|
|
||||||
|
|
||||||
pp = table.pps[key]
|
# transmit pp msg to ems
|
||||||
|
pp = self.ppt.pps[bs_mktid]
|
||||||
pp_msg = BrokerdPosition(
|
pp_msg = BrokerdPosition(
|
||||||
broker=self.broker,
|
broker=self.broker,
|
||||||
account='paper',
|
account='paper',
|
||||||
symbol=fqsn,
|
symbol=fqme,
|
||||||
|
|
||||||
|
size=pp.size,
|
||||||
|
avg_price=pp.ppu,
|
||||||
|
|
||||||
# TODO: we need to look up the asset currency from
|
# TODO: we need to look up the asset currency from
|
||||||
# broker info. i guess for crypto this can be
|
# broker info. i guess for crypto this can be
|
||||||
# inferred from the pair?
|
# inferred from the pair?
|
||||||
currency=key,
|
# currency=bs_mktid,
|
||||||
size=pp.size,
|
|
||||||
avg_price=pp.ppu,
|
|
||||||
)
|
)
|
||||||
|
# write all updates to filesys immediately
|
||||||
|
# (adds latency but that works for simulation anyway)
|
||||||
|
self.ledger.write_config()
|
||||||
|
self.ppt.write_config()
|
||||||
|
|
||||||
await self.ems_trades_stream.send(pp_msg)
|
await self.ems_trades_stream.send(pp_msg)
|
||||||
|
|
||||||
|
@ -421,7 +432,7 @@ async def simulate_fills(
|
||||||
|
|
||||||
# clearing price would have filled entirely
|
# clearing price would have filled entirely
|
||||||
await client.fake_fill(
|
await client.fake_fill(
|
||||||
fqsn=sym,
|
fqme=sym,
|
||||||
# todo slippage to determine fill price
|
# todo slippage to determine fill price
|
||||||
price=tick_price,
|
price=tick_price,
|
||||||
size=size,
|
size=size,
|
||||||
|
@ -469,6 +480,7 @@ async def handle_order_requests(
|
||||||
BrokerdOrderAck(
|
BrokerdOrderAck(
|
||||||
oid=order.oid,
|
oid=order.oid,
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
|
account='paper'
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -512,7 +524,6 @@ _sells: defaultdict[
|
||||||
tuple[float, float, str, str], # order info
|
tuple[float, float, str, str], # order info
|
||||||
]
|
]
|
||||||
] = defaultdict(bidict)
|
] = defaultdict(bidict)
|
||||||
_positions: dict[str, Position] = {}
|
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -520,33 +531,86 @@ async def trades_dialogue(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
broker: str,
|
broker: str,
|
||||||
fqsn: str,
|
fqme: str | None = None, # if empty, we only boot broker mode
|
||||||
loglevel: str = None,
|
loglevel: str = 'warning',
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
tractor.log.get_console_log(loglevel)
|
tractor.log.get_console_log(loglevel)
|
||||||
|
|
||||||
async with (
|
ppt: PpTable
|
||||||
data.open_feed(
|
ledger: TransactionLedger
|
||||||
[fqsn],
|
with (
|
||||||
loglevel=loglevel,
|
open_pps(
|
||||||
) as feed,
|
broker,
|
||||||
|
'paper',
|
||||||
|
write_on_exit=True,
|
||||||
|
) as ppt,
|
||||||
|
|
||||||
|
open_trade_ledger(
|
||||||
|
broker,
|
||||||
|
'paper',
|
||||||
|
) as ledger
|
||||||
):
|
):
|
||||||
|
# NOTE: retreive market(pair) info from the backend broker
|
||||||
|
# since ledger entries (in their backend native format) often
|
||||||
|
# don't contain necessary market info per trade record entry..
|
||||||
|
# - if no fqme was passed in, we presume we're running in
|
||||||
|
# "ledger-sync-only mode" and thus we load mkt info for
|
||||||
|
# each symbol found in the ledger to a ppt table manually.
|
||||||
|
|
||||||
with open_pps(broker, 'paper') as table:
|
# TODO: how to process ledger info from backends?
|
||||||
# save pps in local state
|
# - should we be rolling our own actor-cached version of these
|
||||||
_positions.update(table.pps)
|
# client API refs or using portal IPC to send requests to the
|
||||||
|
# existing brokerd daemon?
|
||||||
|
# - alternatively we can possibly expect and use
|
||||||
|
# a `.broker.norm_trade_records()` ep?
|
||||||
|
brokermod = get_brokermod(broker)
|
||||||
|
gmi = getattr(brokermod, 'get_mkt_info', None)
|
||||||
|
|
||||||
|
# update all transactions with mkt info before
|
||||||
|
# loading any pps
|
||||||
|
mkt_by_fqme: dict[str, MktPair] = {}
|
||||||
|
if fqme:
|
||||||
|
bs_fqme, _, broker = fqme.rpartition('.')
|
||||||
|
mkt, _ = await brokermod.get_mkt_info(bs_fqme)
|
||||||
|
mkt_by_fqme[fqme] = mkt
|
||||||
|
|
||||||
|
# for each sym in the ledger load it's `MktPair` info
|
||||||
|
for tid, txdict in ledger.data.items():
|
||||||
|
l_fqme: str = txdict.get('fqme') or txdict['fqsn']
|
||||||
|
|
||||||
|
if (
|
||||||
|
gmi
|
||||||
|
and l_fqme not in mkt_by_fqme
|
||||||
|
):
|
||||||
|
mkt, pair = await brokermod.get_mkt_info(
|
||||||
|
l_fqme.rstrip(f'.{broker}'),
|
||||||
|
)
|
||||||
|
mkt_by_fqme[l_fqme] = mkt
|
||||||
|
|
||||||
|
# if an ``fqme: str`` input was provided we only
|
||||||
|
# need a ``MktPair`` for that one market, since we're
|
||||||
|
# running in real simulated-clearing mode, not just ledger
|
||||||
|
# syncing.
|
||||||
|
if (
|
||||||
|
fqme is not None
|
||||||
|
and fqme in mkt_by_fqme
|
||||||
|
):
|
||||||
|
break
|
||||||
|
|
||||||
|
# update pos table from ledger history and provide a ``MktPair``
|
||||||
|
# lookup for internal position accounting calcs.
|
||||||
|
ppt.update_from_trans(ledger.to_trans(mkt_by_fqme=mkt_by_fqme))
|
||||||
|
|
||||||
pp_msgs: list[BrokerdPosition] = []
|
pp_msgs: list[BrokerdPosition] = []
|
||||||
pos: Position
|
pos: Position
|
||||||
token: str # f'{symbol}.{self.broker}'
|
token: str # f'{symbol}.{self.broker}'
|
||||||
for token, pos in _positions.items():
|
for token, pos in ppt.pps.items():
|
||||||
pp_msgs.append(BrokerdPosition(
|
pp_msgs.append(BrokerdPosition(
|
||||||
broker=broker,
|
broker=broker,
|
||||||
account='paper',
|
account='paper',
|
||||||
symbol=pos.symbol.front_fqsn(),
|
symbol=pos.mkt.fqme,
|
||||||
size=pos.size,
|
size=pos.size,
|
||||||
avg_price=pos.ppu,
|
avg_price=pos.ppu,
|
||||||
))
|
))
|
||||||
|
@ -556,26 +620,47 @@ async def trades_dialogue(
|
||||||
['paper'],
|
['paper'],
|
||||||
))
|
))
|
||||||
|
|
||||||
|
# write new positions state in case ledger was
|
||||||
|
# newer then that tracked in pps.toml
|
||||||
|
ppt.write_config()
|
||||||
|
|
||||||
|
# exit early since no fqme was passed,
|
||||||
|
# normally this case is just to load
|
||||||
|
# positions "offline".
|
||||||
|
if fqme is None:
|
||||||
|
log.warning(
|
||||||
|
'Paper engine only running in position delivery mode!\n'
|
||||||
|
'NO SIMULATED CLEARING LOOP IS ACTIVE!'
|
||||||
|
)
|
||||||
|
await trio.sleep_forever()
|
||||||
|
return
|
||||||
|
|
||||||
|
async with (
|
||||||
|
data.open_feed(
|
||||||
|
[fqme],
|
||||||
|
loglevel=loglevel,
|
||||||
|
) as feed,
|
||||||
|
):
|
||||||
|
# sanity check all the mkt infos
|
||||||
|
for fqme, flume in feed.flumes.items():
|
||||||
|
assert mkt_by_fqme[fqme] == flume.mkt
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as ems_stream,
|
ctx.open_stream() as ems_stream,
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
client = PaperBoi(
|
client = PaperBoi(
|
||||||
broker,
|
broker=broker,
|
||||||
ems_stream,
|
ems_trades_stream=ems_stream,
|
||||||
|
ppt=ppt,
|
||||||
|
ledger=ledger,
|
||||||
|
|
||||||
_buys=_buys,
|
_buys=_buys,
|
||||||
_sells=_sells,
|
_sells=_sells,
|
||||||
|
|
||||||
_reqids=_reqids,
|
_reqids=_reqids,
|
||||||
|
|
||||||
_positions=_positions,
|
_mkts=mkt_by_fqme,
|
||||||
|
|
||||||
# TODO: load postions from ledger file
|
|
||||||
_trade_ledger={},
|
|
||||||
_syms={
|
|
||||||
fqsn: flume.symbol
|
|
||||||
for fqsn, flume in feed.flumes.items()
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
|
|
||||||
n.start_soon(
|
n.start_soon(
|
||||||
|
@ -588,10 +673,11 @@ async def trades_dialogue(
|
||||||
await simulate_fills(feed.streams[broker], client)
|
await simulate_fills(feed.streams[broker], client)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_paperboi(
|
async def open_paperboi(
|
||||||
fqsn: str,
|
fqme: str | None = None,
|
||||||
loglevel: str,
|
broker: str | None = None,
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
) -> Callable:
|
) -> Callable:
|
||||||
'''
|
'''
|
||||||
|
@ -599,28 +685,39 @@ async def open_paperboi(
|
||||||
its context.
|
its context.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
broker, symbol, expiry = unpack_fqsn(fqsn)
|
if not fqme:
|
||||||
|
assert broker, 'One of `broker` or `fqme` is required siss..!'
|
||||||
|
else:
|
||||||
|
broker, _, _, _ = unpack_fqme(fqme)
|
||||||
|
|
||||||
|
we_spawned: bool = False
|
||||||
service_name = f'paperboi.{broker}'
|
service_name = f'paperboi.{broker}'
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
tractor.find_actor(service_name) as portal,
|
tractor.find_actor(service_name) as portal,
|
||||||
tractor.open_nursery() as tn,
|
tractor.open_nursery() as tn,
|
||||||
):
|
):
|
||||||
# only spawn if no paperboi already is up
|
# NOTE: only spawn if no paperboi already is up since we likely
|
||||||
# (we likely don't need more then one proc for basic
|
# don't need more then one actor for simulated order clearing
|
||||||
# simulated order clearing)
|
# per broker-backend.
|
||||||
if portal is None:
|
if portal is None:
|
||||||
log.info('Starting new paper-engine actor')
|
log.info('Starting new paper-engine actor')
|
||||||
portal = await tn.start_actor(
|
portal = await tn.start_actor(
|
||||||
service_name,
|
service_name,
|
||||||
enable_modules=[__name__]
|
enable_modules=[__name__]
|
||||||
)
|
)
|
||||||
|
we_spawned = True
|
||||||
|
|
||||||
async with portal.open_context(
|
async with portal.open_context(
|
||||||
trades_dialogue,
|
trades_dialogue,
|
||||||
broker=broker,
|
broker=broker,
|
||||||
fqsn=fqsn,
|
fqme=fqme,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
yield ctx, first
|
yield ctx, first
|
||||||
|
|
||||||
|
# tear down connection and any spawned actor on exit
|
||||||
|
await ctx.cancel()
|
||||||
|
if we_spawned:
|
||||||
|
await portal.cancel_actor()
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
"""
|
||||||
|
Sub-sys module commons.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from ..log import (
|
||||||
|
get_logger,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
|
subsys: str = 'piker.clearing'
|
||||||
|
|
||||||
|
log = get_logger(subsys)
|
||||||
|
|
||||||
|
get_console_log = partial(
|
||||||
|
get_console_log,
|
||||||
|
name=subsys,
|
||||||
|
)
|
|
@ -19,6 +19,7 @@ CLI commons.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
import os
|
import os
|
||||||
|
from contextlib import AsyncExitStack
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import trio
|
import trio
|
||||||
|
@ -69,8 +70,8 @@ def pikerd(
|
||||||
Spawn the piker broker-daemon.
|
Spawn the piker broker-daemon.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
from .. import service
|
||||||
|
|
||||||
from ..service import open_pikerd
|
|
||||||
log = get_console_log(loglevel)
|
log = get_console_log(loglevel)
|
||||||
|
|
||||||
if pdb:
|
if pdb:
|
||||||
|
@ -90,17 +91,36 @@ def pikerd(
|
||||||
)
|
)
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
service_mngr: service.Services
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_pikerd(
|
service.open_pikerd(
|
||||||
tsdb=tsdb,
|
|
||||||
es=es,
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=pdb,
|
debug_mode=pdb,
|
||||||
registry_addr=reg_addr,
|
registry_addr=reg_addr,
|
||||||
|
|
||||||
), # normally delivers a ``Services`` handle
|
) as service_mngr, # normally delivers a ``Services`` handle
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
|
|
||||||
|
AsyncExitStack() as stack,
|
||||||
):
|
):
|
||||||
|
if tsdb:
|
||||||
|
dname, conf = await stack.enter_async_context(
|
||||||
|
service.marketstore.start_ahab_daemon(
|
||||||
|
service_mngr,
|
||||||
|
loglevel=loglevel,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
log.info(f'TSDB `{dname}` up with conf:\n{conf}')
|
||||||
|
|
||||||
|
if es:
|
||||||
|
dname, conf = await stack.enter_async_context(
|
||||||
|
service.elastic.start_ahab_daemon(
|
||||||
|
service_mngr,
|
||||||
|
loglevel=loglevel,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
log.info(f'DB `{dname}` up with conf:\n{conf}')
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
275
piker/config.py
275
piker/config.py
|
@ -21,14 +21,21 @@ Platform configuration (files) mgmt.
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
from os import path
|
|
||||||
from os.path import dirname
|
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Optional
|
import time
|
||||||
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
MutableMapping,
|
||||||
|
)
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import toml
|
import tomlkit
|
||||||
|
try:
|
||||||
|
import tomllib
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
import tomli as tomllib
|
||||||
|
|
||||||
|
|
||||||
from .log import get_logger
|
from .log import get_logger
|
||||||
|
|
||||||
|
@ -126,30 +133,33 @@ def get_app_dir(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
_config_dir = _click_config_dir = get_app_dir('piker')
|
_click_config_dir: Path = Path(get_app_dir('piker'))
|
||||||
_parent_user = os.environ.get('SUDO_USER')
|
_config_dir: Path = _click_config_dir
|
||||||
|
_parent_user: str = os.environ.get('SUDO_USER')
|
||||||
|
|
||||||
if _parent_user:
|
if _parent_user:
|
||||||
non_root_user_dir = os.path.expanduser(
|
non_root_user_dir = Path(
|
||||||
f'~{_parent_user}'
|
os.path.expanduser(f'~{_parent_user}')
|
||||||
)
|
)
|
||||||
root = 'root'
|
root: str = 'root'
|
||||||
|
_ccds: str = str(_click_config_dir) # click config dir string
|
||||||
|
i_tail: int = int(_ccds.rfind(root) + len(root))
|
||||||
_config_dir = (
|
_config_dir = (
|
||||||
non_root_user_dir +
|
non_root_user_dir
|
||||||
_click_config_dir[
|
/
|
||||||
_click_config_dir.rfind(root) + len(root):
|
Path(_ccds[i_tail+1:]) # +1 to capture trailing '/'
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
_conf_names: set[str] = {
|
_conf_names: set[str] = {
|
||||||
'brokers',
|
'conf', # god config
|
||||||
'pps',
|
'brokers', # sec backend deatz
|
||||||
'trades',
|
'watchlists', # (user defined) market lists
|
||||||
'watchlists',
|
|
||||||
'paper_trades'
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
# TODO: probably drop all this super legacy, questrade specific,
|
||||||
|
# config stuff XD ?
|
||||||
|
_watchlists_data_path: Path = _config_dir / Path('watchlists.json')
|
||||||
_context_defaults = dict(
|
_context_defaults = dict(
|
||||||
default_map={
|
default_map={
|
||||||
# Questrade specific quote poll rates
|
# Questrade specific quote poll rates
|
||||||
|
@ -180,7 +190,7 @@ def _conf_fn_w_ext(
|
||||||
def get_conf_path(
|
def get_conf_path(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers',
|
||||||
|
|
||||||
) -> str:
|
) -> Path:
|
||||||
'''
|
'''
|
||||||
Return the top-level default config path normally under
|
Return the top-level default config path normally under
|
||||||
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
||||||
|
@ -188,7 +198,6 @@ def get_conf_path(
|
||||||
|
|
||||||
Contains files such as:
|
Contains files such as:
|
||||||
- brokers.toml
|
- brokers.toml
|
||||||
- pp.toml
|
|
||||||
- watchlists.toml
|
- watchlists.toml
|
||||||
|
|
||||||
# maybe coming soon ;)
|
# maybe coming soon ;)
|
||||||
|
@ -196,72 +205,187 @@ def get_conf_path(
|
||||||
- strats.toml
|
- strats.toml
|
||||||
|
|
||||||
'''
|
'''
|
||||||
assert conf_name in _conf_names
|
if 'account.' not in conf_name:
|
||||||
|
assert str(conf_name) in _conf_names
|
||||||
|
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
return os.path.join(
|
return _config_dir / Path(fn)
|
||||||
_config_dir,
|
|
||||||
fn,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def repodir():
|
def repodir() -> Path:
|
||||||
'''
|
'''
|
||||||
Return the abspath to the repo directory.
|
Return the abspath as ``Path`` to the git repo's root dir.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
dirpath = path.abspath(
|
repodir: Path = Path(__file__).absolute().parent.parent
|
||||||
# we're 3 levels down in **this** module file
|
confdir: Path = repodir / 'config'
|
||||||
dirname(dirname(os.path.realpath(__file__)))
|
|
||||||
)
|
if not confdir.is_dir():
|
||||||
return dirpath
|
# prolly inside stupid GH actions CI..
|
||||||
|
repodir: Path = Path(os.environ.get('GITHUB_WORKSPACE'))
|
||||||
|
confdir: Path = repodir / 'config'
|
||||||
|
|
||||||
|
assert confdir.is_dir(), f'{confdir} DNE, {repodir} is likely incorrect!'
|
||||||
|
return repodir
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers', # appended with .toml suffix
|
||||||
path: str = None,
|
path: Path | None = None,
|
||||||
|
|
||||||
|
decode: Callable[
|
||||||
|
[str | bytes,],
|
||||||
|
MutableMapping,
|
||||||
|
] = tomllib.loads,
|
||||||
|
|
||||||
|
touch_if_dne: bool = False,
|
||||||
|
|
||||||
**tomlkws,
|
**tomlkws,
|
||||||
|
|
||||||
) -> (dict, str):
|
) -> tuple[dict, Path]:
|
||||||
'''
|
'''
|
||||||
Load config file by name.
|
Load config file by name.
|
||||||
|
|
||||||
|
If desired config is not in the top level piker-user config path then
|
||||||
|
pass the ``path: Path`` explicitly.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
path = path or get_conf_path(conf_name)
|
# create the $HOME/.config/piker dir if dne
|
||||||
|
if not _config_dir.is_dir():
|
||||||
if not os.path.isdir(_config_dir):
|
_config_dir.mkdir(
|
||||||
Path(_config_dir).mkdir(parents=True, exist_ok=True)
|
parents=True,
|
||||||
|
exist_ok=True,
|
||||||
if not os.path.isfile(path):
|
)
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
|
||||||
|
path_provided: bool = path is not None
|
||||||
template = os.path.join(
|
path: Path = path or get_conf_path(conf_name)
|
||||||
repodir(),
|
|
||||||
'config',
|
if (
|
||||||
fn
|
not path.is_file()
|
||||||
|
and touch_if_dne
|
||||||
|
):
|
||||||
|
# only do a template if no path provided,
|
||||||
|
# just touch an empty file with same name.
|
||||||
|
if path_provided:
|
||||||
|
with path.open(mode='x'):
|
||||||
|
pass
|
||||||
|
|
||||||
|
# try to copy in a template config to the user's dir if one
|
||||||
|
# exists.
|
||||||
|
else:
|
||||||
|
fn: str = _conf_fn_w_ext(conf_name)
|
||||||
|
template: Path = repodir() / 'config' / fn
|
||||||
|
if template.is_file():
|
||||||
|
shutil.copyfile(template, path)
|
||||||
|
|
||||||
|
elif fn and template:
|
||||||
|
assert template.is_file(), f'{template} is not a file!?'
|
||||||
|
|
||||||
|
assert path.is_file(), f'Config file {path} not created!?'
|
||||||
|
|
||||||
|
with path.open(mode='r') as fp:
|
||||||
|
config: dict = decode(
|
||||||
|
fp.read(),
|
||||||
|
**tomlkws,
|
||||||
)
|
)
|
||||||
# try to copy in a template config to the user's directory
|
|
||||||
# if one exists.
|
|
||||||
if os.path.isfile(template):
|
|
||||||
shutil.copyfile(template, path)
|
|
||||||
else:
|
|
||||||
# create an empty file
|
|
||||||
with open(path, 'x'):
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
with open(path, 'r'):
|
|
||||||
pass # touch it
|
|
||||||
|
|
||||||
config = toml.load(path, **tomlkws)
|
|
||||||
log.debug(f"Read config file {path}")
|
log.debug(f"Read config file {path}")
|
||||||
return config, path
|
return config, path
|
||||||
|
|
||||||
|
|
||||||
|
def load_account(
|
||||||
|
brokername: str,
|
||||||
|
acctid: str,
|
||||||
|
|
||||||
|
) -> tuple[dict, Path]:
|
||||||
|
'''
|
||||||
|
Load a accounting (with positions) file from
|
||||||
|
$PIKER_CONFIG_DIR/accounting/account.<brokername>.<acctid>.toml.
|
||||||
|
|
||||||
|
Where normally $PIKER_CONFIG_DIR = ~/.config/piker/
|
||||||
|
and we implicitly create a accounting subdir which should
|
||||||
|
normally be linked to a git repo managed by the user B)
|
||||||
|
|
||||||
|
'''
|
||||||
|
legacy_fn: str = f'pps.{brokername}.{acctid}.toml'
|
||||||
|
fn: str = f'account.{brokername}.{acctid}.toml'
|
||||||
|
|
||||||
|
dirpath: Path = _config_dir / 'accounting'
|
||||||
|
if not dirpath.is_dir():
|
||||||
|
dirpath.mkdir()
|
||||||
|
|
||||||
|
config, path = load(
|
||||||
|
path=dirpath / fn,
|
||||||
|
decode=tomlkit.parse,
|
||||||
|
touch_if_dne=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not config:
|
||||||
|
legacypath = dirpath / legacy_fn
|
||||||
|
log.warning(
|
||||||
|
f'Your account file is using the legacy `pps.` prefix..\n'
|
||||||
|
f'Rewriting contents to new name -> {path}\n'
|
||||||
|
'Please delete the old file!\n'
|
||||||
|
f'|-> {legacypath}\n'
|
||||||
|
)
|
||||||
|
if legacypath.is_file():
|
||||||
|
legacy_config, _ = load(
|
||||||
|
path=legacypath,
|
||||||
|
|
||||||
|
# TODO: move to tomlkit:
|
||||||
|
# - needs to be fixed to support bidict?
|
||||||
|
# https://github.com/sdispater/tomlkit/issues/289
|
||||||
|
# - we need to use or fork's fix to do multiline array
|
||||||
|
# indenting.
|
||||||
|
decode=tomlkit.parse,
|
||||||
|
)
|
||||||
|
config.update(legacy_config)
|
||||||
|
|
||||||
|
# XXX: override the presumably previously non-existant
|
||||||
|
# file with legacy's contents.
|
||||||
|
write(
|
||||||
|
config,
|
||||||
|
path=path,
|
||||||
|
fail_empty=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
return config, path
|
||||||
|
|
||||||
|
|
||||||
|
def load_ledger(
|
||||||
|
brokername: str,
|
||||||
|
acctid: str,
|
||||||
|
|
||||||
|
) -> tuple[dict, Path]:
|
||||||
|
|
||||||
|
ldir: Path = _config_dir / 'accounting' / 'ledgers'
|
||||||
|
if not ldir.is_dir():
|
||||||
|
ldir.mkdir()
|
||||||
|
|
||||||
|
fname = f'trades_{brokername}_{acctid}.toml'
|
||||||
|
fpath: Path = ldir / fname
|
||||||
|
|
||||||
|
if not fpath.is_file():
|
||||||
|
log.info(
|
||||||
|
f'Creating new local trades ledger: {fpath}'
|
||||||
|
)
|
||||||
|
fpath.touch()
|
||||||
|
|
||||||
|
with fpath.open(mode='rb') as cf:
|
||||||
|
start = time.time()
|
||||||
|
ledger_dict = tomllib.load(cf)
|
||||||
|
log.debug(f'Ledger load took {time.time() - start}s')
|
||||||
|
|
||||||
|
return ledger_dict, fpath
|
||||||
|
|
||||||
|
|
||||||
def write(
|
def write(
|
||||||
config: dict, # toml config as dict
|
config: dict, # toml config as dict
|
||||||
name: str = 'brokers',
|
|
||||||
path: str = None,
|
name: str | None = None,
|
||||||
|
path: Path | None = None,
|
||||||
fail_empty: bool = True,
|
fail_empty: bool = True,
|
||||||
|
|
||||||
**toml_kwargs,
|
**toml_kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -271,32 +395,37 @@ def write(
|
||||||
Create a ``brokers.ini`` file if one does not exist.
|
Create a ``brokers.ini`` file if one does not exist.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
path = path or get_conf_path(name)
|
if name:
|
||||||
dirname = os.path.dirname(path)
|
path: Path = path or get_conf_path(name)
|
||||||
if not os.path.isdir(dirname):
|
dirname: Path = path.parent
|
||||||
|
if not dirname.is_dir():
|
||||||
log.debug(f"Creating config dir {_config_dir}")
|
log.debug(f"Creating config dir {_config_dir}")
|
||||||
os.makedirs(dirname)
|
dirname.mkdir()
|
||||||
|
|
||||||
if not config and fail_empty:
|
if (
|
||||||
|
not config
|
||||||
|
and fail_empty
|
||||||
|
):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Watch out you're trying to write a blank config!")
|
"Watch out you're trying to write a blank config!"
|
||||||
|
)
|
||||||
|
|
||||||
log.debug(
|
log.debug(
|
||||||
f"Writing config `{name}` file to:\n"
|
f"Writing config `{name}` file to:\n"
|
||||||
f"{path}"
|
f"{path}"
|
||||||
)
|
)
|
||||||
with open(path, 'w') as cf:
|
with path.open(mode='w') as fp:
|
||||||
return toml.dump(
|
return tomlkit.dump( # preserve style on write B)
|
||||||
config,
|
config,
|
||||||
cf,
|
fp,
|
||||||
**toml_kwargs,
|
**toml_kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_accounts(
|
def load_accounts(
|
||||||
providers: Optional[list[str]] = None
|
providers: list[str] | None = None
|
||||||
|
|
||||||
) -> bidict[str, Optional[str]]:
|
) -> bidict[str, str | None]:
|
||||||
|
|
||||||
conf, path = load()
|
conf, path = load()
|
||||||
accounts = bidict()
|
accounts = bidict()
|
||||||
|
|
|
@ -25,7 +25,7 @@ sharing live streams over a network.
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ..log import (
|
from ._util import (
|
||||||
get_console_log,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from ._normalize import iterticks
|
from ._normalize import iterticks
|
||||||
|
@ -50,39 +50,3 @@ __all__ = [
|
||||||
'open_shm_array',
|
'open_shm_array',
|
||||||
'get_shm_token',
|
'get_shm_token',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
|
||||||
async def _setup_persistent_brokerd(
|
|
||||||
ctx: tractor.Context,
|
|
||||||
brokername: str,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Allocate a actor-wide service nursery in ``brokerd``
|
|
||||||
such that feeds can be run in the background persistently by
|
|
||||||
the broker backend as needed.
|
|
||||||
|
|
||||||
'''
|
|
||||||
get_console_log(tractor.current_actor().loglevel)
|
|
||||||
|
|
||||||
from .feed import (
|
|
||||||
_bus,
|
|
||||||
get_feed_bus,
|
|
||||||
)
|
|
||||||
global _bus
|
|
||||||
assert not _bus
|
|
||||||
|
|
||||||
async with trio.open_nursery() as service_nursery:
|
|
||||||
# assign a nursery to the feeds bus for spawning
|
|
||||||
# background tasks from clients
|
|
||||||
get_feed_bus(brokername, service_nursery)
|
|
||||||
|
|
||||||
# unblock caller
|
|
||||||
await ctx.started()
|
|
||||||
|
|
||||||
# we pin this task to keep the feeds manager active until the
|
|
||||||
# parent actor decides to tear it down
|
|
||||||
await trio.sleep_forever()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -42,10 +42,7 @@ from numba import (
|
||||||
# float64, optional, int64,
|
# float64, optional, int64,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..log import get_logger
|
from ._util import log
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def ds_m4(
|
def ds_m4(
|
||||||
|
|
|
@ -38,8 +38,8 @@ from tractor.trionics import (
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
from ..log import (
|
from ._util import (
|
||||||
get_logger,
|
log,
|
||||||
get_console_log,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from ..service import maybe_spawn_daemon
|
from ..service import maybe_spawn_daemon
|
||||||
|
@ -50,8 +50,6 @@ if TYPE_CHECKING:
|
||||||
)
|
)
|
||||||
from .feed import _FeedsBus
|
from .feed import _FeedsBus
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# highest frequency sample step is 1 second by default, though in
|
# highest frequency sample step is 1 second by default, though in
|
||||||
# the future we may want to support shorter periods or a dynamic style
|
# the future we may want to support shorter periods or a dynamic style
|
||||||
|
@ -353,7 +351,9 @@ async def register_with_sampler(
|
||||||
|
|
||||||
if open_index_stream:
|
if open_index_stream:
|
||||||
try:
|
try:
|
||||||
async with ctx.open_stream() as stream:
|
async with ctx.open_stream(
|
||||||
|
allow_overruns=True,
|
||||||
|
) as stream:
|
||||||
if sub_for_broadcasts:
|
if sub_for_broadcasts:
|
||||||
subs.add(stream)
|
subs.add(stream)
|
||||||
|
|
||||||
|
@ -362,7 +362,10 @@ async def register_with_sampler(
|
||||||
if msg == 'broadcast_all':
|
if msg == 'broadcast_all':
|
||||||
await Sampler.broadcast_all()
|
await Sampler.broadcast_all()
|
||||||
finally:
|
finally:
|
||||||
if sub_for_broadcasts:
|
if (
|
||||||
|
sub_for_broadcasts
|
||||||
|
and subs
|
||||||
|
):
|
||||||
subs.remove(stream)
|
subs.remove(stream)
|
||||||
else:
|
else:
|
||||||
# if no shms are passed in we just wait until cancelled
|
# if no shms are passed in we just wait until cancelled
|
||||||
|
@ -429,7 +432,7 @@ async def spawn_samplerd(
|
||||||
async def maybe_open_samplerd(
|
async def maybe_open_samplerd(
|
||||||
|
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
**kwargs,
|
**pikerd_kwargs,
|
||||||
|
|
||||||
) -> tractor.Portal: # noqa
|
) -> tractor.Portal: # noqa
|
||||||
'''
|
'''
|
||||||
|
@ -442,9 +445,9 @@ async def maybe_open_samplerd(
|
||||||
async with maybe_spawn_daemon(
|
async with maybe_spawn_daemon(
|
||||||
dname,
|
dname,
|
||||||
service_task_target=spawn_samplerd,
|
service_task_target=spawn_samplerd,
|
||||||
spawn_args={'loglevel': loglevel},
|
spawn_args={},
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
**kwargs,
|
**pikerd_kwargs,
|
||||||
|
|
||||||
) as portal:
|
) as portal:
|
||||||
yield portal
|
yield portal
|
||||||
|
@ -615,10 +618,10 @@ async def sample_and_broadcast(
|
||||||
] = bus.get_subs(sub_key)
|
] = bus.get_subs(sub_key)
|
||||||
|
|
||||||
# NOTE: by default the broker backend doesn't append
|
# NOTE: by default the broker backend doesn't append
|
||||||
# it's own "name" into the fqsn schema (but maybe it
|
# it's own "name" into the fqme schema (but maybe it
|
||||||
# should?) so we have to manually generate the correct
|
# should?) so we have to manually generate the correct
|
||||||
# key here.
|
# key here.
|
||||||
fqsn = f'{broker_symbol}.{brokername}'
|
fqme = f'{broker_symbol}.{brokername}'
|
||||||
lags: int = 0
|
lags: int = 0
|
||||||
|
|
||||||
# TODO: speed up this loop in an AOT compiled lang (like
|
# TODO: speed up this loop in an AOT compiled lang (like
|
||||||
|
@ -637,7 +640,7 @@ async def sample_and_broadcast(
|
||||||
# pushes to the ``uniform_rate_send()`` below.
|
# pushes to the ``uniform_rate_send()`` below.
|
||||||
try:
|
try:
|
||||||
stream.send_nowait(
|
stream.send_nowait(
|
||||||
(fqsn, quote)
|
(fqme, quote)
|
||||||
)
|
)
|
||||||
except trio.WouldBlock:
|
except trio.WouldBlock:
|
||||||
overruns[sub_key] += 1
|
overruns[sub_key] += 1
|
||||||
|
@ -669,7 +672,7 @@ async def sample_and_broadcast(
|
||||||
raise trio.BrokenResourceError
|
raise trio.BrokenResourceError
|
||||||
else:
|
else:
|
||||||
await stream.send(
|
await stream.send(
|
||||||
{fqsn: quote}
|
{fqme: quote}
|
||||||
)
|
)
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
if cs.cancelled_caught:
|
||||||
|
@ -782,9 +785,6 @@ async def uniform_rate_send(
|
||||||
https://gist.github.com/njsmith/7ea44ec07e901cb78ebe1dd8dd846cb9
|
https://gist.github.com/njsmith/7ea44ec07e901cb78ebe1dd8dd846cb9
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# try not to error-out on overruns of the subscribed client
|
|
||||||
stream._ctx._backpressure = True
|
|
||||||
|
|
||||||
# TODO: compute the approx overhead latency per cycle
|
# TODO: compute the approx overhead latency per cycle
|
||||||
left_to_sleep = throttle_period = 1/rate - 0.000616
|
left_to_sleep = throttle_period = 1/rate - 0.000616
|
||||||
|
|
||||||
|
|
|
@ -32,14 +32,11 @@ import numpy as np
|
||||||
from numpy.lib import recfunctions as rfn
|
from numpy.lib import recfunctions as rfn
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from ..log import get_logger
|
from ._util import log
|
||||||
from ._source import base_iohlc_dtype
|
from ._source import base_iohlc_dtype
|
||||||
from .types import Struct
|
from .types import Struct
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# how much is probably dependent on lifestyle
|
# how much is probably dependent on lifestyle
|
||||||
_secs_in_day = int(60 * 60 * 24)
|
_secs_in_day = int(60 * 60 * 24)
|
||||||
# we try for a buncha times, but only on a run-every-other-day kinda week.
|
# we try for a buncha times, but only on a run-every-other-day kinda week.
|
||||||
|
@ -649,7 +646,7 @@ def maybe_open_shm_array(
|
||||||
token = _known_tokens[key]
|
token = _known_tokens[key]
|
||||||
return attach_shm_array(token=token, **kwargs), False
|
return attach_shm_array(token=token, **kwargs), False
|
||||||
except KeyError:
|
except KeyError:
|
||||||
log.warning(f"Could not find {key} in shms cache")
|
log.debug(f"Could not find {key} in shms cache")
|
||||||
if dtype:
|
if dtype:
|
||||||
token = _make_token(
|
token = _make_token(
|
||||||
key,
|
key,
|
||||||
|
@ -659,7 +656,7 @@ def maybe_open_shm_array(
|
||||||
try:
|
try:
|
||||||
return attach_shm_array(token=token, **kwargs), False
|
return attach_shm_array(token=token, **kwargs), False
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
log.warning(f"Could not attach to shm with token {token}")
|
log.debug(f"Could not attach to shm with token {token}")
|
||||||
|
|
||||||
# This actor does not know about memory
|
# This actor does not know about memory
|
||||||
# associated with the provided "key".
|
# associated with the provided "key".
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -18,18 +18,10 @@
|
||||||
numpy data source coversion helpers.
|
numpy data source coversion helpers.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from decimal import (
|
|
||||||
Decimal,
|
|
||||||
ROUND_HALF_EVEN,
|
|
||||||
)
|
|
||||||
from typing import Any
|
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from .types import Struct
|
|
||||||
# from numba import from_dtype
|
|
||||||
|
|
||||||
|
|
||||||
ohlc_fields = [
|
ohlc_fields = [
|
||||||
('time', float),
|
('time', float),
|
||||||
|
@ -50,6 +42,7 @@ base_ohlc_dtype = np.dtype(ohlc_fields)
|
||||||
|
|
||||||
# TODO: for now need to construct this manually for readonly arrays, see
|
# TODO: for now need to construct this manually for readonly arrays, see
|
||||||
# https://github.com/numba/numba/issues/4511
|
# https://github.com/numba/numba/issues/4511
|
||||||
|
# from numba import from_dtype
|
||||||
# numba_ohlc_dtype = from_dtype(base_ohlc_dtype)
|
# numba_ohlc_dtype = from_dtype(base_ohlc_dtype)
|
||||||
|
|
||||||
# map time frame "keys" to seconds values
|
# map time frame "keys" to seconds values
|
||||||
|
@ -64,32 +57,6 @@ tf_in_1s = bidict({
|
||||||
})
|
})
|
||||||
|
|
||||||
|
|
||||||
def mk_fqsn(
|
|
||||||
provider: str,
|
|
||||||
symbol: str,
|
|
||||||
|
|
||||||
) -> str:
|
|
||||||
'''
|
|
||||||
Generate a "fully qualified symbol name" which is
|
|
||||||
a reverse-hierarchical cross broker/provider symbol
|
|
||||||
|
|
||||||
'''
|
|
||||||
return '.'.join([symbol, provider]).lower()
|
|
||||||
|
|
||||||
|
|
||||||
def float_digits(
|
|
||||||
value: float,
|
|
||||||
) -> int:
|
|
||||||
'''
|
|
||||||
Return the number of precision digits read from a float value.
|
|
||||||
|
|
||||||
'''
|
|
||||||
if value == 0:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
return int(-Decimal(str(value)).as_tuple().exponent)
|
|
||||||
|
|
||||||
|
|
||||||
def ohlc_zeros(length: int) -> np.ndarray:
|
def ohlc_zeros(length: int) -> np.ndarray:
|
||||||
"""Construct an OHLC field formatted structarray.
|
"""Construct an OHLC field formatted structarray.
|
||||||
|
|
||||||
|
@ -100,220 +67,6 @@ def ohlc_zeros(length: int) -> np.ndarray:
|
||||||
return np.zeros(length, dtype=base_ohlc_dtype)
|
return np.zeros(length, dtype=base_ohlc_dtype)
|
||||||
|
|
||||||
|
|
||||||
def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
|
||||||
'''
|
|
||||||
Unpack a fully-qualified-symbol-name to ``tuple``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
venue = ''
|
|
||||||
suffix = ''
|
|
||||||
|
|
||||||
# TODO: probably reverse the order of all this XD
|
|
||||||
tokens = fqsn.split('.')
|
|
||||||
if len(tokens) < 3:
|
|
||||||
# probably crypto
|
|
||||||
symbol, broker = tokens
|
|
||||||
return (
|
|
||||||
broker,
|
|
||||||
symbol,
|
|
||||||
'',
|
|
||||||
)
|
|
||||||
|
|
||||||
elif len(tokens) > 3:
|
|
||||||
symbol, venue, suffix, broker = tokens
|
|
||||||
else:
|
|
||||||
symbol, venue, broker = tokens
|
|
||||||
suffix = ''
|
|
||||||
|
|
||||||
# head, _, broker = fqsn.rpartition('.')
|
|
||||||
# symbol, _, suffix = head.rpartition('.')
|
|
||||||
return (
|
|
||||||
broker,
|
|
||||||
'.'.join([symbol, venue]),
|
|
||||||
suffix,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class MktPair(Struct, frozen=True):
|
|
||||||
|
|
||||||
src: str # source asset name being used to buy
|
|
||||||
src_type: str # source asset's financial type/classification name
|
|
||||||
# ^ specifies a "class" of financial instrument
|
|
||||||
# egs. stock, futer, option, bond etc.
|
|
||||||
|
|
||||||
dst: str # destination asset name being bought
|
|
||||||
dst_type: str # destination asset's financial type/classification name
|
|
||||||
|
|
||||||
price_tick: float # minimum price increment value increment
|
|
||||||
price_tick_digits: int # required decimal digits for above
|
|
||||||
|
|
||||||
size_tick: float # minimum size (aka vlm) increment value increment
|
|
||||||
size_tick_digits: int # required decimal digits for above
|
|
||||||
|
|
||||||
venue: str | None = None # market venue provider name
|
|
||||||
expiry: str | None = None # for derivs, expiry datetime parseable str
|
|
||||||
|
|
||||||
# for derivs, info describing contract, egs.
|
|
||||||
# strike price, call or put, swap type, exercise model, etc.
|
|
||||||
contract_info: str | None = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_msg(
|
|
||||||
self,
|
|
||||||
msg: dict[str, Any],
|
|
||||||
|
|
||||||
) -> MktPair:
|
|
||||||
'''
|
|
||||||
Constructor for a received msg-dict normally received over IPC.
|
|
||||||
|
|
||||||
'''
|
|
||||||
...
|
|
||||||
|
|
||||||
# fqa, fqma, .. etc. see issue:
|
|
||||||
# https://github.com/pikers/piker/issues/467
|
|
||||||
@property
|
|
||||||
def fqsn(self) -> str:
|
|
||||||
'''
|
|
||||||
Return the fully qualified market (endpoint) name for the
|
|
||||||
pair of transacting assets.
|
|
||||||
|
|
||||||
'''
|
|
||||||
...
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: rework the below `Symbol` (which was originally inspired and
|
|
||||||
# derived from stuff in quantdom) into a simpler, ipc msg ready, market
|
|
||||||
# endpoint meta-data container type as per the drafted interace above.
|
|
||||||
class Symbol(Struct):
|
|
||||||
'''
|
|
||||||
I guess this is some kinda container thing for dealing with
|
|
||||||
all the different meta-data formats from brokers?
|
|
||||||
|
|
||||||
'''
|
|
||||||
key: str
|
|
||||||
tick_size: float = 0.01
|
|
||||||
lot_tick_size: float = 0.0 # "volume" precision as min step value
|
|
||||||
tick_size_digits: int = 2
|
|
||||||
lot_size_digits: int = 0
|
|
||||||
suffix: str = ''
|
|
||||||
broker_info: dict[str, dict[str, Any]] = {}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_broker_info(
|
|
||||||
cls,
|
|
||||||
broker: str,
|
|
||||||
symbol: str,
|
|
||||||
info: dict[str, Any],
|
|
||||||
suffix: str = '',
|
|
||||||
|
|
||||||
) -> Symbol:
|
|
||||||
|
|
||||||
tick_size = info.get('price_tick_size', 0.01)
|
|
||||||
lot_size = info.get('lot_tick_size', 0.0)
|
|
||||||
|
|
||||||
return Symbol(
|
|
||||||
key=symbol,
|
|
||||||
tick_size=tick_size,
|
|
||||||
lot_tick_size=lot_size,
|
|
||||||
tick_size_digits=float_digits(tick_size),
|
|
||||||
lot_size_digits=float_digits(lot_size),
|
|
||||||
suffix=suffix,
|
|
||||||
broker_info={broker: info},
|
|
||||||
)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def from_fqsn(
|
|
||||||
cls,
|
|
||||||
fqsn: str,
|
|
||||||
info: dict[str, Any],
|
|
||||||
|
|
||||||
) -> Symbol:
|
|
||||||
broker, key, suffix = unpack_fqsn(fqsn)
|
|
||||||
return cls.from_broker_info(
|
|
||||||
broker,
|
|
||||||
key,
|
|
||||||
info=info,
|
|
||||||
suffix=suffix,
|
|
||||||
)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def type_key(self) -> str:
|
|
||||||
return list(self.broker_info.values())[0]['asset_type']
|
|
||||||
|
|
||||||
@property
|
|
||||||
def brokers(self) -> list[str]:
|
|
||||||
return list(self.broker_info.keys())
|
|
||||||
|
|
||||||
def nearest_tick(self, value: float) -> float:
|
|
||||||
'''
|
|
||||||
Return the nearest tick value based on mininum increment.
|
|
||||||
|
|
||||||
'''
|
|
||||||
mult = 1 / self.tick_size
|
|
||||||
return round(value * mult) / mult
|
|
||||||
|
|
||||||
def front_feed(self) -> tuple[str, str]:
|
|
||||||
'''
|
|
||||||
Return the "current" feed key for this symbol.
|
|
||||||
|
|
||||||
(i.e. the broker + symbol key in a tuple).
|
|
||||||
|
|
||||||
'''
|
|
||||||
return (
|
|
||||||
list(self.broker_info.keys())[0],
|
|
||||||
self.key,
|
|
||||||
)
|
|
||||||
|
|
||||||
def tokens(self) -> tuple[str]:
|
|
||||||
broker, key = self.front_feed()
|
|
||||||
if self.suffix:
|
|
||||||
return (key, self.suffix, broker)
|
|
||||||
else:
|
|
||||||
return (key, broker)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def fqsn(self) -> str:
|
|
||||||
return '.'.join(self.tokens()).lower()
|
|
||||||
|
|
||||||
def front_fqsn(self) -> str:
|
|
||||||
'''
|
|
||||||
fqsn = "fully qualified symbol name"
|
|
||||||
|
|
||||||
Basically the idea here is for all client-ish code (aka programs/actors
|
|
||||||
that ask the provider agnostic layers in the stack for data) should be
|
|
||||||
able to tell which backend / venue / derivative each data feed/flow is
|
|
||||||
from by an explicit string key of the current form:
|
|
||||||
|
|
||||||
<instrumentname>.<venue>.<suffixwithmetadata>.<brokerbackendname>
|
|
||||||
|
|
||||||
TODO: I have thoughts that we should actually change this to be
|
|
||||||
more like an "attr lookup" (like how the web should have done
|
|
||||||
urls, but marketting peeps ruined it etc. etc.):
|
|
||||||
|
|
||||||
<broker>.<venue>.<instrumentname>.<suffixwithmetadata>
|
|
||||||
|
|
||||||
'''
|
|
||||||
tokens = self.tokens()
|
|
||||||
fqsn = '.'.join(map(str.lower, tokens))
|
|
||||||
return fqsn
|
|
||||||
|
|
||||||
def quantize_size(
|
|
||||||
self,
|
|
||||||
size: float,
|
|
||||||
|
|
||||||
) -> Decimal:
|
|
||||||
'''
|
|
||||||
Truncate input ``size: float`` using ``Decimal``
|
|
||||||
and ``.lot_size_digits``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
digits = self.lot_size_digits
|
|
||||||
return Decimal(size).quantize(
|
|
||||||
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
|
||||||
rounding=ROUND_HALF_EVEN
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _nan_to_closest_num(array: np.ndarray):
|
def _nan_to_closest_num(array: np.ndarray):
|
||||||
"""Return interpolated values instead of NaN.
|
"""Return interpolated values instead of NaN.
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Data layer module commons.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from ..log import (
|
||||||
|
get_logger,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
|
subsys: str = 'piker.data'
|
||||||
|
|
||||||
|
log = get_logger(subsys)
|
||||||
|
|
||||||
|
get_console_log = partial(
|
||||||
|
get_console_log,
|
||||||
|
name=subsys,
|
||||||
|
)
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -18,23 +18,29 @@
|
||||||
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from contextlib import (
|
from contextlib import (
|
||||||
asynccontextmanager,
|
asynccontextmanager as acm,
|
||||||
AsyncExitStack,
|
|
||||||
)
|
)
|
||||||
from itertools import count
|
from itertools import count
|
||||||
|
from functools import partial
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Optional,
|
Optional,
|
||||||
Callable,
|
Callable,
|
||||||
|
AsyncContextManager,
|
||||||
AsyncGenerator,
|
AsyncGenerator,
|
||||||
Iterable,
|
Iterable,
|
||||||
)
|
)
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import trio_websocket
|
from trio_typing import TaskStatus
|
||||||
|
from trio_websocket import (
|
||||||
|
WebSocketConnection,
|
||||||
|
open_websocket_url,
|
||||||
|
)
|
||||||
from wsproto.utilities import LocalProtocolError
|
from wsproto.utilities import LocalProtocolError
|
||||||
from trio_websocket._impl import (
|
from trio_websocket._impl import (
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
|
@ -44,20 +50,23 @@ from trio_websocket._impl import (
|
||||||
ConnectionTimeout,
|
ConnectionTimeout,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..log import get_logger
|
from ._util import log
|
||||||
|
|
||||||
from .types import Struct
|
from .types import Struct
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class NoBsWs:
|
class NoBsWs:
|
||||||
'''
|
'''
|
||||||
Make ``trio_websocket`` sockets stay up no matter the bs.
|
Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||||
|
|
||||||
You can provide a ``fixture`` async-context-manager which will be
|
A shim interface that allows client code to stream from some
|
||||||
enter/exitted around each reconnect operation.
|
``WebSocketConnection`` but where any connectivy bs is handled
|
||||||
|
automatcially and entirely in the background.
|
||||||
|
|
||||||
|
NOTE: this type should never be created directly but instead is
|
||||||
|
provided via the ``open_autorecon_ws()`` factor below.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
# apparently we can QoS for all sorts of reasons..so catch em.
|
||||||
recon_errors = (
|
recon_errors = (
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
DisconnectionTimeout,
|
DisconnectionTimeout,
|
||||||
|
@ -70,68 +79,42 @@ class NoBsWs:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
stack: AsyncExitStack,
|
rxchan: trio.MemoryReceiveChannel,
|
||||||
fixture: Optional[Callable] = None,
|
msg_recv_timeout: float,
|
||||||
|
|
||||||
serializer: ModuleType = json
|
serializer: ModuleType = json
|
||||||
):
|
):
|
||||||
self.url = url
|
self.url = url
|
||||||
self.fixture = fixture
|
self._rx = rxchan
|
||||||
self._stack = stack
|
self._timeout = msg_recv_timeout
|
||||||
self._ws: 'WebSocketConnection' = None # noqa
|
|
||||||
|
|
||||||
# TODO: is there some method we can call
|
# signaling between caller and relay task which determines when
|
||||||
# on the underlying `._ws` to get this?
|
# socket is connected (and subscribed).
|
||||||
self._connected: bool = False
|
self._connected: trio.Event = trio.Event()
|
||||||
|
|
||||||
async def _connect(
|
# dynamically reset by the bg relay task
|
||||||
self,
|
self._ws: WebSocketConnection | None = None
|
||||||
tries: int = 1000,
|
self._cs: trio.CancelScope | None = None
|
||||||
) -> None:
|
|
||||||
|
|
||||||
self._connected = False
|
# interchange codec methods
|
||||||
while True:
|
# TODO: obviously the method API here may be different
|
||||||
try:
|
# for another interchange format..
|
||||||
await self._stack.aclose()
|
self._dumps: Callable = serializer.dumps
|
||||||
except self.recon_errors:
|
self._loads: Callable = serializer.loads
|
||||||
await trio.sleep(0.5)
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
last_err = None
|
|
||||||
for i in range(tries):
|
|
||||||
try:
|
|
||||||
self._ws = await self._stack.enter_async_context(
|
|
||||||
trio_websocket.open_websocket_url(self.url)
|
|
||||||
)
|
|
||||||
|
|
||||||
if self.fixture is not None:
|
|
||||||
# rerun user code fixture
|
|
||||||
ret = await self._stack.enter_async_context(
|
|
||||||
self.fixture(self)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert ret is None
|
|
||||||
|
|
||||||
log.info(f'Connection success: {self.url}')
|
|
||||||
|
|
||||||
self._connected = True
|
|
||||||
return self._ws
|
|
||||||
|
|
||||||
except self.recon_errors as err:
|
|
||||||
last_err = err
|
|
||||||
log.error(
|
|
||||||
f'{self} connection bail with '
|
|
||||||
f'{type(err)}...retry attempt {i}'
|
|
||||||
)
|
|
||||||
await trio.sleep(0.5)
|
|
||||||
self._connected = False
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
log.exception('ws connection fail...')
|
|
||||||
raise last_err
|
|
||||||
|
|
||||||
def connected(self) -> bool:
|
def connected(self) -> bool:
|
||||||
return self._connected
|
return self._connected.is_set()
|
||||||
|
|
||||||
|
async def reset(self) -> None:
|
||||||
|
'''
|
||||||
|
Reset the underlying ws connection by cancelling
|
||||||
|
the bg relay task and waiting for it to signal
|
||||||
|
a new connection.
|
||||||
|
|
||||||
|
'''
|
||||||
|
self._connected = trio.Event()
|
||||||
|
self._cs.cancel()
|
||||||
|
await self._connected.wait()
|
||||||
|
|
||||||
async def send_msg(
|
async def send_msg(
|
||||||
self,
|
self,
|
||||||
|
@ -139,18 +122,15 @@ class NoBsWs:
|
||||||
) -> None:
|
) -> None:
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
return await self._ws.send_message(json.dumps(data))
|
msg: Any = self._dumps(data)
|
||||||
|
return await self._ws.send_message(msg)
|
||||||
except self.recon_errors:
|
except self.recon_errors:
|
||||||
await self._connect()
|
await self.reset()
|
||||||
|
|
||||||
async def recv_msg(
|
async def recv_msg(self) -> Any:
|
||||||
self,
|
msg: Any = await self._rx.receive()
|
||||||
) -> Any:
|
data = self._loads(msg)
|
||||||
while True:
|
return data
|
||||||
try:
|
|
||||||
return json.loads(await self._ws.get_message())
|
|
||||||
except self.recon_errors:
|
|
||||||
await self._connect()
|
|
||||||
|
|
||||||
def __aiter__(self):
|
def __aiter__(self):
|
||||||
return self
|
return self
|
||||||
|
@ -158,27 +138,218 @@ class NoBsWs:
|
||||||
async def __anext__(self):
|
async def __anext__(self):
|
||||||
return await self.recv_msg()
|
return await self.recv_msg()
|
||||||
|
|
||||||
|
def set_recv_timeout(
|
||||||
|
self,
|
||||||
|
timeout: float,
|
||||||
|
) -> None:
|
||||||
|
self._timeout = timeout
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
|
async def _reconnect_forever(
|
||||||
|
url: str,
|
||||||
|
snd: trio.MemorySendChannel,
|
||||||
|
nobsws: NoBsWs,
|
||||||
|
reset_after: int, # msg recv timeout before reset attempt
|
||||||
|
|
||||||
|
fixture: AsyncContextManager | None = None,
|
||||||
|
task_status: TaskStatus = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# TODO: can we just report "where" in the call stack
|
||||||
|
# the client code is using the ws stream?
|
||||||
|
# Maybe we can just drop this since it's already in the log msg
|
||||||
|
# orefix?
|
||||||
|
if fixture is not None:
|
||||||
|
src_mod: str = fixture.__module__
|
||||||
|
else:
|
||||||
|
src_mod: str = 'unknown'
|
||||||
|
|
||||||
|
async def proxy_msgs(
|
||||||
|
ws: WebSocketConnection,
|
||||||
|
pcs: trio.CancelScope, # parent cancel scope
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Receive (under `timeout` deadline) all msgs from from underlying
|
||||||
|
websocket and relay them to (calling) parent task via ``trio``
|
||||||
|
mem chan.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# after so many msg recv timeouts, reset the connection
|
||||||
|
timeouts: int = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
with trio.move_on_after(
|
||||||
|
# can be dynamically changed by user code
|
||||||
|
nobsws._timeout,
|
||||||
|
) as cs:
|
||||||
|
try:
|
||||||
|
msg: Any = await ws.get_message()
|
||||||
|
await snd.send(msg)
|
||||||
|
except nobsws.recon_errors:
|
||||||
|
log.exception(
|
||||||
|
f'{src_mod}\n'
|
||||||
|
f'{url} connection bail with:'
|
||||||
|
)
|
||||||
|
await trio.sleep(0.5)
|
||||||
|
pcs.cancel()
|
||||||
|
|
||||||
|
# go back to reonnect loop in parent task
|
||||||
|
return
|
||||||
|
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
timeouts += 1
|
||||||
|
if timeouts > reset_after:
|
||||||
|
log.error(
|
||||||
|
f'{src_mod}\n'
|
||||||
|
'WS feed seems down and slow af.. reconnecting\n'
|
||||||
|
)
|
||||||
|
pcs.cancel()
|
||||||
|
|
||||||
|
# go back to reonnect loop in parent task
|
||||||
|
return
|
||||||
|
|
||||||
|
async def open_fixture(
|
||||||
|
fixture: AsyncContextManager,
|
||||||
|
nobsws: NoBsWs,
|
||||||
|
task_status: TaskStatus = trio.TASK_STATUS_IGNORED,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Open user provided `@acm` and sleep until any connection
|
||||||
|
reset occurs.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with fixture(nobsws) as ret:
|
||||||
|
assert ret is None
|
||||||
|
task_status.started()
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
# last_err = None
|
||||||
|
nobsws._connected = trio.Event()
|
||||||
|
task_status.started()
|
||||||
|
|
||||||
|
while not snd._closed:
|
||||||
|
log.info(
|
||||||
|
f'{src_mod}\n'
|
||||||
|
f'{url} trying (RE)CONNECT'
|
||||||
|
)
|
||||||
|
|
||||||
|
async with trio.open_nursery() as n:
|
||||||
|
cs = nobsws._cs = n.cancel_scope
|
||||||
|
ws: WebSocketConnection
|
||||||
|
async with open_websocket_url(url) as ws:
|
||||||
|
nobsws._ws = ws
|
||||||
|
log.info(
|
||||||
|
f'{src_mod}\n'
|
||||||
|
f'Connection success: {url}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# begin relay loop to forward msgs
|
||||||
|
n.start_soon(
|
||||||
|
proxy_msgs,
|
||||||
|
ws,
|
||||||
|
cs,
|
||||||
|
)
|
||||||
|
|
||||||
|
if fixture is not None:
|
||||||
|
log.info(
|
||||||
|
f'{src_mod}\n'
|
||||||
|
f'Entering fixture: {fixture}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: should we return an explicit sub-cs
|
||||||
|
# from this fixture task?
|
||||||
|
await n.start(
|
||||||
|
open_fixture,
|
||||||
|
fixture,
|
||||||
|
nobsws,
|
||||||
|
)
|
||||||
|
|
||||||
|
# indicate to wrapper / opener that we are up and block
|
||||||
|
# to let tasks run **inside** the ws open block above.
|
||||||
|
nobsws._connected.set()
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
# ws open block end
|
||||||
|
# nursery block end
|
||||||
|
nobsws._connected = trio.Event()
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
log.cancel(
|
||||||
|
f'{url} connection cancelled!'
|
||||||
|
)
|
||||||
|
# if wrapper cancelled us, we expect it to also
|
||||||
|
# have re-assigned a new event
|
||||||
|
assert (
|
||||||
|
nobsws._connected
|
||||||
|
and not nobsws._connected.is_set()
|
||||||
|
)
|
||||||
|
|
||||||
|
# -> from here, move to next reconnect attempt
|
||||||
|
|
||||||
|
else:
|
||||||
|
log.exception(
|
||||||
|
f'{src_mod}\n'
|
||||||
|
'ws connection closed by client...'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
async def open_autorecon_ws(
|
async def open_autorecon_ws(
|
||||||
url: str,
|
url: str,
|
||||||
|
|
||||||
# TODO: proper type cannot smh
|
fixture: AsyncContextManager | None = None,
|
||||||
fixture: Optional[Callable] = None,
|
|
||||||
|
# time in sec between msgs received before
|
||||||
|
# we presume connection might need a reset.
|
||||||
|
msg_recv_timeout: float = 16,
|
||||||
|
|
||||||
|
# count of the number of above timeouts before connection reset
|
||||||
|
reset_after: int = 3,
|
||||||
|
|
||||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
'''
|
||||||
|
An auto-reconnect websocket (wrapper API) around
|
||||||
|
``trio_websocket.open_websocket_url()`` providing automatic
|
||||||
|
re-connection on network errors, msg latency and thus roaming.
|
||||||
|
|
||||||
"""
|
Here we implement a re-connect websocket interface where a bg
|
||||||
async with AsyncExitStack() as stack:
|
nursery runs ``WebSocketConnection.receive_message()``s in a loop
|
||||||
ws = NoBsWs(url, stack, fixture=fixture)
|
and restarts the full http(s) handshake on catches of certain
|
||||||
await ws._connect()
|
connetivity errors, or some user defined recv timeout.
|
||||||
|
|
||||||
|
You can provide a ``fixture`` async-context-manager which will be
|
||||||
|
entered/exitted around each connection reset; eg. for (re)requesting
|
||||||
|
subscriptions without requiring streaming setup code to rerun.
|
||||||
|
|
||||||
|
'''
|
||||||
|
snd: trio.MemorySendChannel
|
||||||
|
rcv: trio.MemoryReceiveChannel
|
||||||
|
snd, rcv = trio.open_memory_channel(616)
|
||||||
|
|
||||||
|
async with trio.open_nursery() as n:
|
||||||
|
nobsws = NoBsWs(
|
||||||
|
url,
|
||||||
|
rcv,
|
||||||
|
msg_recv_timeout=msg_recv_timeout,
|
||||||
|
)
|
||||||
|
await n.start(
|
||||||
|
partial(
|
||||||
|
_reconnect_forever,
|
||||||
|
url,
|
||||||
|
snd,
|
||||||
|
nobsws,
|
||||||
|
fixture=fixture,
|
||||||
|
reset_after=reset_after,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await nobsws._connected.wait()
|
||||||
|
assert nobsws._cs
|
||||||
|
assert nobsws.connected()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield ws
|
yield nobsws
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
await stack.aclose()
|
n.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -195,7 +366,7 @@ class JSONRPCResult(Struct):
|
||||||
error: Optional[dict] = None
|
error: Optional[dict] = None
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_jsonrpc_session(
|
async def open_jsonrpc_session(
|
||||||
url: str,
|
url: str,
|
||||||
start_id: int = 0,
|
start_id: int = 0,
|
||||||
|
|
|
@ -32,14 +32,11 @@ from ..service.marketstore import (
|
||||||
)
|
)
|
||||||
from ..cli import cli
|
from ..cli import cli
|
||||||
from .. import watchlists as wl
|
from .. import watchlists as wl
|
||||||
from ..log import (
|
from ._util import (
|
||||||
get_logger,
|
log,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option(
|
@click.option(
|
||||||
'--url',
|
'--url',
|
||||||
|
@ -187,10 +184,10 @@ def storage(
|
||||||
symbol = symbols[0]
|
symbol = symbols[0]
|
||||||
async with open_tsdb_client(symbol) as storage:
|
async with open_tsdb_client(symbol) as storage:
|
||||||
if delete:
|
if delete:
|
||||||
for fqsn in symbols:
|
for fqme in symbols:
|
||||||
syms = await storage.client.list_symbols()
|
syms = await storage.client.list_symbols()
|
||||||
|
|
||||||
resp60s = await storage.delete_ts(fqsn, 60)
|
resp60s = await storage.delete_ts(fqme, 60)
|
||||||
|
|
||||||
msgish = resp60s.ListFields()[0][1]
|
msgish = resp60s.ListFields()[0][1]
|
||||||
if 'error' in str(msgish):
|
if 'error' in str(msgish):
|
||||||
|
@ -202,15 +199,15 @@ def storage(
|
||||||
# well, if we ever can make this work we
|
# well, if we ever can make this work we
|
||||||
# probably want to dogsplain the real reason
|
# probably want to dogsplain the real reason
|
||||||
# for the delete errurz..llululu
|
# for the delete errurz..llululu
|
||||||
if fqsn not in syms:
|
if fqme not in syms:
|
||||||
log.error(f'Pair {fqsn} dne in DB')
|
log.error(f'Pair {fqme} dne in DB')
|
||||||
|
|
||||||
log.error(f'Deletion error: {fqsn}\n{msgish}')
|
log.error(f'Deletion error: {fqme}\n{msgish}')
|
||||||
|
|
||||||
resp1s = await storage.delete_ts(fqsn, 1)
|
resp1s = await storage.delete_ts(fqme, 1)
|
||||||
msgish = resp1s.ListFields()[0][1]
|
msgish = resp1s.ListFields()[0][1]
|
||||||
if 'error' in str(msgish):
|
if 'error' in str(msgish):
|
||||||
log.error(f'Deletion error: {fqsn}\n{msgish}')
|
log.error(f'Deletion error: {fqme}\n{msgish}')
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
1002
piker/data/feed.py
1002
piker/data/feed.py
File diff suppressed because it is too large
Load Diff
|
@ -15,7 +15,7 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
abstractions for organizing, managing and generally operating-on
|
Public abstractions for organizing, managing and generally operating-on
|
||||||
real-time data processing data-structures.
|
real-time data processing data-structures.
|
||||||
|
|
||||||
"Streams, flumes, cascades and flows.."
|
"Streams, flumes, cascades and flows.."
|
||||||
|
@ -30,10 +30,9 @@ import tractor
|
||||||
import pendulum
|
import pendulum
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
from ..accounting import MktPair
|
||||||
|
from ._util import log
|
||||||
from .types import Struct
|
from .types import Struct
|
||||||
from ._source import (
|
|
||||||
Symbol,
|
|
||||||
)
|
|
||||||
from ._sharedmem import (
|
from ._sharedmem import (
|
||||||
attach_shm_array,
|
attach_shm_array,
|
||||||
ShmArray,
|
ShmArray,
|
||||||
|
@ -89,7 +88,7 @@ class Flume(Struct):
|
||||||
queuing properties.
|
queuing properties.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
symbol: Symbol
|
mkt: MktPair
|
||||||
first_quote: dict
|
first_quote: dict
|
||||||
_rt_shm_token: _Token
|
_rt_shm_token: _Token
|
||||||
|
|
||||||
|
@ -172,8 +171,9 @@ class Flume(Struct):
|
||||||
|
|
||||||
# TODO: get native msgspec decoding for these workinn
|
# TODO: get native msgspec decoding for these workinn
|
||||||
def to_msg(self) -> dict:
|
def to_msg(self) -> dict:
|
||||||
|
|
||||||
msg = self.to_dict()
|
msg = self.to_dict()
|
||||||
msg['symbol'] = msg['symbol'].to_dict()
|
msg['mkt'] = self.mkt.to_dict()
|
||||||
|
|
||||||
# can't serialize the stream or feed objects, it's expected
|
# can't serialize the stream or feed objects, it's expected
|
||||||
# you'll have a ref to it since this msg should be rxed on
|
# you'll have a ref to it since this msg should be rxed on
|
||||||
|
@ -183,12 +183,19 @@ class Flume(Struct):
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_msg(cls, msg: dict) -> dict:
|
def from_msg(
|
||||||
symbol = Symbol(**msg.pop('symbol'))
|
cls,
|
||||||
return cls(
|
msg: dict,
|
||||||
symbol=symbol,
|
|
||||||
**msg,
|
) -> dict:
|
||||||
)
|
'''
|
||||||
|
Load from an IPC msg presumably in either `dict` or
|
||||||
|
`msgspec.Struct` form.
|
||||||
|
|
||||||
|
'''
|
||||||
|
mkt_msg = msg.pop('mkt')
|
||||||
|
mkt = MktPair.from_msg(mkt_msg)
|
||||||
|
return cls(mkt=mkt, **msg)
|
||||||
|
|
||||||
def get_index(
|
def get_index(
|
||||||
self,
|
self,
|
||||||
|
@ -208,3 +215,23 @@ class Flume(Struct):
|
||||||
)
|
)
|
||||||
imx = times.shape[0] - 1
|
imx = times.shape[0] - 1
|
||||||
return min(first, imx)
|
return min(first, imx)
|
||||||
|
|
||||||
|
# only set by external msg or creator, never
|
||||||
|
# manually!
|
||||||
|
_has_vlm: bool = True
|
||||||
|
|
||||||
|
def has_vlm(self) -> bool:
|
||||||
|
|
||||||
|
if not self._has_vlm:
|
||||||
|
return False
|
||||||
|
|
||||||
|
# make sure that the instrument supports volume history
|
||||||
|
# (sometimes this is not the case for some commodities and
|
||||||
|
# derivatives)
|
||||||
|
vlm: np.ndarray = self.rt_shm.array['volume']
|
||||||
|
return not bool(
|
||||||
|
np.all(np.isin(vlm, -1))
|
||||||
|
or np.all(np.isnan(vlm))
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,810 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Historical data business logic for load, backfill and tsdb storage.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
|
from collections import (
|
||||||
|
Counter,
|
||||||
|
)
|
||||||
|
from datetime import datetime
|
||||||
|
from functools import partial
|
||||||
|
import time
|
||||||
|
from types import ModuleType
|
||||||
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Optional,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import tractor
|
||||||
|
import pendulum
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from .. import config
|
||||||
|
from ..accounting import (
|
||||||
|
MktPair,
|
||||||
|
)
|
||||||
|
from ._util import (
|
||||||
|
log,
|
||||||
|
)
|
||||||
|
from ..service import (
|
||||||
|
check_for_service,
|
||||||
|
)
|
||||||
|
from ._sharedmem import (
|
||||||
|
maybe_open_shm_array,
|
||||||
|
ShmArray,
|
||||||
|
_secs_in_day,
|
||||||
|
)
|
||||||
|
from ._source import base_iohlc_dtype
|
||||||
|
from ._sampling import (
|
||||||
|
open_sample_stream,
|
||||||
|
)
|
||||||
|
from ..brokers._util import (
|
||||||
|
DataUnavailable,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ..service.marketstore import Storage
|
||||||
|
from .feed import _FeedsBus
|
||||||
|
|
||||||
|
|
||||||
|
def diff_history(
|
||||||
|
array: np.ndarray,
|
||||||
|
timeframe: int,
|
||||||
|
start_dt: datetime,
|
||||||
|
end_dt: datetime,
|
||||||
|
last_tsdb_dt: datetime | None = None
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
|
||||||
|
# no diffing with tsdb dt index possible..
|
||||||
|
if last_tsdb_dt is None:
|
||||||
|
return array
|
||||||
|
|
||||||
|
time = array['time']
|
||||||
|
return array[time > last_tsdb_dt.timestamp()]
|
||||||
|
|
||||||
|
|
||||||
|
async def start_backfill(
|
||||||
|
mod: ModuleType,
|
||||||
|
mkt: MktPair,
|
||||||
|
shm: ShmArray,
|
||||||
|
timeframe: float,
|
||||||
|
sampler_stream: tractor.MsgStream,
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
|
||||||
|
last_tsdb_dt: Optional[datetime] = None,
|
||||||
|
storage: Optional[Storage] = None,
|
||||||
|
write_tsdb: bool = True,
|
||||||
|
tsdb_is_up: bool = False,
|
||||||
|
|
||||||
|
task_status: TaskStatus[tuple] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> int:
|
||||||
|
|
||||||
|
hist: Callable[
|
||||||
|
[int, datetime, datetime],
|
||||||
|
tuple[np.ndarray, str]
|
||||||
|
]
|
||||||
|
config: dict[str, int]
|
||||||
|
|
||||||
|
async with mod.open_history_client(
|
||||||
|
mkt,
|
||||||
|
) as (hist, config):
|
||||||
|
log.info(f'{mod} history client returned backfill config: {config}')
|
||||||
|
|
||||||
|
# get latest query's worth of history all the way
|
||||||
|
# back to what is recorded in the tsdb
|
||||||
|
array, start_dt, end_dt = await hist(
|
||||||
|
timeframe,
|
||||||
|
end_dt=None,
|
||||||
|
)
|
||||||
|
times = array['time']
|
||||||
|
|
||||||
|
# sample period step size in seconds
|
||||||
|
step_size_s = (
|
||||||
|
pendulum.from_timestamp(times[-1])
|
||||||
|
- pendulum.from_timestamp(times[-2])
|
||||||
|
).seconds
|
||||||
|
|
||||||
|
# if the market is open (aka we have a live feed) but the
|
||||||
|
# history sample step index seems off we report the surrounding
|
||||||
|
# data and drop into a bp. this case shouldn't really ever
|
||||||
|
# happen if we're doing history retrieval correctly.
|
||||||
|
if (
|
||||||
|
step_size_s == 60
|
||||||
|
and feed_is_live.is_set()
|
||||||
|
):
|
||||||
|
inow = round(time.time())
|
||||||
|
diff = inow - times[-1]
|
||||||
|
if abs(diff) > 60:
|
||||||
|
surr = array[-6:]
|
||||||
|
diff_in_mins = round(diff/60., ndigits=2)
|
||||||
|
log.warning(
|
||||||
|
f'STEP ERROR `{mkt.fqme}` for period {step_size_s}s:\n'
|
||||||
|
f'Off by `{diff}` seconds (or `{diff_in_mins}` mins)\n'
|
||||||
|
'Surrounding 6 time stamps:\n'
|
||||||
|
f'{list(surr["time"])}\n'
|
||||||
|
'Here is surrounding 6 samples:\n'
|
||||||
|
f'{surr}\nn'
|
||||||
|
)
|
||||||
|
|
||||||
|
# uncomment this for a hacker who wants to investigate
|
||||||
|
# this case manually..
|
||||||
|
# await tractor.breakpoint()
|
||||||
|
|
||||||
|
# frame's worth of sample-period-steps, in seconds
|
||||||
|
frame_size_s = len(array) * step_size_s
|
||||||
|
|
||||||
|
to_push = diff_history(
|
||||||
|
array,
|
||||||
|
timeframe,
|
||||||
|
start_dt,
|
||||||
|
end_dt,
|
||||||
|
last_tsdb_dt=last_tsdb_dt,
|
||||||
|
)
|
||||||
|
|
||||||
|
log.info(f'Pushing {to_push.size} to shm!')
|
||||||
|
shm.push(to_push, prepend=True)
|
||||||
|
|
||||||
|
# TODO: *** THIS IS A BUG ***
|
||||||
|
# we need to only broadcast to subscribers for this fqme..
|
||||||
|
# otherwise all fsps get reset on every chart..
|
||||||
|
await sampler_stream.send('broadcast_all')
|
||||||
|
|
||||||
|
# signal that backfilling to tsdb's end datum is complete
|
||||||
|
bf_done = trio.Event()
|
||||||
|
|
||||||
|
# let caller unblock and deliver latest history frame
|
||||||
|
task_status.started((
|
||||||
|
start_dt,
|
||||||
|
end_dt,
|
||||||
|
bf_done,
|
||||||
|
))
|
||||||
|
|
||||||
|
# based on the sample step size, maybe load a certain amount history
|
||||||
|
if last_tsdb_dt is None:
|
||||||
|
if step_size_s not in (1, 60):
|
||||||
|
raise ValueError(
|
||||||
|
'`piker` only needs to support 1m and 1s sampling '
|
||||||
|
'but ur api is trying to deliver a longer '
|
||||||
|
f'timeframe of {step_size_s} seconds..\n'
|
||||||
|
'So yuh.. dun do dat brudder.'
|
||||||
|
)
|
||||||
|
|
||||||
|
# when no tsdb "last datum" is provided, we just load
|
||||||
|
# some near-term history.
|
||||||
|
periods = {
|
||||||
|
1: {'days': 1},
|
||||||
|
60: {'days': 14},
|
||||||
|
}
|
||||||
|
|
||||||
|
if tsdb_is_up:
|
||||||
|
# do a decently sized backfill and load it into storage.
|
||||||
|
periods = {
|
||||||
|
1: {'days': 6},
|
||||||
|
60: {'years': 6},
|
||||||
|
}
|
||||||
|
|
||||||
|
period_duration = periods[step_size_s]
|
||||||
|
|
||||||
|
# NOTE: manually set the "latest" datetime which we intend to
|
||||||
|
# backfill history "until" so as to adhere to the history
|
||||||
|
# settings above when the tsdb is detected as being empty.
|
||||||
|
last_tsdb_dt = start_dt.subtract(**period_duration)
|
||||||
|
|
||||||
|
# configure async query throttling
|
||||||
|
# rate = config.get('rate', 1)
|
||||||
|
# XXX: legacy from ``trimeter`` code but unsupported now.
|
||||||
|
# erlangs = config.get('erlangs', 1)
|
||||||
|
|
||||||
|
# avoid duplicate history frames with a set of datetime frame
|
||||||
|
# starts and associated counts of how many duplicates we see
|
||||||
|
# per time stamp.
|
||||||
|
starts: Counter[datetime] = Counter()
|
||||||
|
|
||||||
|
# inline sequential loop where we simply pass the
|
||||||
|
# last retrieved start dt to the next request as
|
||||||
|
# it's end dt.
|
||||||
|
while end_dt > last_tsdb_dt:
|
||||||
|
log.debug(
|
||||||
|
f'Requesting {step_size_s}s frame ending in {start_dt}'
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
array, next_start_dt, end_dt = await hist(
|
||||||
|
timeframe,
|
||||||
|
end_dt=start_dt,
|
||||||
|
)
|
||||||
|
|
||||||
|
# broker says there never was or is no more history to pull
|
||||||
|
except DataUnavailable:
|
||||||
|
log.warning(
|
||||||
|
f'NO-MORE-DATA: backend {mod.name} halted history!?'
|
||||||
|
)
|
||||||
|
|
||||||
|
# ugh, what's a better way?
|
||||||
|
# TODO: fwiw, we probably want a way to signal a throttle
|
||||||
|
# condition (eg. with ib) so that we can halt the
|
||||||
|
# request loop until the condition is resolved?
|
||||||
|
return
|
||||||
|
|
||||||
|
if (
|
||||||
|
next_start_dt in starts
|
||||||
|
and starts[next_start_dt] <= 6
|
||||||
|
):
|
||||||
|
start_dt = min(starts)
|
||||||
|
log.warning(
|
||||||
|
f"{mkt.fqme}: skipping duplicate frame @ {next_start_dt}"
|
||||||
|
)
|
||||||
|
starts[start_dt] += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif starts[next_start_dt] > 6:
|
||||||
|
log.warning(
|
||||||
|
f'NO-MORE-DATA: backend {mod.name} before {next_start_dt}?'
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
# only update new start point if not-yet-seen
|
||||||
|
start_dt = next_start_dt
|
||||||
|
starts[start_dt] += 1
|
||||||
|
|
||||||
|
assert array['time'][0] == start_dt.timestamp()
|
||||||
|
|
||||||
|
diff = end_dt - start_dt
|
||||||
|
frame_time_diff_s = diff.seconds
|
||||||
|
expected_frame_size_s = frame_size_s + step_size_s
|
||||||
|
|
||||||
|
if frame_time_diff_s > expected_frame_size_s:
|
||||||
|
|
||||||
|
# XXX: query result includes a start point prior to our
|
||||||
|
# expected "frame size" and thus is likely some kind of
|
||||||
|
# history gap (eg. market closed period, outage, etc.)
|
||||||
|
# so just report it to console for now.
|
||||||
|
log.warning(
|
||||||
|
f'History frame ending @ {end_dt} appears to have a gap:\n'
|
||||||
|
f'{diff} ~= {frame_time_diff_s} seconds'
|
||||||
|
)
|
||||||
|
|
||||||
|
to_push = diff_history(
|
||||||
|
array,
|
||||||
|
timeframe,
|
||||||
|
start_dt,
|
||||||
|
end_dt,
|
||||||
|
last_tsdb_dt=last_tsdb_dt,
|
||||||
|
)
|
||||||
|
ln = len(to_push)
|
||||||
|
if ln:
|
||||||
|
log.info(f'{ln} bars for {start_dt} -> {end_dt}')
|
||||||
|
|
||||||
|
else:
|
||||||
|
log.warning(
|
||||||
|
f'{ln} BARS TO PUSH after diff?!: {start_dt} -> {end_dt}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# bail gracefully on shm allocation overrun/full condition
|
||||||
|
try:
|
||||||
|
shm.push(to_push, prepend=True)
|
||||||
|
except ValueError:
|
||||||
|
log.info(
|
||||||
|
f'Shm buffer overrun on: {start_dt} -> {end_dt}?'
|
||||||
|
)
|
||||||
|
# can't push the entire frame? so
|
||||||
|
# push only the amount that can fit..
|
||||||
|
break
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f'Shm pushed {ln} frame:\n'
|
||||||
|
f'{start_dt} -> {end_dt}'
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
storage is not None
|
||||||
|
and write_tsdb
|
||||||
|
):
|
||||||
|
log.info(
|
||||||
|
f'Writing {ln} frame to storage:\n'
|
||||||
|
f'{start_dt} -> {end_dt}'
|
||||||
|
)
|
||||||
|
|
||||||
|
if mkt.dst.atype not in {'crypto', 'crypto_currency'}:
|
||||||
|
# for now, our table key schema is not including
|
||||||
|
# the dst[/src] source asset token.
|
||||||
|
col_sym_key: str = mkt.get_fqme(
|
||||||
|
delim_char='',
|
||||||
|
without_src=True,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
col_sym_key: str = mkt.get_fqme(delim_char='')
|
||||||
|
|
||||||
|
await storage.write_ohlcv(
|
||||||
|
col_sym_key,
|
||||||
|
to_push,
|
||||||
|
timeframe,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: can we only trigger this if the respective
|
||||||
|
# history in "in view"?!?
|
||||||
|
|
||||||
|
# XXX: extremely important, there can be no checkpoints
|
||||||
|
# in the block above to avoid entering new ``frames``
|
||||||
|
# values while we're pipelining the current ones to
|
||||||
|
# memory...
|
||||||
|
await sampler_stream.send('broadcast_all')
|
||||||
|
|
||||||
|
# short-circuit (for now)
|
||||||
|
bf_done.set()
|
||||||
|
|
||||||
|
|
||||||
|
async def basic_backfill(
|
||||||
|
bus: _FeedsBus,
|
||||||
|
mod: ModuleType,
|
||||||
|
mkt: MktPair,
|
||||||
|
shms: dict[int, ShmArray],
|
||||||
|
sampler_stream: tractor.MsgStream,
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# do a legacy incremental backfill from the provider.
|
||||||
|
log.info('No TSDB (marketstored) found, doing basic backfill..')
|
||||||
|
|
||||||
|
# start history backfill task ``backfill_bars()`` is
|
||||||
|
# a required backend func this must block until shm is
|
||||||
|
# filled with first set of ohlc bars
|
||||||
|
for timeframe, shm in shms.items():
|
||||||
|
try:
|
||||||
|
await bus.nursery.start(
|
||||||
|
partial(
|
||||||
|
start_backfill,
|
||||||
|
mod,
|
||||||
|
mkt,
|
||||||
|
shm,
|
||||||
|
timeframe,
|
||||||
|
sampler_stream,
|
||||||
|
feed_is_live,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except DataUnavailable:
|
||||||
|
# XXX: timeframe not supported for backend
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
async def tsdb_backfill(
|
||||||
|
mod: ModuleType,
|
||||||
|
marketstore: ModuleType,
|
||||||
|
bus: _FeedsBus,
|
||||||
|
storage: Storage,
|
||||||
|
mkt: MktPair,
|
||||||
|
shms: dict[int, ShmArray],
|
||||||
|
sampler_stream: tractor.MsgStream,
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
|
||||||
|
task_status: TaskStatus[
|
||||||
|
tuple[ShmArray, ShmArray]
|
||||||
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# TODO: this should be used verbatim for the pure
|
||||||
|
# shm backfiller approach below.
|
||||||
|
dts_per_tf: dict[int, datetime] = {}
|
||||||
|
fqme: str = mkt.fqme
|
||||||
|
|
||||||
|
# start history anal and load missing new data via backend.
|
||||||
|
for timeframe, shm in shms.items():
|
||||||
|
# loads a (large) frame of data from the tsdb depending
|
||||||
|
# on the db's query size limit.
|
||||||
|
tsdb_history, first_tsdb_dt, last_tsdb_dt = await storage.load(
|
||||||
|
fqme,
|
||||||
|
timeframe=timeframe,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
(
|
||||||
|
latest_start_dt,
|
||||||
|
latest_end_dt,
|
||||||
|
bf_done,
|
||||||
|
) = await bus.nursery.start(
|
||||||
|
partial(
|
||||||
|
start_backfill,
|
||||||
|
mod,
|
||||||
|
mkt,
|
||||||
|
shm,
|
||||||
|
timeframe,
|
||||||
|
sampler_stream,
|
||||||
|
feed_is_live,
|
||||||
|
|
||||||
|
last_tsdb_dt=last_tsdb_dt,
|
||||||
|
tsdb_is_up=True,
|
||||||
|
storage=storage,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except DataUnavailable:
|
||||||
|
# XXX: timeframe not supported for backend
|
||||||
|
dts_per_tf[timeframe] = (
|
||||||
|
tsdb_history,
|
||||||
|
last_tsdb_dt,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# tsdb_history = series.get(timeframe)
|
||||||
|
dts_per_tf[timeframe] = (
|
||||||
|
tsdb_history,
|
||||||
|
last_tsdb_dt,
|
||||||
|
latest_start_dt,
|
||||||
|
latest_end_dt,
|
||||||
|
bf_done,
|
||||||
|
)
|
||||||
|
|
||||||
|
# if len(hist_shm.array) < 2:
|
||||||
|
# TODO: there's an edge case here to solve where if the last
|
||||||
|
# frame before market close (at least on ib) was pushed and
|
||||||
|
# there was only "1 new" row pushed from the first backfill
|
||||||
|
# query-iteration, then the sample step sizing calcs will
|
||||||
|
# break upstream from here since you can't diff on at least
|
||||||
|
# 2 steps... probably should also add logic to compute from
|
||||||
|
# the tsdb series and stash that somewhere as meta data on
|
||||||
|
# the shm buffer?.. no se.
|
||||||
|
|
||||||
|
# unblock the feed bus management task
|
||||||
|
# assert len(shms[1].array)
|
||||||
|
task_status.started()
|
||||||
|
|
||||||
|
async def back_load_from_tsdb(
|
||||||
|
timeframe: int,
|
||||||
|
shm: ShmArray,
|
||||||
|
):
|
||||||
|
(
|
||||||
|
tsdb_history,
|
||||||
|
last_tsdb_dt,
|
||||||
|
latest_start_dt,
|
||||||
|
latest_end_dt,
|
||||||
|
bf_done,
|
||||||
|
) = dts_per_tf[timeframe]
|
||||||
|
|
||||||
|
# sync to backend history task's query/load completion
|
||||||
|
if bf_done:
|
||||||
|
await bf_done.wait()
|
||||||
|
|
||||||
|
# TODO: eventually it'd be nice to not require a shm array/buffer
|
||||||
|
# to accomplish this.. maybe we can do some kind of tsdb direct to
|
||||||
|
# graphics format eventually in a child-actor?
|
||||||
|
|
||||||
|
# TODO: see if there's faster multi-field reads:
|
||||||
|
# https://numpy.org/doc/stable/user/basics.rec.html#accessing-multiple-fields
|
||||||
|
# re-index with a `time` and index field
|
||||||
|
prepend_start = shm._first.value
|
||||||
|
array = shm.array
|
||||||
|
if len(array):
|
||||||
|
shm_last_dt = pendulum.from_timestamp(shm.array[0]['time'])
|
||||||
|
else:
|
||||||
|
shm_last_dt = None
|
||||||
|
|
||||||
|
if last_tsdb_dt:
|
||||||
|
assert shm_last_dt >= last_tsdb_dt
|
||||||
|
|
||||||
|
# do diff against start index of last frame of history and only
|
||||||
|
# fill in an amount of datums from tsdb allows for most recent
|
||||||
|
# to be loaded into mem *before* tsdb data.
|
||||||
|
if (
|
||||||
|
last_tsdb_dt
|
||||||
|
and latest_start_dt
|
||||||
|
):
|
||||||
|
backfilled_size_s = (
|
||||||
|
latest_start_dt - last_tsdb_dt
|
||||||
|
).seconds
|
||||||
|
# if the shm buffer len is not large enough to contain
|
||||||
|
# all missing data between the most recent backend-queried frame
|
||||||
|
# and the most recent dt-index in the db we warn that we only
|
||||||
|
# want to load a portion of the next tsdb query to fill that
|
||||||
|
# space.
|
||||||
|
log.info(
|
||||||
|
f'{backfilled_size_s} seconds worth of {timeframe}s loaded'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load TSDB history into shm buffer (for display) if there is
|
||||||
|
# remaining buffer space.
|
||||||
|
|
||||||
|
if (
|
||||||
|
len(tsdb_history)
|
||||||
|
):
|
||||||
|
# load the first (smaller) bit of history originally loaded
|
||||||
|
# above from ``Storage.load()``.
|
||||||
|
to_push = tsdb_history[-prepend_start:]
|
||||||
|
shm.push(
|
||||||
|
to_push,
|
||||||
|
|
||||||
|
# insert the history pre a "days worth" of samples
|
||||||
|
# to leave some real-time buffer space at the end.
|
||||||
|
prepend=True,
|
||||||
|
# update_first=False,
|
||||||
|
# start=prepend_start,
|
||||||
|
field_map=marketstore.ohlc_key_map,
|
||||||
|
)
|
||||||
|
|
||||||
|
tsdb_last_frame_start = tsdb_history['Epoch'][0]
|
||||||
|
|
||||||
|
if timeframe == 1:
|
||||||
|
times = shm.array['time']
|
||||||
|
assert (times[1] - times[0]) == 1
|
||||||
|
|
||||||
|
# load as much from storage into shm possible (depends on
|
||||||
|
# user's shm size settings).
|
||||||
|
while shm._first.value > 0:
|
||||||
|
|
||||||
|
tsdb_history = await storage.read_ohlcv(
|
||||||
|
fqme,
|
||||||
|
timeframe=timeframe,
|
||||||
|
end=tsdb_last_frame_start,
|
||||||
|
)
|
||||||
|
|
||||||
|
# empty query
|
||||||
|
if not len(tsdb_history):
|
||||||
|
break
|
||||||
|
|
||||||
|
next_start = tsdb_history['Epoch'][0]
|
||||||
|
if next_start >= tsdb_last_frame_start:
|
||||||
|
# no earlier data detected
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
tsdb_last_frame_start = next_start
|
||||||
|
|
||||||
|
prepend_start = shm._first.value
|
||||||
|
to_push = tsdb_history[-prepend_start:]
|
||||||
|
|
||||||
|
# insert the history pre a "days worth" of samples
|
||||||
|
# to leave some real-time buffer space at the end.
|
||||||
|
shm.push(
|
||||||
|
to_push,
|
||||||
|
prepend=True,
|
||||||
|
field_map=marketstore.ohlc_key_map,
|
||||||
|
)
|
||||||
|
log.info(f'Loaded {to_push.shape} datums from storage')
|
||||||
|
|
||||||
|
# manually trigger step update to update charts/fsps
|
||||||
|
# which need an incremental update.
|
||||||
|
# NOTE: the way this works is super duper
|
||||||
|
# un-intuitive right now:
|
||||||
|
# - the broadcaster fires a msg to the fsp subsystem.
|
||||||
|
# - fsp subsys then checks for a sample step diff and
|
||||||
|
# possibly recomputes prepended history.
|
||||||
|
# - the fsp then sends back to the parent actor
|
||||||
|
# (usually a chart showing graphics for said fsp)
|
||||||
|
# which tells the chart to conduct a manual full
|
||||||
|
# graphics loop cycle.
|
||||||
|
await sampler_stream.send('broadcast_all')
|
||||||
|
|
||||||
|
# TODO: write new data to tsdb to be ready to for next read.
|
||||||
|
|
||||||
|
# backload from db (concurrently per timeframe) once backfilling of
|
||||||
|
# recent dat a loaded from the backend provider (see
|
||||||
|
# ``bf_done.wait()`` call).
|
||||||
|
async with trio.open_nursery() as nurse:
|
||||||
|
for timeframe, shm in shms.items():
|
||||||
|
nurse.start_soon(
|
||||||
|
back_load_from_tsdb,
|
||||||
|
timeframe,
|
||||||
|
shm,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def manage_history(
|
||||||
|
mod: ModuleType,
|
||||||
|
bus: _FeedsBus,
|
||||||
|
mkt: MktPair,
|
||||||
|
some_data_ready: trio.Event,
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
timeframe: float = 60, # in seconds
|
||||||
|
|
||||||
|
task_status: TaskStatus[
|
||||||
|
tuple[ShmArray, ShmArray]
|
||||||
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Load and manage historical data including the loading of any
|
||||||
|
available series from `marketstore` as well as conducting real-time
|
||||||
|
update of both that existing db and the allocated shared memory
|
||||||
|
buffer.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# TODO: is there a way to make each shm file key
|
||||||
|
# actor-tree-discovery-addr unique so we avoid collisions
|
||||||
|
# when doing tests which also allocate shms for certain instruments
|
||||||
|
# that may be in use on the system by some other running daemons?
|
||||||
|
# from tractor._state import _runtime_vars
|
||||||
|
# port = _runtime_vars['_root_mailbox'][1]
|
||||||
|
|
||||||
|
uid = tractor.current_actor().uid
|
||||||
|
name, uuid = uid
|
||||||
|
service = name.rstrip(f'.{mod.name}')
|
||||||
|
|
||||||
|
fqme: str = mkt.get_fqme(delim_char='')
|
||||||
|
|
||||||
|
# (maybe) allocate shm array for this broker/symbol which will
|
||||||
|
# be used for fast near-term history capture and processing.
|
||||||
|
hist_shm, opened = maybe_open_shm_array(
|
||||||
|
key=f'piker.{service}[{uuid[:16]}].{fqme}.hist',
|
||||||
|
|
||||||
|
# use any broker defined ohlc dtype:
|
||||||
|
dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype),
|
||||||
|
|
||||||
|
# we expect the sub-actor to write
|
||||||
|
readonly=False,
|
||||||
|
)
|
||||||
|
hist_zero_index = hist_shm.index - 1
|
||||||
|
|
||||||
|
# TODO: history validation
|
||||||
|
if not opened:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Persistent shm for sym was already open?!"
|
||||||
|
)
|
||||||
|
|
||||||
|
rt_shm, opened = maybe_open_shm_array(
|
||||||
|
key=f'piker.{service}[{uuid[:16]}].{fqme}.rt',
|
||||||
|
|
||||||
|
# use any broker defined ohlc dtype:
|
||||||
|
dtype=getattr(mod, '_ohlc_dtype', base_iohlc_dtype),
|
||||||
|
|
||||||
|
# we expect the sub-actor to write
|
||||||
|
readonly=False,
|
||||||
|
size=3*_secs_in_day,
|
||||||
|
)
|
||||||
|
|
||||||
|
# (for now) set the rt (hft) shm array with space to prepend
|
||||||
|
# only a few days worth of 1s history.
|
||||||
|
days = 2
|
||||||
|
start_index = days*_secs_in_day
|
||||||
|
rt_shm._first.value = start_index
|
||||||
|
rt_shm._last.value = start_index
|
||||||
|
rt_zero_index = rt_shm.index - 1
|
||||||
|
|
||||||
|
if not opened:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Persistent shm for sym was already open?!"
|
||||||
|
)
|
||||||
|
|
||||||
|
# register 1s and 1m buffers with the global incrementer task
|
||||||
|
async with open_sample_stream(
|
||||||
|
period_s=1.,
|
||||||
|
shms_by_period={
|
||||||
|
1.: rt_shm.token,
|
||||||
|
60.: hist_shm.token,
|
||||||
|
},
|
||||||
|
|
||||||
|
# NOTE: we want to only open a stream for doing broadcasts on
|
||||||
|
# backfill operations, not receive the sample index-stream
|
||||||
|
# (since there's no code in this data feed layer that needs to
|
||||||
|
# consume it).
|
||||||
|
open_index_stream=True,
|
||||||
|
sub_for_broadcasts=False,
|
||||||
|
|
||||||
|
) as sample_stream:
|
||||||
|
|
||||||
|
open_history_client = getattr(
|
||||||
|
mod,
|
||||||
|
'open_history_client',
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
assert open_history_client
|
||||||
|
|
||||||
|
tsdb_is_up: bool = False
|
||||||
|
try_remote_tsdb: bool = False
|
||||||
|
|
||||||
|
conf, path = config.load('conf', touch_if_dne=True)
|
||||||
|
net = conf.get('network')
|
||||||
|
if net:
|
||||||
|
tsdbconf = net.get('tsdb')
|
||||||
|
|
||||||
|
# lookup backend tsdb module by name and load any user service
|
||||||
|
# settings for connecting to the tsdb service.
|
||||||
|
tsdb_backend: str = tsdbconf.pop('backend')
|
||||||
|
tsdb_host: str = tsdbconf['host']
|
||||||
|
|
||||||
|
# TODO: import and load storagemod by name
|
||||||
|
# mod = get_storagemod(tsdb_backend)
|
||||||
|
from ..service import marketstore
|
||||||
|
if tsdb_host == 'localhost':
|
||||||
|
log.info('Scanning for existing `{tsbd_backend}`')
|
||||||
|
tsdb_is_up: bool = await check_for_service(f'{tsdb_backend}d')
|
||||||
|
|
||||||
|
else:
|
||||||
|
try_remote_tsdb: bool = True
|
||||||
|
|
||||||
|
if (
|
||||||
|
tsdb_is_up
|
||||||
|
or try_remote_tsdb
|
||||||
|
and (
|
||||||
|
opened
|
||||||
|
and open_history_client
|
||||||
|
)
|
||||||
|
):
|
||||||
|
log.info('Found existing `marketstored`')
|
||||||
|
|
||||||
|
async with (
|
||||||
|
marketstore.open_storage_client(
|
||||||
|
**tsdbconf
|
||||||
|
) as storage,
|
||||||
|
):
|
||||||
|
# TODO: drop returning the output that we pass in?
|
||||||
|
await bus.nursery.start(
|
||||||
|
tsdb_backfill,
|
||||||
|
mod,
|
||||||
|
marketstore,
|
||||||
|
bus,
|
||||||
|
storage,
|
||||||
|
mkt,
|
||||||
|
{
|
||||||
|
1: rt_shm,
|
||||||
|
60: hist_shm,
|
||||||
|
},
|
||||||
|
sample_stream,
|
||||||
|
feed_is_live,
|
||||||
|
)
|
||||||
|
|
||||||
|
# yield back after client connect with filled shm
|
||||||
|
task_status.started((
|
||||||
|
hist_zero_index,
|
||||||
|
hist_shm,
|
||||||
|
rt_zero_index,
|
||||||
|
rt_shm,
|
||||||
|
))
|
||||||
|
|
||||||
|
# indicate to caller that feed can be delivered to
|
||||||
|
# remote requesting client since we've loaded history
|
||||||
|
# data that can be used.
|
||||||
|
some_data_ready.set()
|
||||||
|
|
||||||
|
# history retreival loop depending on user interaction
|
||||||
|
# and thus a small RPC-prot for remotely controllinlg
|
||||||
|
# what data is loaded for viewing.
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
# load less history if no tsdb can be found
|
||||||
|
elif (
|
||||||
|
not tsdb_is_up
|
||||||
|
and opened
|
||||||
|
):
|
||||||
|
await basic_backfill(
|
||||||
|
bus,
|
||||||
|
mod,
|
||||||
|
mkt,
|
||||||
|
{
|
||||||
|
1: rt_shm,
|
||||||
|
60: hist_shm,
|
||||||
|
},
|
||||||
|
sample_stream,
|
||||||
|
feed_is_live,
|
||||||
|
)
|
||||||
|
task_status.started((
|
||||||
|
hist_zero_index,
|
||||||
|
hist_shm,
|
||||||
|
rt_zero_index,
|
||||||
|
rt_shm,
|
||||||
|
))
|
||||||
|
some_data_ready.set()
|
||||||
|
await trio.sleep_forever()
|
|
@ -23,7 +23,7 @@ Api layer likely in here...
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from importlib import import_module
|
from importlib import import_module
|
||||||
|
|
||||||
from ..log import get_logger
|
from ._util import get_logger
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@ Built-in (extension) types.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import sys
|
import sys
|
||||||
from typing import Optional
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
import msgspec
|
import msgspec
|
||||||
|
@ -59,7 +58,7 @@ class Struct(
|
||||||
|
|
||||||
def copy(
|
def copy(
|
||||||
self,
|
self,
|
||||||
update: Optional[dict] = None,
|
update: dict | None = None,
|
||||||
|
|
||||||
) -> msgspec.Struct:
|
) -> msgspec.Struct:
|
||||||
'''
|
'''
|
||||||
|
@ -80,9 +79,11 @@ class Struct(
|
||||||
msgspec.msgpack.Encoder().encode(self)
|
msgspec.msgpack.Encoder().encode(self)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# NOTE XXX: this won't work on frozen types!
|
||||||
|
# use ``.copy()`` above in such cases.
|
||||||
def typecast(
|
def typecast(
|
||||||
self,
|
self,
|
||||||
# fields: Optional[list[str]] = None,
|
# fields: list[str] | None = None,
|
||||||
) -> None:
|
) -> None:
|
||||||
for fname, ftype in self.__annotations__.items():
|
for fname, ftype in self.__annotations__.items():
|
||||||
setattr(self, fname, ftype(getattr(self, fname)))
|
setattr(self, fname, ftype(getattr(self, fname)))
|
||||||
|
|
|
@ -0,0 +1,264 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
'''
|
||||||
|
Data feed synchronization protocols, init msgs, and general
|
||||||
|
data-provider-backend-agnostic schema definitions.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from decimal import Decimal
|
||||||
|
from pprint import pformat
|
||||||
|
from types import ModuleType
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
|
|
||||||
|
from msgspec import field
|
||||||
|
|
||||||
|
from .types import Struct
|
||||||
|
from ..accounting import (
|
||||||
|
Asset,
|
||||||
|
MktPair,
|
||||||
|
)
|
||||||
|
from ._util import log
|
||||||
|
|
||||||
|
|
||||||
|
class FeedInitializationError(ValueError):
|
||||||
|
'''
|
||||||
|
Live data feed setup failed due to API / msg incompatiblity!
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class FeedInit(Struct, frozen=True):
|
||||||
|
'''
|
||||||
|
A stringent data provider startup msg schema validator.
|
||||||
|
|
||||||
|
The fields defined here are matched with those absolutely required
|
||||||
|
from each backend broker/data provider.
|
||||||
|
|
||||||
|
'''
|
||||||
|
mkt_info: MktPair
|
||||||
|
|
||||||
|
# NOTE: only field we use rn in ``.data.feed``
|
||||||
|
# TODO: maybe make a SamplerConfig(Struct)?
|
||||||
|
shm_write_opts: dict[str, Any] = field(
|
||||||
|
default_factory=lambda: {
|
||||||
|
'has_vlm': True,
|
||||||
|
'sum_tick_vlm': True,
|
||||||
|
})
|
||||||
|
|
||||||
|
# XXX: we group backend endpoints into 3
|
||||||
|
# groups to determine "degrees" of functionality.
|
||||||
|
_eps: dict[str, list[str]] = {
|
||||||
|
|
||||||
|
# basic API `Client` layer
|
||||||
|
'middleware': [
|
||||||
|
'get_client',
|
||||||
|
],
|
||||||
|
|
||||||
|
# (live) data streaming / loading / search
|
||||||
|
'datad': [
|
||||||
|
'get_mkt_info',
|
||||||
|
'open_history_client',
|
||||||
|
'open_symbol_search',
|
||||||
|
'stream_quotes',
|
||||||
|
],
|
||||||
|
|
||||||
|
# live order control and trading
|
||||||
|
'brokerd': [
|
||||||
|
'trades_dialogue',
|
||||||
|
# TODO: ledger normalizer helper?
|
||||||
|
# norm_trades(records: dict[str, Any]) -> TransactionLedger)
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def validate_backend(
|
||||||
|
mod: ModuleType,
|
||||||
|
syms: list[str],
|
||||||
|
init_msgs: list[FeedInit] | dict[str, dict[str, Any]],
|
||||||
|
|
||||||
|
# TODO: do a module method scan and report mismatches.
|
||||||
|
check_eps: bool = False,
|
||||||
|
|
||||||
|
api_log_msg_level: str = 'critical'
|
||||||
|
|
||||||
|
) -> FeedInit:
|
||||||
|
'''
|
||||||
|
Fail on malformed live quotes feed config/init or warn on changes
|
||||||
|
that haven't been implemented by this backend yet.
|
||||||
|
|
||||||
|
'''
|
||||||
|
for daemon_name, eps in _eps.items():
|
||||||
|
for name in eps:
|
||||||
|
ep: Callable = getattr(
|
||||||
|
mod,
|
||||||
|
name,
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
if ep is None:
|
||||||
|
log.warning(
|
||||||
|
f'Provider backend {mod.name} is missing '
|
||||||
|
f'{daemon_name} support :(\n'
|
||||||
|
f'The following endpoint is missing: {name}'
|
||||||
|
)
|
||||||
|
|
||||||
|
inits: list[
|
||||||
|
FeedInit | dict[str, Any]
|
||||||
|
] = init_msgs
|
||||||
|
|
||||||
|
# convert to list if from old dict-style
|
||||||
|
if isinstance(init_msgs, dict):
|
||||||
|
inits = list(init_msgs.values())
|
||||||
|
|
||||||
|
init: FeedInit | dict[str, Any]
|
||||||
|
for i, init in enumerate(inits):
|
||||||
|
|
||||||
|
# XXX: eventually this WILL NOT necessarily be true.
|
||||||
|
if i > 0:
|
||||||
|
assert not len(init_msgs) == 1
|
||||||
|
if isinstance(init_msgs, dict):
|
||||||
|
keys: set = set(init_msgs.keys()) - set(syms)
|
||||||
|
raise FeedInitializationError(
|
||||||
|
'TOO MANY INIT MSGS!\n'
|
||||||
|
f'Unexpected keys: {keys}\n'
|
||||||
|
'ALL MSGS:\n'
|
||||||
|
f'{pformat(init_msgs)}\n'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise FeedInitializationError(
|
||||||
|
'TOO MANY INIT MSGS!\n'
|
||||||
|
f'{pformat(init_msgs)}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: once all backends are updated we can remove this branching.
|
||||||
|
rx_msg: bool = False
|
||||||
|
warn_msg: str = ''
|
||||||
|
if not isinstance(init, FeedInit):
|
||||||
|
warn_msg += (
|
||||||
|
'\n'
|
||||||
|
'--------------------------\n'
|
||||||
|
':::DEPRECATED API STYLE:::\n'
|
||||||
|
'--------------------------\n'
|
||||||
|
f'`{mod.name}.stream_quotes()` should deliver '
|
||||||
|
'`.started(FeedInit)`\n'
|
||||||
|
f'|-> CURRENTLY it is using DEPRECATED `.started(dict)` style!\n'
|
||||||
|
f'|-> SEE `FeedInit` in `piker.data.validate`\n'
|
||||||
|
'--------------------------------------------\n'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
rx_msg = True
|
||||||
|
|
||||||
|
# verify feed init state / schema
|
||||||
|
bs_fqme: str # backend specific fqme
|
||||||
|
mkt: MktPair
|
||||||
|
|
||||||
|
match init:
|
||||||
|
|
||||||
|
# backend is using old dict msg delivery
|
||||||
|
case {
|
||||||
|
'symbol_info': dict(symbol_info),
|
||||||
|
'fqsn': bs_fqme,
|
||||||
|
} | {
|
||||||
|
'mkt_info': dict(symbol_info),
|
||||||
|
'fqsn': bs_fqme,
|
||||||
|
}:
|
||||||
|
symbol_info: dict
|
||||||
|
warn_msg += (
|
||||||
|
'It may also be still using the legacy `Symbol` style API\n'
|
||||||
|
'IT SHOULD BE PORTED TO THE NEW '
|
||||||
|
'`.accounting._mktinfo.MktPair`\n'
|
||||||
|
'STATTTTT!!!\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX use default legacy (aka discrete precision) mkt
|
||||||
|
# price/size_ticks if none delivered.
|
||||||
|
price_tick = symbol_info.get(
|
||||||
|
'price_tick_size',
|
||||||
|
Decimal('0.01'),
|
||||||
|
)
|
||||||
|
size_tick = symbol_info.get(
|
||||||
|
'lot_tick_size',
|
||||||
|
Decimal('1'),
|
||||||
|
)
|
||||||
|
bs_mktid = init.get('bs_mktid') or bs_fqme
|
||||||
|
mkt = MktPair.from_fqme(
|
||||||
|
fqme=f'{bs_fqme}.{mod.name}',
|
||||||
|
|
||||||
|
price_tick=price_tick,
|
||||||
|
size_tick=size_tick,
|
||||||
|
|
||||||
|
bs_mktid=str(bs_mktid),
|
||||||
|
_atype=symbol_info['asset_type']
|
||||||
|
)
|
||||||
|
|
||||||
|
# backend is using new `MktPair` but not entirely
|
||||||
|
case {
|
||||||
|
'mkt_info': MktPair(
|
||||||
|
dst=Asset(),
|
||||||
|
) as mkt,
|
||||||
|
'fqsn': bs_fqme,
|
||||||
|
}:
|
||||||
|
warn_msg += (
|
||||||
|
f'{mod.name} in API compat transition?\n'
|
||||||
|
"It's half dict, half man..\n"
|
||||||
|
'-------------------------------------\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
case FeedInit(
|
||||||
|
mkt_info=MktPair(dst=Asset()) as mkt,
|
||||||
|
shm_write_opts=dict(shm_opts),
|
||||||
|
) as init:
|
||||||
|
name: str = mod.name
|
||||||
|
log.info(
|
||||||
|
f"{name}'s `MktPair` info:\n"
|
||||||
|
f'{pformat(mkt.to_dict())}\n'
|
||||||
|
f'shm conf: {pformat(shm_opts)}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
case _:
|
||||||
|
raise FeedInitializationError(init)
|
||||||
|
|
||||||
|
# build a msg if we received a dict for input.
|
||||||
|
if not rx_msg:
|
||||||
|
assert bs_fqme in mkt.fqme
|
||||||
|
init = FeedInit(
|
||||||
|
mkt_info=mkt,
|
||||||
|
shm_write_opts=init.get('shm_write_opts'),
|
||||||
|
)
|
||||||
|
|
||||||
|
# `MktPair` value audits
|
||||||
|
mkt = init.mkt_info
|
||||||
|
assert mkt.type_key
|
||||||
|
|
||||||
|
# backend is using new `MktPair` but not embedded `Asset` types
|
||||||
|
# for the .src/.dst..
|
||||||
|
if not isinstance(mkt.src, Asset):
|
||||||
|
warn_msg += (
|
||||||
|
f'ALSO, {mod.name.upper()} should try to deliver\n'
|
||||||
|
'the new `MktPair.src: Asset` field!\n'
|
||||||
|
'-----------------------------------------------\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# complain about any non-idealities
|
||||||
|
if warn_msg:
|
||||||
|
# TODO: would be nice to register an API_COMPAT or something in
|
||||||
|
# maybe cyan for this in general throughput piker no?
|
||||||
|
logmeth = getattr(log, api_log_msg_level)
|
||||||
|
logmeth(warn_msg)
|
||||||
|
|
||||||
|
return init.copy()
|
|
@ -174,15 +174,6 @@ def fsp(
|
||||||
return Fsp(wrapped, outputs=(wrapped.__name__,))
|
return Fsp(wrapped, outputs=(wrapped.__name__,))
|
||||||
|
|
||||||
|
|
||||||
def mk_fsp_shm_key(
|
|
||||||
sym: str,
|
|
||||||
target: Fsp
|
|
||||||
|
|
||||||
) -> str:
|
|
||||||
uid = tractor.current_actor().uid
|
|
||||||
return f'{sym}.fsp.{target.name}.{".".join(uid)}'
|
|
||||||
|
|
||||||
|
|
||||||
def maybe_mk_fsp_shm(
|
def maybe_mk_fsp_shm(
|
||||||
sym: str,
|
sym: str,
|
||||||
target: Fsp,
|
target: Fsp,
|
||||||
|
@ -206,7 +197,10 @@ def maybe_mk_fsp_shm(
|
||||||
[(field_name, float) for field_name in target.outputs]
|
[(field_name, float) for field_name in target.outputs]
|
||||||
)
|
)
|
||||||
|
|
||||||
key = mk_fsp_shm_key(sym, target)
|
# (attempt to) uniquely key the fsp shm buffers
|
||||||
|
actor_name, uuid = tractor.current_actor().uid
|
||||||
|
uuid_snip: str = uuid[:16]
|
||||||
|
key: str = f'piker.{actor_name}[{uuid_snip}].{sym}.{target.name}'
|
||||||
|
|
||||||
shm, opened = maybe_open_shm_array(
|
shm, opened = maybe_open_shm_array(
|
||||||
key,
|
key,
|
||||||
|
|
|
@ -45,7 +45,7 @@ from ..data._sampling import (
|
||||||
_default_delay_s,
|
_default_delay_s,
|
||||||
open_sample_stream,
|
open_sample_stream,
|
||||||
)
|
)
|
||||||
from ..data._source import Symbol
|
from ..accounting import MktPair
|
||||||
from ._api import (
|
from ._api import (
|
||||||
Fsp,
|
Fsp,
|
||||||
_load_builtins,
|
_load_builtins,
|
||||||
|
@ -85,7 +85,7 @@ async def filter_quotes_by_sym(
|
||||||
|
|
||||||
async def fsp_compute(
|
async def fsp_compute(
|
||||||
|
|
||||||
symbol: Symbol,
|
mkt: MktPair,
|
||||||
flume: Flume,
|
flume: Flume,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
|
|
||||||
|
@ -104,14 +104,14 @@ async def fsp_compute(
|
||||||
disabled=True
|
disabled=True
|
||||||
)
|
)
|
||||||
|
|
||||||
fqsn = symbol.front_fqsn()
|
fqme = mkt.fqme
|
||||||
out_stream = func(
|
out_stream = func(
|
||||||
|
|
||||||
# TODO: do we even need this if we do the feed api right?
|
# TODO: do we even need this if we do the feed api right?
|
||||||
# shouldn't a local stream do this before we get a handle
|
# shouldn't a local stream do this before we get a handle
|
||||||
# to the async iterable? it's that or we do some kinda
|
# to the async iterable? it's that or we do some kinda
|
||||||
# async itertools style?
|
# async itertools style?
|
||||||
filter_quotes_by_sym(fqsn, quote_stream),
|
filter_quotes_by_sym(fqme, quote_stream),
|
||||||
|
|
||||||
# XXX: currently the ``ohlcv`` arg
|
# XXX: currently the ``ohlcv`` arg
|
||||||
flume.rt_shm,
|
flume.rt_shm,
|
||||||
|
@ -271,7 +271,7 @@ async def cascade(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
|
|
||||||
# data feed key
|
# data feed key
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
|
|
||||||
src_shm_token: dict,
|
src_shm_token: dict,
|
||||||
dst_shm_token: tuple[str, np.dtype],
|
dst_shm_token: tuple[str, np.dtype],
|
||||||
|
@ -329,7 +329,7 @@ async def cascade(
|
||||||
# open a data feed stream with requested broker
|
# open a data feed stream with requested broker
|
||||||
feed: Feed
|
feed: Feed
|
||||||
async with data.feed.maybe_open_feed(
|
async with data.feed.maybe_open_feed(
|
||||||
[fqsn],
|
[fqme],
|
||||||
|
|
||||||
# TODO throttle tick outputs from *this* daemon since
|
# TODO throttle tick outputs from *this* daemon since
|
||||||
# it'll emit tons of ticks due to the throttle only
|
# it'll emit tons of ticks due to the throttle only
|
||||||
|
@ -339,8 +339,8 @@ async def cascade(
|
||||||
|
|
||||||
) as feed:
|
) as feed:
|
||||||
|
|
||||||
flume = feed.flumes[fqsn]
|
flume = feed.flumes[fqme]
|
||||||
symbol = flume.symbol
|
mkt = flume.mkt
|
||||||
assert src.token == flume.rt_shm.token
|
assert src.token == flume.rt_shm.token
|
||||||
profiler(f'{func}: feed up')
|
profiler(f'{func}: feed up')
|
||||||
|
|
||||||
|
@ -352,7 +352,7 @@ async def cascade(
|
||||||
fsp_target = partial(
|
fsp_target = partial(
|
||||||
|
|
||||||
fsp_compute,
|
fsp_compute,
|
||||||
symbol=symbol,
|
mkt=mkt,
|
||||||
flume=flume,
|
flume=flume,
|
||||||
quote_stream=flume.stream,
|
quote_stream=flume.stream,
|
||||||
|
|
||||||
|
|
|
@ -32,8 +32,6 @@ from ._registry import ( # noqa
|
||||||
)
|
)
|
||||||
from ._daemon import ( # noqa
|
from ._daemon import ( # noqa
|
||||||
maybe_spawn_daemon,
|
maybe_spawn_daemon,
|
||||||
spawn_brokerd,
|
|
||||||
maybe_spawn_brokerd,
|
|
||||||
spawn_emsd,
|
spawn_emsd,
|
||||||
maybe_open_emsd,
|
maybe_open_emsd,
|
||||||
)
|
)
|
||||||
|
@ -43,6 +41,10 @@ from ._actor_runtime import (
|
||||||
open_pikerd,
|
open_pikerd,
|
||||||
get_tractor_runtime_kwargs,
|
get_tractor_runtime_kwargs,
|
||||||
)
|
)
|
||||||
|
from ..brokers._daemon import (
|
||||||
|
spawn_brokerd,
|
||||||
|
maybe_spawn_brokerd,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
|
|
|
@ -19,8 +19,6 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from pprint import pformat
|
|
||||||
from functools import partial
|
|
||||||
import os
|
import os
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
Optional,
|
||||||
|
@ -34,8 +32,7 @@ from contextlib import (
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ..log import (
|
from ._util import (
|
||||||
get_logger,
|
|
||||||
get_console_log,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from ._mngr import (
|
from ._mngr import (
|
||||||
|
@ -47,8 +44,6 @@ from ._registry import ( # noqa
|
||||||
open_registry,
|
open_registry,
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def get_tractor_runtime_kwargs() -> dict[str, Any]:
|
def get_tractor_runtime_kwargs() -> dict[str, Any]:
|
||||||
'''
|
'''
|
||||||
|
@ -135,8 +130,11 @@ _root_dname = 'pikerd'
|
||||||
_root_modules = [
|
_root_modules = [
|
||||||
__name__,
|
__name__,
|
||||||
'piker.service._daemon',
|
'piker.service._daemon',
|
||||||
|
'piker.brokers._daemon',
|
||||||
|
|
||||||
'piker.clearing._ems',
|
'piker.clearing._ems',
|
||||||
'piker.clearing._client',
|
'piker.clearing._client',
|
||||||
|
|
||||||
'piker.data._sampling',
|
'piker.data._sampling',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -151,11 +149,6 @@ async def open_pikerd(
|
||||||
debug_mode: bool = False,
|
debug_mode: bool = False,
|
||||||
registry_addr: None | tuple[str, int] = None,
|
registry_addr: None | tuple[str, int] = None,
|
||||||
|
|
||||||
# db init flags
|
|
||||||
tsdb: bool = False,
|
|
||||||
es: bool = False,
|
|
||||||
drop_root_perms_for_ahab: bool = True,
|
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> Services:
|
) -> Services:
|
||||||
|
@ -185,57 +178,16 @@ async def open_pikerd(
|
||||||
trio.open_nursery() as service_nursery,
|
trio.open_nursery() as service_nursery,
|
||||||
):
|
):
|
||||||
if root_actor.accept_addr != reg_addr:
|
if root_actor.accept_addr != reg_addr:
|
||||||
raise RuntimeError(f'Daemon failed to bind on {reg_addr}!?')
|
raise RuntimeError(
|
||||||
|
f'`pikerd` failed to bind on {reg_addr}!\n'
|
||||||
|
'Maybe you have another daemon already running?'
|
||||||
|
)
|
||||||
|
|
||||||
# assign globally for future daemon/task creation
|
# assign globally for future daemon/task creation
|
||||||
Services.actor_n = actor_nursery
|
Services.actor_n = actor_nursery
|
||||||
Services.service_n = service_nursery
|
Services.service_n = service_nursery
|
||||||
Services.debug_mode = debug_mode
|
Services.debug_mode = debug_mode
|
||||||
|
|
||||||
if tsdb:
|
|
||||||
from ._ahab import start_ahab
|
|
||||||
from .marketstore import start_marketstore
|
|
||||||
|
|
||||||
log.info('Spawning `marketstore` supervisor')
|
|
||||||
ctn_ready, config, (cid, pid) = await service_nursery.start(
|
|
||||||
partial(
|
|
||||||
start_ahab,
|
|
||||||
'marketstored',
|
|
||||||
start_marketstore,
|
|
||||||
loglevel=loglevel,
|
|
||||||
drop_root_perms=drop_root_perms_for_ahab,
|
|
||||||
)
|
|
||||||
|
|
||||||
)
|
|
||||||
log.info(
|
|
||||||
f'`marketstored` up!\n'
|
|
||||||
f'pid: {pid}\n'
|
|
||||||
f'container id: {cid[:12]}\n'
|
|
||||||
f'config: {pformat(config)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
if es:
|
|
||||||
from ._ahab import start_ahab
|
|
||||||
from .elastic import start_elasticsearch
|
|
||||||
|
|
||||||
log.info('Spawning `elasticsearch` supervisor')
|
|
||||||
ctn_ready, config, (cid, pid) = await service_nursery.start(
|
|
||||||
partial(
|
|
||||||
start_ahab,
|
|
||||||
'elasticsearch',
|
|
||||||
start_elasticsearch,
|
|
||||||
loglevel=loglevel,
|
|
||||||
drop_root_perms=drop_root_perms_for_ahab,
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
log.info(
|
|
||||||
f'`elasticsearch` up!\n'
|
|
||||||
f'pid: {pid}\n'
|
|
||||||
f'container id: {cid[:12]}\n'
|
|
||||||
f'config: {pformat(config)}'
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
yield Services
|
yield Services
|
||||||
|
|
||||||
|
@ -275,9 +227,6 @@ async def open_pikerd(
|
||||||
async def maybe_open_pikerd(
|
async def maybe_open_pikerd(
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
registry_addr: None | tuple = None,
|
registry_addr: None | tuple = None,
|
||||||
tsdb: bool = False,
|
|
||||||
es: bool = False,
|
|
||||||
drop_root_perms_for_ahab: bool = True,
|
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
|
@ -331,11 +280,6 @@ async def maybe_open_pikerd(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
registry_addr=registry_addr,
|
registry_addr=registry_addr,
|
||||||
|
|
||||||
# ahabd (docker super) specific controls
|
|
||||||
tsdb=tsdb,
|
|
||||||
es=es,
|
|
||||||
drop_root_perms_for_ahab=drop_root_perms_for_ahab,
|
|
||||||
|
|
||||||
# passthrough to ``tractor`` init
|
# passthrough to ``tractor`` init
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ Supervisor for ``docker`` with included async and SC wrapping
|
||||||
to ensure a cancellable container lifetime system.
|
to ensure a cancellable container lifetime system.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
from collections import ChainMap
|
from collections import ChainMap
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import os
|
import os
|
||||||
|
@ -48,14 +49,13 @@ from requests.exceptions import (
|
||||||
ReadTimeout,
|
ReadTimeout,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ..log import (
|
from ._mngr import Services
|
||||||
get_logger,
|
from ._util import (
|
||||||
|
log, # sub-sys logger
|
||||||
get_console_log,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from .. import config
|
from .. import config
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class DockerNotStarted(Exception):
|
class DockerNotStarted(Exception):
|
||||||
'Prolly you dint start da daemon bruh'
|
'Prolly you dint start da daemon bruh'
|
||||||
|
@ -189,7 +189,11 @@ class Container:
|
||||||
and entry not in seen_so_far
|
and entry not in seen_so_far
|
||||||
):
|
):
|
||||||
seen_so_far.add(entry)
|
seen_so_far.add(entry)
|
||||||
getattr(log, level.lower(), log.error)(f'{msg}')
|
getattr(
|
||||||
|
log,
|
||||||
|
level.lower(),
|
||||||
|
log.error
|
||||||
|
)(f'{msg}')
|
||||||
|
|
||||||
if level == 'fatal':
|
if level == 'fatal':
|
||||||
raise ApplicationLogError(msg)
|
raise ApplicationLogError(msg)
|
||||||
|
@ -265,8 +269,10 @@ class Container:
|
||||||
start = time.time()
|
start = time.time()
|
||||||
for _ in range(6):
|
for _ in range(6):
|
||||||
|
|
||||||
with trio.move_on_after(0.5) as cs:
|
with trio.move_on_after(1) as cs:
|
||||||
log.cancel('polling for CNTR logs...')
|
log.cancel(
|
||||||
|
'polling for CNTR logs for {stop_predicate}..'
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.process_logs_until(
|
await self.process_logs_until(
|
||||||
|
@ -330,16 +336,13 @@ class Container:
|
||||||
async def open_ahabd(
|
async def open_ahabd(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
endpoint: str, # ns-pointer str-msg-type
|
endpoint: str, # ns-pointer str-msg-type
|
||||||
loglevel: str | None = 'cancel',
|
loglevel: str | None = None,
|
||||||
|
|
||||||
**kwargs,
|
**ep_kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
log = get_console_log(
|
log = get_console_log(loglevel or 'cancel')
|
||||||
loglevel,
|
|
||||||
name=__name__,
|
|
||||||
)
|
|
||||||
|
|
||||||
async with open_docker() as client:
|
async with open_docker() as client:
|
||||||
|
|
||||||
|
@ -352,7 +355,7 @@ async def open_ahabd(
|
||||||
cntr_config,
|
cntr_config,
|
||||||
start_pred,
|
start_pred,
|
||||||
stop_pred,
|
stop_pred,
|
||||||
) = ep_func(client)
|
) = ep_func(client, **ep_kwargs)
|
||||||
cntr = Container(dcntr)
|
cntr = Container(dcntr)
|
||||||
|
|
||||||
conf: ChainMap[str, Any] = ChainMap(
|
conf: ChainMap[str, Any] = ChainMap(
|
||||||
|
@ -448,10 +451,17 @@ async def open_ahabd(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def start_ahab(
|
@acm
|
||||||
|
async def start_ahab_service(
|
||||||
|
services: Services,
|
||||||
service_name: str,
|
service_name: str,
|
||||||
|
|
||||||
|
# endpoint config passed as **kwargs
|
||||||
endpoint: Callable[docker.DockerClient, DockerContainer],
|
endpoint: Callable[docker.DockerClient, DockerContainer],
|
||||||
|
ep_kwargs: dict,
|
||||||
loglevel: str | None = 'cancel',
|
loglevel: str | None = 'cancel',
|
||||||
|
|
||||||
|
# supervisor config
|
||||||
drop_root_perms: bool = True,
|
drop_root_perms: bool = True,
|
||||||
|
|
||||||
task_status: TaskStatus[
|
task_status: TaskStatus[
|
||||||
|
@ -472,6 +482,9 @@ async def start_ahab(
|
||||||
is started.
|
is started.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
# global log
|
||||||
|
log = get_console_log(loglevel or 'cancel')
|
||||||
|
|
||||||
cn_ready = trio.Event()
|
cn_ready = trio.Event()
|
||||||
try:
|
try:
|
||||||
async with tractor.open_nursery() as an:
|
async with tractor.open_nursery() as an:
|
||||||
|
@ -500,21 +513,28 @@ async def start_ahab(
|
||||||
)[2] # named user's uid
|
)[2] # named user's uid
|
||||||
)
|
)
|
||||||
|
|
||||||
async with portal.open_context(
|
cs, first = await services.start_service_task(
|
||||||
open_ahabd,
|
name=service_name,
|
||||||
|
portal=portal,
|
||||||
|
|
||||||
|
# rest: endpoint inputs
|
||||||
|
target=open_ahabd,
|
||||||
endpoint=str(NamespacePath.from_ref(endpoint)),
|
endpoint=str(NamespacePath.from_ref(endpoint)),
|
||||||
loglevel='cancel',
|
loglevel='cancel',
|
||||||
) as (ctx, first):
|
**ep_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
cid, pid, cntr_config = first
|
cid, pid, cntr_config = first
|
||||||
|
|
||||||
task_status.started((
|
try:
|
||||||
|
yield (
|
||||||
cn_ready,
|
cn_ready,
|
||||||
cntr_config,
|
cntr_config,
|
||||||
(cid, pid),
|
(cid, pid),
|
||||||
))
|
)
|
||||||
|
finally:
|
||||||
await trio.sleep_forever()
|
log.info(f'Cancelling ahab service `{service_name}`')
|
||||||
|
await services.cancel_service(service_name)
|
||||||
|
|
||||||
# since we demoted root perms in this parent
|
# since we demoted root perms in this parent
|
||||||
# we'll get a perms error on proc cleanup in
|
# we'll get a perms error on proc cleanup in
|
||||||
|
|
|
@ -20,7 +20,6 @@ Daemon-actor spawning "endpoint-hooks".
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
|
||||||
Callable,
|
Callable,
|
||||||
Any,
|
Any,
|
||||||
)
|
)
|
||||||
|
@ -30,41 +29,28 @@ from contextlib import (
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from ..log import (
|
from ._util import (
|
||||||
get_logger,
|
log, # sub-sys logger
|
||||||
get_console_log,
|
|
||||||
)
|
)
|
||||||
from ..brokers import get_brokermod
|
|
||||||
from ._mngr import (
|
from ._mngr import (
|
||||||
Services,
|
Services,
|
||||||
)
|
)
|
||||||
from ._actor_runtime import maybe_open_pikerd
|
from ._actor_runtime import maybe_open_pikerd
|
||||||
from ._registry import find_service
|
from ._registry import find_service
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
# `brokerd` enabled modules
|
|
||||||
# NOTE: keeping this list as small as possible is part of our caps-sec
|
|
||||||
# model and should be treated with utmost care!
|
|
||||||
_data_mods = [
|
|
||||||
'piker.brokers.core',
|
|
||||||
'piker.brokers.data',
|
|
||||||
'piker.data',
|
|
||||||
'piker.data.feed',
|
|
||||||
'piker.data._sampling'
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def maybe_spawn_daemon(
|
async def maybe_spawn_daemon(
|
||||||
|
|
||||||
service_name: str,
|
service_name: str,
|
||||||
service_task_target: Callable,
|
service_task_target: Callable,
|
||||||
spawn_args: dict[str, Any],
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
|
|
||||||
|
spawn_args: dict[str, Any],
|
||||||
|
|
||||||
|
loglevel: str | None = None,
|
||||||
singleton: bool = False,
|
singleton: bool = False,
|
||||||
**kwargs,
|
|
||||||
|
**pikerd_kwargs,
|
||||||
|
|
||||||
) -> tractor.Portal:
|
) -> tractor.Portal:
|
||||||
'''
|
'''
|
||||||
|
@ -79,9 +65,6 @@ async def maybe_spawn_daemon(
|
||||||
clients.
|
clients.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if loglevel:
|
|
||||||
get_console_log(loglevel)
|
|
||||||
|
|
||||||
# serialize access to this section to avoid
|
# serialize access to this section to avoid
|
||||||
# 2 or more tasks racing to create a daemon
|
# 2 or more tasks racing to create a daemon
|
||||||
lock = Services.locks[service_name]
|
lock = Services.locks[service_name]
|
||||||
|
@ -93,18 +76,17 @@ async def maybe_spawn_daemon(
|
||||||
yield portal
|
yield portal
|
||||||
return
|
return
|
||||||
|
|
||||||
log.warning(f"Couldn't find any existing {service_name}")
|
log.warning(
|
||||||
|
f"Couldn't find any existing {service_name}\n"
|
||||||
# TODO: really shouldn't the actor spawning be part of the service
|
'Attempting to spawn new daemon-service..'
|
||||||
# starting method `Services.start_service()` ?
|
)
|
||||||
|
|
||||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||||
# pikerd is not live we now become the root of the
|
# pikerd is not live we now become the root of the
|
||||||
# process tree
|
# process tree
|
||||||
async with maybe_open_pikerd(
|
async with maybe_open_pikerd(
|
||||||
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
**kwargs,
|
**pikerd_kwargs,
|
||||||
|
|
||||||
) as pikerd_portal:
|
) as pikerd_portal:
|
||||||
|
|
||||||
|
@ -117,108 +99,42 @@ async def maybe_spawn_daemon(
|
||||||
# service task for that actor.
|
# service task for that actor.
|
||||||
started: bool
|
started: bool
|
||||||
if pikerd_portal is None:
|
if pikerd_portal is None:
|
||||||
started = await service_task_target(**spawn_args)
|
started = await service_task_target(
|
||||||
|
loglevel=loglevel,
|
||||||
|
**spawn_args,
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# tell the remote `pikerd` to start the target,
|
# request a remote `pikerd` (service manager) to start the
|
||||||
# the target can't return a non-serializable value
|
# target daemon-task, the target can't return
|
||||||
# since it is expected that service startingn is
|
# a non-serializable value since it is expected that service
|
||||||
# non-blocking and the target task will persist running
|
# starting is non-blocking and the target task will persist
|
||||||
# on `pikerd` after the client requesting it's start
|
# running "under" or "within" the `pikerd` actor tree after
|
||||||
# disconnects.
|
# the questing client disconnects. in other words this
|
||||||
|
# spawns a persistent daemon actor that continues to live
|
||||||
|
# for the lifespan of whatever the service manager inside
|
||||||
|
# `pikerd` says it should.
|
||||||
started = await pikerd_portal.run(
|
started = await pikerd_portal.run(
|
||||||
service_task_target,
|
service_task_target,
|
||||||
|
loglevel=loglevel,
|
||||||
**spawn_args,
|
**spawn_args,
|
||||||
)
|
)
|
||||||
|
|
||||||
if started:
|
if started:
|
||||||
log.info(f'Service {service_name} started!')
|
log.info(f'Service {service_name} started!')
|
||||||
|
|
||||||
|
# block until we can discover (by IPC connection) to the newly
|
||||||
|
# spawned daemon-actor and then deliver the portal to the
|
||||||
|
# caller.
|
||||||
async with tractor.wait_for_actor(service_name) as portal:
|
async with tractor.wait_for_actor(service_name) as portal:
|
||||||
lock.release()
|
lock.release()
|
||||||
yield portal
|
yield portal
|
||||||
await portal.cancel_actor()
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
async def spawn_brokerd(
|
|
||||||
|
|
||||||
brokername: str,
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
**tractor_kwargs,
|
|
||||||
|
|
||||||
) -> bool:
|
|
||||||
|
|
||||||
log.info(f'Spawning {brokername} broker daemon')
|
|
||||||
|
|
||||||
brokermod = get_brokermod(brokername)
|
|
||||||
dname = f'brokerd.{brokername}'
|
|
||||||
|
|
||||||
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
|
||||||
tractor_kwargs.update(extra_tractor_kwargs)
|
|
||||||
|
|
||||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
|
||||||
# actor nursery
|
|
||||||
modpath = brokermod.__name__
|
|
||||||
broker_enable = [modpath]
|
|
||||||
for submodname in getattr(
|
|
||||||
brokermod,
|
|
||||||
'__enable_modules__',
|
|
||||||
[],
|
|
||||||
):
|
|
||||||
subpath = f'{modpath}.{submodname}'
|
|
||||||
broker_enable.append(subpath)
|
|
||||||
|
|
||||||
portal = await Services.actor_n.start_actor(
|
|
||||||
dname,
|
|
||||||
enable_modules=_data_mods + broker_enable,
|
|
||||||
loglevel=loglevel,
|
|
||||||
debug_mode=Services.debug_mode,
|
|
||||||
**tractor_kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
# non-blocking setup of brokerd service nursery
|
|
||||||
from ..data import _setup_persistent_brokerd
|
|
||||||
|
|
||||||
await Services.start_service_task(
|
|
||||||
dname,
|
|
||||||
portal,
|
|
||||||
_setup_persistent_brokerd,
|
|
||||||
brokername=brokername,
|
|
||||||
)
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def maybe_spawn_brokerd(
|
|
||||||
|
|
||||||
brokername: str,
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> tractor.Portal:
|
|
||||||
'''
|
|
||||||
Helper to spawn a brokerd service *from* a client
|
|
||||||
who wishes to use the sub-actor-daemon.
|
|
||||||
|
|
||||||
'''
|
|
||||||
async with maybe_spawn_daemon(
|
|
||||||
|
|
||||||
f'brokerd.{brokername}',
|
|
||||||
service_task_target=spawn_brokerd,
|
|
||||||
spawn_args={
|
|
||||||
'brokername': brokername,
|
|
||||||
'loglevel': loglevel,
|
|
||||||
},
|
|
||||||
loglevel=loglevel,
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) as portal:
|
|
||||||
yield portal
|
|
||||||
|
|
||||||
|
|
||||||
async def spawn_emsd(
|
async def spawn_emsd(
|
||||||
|
|
||||||
loglevel: Optional[str] = None,
|
loglevel: str | None = None,
|
||||||
**extra_tractor_kwargs
|
**extra_tractor_kwargs
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
@ -245,7 +161,10 @@ async def spawn_emsd(
|
||||||
await Services.start_service_task(
|
await Services.start_service_task(
|
||||||
'emsd',
|
'emsd',
|
||||||
portal,
|
portal,
|
||||||
|
|
||||||
|
# signature of target root-task endpoint
|
||||||
_setup_persistent_emsd,
|
_setup_persistent_emsd,
|
||||||
|
loglevel=loglevel,
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -254,18 +173,18 @@ async def spawn_emsd(
|
||||||
async def maybe_open_emsd(
|
async def maybe_open_emsd(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
loglevel: Optional[str] = None,
|
loglevel: str | None = None,
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> tractor._portal.Portal: # noqa
|
**pikerd_kwargs,
|
||||||
|
|
||||||
|
) -> tractor.Portal: # noqa
|
||||||
|
|
||||||
async with maybe_spawn_daemon(
|
async with maybe_spawn_daemon(
|
||||||
|
|
||||||
'emsd',
|
'emsd',
|
||||||
service_task_target=spawn_emsd,
|
service_task_target=spawn_emsd,
|
||||||
spawn_args={'loglevel': loglevel},
|
spawn_args={},
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
**kwargs,
|
**pikerd_kwargs,
|
||||||
|
|
||||||
) as portal:
|
) as portal:
|
||||||
yield portal
|
yield portal
|
||||||
|
|
|
@ -28,15 +28,16 @@ import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from ..log import (
|
from ._util import (
|
||||||
get_logger,
|
log, # sub-sys logger
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
# TODO: we need remote wrapping and a general soln:
|
||||||
# TODO: factor this into a ``tractor.highlevel`` extension
|
# - factor this into a ``tractor.highlevel`` extension # pack for the
|
||||||
# pack for the library.
|
# library.
|
||||||
|
# - wrap a "remote api" wherein you can get a method proxy
|
||||||
|
# to the pikerd actor for starting services remotely!
|
||||||
class Services:
|
class Services:
|
||||||
|
|
||||||
actor_n: tractor._supervise.ActorNursery
|
actor_n: tractor._supervise.ActorNursery
|
||||||
|
@ -58,7 +59,8 @@ class Services:
|
||||||
name: str,
|
name: str,
|
||||||
portal: tractor.Portal,
|
portal: tractor.Portal,
|
||||||
target: Callable,
|
target: Callable,
|
||||||
**kwargs,
|
allow_overruns: bool = False,
|
||||||
|
**ctx_kwargs,
|
||||||
|
|
||||||
) -> (trio.CancelScope, tractor.Context):
|
) -> (trio.CancelScope, tractor.Context):
|
||||||
'''
|
'''
|
||||||
|
@ -81,9 +83,11 @@ class Services:
|
||||||
) -> Any:
|
) -> Any:
|
||||||
|
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
|
|
||||||
async with portal.open_context(
|
async with portal.open_context(
|
||||||
target,
|
target,
|
||||||
**kwargs,
|
allow_overruns=allow_overruns,
|
||||||
|
**ctx_kwargs,
|
||||||
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
||||||
|
|
|
@ -28,13 +28,10 @@ from typing import (
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
|
from ._util import (
|
||||||
from ..log import (
|
log, # sub-sys logger
|
||||||
get_logger,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
_default_registry_host: str = '127.0.0.1'
|
_default_registry_host: str = '127.0.0.1'
|
||||||
_default_registry_port: int = 6116
|
_default_registry_port: int = 6116
|
||||||
_default_reg_addr: tuple[str, int] = (
|
_default_reg_addr: tuple[str, int] = (
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
"""
|
||||||
|
Sub-sys module commons.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from functools import partial
|
||||||
|
|
||||||
|
from ..log import (
|
||||||
|
get_logger,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
|
subsys: str = 'piker.service'
|
||||||
|
|
||||||
|
log = get_logger(subsys)
|
||||||
|
|
||||||
|
get_console_log = partial(
|
||||||
|
get_console_log,
|
||||||
|
name=subsys,
|
||||||
|
)
|
|
@ -15,26 +15,23 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
import asks
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
import docker
|
import docker
|
||||||
from ._ahab import DockerContainer
|
from ._ahab import DockerContainer
|
||||||
|
|
||||||
from piker.log import (
|
from ._util import log # sub-sys logger
|
||||||
get_logger,
|
from ._util import (
|
||||||
get_console_log
|
get_console_log,
|
||||||
)
|
)
|
||||||
|
|
||||||
import asks
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# container level config
|
# container level config
|
||||||
_config = {
|
_config = {
|
||||||
|
@ -92,7 +89,7 @@ def start_elasticsearch(
|
||||||
'http://localhost:19200/_cat/health',
|
'http://localhost:19200/_cat/health',
|
||||||
params={'format': 'json'}
|
params={'format': 'json'}
|
||||||
)).json()
|
)).json()
|
||||||
kog.info(
|
log.info(
|
||||||
'ElasticSearch cntr health:\n'
|
'ElasticSearch cntr health:\n'
|
||||||
f'{health}'
|
f'{health}'
|
||||||
)
|
)
|
||||||
|
@ -126,3 +123,47 @@ def start_elasticsearch(
|
||||||
health_query,
|
health_query,
|
||||||
chk_for_closed_msg,
|
chk_for_closed_msg,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def start_ahab_daemon(
|
||||||
|
service_mngr: Services,
|
||||||
|
user_config: dict | None = None,
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
|
) -> tuple[str, dict]:
|
||||||
|
'''
|
||||||
|
Task entrypoint to start the estasticsearch docker container using
|
||||||
|
the service manager.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from ._ahab import start_ahab_service
|
||||||
|
|
||||||
|
# dict-merge any user settings
|
||||||
|
conf: dict = _config.copy()
|
||||||
|
if user_config:
|
||||||
|
conf = conf | user_config
|
||||||
|
|
||||||
|
dname: str = 'esd'
|
||||||
|
log.info(f'Spawning `{dname}` supervisor')
|
||||||
|
async with start_ahab_service(
|
||||||
|
service_mngr,
|
||||||
|
dname,
|
||||||
|
|
||||||
|
# NOTE: docker-py client is passed at runtime
|
||||||
|
start_elasticsearch,
|
||||||
|
ep_kwargs={'user_config': conf},
|
||||||
|
loglevel=loglevel,
|
||||||
|
|
||||||
|
) as (
|
||||||
|
ctn_ready,
|
||||||
|
config,
|
||||||
|
(cid, pid),
|
||||||
|
):
|
||||||
|
log.info(
|
||||||
|
f'`{dname}` up!\n'
|
||||||
|
f'pid: {pid}\n'
|
||||||
|
f'container id: {cid[:12]}\n'
|
||||||
|
f'config: {pformat(config)}'
|
||||||
|
)
|
||||||
|
yield dname, conf
|
||||||
|
|
|
@ -26,9 +26,9 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from pprint import pformat
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Optional,
|
|
||||||
Union,
|
Union,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
@ -54,12 +54,14 @@ if TYPE_CHECKING:
|
||||||
import docker
|
import docker
|
||||||
from ._ahab import DockerContainer
|
from ._ahab import DockerContainer
|
||||||
|
|
||||||
|
from ._util import (
|
||||||
|
log, # sub-sys logger
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
|
from . import Services
|
||||||
from ..data.feed import maybe_open_feed
|
from ..data.feed import maybe_open_feed
|
||||||
from ..log import get_logger, get_console_log
|
|
||||||
from .._profile import Profiler
|
from .._profile import Profiler
|
||||||
|
from .. import config
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# ahabd-supervisor and container level config
|
# ahabd-supervisor and container level config
|
||||||
|
@ -70,7 +72,7 @@ _config = {
|
||||||
'startup_timeout': 2,
|
'startup_timeout': 2,
|
||||||
}
|
}
|
||||||
|
|
||||||
_yaml_config = '''
|
_yaml_config_str: str = '''
|
||||||
# piker's ``marketstore`` config.
|
# piker's ``marketstore`` config.
|
||||||
|
|
||||||
# mount this config using:
|
# mount this config using:
|
||||||
|
@ -89,6 +91,12 @@ stale_threshold: 5
|
||||||
enable_add: true
|
enable_add: true
|
||||||
enable_remove: false
|
enable_remove: false
|
||||||
|
|
||||||
|
# SUPER DUPER CRITICAL to address a super weird issue:
|
||||||
|
# https://github.com/pikers/piker/issues/443
|
||||||
|
# seems like "variable compression" is possibly borked
|
||||||
|
# or snappy compression somehow breaks easily?
|
||||||
|
disable_variable_compression: true
|
||||||
|
|
||||||
triggers:
|
triggers:
|
||||||
- module: ondiskagg.so
|
- module: ondiskagg.so
|
||||||
on: "*/1Sec/OHLCV"
|
on: "*/1Sec/OHLCV"
|
||||||
|
@ -106,18 +114,18 @@ triggers:
|
||||||
# config:
|
# config:
|
||||||
# filter: "nasdaq"
|
# filter: "nasdaq"
|
||||||
|
|
||||||
'''.format(**_config)
|
'''
|
||||||
|
|
||||||
|
|
||||||
def start_marketstore(
|
def start_marketstore(
|
||||||
client: docker.DockerClient,
|
client: docker.DockerClient,
|
||||||
|
user_config: dict,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> tuple[DockerContainer, dict[str, Any]]:
|
) -> tuple[DockerContainer, dict[str, Any]]:
|
||||||
'''
|
'''
|
||||||
Start and supervise a marketstore instance with its config bind-mounted
|
Start and supervise a marketstore instance with its config
|
||||||
in from the piker config directory on the system.
|
bind-mounted in from the piker config directory on the system.
|
||||||
|
|
||||||
The equivalent cli cmd to this code is:
|
The equivalent cli cmd to this code is:
|
||||||
|
|
||||||
|
@ -141,14 +149,16 @@ def start_marketstore(
|
||||||
os.mkdir(mktsdir)
|
os.mkdir(mktsdir)
|
||||||
|
|
||||||
yml_file = os.path.join(mktsdir, 'mkts.yml')
|
yml_file = os.path.join(mktsdir, 'mkts.yml')
|
||||||
|
yaml_config = _yaml_config_str.format(**user_config)
|
||||||
|
|
||||||
if not os.path.isfile(yml_file):
|
if not os.path.isfile(yml_file):
|
||||||
log.warning(
|
log.warning(
|
||||||
f'No `marketstore` config exists?: {yml_file}\n'
|
f'No `marketstore` config exists?: {yml_file}\n'
|
||||||
'Generating new file from template:\n'
|
'Generating new file from template:\n'
|
||||||
f'{_yaml_config}\n'
|
f'{yaml_config}\n'
|
||||||
)
|
)
|
||||||
with open(yml_file, 'w') as yf:
|
with open(yml_file, 'w') as yf:
|
||||||
yf.write(_yaml_config)
|
yf.write(yaml_config)
|
||||||
|
|
||||||
# create a mount from user's local piker config dir into container
|
# create a mount from user's local piker config dir into container
|
||||||
config_dir_mnt = docker.types.Mount(
|
config_dir_mnt = docker.types.Mount(
|
||||||
|
@ -171,6 +181,9 @@ def start_marketstore(
|
||||||
type='bind',
|
type='bind',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
grpc_listen_port = int(user_config['grpc_listen_port'])
|
||||||
|
ws_listen_port = int(user_config['ws_listen_port'])
|
||||||
|
|
||||||
dcntr: DockerContainer = client.containers.run(
|
dcntr: DockerContainer = client.containers.run(
|
||||||
'alpacamarkets/marketstore:latest',
|
'alpacamarkets/marketstore:latest',
|
||||||
# do we need this for cmds?
|
# do we need this for cmds?
|
||||||
|
@ -178,8 +191,8 @@ def start_marketstore(
|
||||||
|
|
||||||
# '-p 5993:5993',
|
# '-p 5993:5993',
|
||||||
ports={
|
ports={
|
||||||
'5993/tcp': 5993, # jsonrpc / ws?
|
f'{ws_listen_port}/tcp': ws_listen_port,
|
||||||
'5995/tcp': 5995, # grpc
|
f'{grpc_listen_port}/tcp': grpc_listen_port,
|
||||||
},
|
},
|
||||||
mounts=[
|
mounts=[
|
||||||
config_dir_mnt,
|
config_dir_mnt,
|
||||||
|
@ -199,7 +212,13 @@ def start_marketstore(
|
||||||
return "launching tcp listener for all services..." in msg
|
return "launching tcp listener for all services..." in msg
|
||||||
|
|
||||||
async def stop_matcher(msg: str):
|
async def stop_matcher(msg: str):
|
||||||
return "exiting..." in msg
|
return (
|
||||||
|
# not sure when this happens, some kinda stop condition
|
||||||
|
"exiting..." in msg
|
||||||
|
|
||||||
|
# after we send SIGINT..
|
||||||
|
or "initiating graceful shutdown due to 'interrupt' request" in msg
|
||||||
|
)
|
||||||
|
|
||||||
return (
|
return (
|
||||||
dcntr,
|
dcntr,
|
||||||
|
@ -211,6 +230,49 @@ def start_marketstore(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def start_ahab_daemon(
|
||||||
|
service_mngr: Services,
|
||||||
|
user_config: dict | None = None,
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
|
) -> tuple[str, dict]:
|
||||||
|
'''
|
||||||
|
Task entrypoint to start the marketstore docker container using the
|
||||||
|
service manager.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from ._ahab import start_ahab_service
|
||||||
|
|
||||||
|
# dict-merge any user settings
|
||||||
|
conf: dict = _config.copy()
|
||||||
|
if user_config:
|
||||||
|
conf: dict = conf | user_config
|
||||||
|
|
||||||
|
dname: str = 'marketstored'
|
||||||
|
log.info(f'Spawning `{dname}` supervisor')
|
||||||
|
async with start_ahab_service(
|
||||||
|
service_mngr,
|
||||||
|
dname,
|
||||||
|
|
||||||
|
# NOTE: docker-py client is passed at runtime
|
||||||
|
start_marketstore,
|
||||||
|
ep_kwargs={'user_config': conf},
|
||||||
|
loglevel=loglevel,
|
||||||
|
) as (
|
||||||
|
_,
|
||||||
|
config,
|
||||||
|
(cid, pid),
|
||||||
|
):
|
||||||
|
log.info(
|
||||||
|
f'`{dname}` up!\n'
|
||||||
|
f'pid: {pid}\n'
|
||||||
|
f'container id: {cid[:12]}\n'
|
||||||
|
f'config: {pformat(config)}'
|
||||||
|
)
|
||||||
|
yield dname, conf
|
||||||
|
|
||||||
|
|
||||||
_tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK')
|
_tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK')
|
||||||
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
|
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
|
||||||
|
|
||||||
|
@ -286,7 +348,7 @@ def mk_tbk(keys: tuple[str, str, str]) -> str:
|
||||||
|
|
||||||
def quote_to_marketstore_structarray(
|
def quote_to_marketstore_structarray(
|
||||||
quote: dict[str, Any],
|
quote: dict[str, Any],
|
||||||
last_fill: Optional[float]
|
last_fill: float | None,
|
||||||
|
|
||||||
) -> np.array:
|
) -> np.array:
|
||||||
'''
|
'''
|
||||||
|
@ -327,8 +389,8 @@ def quote_to_marketstore_structarray(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def get_client(
|
async def get_client(
|
||||||
host: str = 'localhost',
|
host: str | None,
|
||||||
port: int = _config['grpc_listen_port'],
|
port: int | None,
|
||||||
|
|
||||||
) -> MarketstoreClient:
|
) -> MarketstoreClient:
|
||||||
'''
|
'''
|
||||||
|
@ -337,8 +399,8 @@ async def get_client(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async with open_marketstore_client(
|
async with open_marketstore_client(
|
||||||
host,
|
host or 'localhost',
|
||||||
port
|
port or _config['grpc_listen_port'],
|
||||||
) as client:
|
) as client:
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
@ -402,18 +464,18 @@ class Storage:
|
||||||
|
|
||||||
async def load(
|
async def load(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
timeframe: int,
|
timeframe: int,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
np.ndarray, # timeframe sampled array-series
|
np.ndarray, # timeframe sampled array-series
|
||||||
Optional[datetime], # first dt
|
datetime | None, # first dt
|
||||||
Optional[datetime], # last dt
|
datetime | None, # last dt
|
||||||
]:
|
]:
|
||||||
|
|
||||||
first_tsdb_dt, last_tsdb_dt = None, None
|
first_tsdb_dt, last_tsdb_dt = None, None
|
||||||
hist = await self.read_ohlcv(
|
hist = await self.read_ohlcv(
|
||||||
fqsn,
|
fqme,
|
||||||
# on first load we don't need to pull the max
|
# on first load we don't need to pull the max
|
||||||
# history per request size worth.
|
# history per request size worth.
|
||||||
limit=3000,
|
limit=3000,
|
||||||
|
@ -436,9 +498,9 @@ class Storage:
|
||||||
|
|
||||||
async def read_ohlcv(
|
async def read_ohlcv(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
timeframe: int | str,
|
timeframe: int | str,
|
||||||
end: Optional[int] = None,
|
end: int | None = None,
|
||||||
limit: int = int(800e3),
|
limit: int = int(800e3),
|
||||||
|
|
||||||
) -> np.ndarray:
|
) -> np.ndarray:
|
||||||
|
@ -446,14 +508,14 @@ class Storage:
|
||||||
client = self.client
|
client = self.client
|
||||||
syms = await client.list_symbols()
|
syms = await client.list_symbols()
|
||||||
|
|
||||||
if fqsn not in syms:
|
if fqme not in syms:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
# use the provided timeframe or 1s by default
|
# use the provided timeframe or 1s by default
|
||||||
tfstr = tf_in_1s.get(timeframe, tf_in_1s[1])
|
tfstr = tf_in_1s.get(timeframe, tf_in_1s[1])
|
||||||
|
|
||||||
params = Params(
|
params = Params(
|
||||||
symbols=fqsn,
|
symbols=fqme,
|
||||||
timeframe=tfstr,
|
timeframe=tfstr,
|
||||||
attrgroup='OHLCV',
|
attrgroup='OHLCV',
|
||||||
end=end,
|
end=end,
|
||||||
|
@ -464,20 +526,26 @@ class Storage:
|
||||||
limit=limit,
|
limit=limit,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
for i in range(3):
|
||||||
try:
|
try:
|
||||||
result = await client.query(params)
|
result = await client.query(params)
|
||||||
|
break
|
||||||
except purerpc.grpclib.exceptions.UnknownError as err:
|
except purerpc.grpclib.exceptions.UnknownError as err:
|
||||||
|
if 'snappy' in err.args:
|
||||||
|
await tractor.breakpoint()
|
||||||
|
|
||||||
# indicate there is no history for this timeframe
|
# indicate there is no history for this timeframe
|
||||||
log.exception(
|
log.exception(
|
||||||
f'Unknown mkts QUERY error: {params}\n'
|
f'Unknown mkts QUERY error: {params}\n'
|
||||||
f'{err.args}'
|
f'{err.args}'
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
# TODO: it turns out column access on recarrays is actually slower:
|
# TODO: it turns out column access on recarrays is actually slower:
|
||||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||||
# it might make sense to make these structured arrays?
|
# it might make sense to make these structured arrays?
|
||||||
data_set = result.by_symbols()[fqsn]
|
data_set = result.by_symbols()[fqme]
|
||||||
array = data_set.array
|
array = data_set.array
|
||||||
|
|
||||||
# XXX: ensure sample rate is as expected
|
# XXX: ensure sample rate is as expected
|
||||||
|
@ -492,11 +560,11 @@ class Storage:
|
||||||
'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG'
|
'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG'
|
||||||
f'WIPING HISTORY FOR {ts}s'
|
f'WIPING HISTORY FOR {ts}s'
|
||||||
)
|
)
|
||||||
await self.delete_ts(fqsn, timeframe)
|
await self.delete_ts(fqme, timeframe)
|
||||||
|
|
||||||
# try reading again..
|
# try reading again..
|
||||||
return await self.read_ohlcv(
|
return await self.read_ohlcv(
|
||||||
fqsn,
|
fqme,
|
||||||
timeframe,
|
timeframe,
|
||||||
end,
|
end,
|
||||||
limit,
|
limit,
|
||||||
|
@ -507,7 +575,7 @@ class Storage:
|
||||||
async def delete_ts(
|
async def delete_ts(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
timeframe: Optional[Union[int, str]] = None,
|
timeframe: Union[int, str | None] = None,
|
||||||
fmt: str = 'OHLCV',
|
fmt: str = 'OHLCV',
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
@ -515,6 +583,7 @@ class Storage:
|
||||||
client = self.client
|
client = self.client
|
||||||
syms = await client.list_symbols()
|
syms = await client.list_symbols()
|
||||||
if key not in syms:
|
if key not in syms:
|
||||||
|
await tractor.breakpoint()
|
||||||
raise KeyError(f'`{key}` table key not found in\n{syms}?')
|
raise KeyError(f'`{key}` table key not found in\n{syms}?')
|
||||||
|
|
||||||
tbk = mk_tbk((
|
tbk = mk_tbk((
|
||||||
|
@ -526,7 +595,7 @@ class Storage:
|
||||||
|
|
||||||
async def write_ohlcv(
|
async def write_ohlcv(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
ohlcv: np.ndarray,
|
ohlcv: np.ndarray,
|
||||||
timeframe: int,
|
timeframe: int,
|
||||||
append_and_duplicate: bool = True,
|
append_and_duplicate: bool = True,
|
||||||
|
@ -559,7 +628,7 @@ class Storage:
|
||||||
# write to db
|
# write to db
|
||||||
resp = await self.client.write(
|
resp = await self.client.write(
|
||||||
to_push,
|
to_push,
|
||||||
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
tbk=f'{fqme}/{tfkey}/OHLCV',
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
# NOTE: will will append duplicates
|
||||||
# for the same timestamp-index.
|
# for the same timestamp-index.
|
||||||
|
@ -582,7 +651,7 @@ class Storage:
|
||||||
# write to db
|
# write to db
|
||||||
resp = await self.client.write(
|
resp = await self.client.write(
|
||||||
to_push,
|
to_push,
|
||||||
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
tbk=f'{fqme}/{tfkey}/OHLCV',
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
# NOTE: will will append duplicates
|
||||||
# for the same timestamp-index.
|
# for the same timestamp-index.
|
||||||
|
@ -614,8 +683,8 @@ class Storage:
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_storage_client(
|
async def open_storage_client(
|
||||||
fqsn: str,
|
host: str,
|
||||||
period: Optional[Union[int, str]] = None, # in seconds
|
grpc_port: int,
|
||||||
|
|
||||||
) -> tuple[Storage, dict[str, np.ndarray]]:
|
) -> tuple[Storage, dict[str, np.ndarray]]:
|
||||||
'''
|
'''
|
||||||
|
@ -624,7 +693,10 @@ async def open_storage_client(
|
||||||
'''
|
'''
|
||||||
async with (
|
async with (
|
||||||
# eventually a storage backend endpoint
|
# eventually a storage backend endpoint
|
||||||
get_client() as client,
|
get_client(
|
||||||
|
host=host,
|
||||||
|
port=grpc_port,
|
||||||
|
) as client,
|
||||||
):
|
):
|
||||||
# slap on our wrapper api
|
# slap on our wrapper api
|
||||||
yield Storage(client)
|
yield Storage(client)
|
||||||
|
@ -632,7 +704,7 @@ async def open_storage_client(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_tsdb_client(
|
async def open_tsdb_client(
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
) -> Storage:
|
) -> Storage:
|
||||||
|
|
||||||
# TODO: real-time dedicated task for ensuring
|
# TODO: real-time dedicated task for ensuring
|
||||||
|
@ -666,25 +738,34 @@ async def open_tsdb_client(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# load any user service settings for connecting to
|
||||||
|
rootconf, path = config.load(
|
||||||
|
'conf',
|
||||||
|
touch_if_dne=True,
|
||||||
|
)
|
||||||
|
tsdbconf = rootconf['network'].get('tsdb')
|
||||||
|
# backend = tsdbconf.pop('backend')
|
||||||
async with (
|
async with (
|
||||||
open_storage_client(fqsn) as storage,
|
open_storage_client(
|
||||||
|
**tsdbconf,
|
||||||
|
) as storage,
|
||||||
|
|
||||||
maybe_open_feed(
|
maybe_open_feed(
|
||||||
[fqsn],
|
[fqme],
|
||||||
start_stream=False,
|
start_stream=False,
|
||||||
|
|
||||||
) as feed,
|
) as feed,
|
||||||
):
|
):
|
||||||
profiler(f'opened feed for {fqsn}')
|
profiler(f'opened feed for {fqme}')
|
||||||
|
|
||||||
# to_append = feed.hist_shm.array
|
# to_append = feed.hist_shm.array
|
||||||
# to_prepend = None
|
# to_prepend = None
|
||||||
|
|
||||||
if fqsn:
|
if fqme:
|
||||||
flume = feed.flumes[fqsn]
|
flume = feed.flumes[fqme]
|
||||||
symbol = flume.symbol
|
symbol = flume.mkt
|
||||||
if symbol:
|
if symbol:
|
||||||
fqsn = symbol.fqsn
|
fqme = symbol.fqme
|
||||||
|
|
||||||
# diff db history with shm and only write the missing portions
|
# diff db history with shm and only write the missing portions
|
||||||
# ohlcv = flume.hist_shm.array
|
# ohlcv = flume.hist_shm.array
|
||||||
|
@ -692,7 +773,7 @@ async def open_tsdb_client(
|
||||||
# TODO: use pg profiler
|
# TODO: use pg profiler
|
||||||
# for secs in (1, 60):
|
# for secs in (1, 60):
|
||||||
# tsdb_array = await storage.read_ohlcv(
|
# tsdb_array = await storage.read_ohlcv(
|
||||||
# fqsn,
|
# fqme,
|
||||||
# timeframe=timeframe,
|
# timeframe=timeframe,
|
||||||
# )
|
# )
|
||||||
# # hist diffing:
|
# # hist diffing:
|
||||||
|
@ -703,7 +784,7 @@ async def open_tsdb_client(
|
||||||
|
|
||||||
# profiler('Finished db arrays diffs')
|
# profiler('Finished db arrays diffs')
|
||||||
|
|
||||||
syms = await storage.client.list_symbols()
|
_ = await storage.client.list_symbols()
|
||||||
# log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
# log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||||
# profiler(f'listed symbols {syms}')
|
# profiler(f'listed symbols {syms}')
|
||||||
yield storage
|
yield storage
|
||||||
|
@ -715,7 +796,7 @@ async def open_tsdb_client(
|
||||||
# log.info(
|
# log.info(
|
||||||
# f'Writing datums {array.size} -> to tsdb from shm\n'
|
# f'Writing datums {array.size} -> to tsdb from shm\n'
|
||||||
# )
|
# )
|
||||||
# await storage.write_ohlcv(fqsn, array)
|
# await storage.write_ohlcv(fqme, array)
|
||||||
|
|
||||||
# profiler('Finished db writes')
|
# profiler('Finished db writes')
|
||||||
|
|
||||||
|
@ -882,3 +963,5 @@ async def stream_quotes(
|
||||||
|
|
||||||
if quotes:
|
if quotes:
|
||||||
yield quotes
|
yield quotes
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -28,7 +28,7 @@ from ..service import maybe_spawn_brokerd
|
||||||
from . import _event
|
from . import _event
|
||||||
from ._exec import run_qtractor
|
from ._exec import run_qtractor
|
||||||
from ..data.feed import install_brokerd_search
|
from ..data.feed import install_brokerd_search
|
||||||
from ..data._source import unpack_fqsn
|
from ..accounting import unpack_fqme
|
||||||
from . import _search
|
from . import _search
|
||||||
from ._chart import GodWidget
|
from ._chart import GodWidget
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
@ -100,8 +100,8 @@ async def _async_main(
|
||||||
starting_done = sbar.open_status('starting ze sexy chartz')
|
starting_done = sbar.open_status('starting ze sexy chartz')
|
||||||
|
|
||||||
needed_brokermods: dict[str, ModuleType] = {}
|
needed_brokermods: dict[str, ModuleType] = {}
|
||||||
for fqsn in syms:
|
for fqme in syms:
|
||||||
brokername, *_ = unpack_fqsn(fqsn)
|
brokername, *_ = unpack_fqme(fqme)
|
||||||
needed_brokermods[brokername] = brokers[brokername]
|
needed_brokermods[brokername] = brokers[brokername]
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
@ -120,7 +120,7 @@ async def _async_main(
|
||||||
|
|
||||||
# this internally starts a ``display_symbol_data()`` task above
|
# this internally starts a ``display_symbol_data()`` task above
|
||||||
order_mode_ready = await godwidget.load_symbols(
|
order_mode_ready = await godwidget.load_symbols(
|
||||||
fqsns=syms,
|
fqmes=syms,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -29,7 +29,7 @@ from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
from PyQt5.QtCore import QPointF
|
from PyQt5.QtCore import QPointF
|
||||||
|
|
||||||
from . import _pg_overrides as pgo
|
from . import _pg_overrides as pgo
|
||||||
from ..data._source import float_digits
|
from ..accounting._mktinfo import float_digits
|
||||||
from ._label import Label
|
from ._label import Label
|
||||||
from ._style import DpiAwareFont, hcolor, _font
|
from ._style import DpiAwareFont, hcolor, _font
|
||||||
from ._interaction import ChartView
|
from ._interaction import ChartView
|
||||||
|
|
|
@ -68,7 +68,9 @@ from ..data.feed import (
|
||||||
Feed,
|
Feed,
|
||||||
Flume,
|
Flume,
|
||||||
)
|
)
|
||||||
from ..data._source import Symbol
|
from ..accounting import (
|
||||||
|
MktPair,
|
||||||
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._interaction import ChartView
|
from ._interaction import ChartView
|
||||||
from ._forms import FieldsForm
|
from ._forms import FieldsForm
|
||||||
|
@ -152,7 +154,7 @@ class GodWidget(QWidget):
|
||||||
|
|
||||||
def set_chart_symbols(
|
def set_chart_symbols(
|
||||||
self,
|
self,
|
||||||
group_key: tuple[str], # of form <fqsn>.<providername>
|
group_key: tuple[str], # of form <fqme>.<providername>
|
||||||
all_linked: tuple[LinkedSplits, LinkedSplits], # type: ignore
|
all_linked: tuple[LinkedSplits, LinkedSplits], # type: ignore
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -170,7 +172,7 @@ class GodWidget(QWidget):
|
||||||
|
|
||||||
async def load_symbols(
|
async def load_symbols(
|
||||||
self,
|
self,
|
||||||
fqsns: list[str],
|
fqmes: list[str],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
reset: bool = False,
|
reset: bool = False,
|
||||||
|
|
||||||
|
@ -183,7 +185,7 @@ class GodWidget(QWidget):
|
||||||
'''
|
'''
|
||||||
# NOTE: for now we use the first symbol in the set as the "key"
|
# NOTE: for now we use the first symbol in the set as the "key"
|
||||||
# for the overlay of feeds on the chart.
|
# for the overlay of feeds on the chart.
|
||||||
group_key: tuple[str] = tuple(fqsns)
|
group_key: tuple[str] = tuple(fqmes)
|
||||||
|
|
||||||
all_linked = self.get_chart_symbols(group_key)
|
all_linked = self.get_chart_symbols(group_key)
|
||||||
order_mode_started = trio.Event()
|
order_mode_started = trio.Event()
|
||||||
|
@ -217,7 +219,7 @@ class GodWidget(QWidget):
|
||||||
self._root_n.start_soon(
|
self._root_n.start_soon(
|
||||||
display_symbol_data,
|
display_symbol_data,
|
||||||
self,
|
self,
|
||||||
fqsns,
|
fqmes,
|
||||||
loglevel,
|
loglevel,
|
||||||
order_mode_started,
|
order_mode_started,
|
||||||
)
|
)
|
||||||
|
@ -287,11 +289,11 @@ class GodWidget(QWidget):
|
||||||
pp_nav.hide()
|
pp_nav.hide()
|
||||||
|
|
||||||
# set window titlebar info
|
# set window titlebar info
|
||||||
symbol = self.rt_linked.symbol
|
symbol = self.rt_linked.mkt
|
||||||
if symbol is not None:
|
if symbol is not None:
|
||||||
self.window.setWindowTitle(
|
self.window.setWindowTitle(
|
||||||
f'{symbol.front_fqsn()} '
|
f'{symbol.fqme} '
|
||||||
f'tick:{symbol.tick_size}'
|
f'tick:{symbol.size_tick}'
|
||||||
)
|
)
|
||||||
|
|
||||||
return order_mode_started
|
return order_mode_started
|
||||||
|
@ -452,7 +454,7 @@ class LinkedSplits(QWidget):
|
||||||
# update the UI for a given "chart instance".
|
# update the UI for a given "chart instance".
|
||||||
self.display_state: DisplayState | None = None
|
self.display_state: DisplayState | None = None
|
||||||
|
|
||||||
self._symbol: Symbol = None
|
self._mkt: MktPair = None
|
||||||
|
|
||||||
def on_splitter_adjust(
|
def on_splitter_adjust(
|
||||||
self,
|
self,
|
||||||
|
@ -474,9 +476,15 @@ class LinkedSplits(QWidget):
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def set_mkt_info(
|
||||||
|
self,
|
||||||
|
mkt: MktPair,
|
||||||
|
) -> None:
|
||||||
|
self._mkt = mkt
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def symbol(self) -> Symbol:
|
def mkt(self) -> MktPair:
|
||||||
return self._symbol
|
return self._mkt
|
||||||
|
|
||||||
def set_split_sizes(
|
def set_split_sizes(
|
||||||
self,
|
self,
|
||||||
|
@ -521,7 +529,7 @@ class LinkedSplits(QWidget):
|
||||||
def plot_ohlc_main(
|
def plot_ohlc_main(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
symbol: Symbol,
|
mkt: MktPair,
|
||||||
shm: ShmArray,
|
shm: ShmArray,
|
||||||
flume: Flume,
|
flume: Flume,
|
||||||
sidepane: FieldsForm,
|
sidepane: FieldsForm,
|
||||||
|
@ -540,7 +548,7 @@ class LinkedSplits(QWidget):
|
||||||
# add crosshairs
|
# add crosshairs
|
||||||
self.cursor = Cursor(
|
self.cursor = Cursor(
|
||||||
linkedsplits=self,
|
linkedsplits=self,
|
||||||
digits=symbol.tick_size_digits,
|
digits=mkt.price_tick_digits,
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE: atm the first (and only) OHLC price chart for the symbol
|
# NOTE: atm the first (and only) OHLC price chart for the symbol
|
||||||
|
@ -548,7 +556,7 @@ class LinkedSplits(QWidget):
|
||||||
# be no distinction since we will have multiple symbols per
|
# be no distinction since we will have multiple symbols per
|
||||||
# view as part of "aggregate feeds".
|
# view as part of "aggregate feeds".
|
||||||
self.chart = self.add_plot(
|
self.chart = self.add_plot(
|
||||||
name=symbol.fqsn,
|
name=mkt.fqme,
|
||||||
shm=shm,
|
shm=shm,
|
||||||
flume=flume,
|
flume=flume,
|
||||||
style=style,
|
style=style,
|
||||||
|
@ -1030,7 +1038,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
'''
|
'''
|
||||||
view = vb or self.view
|
view = vb or self.view
|
||||||
viz = self.main_viz
|
viz = self.main_viz
|
||||||
l, r = viz.view_range()
|
left, right = viz.view_range()
|
||||||
x_shift = viz.index_step() * datums
|
x_shift = viz.index_step() * datums
|
||||||
|
|
||||||
if datums >= 300:
|
if datums >= 300:
|
||||||
|
@ -1040,8 +1048,8 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
# should trigger broadcast on all overlays right?
|
# should trigger broadcast on all overlays right?
|
||||||
view.setXRange(
|
view.setXRange(
|
||||||
min=l + x_shift,
|
min=left + x_shift,
|
||||||
max=r + x_shift,
|
max=right + x_shift,
|
||||||
|
|
||||||
# TODO: holy shit, wtf dude... why tf would this not be 0 by
|
# TODO: holy shit, wtf dude... why tf would this not be 0 by
|
||||||
# default... speechless.
|
# default... speechless.
|
||||||
|
@ -1222,12 +1230,12 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
# TODO: UGH! just make this not here! we should
|
# TODO: UGH! just make this not here! we should
|
||||||
# be making the sticky from code which has access
|
# be making the sticky from code which has access
|
||||||
# to the ``Symbol`` instance..
|
# to the ``MktPair`` instance..
|
||||||
|
|
||||||
# if the sticky is for our symbol
|
# if the sticky is for our symbol
|
||||||
# use the tick size precision for display
|
# use the tick size precision for display
|
||||||
name = name or pi.name
|
name = name or pi.name
|
||||||
sym = self.linked.symbol
|
sym = self.linked.mkt
|
||||||
digits = None
|
digits = None
|
||||||
if name == sym.key:
|
if name == sym.key:
|
||||||
digits = sym.tick_size_digits
|
digits = sym.tick_size_digits
|
||||||
|
|
|
@ -228,7 +228,7 @@ class ContentsLabel(pg.LabelItem):
|
||||||
'bar_wap',
|
'bar_wap',
|
||||||
]
|
]
|
||||||
],
|
],
|
||||||
name=name,
|
# name=name,
|
||||||
index=ix,
|
index=ix,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -363,7 +363,8 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
# value used for rounding y-axis discreet tick steps
|
# value used for rounding y-axis discreet tick steps
|
||||||
# computing once, up front, here cuz why not
|
# computing once, up front, here cuz why not
|
||||||
self._y_incr_mult = 1 / self.linked._symbol.tick_size
|
mkt = self.linked.mkt
|
||||||
|
self._y_tick_mult = 1/float(mkt.price_tick)
|
||||||
|
|
||||||
# line width in view coordinates
|
# line width in view coordinates
|
||||||
self._lw = self.pixelWidth() * self.lines_pen.width()
|
self._lw = self.pixelWidth() * self.lines_pen.width()
|
||||||
|
@ -571,9 +572,15 @@ class Cursor(pg.GraphicsObject):
|
||||||
line_offset = self._lw / 2
|
line_offset = self._lw / 2
|
||||||
|
|
||||||
# round y value to nearest tick step
|
# round y value to nearest tick step
|
||||||
m = self._y_incr_mult
|
m = self._y_tick_mult
|
||||||
iy = round(y * m) / m
|
iy = round(y * m) / m
|
||||||
vl_y = iy - line_offset
|
vl_y = iy - line_offset
|
||||||
|
# print(
|
||||||
|
# f'tick: {self._y_tick}\n'
|
||||||
|
# f'y: {y}\n'
|
||||||
|
# f'iy: {iy}\n'
|
||||||
|
# f'vl_y: {vl_y}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
# update y-range items
|
# update y-range items
|
||||||
if iy != last_iy:
|
if iy != last_iy:
|
||||||
|
|
|
@ -23,6 +23,8 @@ from functools import lru_cache
|
||||||
from math import (
|
from math import (
|
||||||
ceil,
|
ceil,
|
||||||
floor,
|
floor,
|
||||||
|
isnan,
|
||||||
|
log as logf,
|
||||||
)
|
)
|
||||||
from typing import (
|
from typing import (
|
||||||
Literal,
|
Literal,
|
||||||
|
@ -332,6 +334,8 @@ class Viz(Struct):
|
||||||
float,
|
float,
|
||||||
] = {}
|
] = {}
|
||||||
|
|
||||||
|
_mxmn_cache_enabled: bool = True
|
||||||
|
|
||||||
# to make lru_cache-ing work, see
|
# to make lru_cache-ing work, see
|
||||||
# https://docs.python.org/3/faq/programming.html#how-do-i-cache-method-calls
|
# https://docs.python.org/3/faq/programming.html#how-do-i-cache-method-calls
|
||||||
def __eq__(self, other):
|
def __eq__(self, other):
|
||||||
|
@ -432,12 +436,12 @@ class Viz(Struct):
|
||||||
else:
|
else:
|
||||||
if x_range is None:
|
if x_range is None:
|
||||||
(
|
(
|
||||||
l,
|
xl,
|
||||||
_,
|
_,
|
||||||
lbar,
|
lbar,
|
||||||
rbar,
|
rbar,
|
||||||
_,
|
_,
|
||||||
r,
|
xr,
|
||||||
) = self.datums_range()
|
) = self.datums_range()
|
||||||
|
|
||||||
profiler(f'{self.name} got bars range')
|
profiler(f'{self.name} got bars range')
|
||||||
|
@ -447,7 +451,10 @@ class Viz(Struct):
|
||||||
# https://stackoverflow.com/a/29980872
|
# https://stackoverflow.com/a/29980872
|
||||||
ixrng = lbar, rbar = round(x_range[0]), round(x_range[1])
|
ixrng = lbar, rbar = round(x_range[0]), round(x_range[1])
|
||||||
|
|
||||||
if use_caching:
|
if (
|
||||||
|
use_caching
|
||||||
|
and self._mxmn_cache_enabled
|
||||||
|
):
|
||||||
cached_result = self._mxmns.get(ixrng)
|
cached_result = self._mxmns.get(ixrng)
|
||||||
if cached_result:
|
if cached_result:
|
||||||
if do_print:
|
if do_print:
|
||||||
|
@ -521,8 +528,31 @@ class Viz(Struct):
|
||||||
)
|
)
|
||||||
|
|
||||||
# cache result for input range
|
# cache result for input range
|
||||||
assert mxmn
|
ylow, yhi = mxmn
|
||||||
|
|
||||||
|
try:
|
||||||
|
prolly_anomaly: bool = (
|
||||||
|
(
|
||||||
|
abs(logf(ylow, 10)) > 16
|
||||||
|
if ylow
|
||||||
|
else False
|
||||||
|
)
|
||||||
|
or (
|
||||||
|
isnan(ylow) or isnan(yhi)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except ValueError:
|
||||||
|
prolly_anomaly = True
|
||||||
|
|
||||||
|
if prolly_anomaly:
|
||||||
|
return None
|
||||||
|
|
||||||
|
if (
|
||||||
|
not isnan(ylow)
|
||||||
|
and not prolly_anomaly
|
||||||
|
):
|
||||||
self._mxmns[ixrng] = (read_slc, mxmn)
|
self._mxmns[ixrng] = (read_slc, mxmn)
|
||||||
|
|
||||||
self.vs.yrange = mxmn
|
self.vs.yrange = mxmn
|
||||||
profiler(f'yrange mxmn cacheing: {x_range} -> {mxmn}')
|
profiler(f'yrange mxmn cacheing: {x_range} -> {mxmn}')
|
||||||
return (
|
return (
|
||||||
|
@ -555,12 +585,12 @@ class Viz(Struct):
|
||||||
Return a range tuple for the datums present in view.
|
Return a range tuple for the datums present in view.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
l, r = view_range or self.view_range()
|
xl, xr = view_range or self.view_range()
|
||||||
|
|
||||||
index_field: str = index_field or self.index_field
|
index_field: str = index_field or self.index_field
|
||||||
if index_field == 'index':
|
if index_field == 'index':
|
||||||
l: int = round(l)
|
xl: int = round(xl)
|
||||||
r: int = round(r)
|
xr: int = round(xr)
|
||||||
|
|
||||||
if array is None:
|
if array is None:
|
||||||
array = self.shm.array
|
array = self.shm.array
|
||||||
|
@ -571,12 +601,12 @@ class Viz(Struct):
|
||||||
|
|
||||||
# invalid view state
|
# invalid view state
|
||||||
if (
|
if (
|
||||||
r < l
|
xr < xl
|
||||||
or l < 0
|
or xl < 0
|
||||||
or r < 0
|
or xr < 0
|
||||||
or (
|
or (
|
||||||
l > last
|
xl > last
|
||||||
and r > last
|
and xr > last
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
leftmost: int = first
|
leftmost: int = first
|
||||||
|
@ -586,12 +616,12 @@ class Viz(Struct):
|
||||||
# determine first and last datums in view determined by
|
# determine first and last datums in view determined by
|
||||||
# l -> r view range.
|
# l -> r view range.
|
||||||
rightmost = max(
|
rightmost = max(
|
||||||
min(last, ceil(r)),
|
min(last, ceil(xr)),
|
||||||
first,
|
first,
|
||||||
)
|
)
|
||||||
|
|
||||||
leftmost = min(
|
leftmost = min(
|
||||||
max(first, floor(l)),
|
max(first, floor(xl)),
|
||||||
last,
|
last,
|
||||||
rightmost - 1,
|
rightmost - 1,
|
||||||
)
|
)
|
||||||
|
@ -602,12 +632,12 @@ class Viz(Struct):
|
||||||
self.vs.xrange = leftmost, rightmost
|
self.vs.xrange = leftmost, rightmost
|
||||||
|
|
||||||
return (
|
return (
|
||||||
l, # left x-in-view
|
xl, # left x-in-view
|
||||||
first, # first datum
|
first, # first datum
|
||||||
leftmost,
|
leftmost,
|
||||||
rightmost,
|
rightmost,
|
||||||
last, # last_datum
|
last, # last_datum
|
||||||
r, # right-x-in-view
|
xr, # right-x-in-view
|
||||||
)
|
)
|
||||||
|
|
||||||
def read(
|
def read(
|
||||||
|
@ -635,12 +665,12 @@ class Viz(Struct):
|
||||||
profiler('self.shm.array READ')
|
profiler('self.shm.array READ')
|
||||||
|
|
||||||
(
|
(
|
||||||
l,
|
xl,
|
||||||
ifirst,
|
ifirst,
|
||||||
lbar,
|
lbar,
|
||||||
rbar,
|
rbar,
|
||||||
ilast,
|
ilast,
|
||||||
r,
|
xr,
|
||||||
) = self.datums_range(
|
) = self.datums_range(
|
||||||
index_field=index_field,
|
index_field=index_field,
|
||||||
array=array,
|
array=array,
|
||||||
|
@ -685,8 +715,8 @@ class Viz(Struct):
|
||||||
# a uniform time stamp step size?
|
# a uniform time stamp step size?
|
||||||
else:
|
else:
|
||||||
# get read-relative indices adjusting for master shm index.
|
# get read-relative indices adjusting for master shm index.
|
||||||
lbar_i = max(l, ifirst) - ifirst
|
lbar_i = max(xl, ifirst) - ifirst
|
||||||
rbar_i = min(r, ilast) - ifirst
|
rbar_i = min(xr, ilast) - ifirst
|
||||||
|
|
||||||
# NOTE: the slice here does NOT include the extra ``+ 1``
|
# NOTE: the slice here does NOT include the extra ``+ 1``
|
||||||
# BUT the ``in_view`` slice DOES..
|
# BUT the ``in_view`` slice DOES..
|
||||||
|
@ -1214,18 +1244,25 @@ class Viz(Struct):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# get most recent right datum index in-view
|
# get most recent right datum index in-view
|
||||||
l, start, datum_start, datum_stop, stop, r = self.datums_range()
|
(
|
||||||
|
xl,
|
||||||
|
start,
|
||||||
|
datum_start,
|
||||||
|
datum_stop,
|
||||||
|
stop,
|
||||||
|
xr,
|
||||||
|
) = self.datums_range()
|
||||||
lasts = self.shm.array[-1]
|
lasts = self.shm.array[-1]
|
||||||
i_step = lasts['index'] # last index-specific step.
|
i_step = lasts['index'] # last index-specific step.
|
||||||
i_step_t = lasts['time'] # last time step.
|
i_step_t = lasts['time'] # last time step.
|
||||||
|
|
||||||
# fqsn = self.flume.symbol.fqsn
|
# fqme = self.flume.mkt.fqme
|
||||||
|
|
||||||
# check if "last (is) in view" -> is a real-time update necessary?
|
# check if "last (is) in view" -> is a real-time update necessary?
|
||||||
if self.index_field == 'index':
|
if self.index_field == 'index':
|
||||||
liv = (r >= i_step)
|
liv = (xr >= i_step)
|
||||||
else:
|
else:
|
||||||
liv = (r >= i_step_t)
|
liv = (xr >= i_step_t)
|
||||||
|
|
||||||
# compute the first available graphic obj's x-units-per-pixel
|
# compute the first available graphic obj's x-units-per-pixel
|
||||||
# TODO: make this not loop through all vizs each time!
|
# TODO: make this not loop through all vizs each time!
|
||||||
|
@ -1274,7 +1311,7 @@ class Viz(Struct):
|
||||||
varz['i_last_append'] = i_step
|
varz['i_last_append'] = i_step
|
||||||
|
|
||||||
# print(
|
# print(
|
||||||
# f'DOING APPEND => {fqsn}\n'
|
# f'DOING APPEND => {fqme}\n'
|
||||||
# f'i_step: {i_step}\n'
|
# f'i_step: {i_step}\n'
|
||||||
# f'i_step_t: {i_step_t}\n'
|
# f'i_step_t: {i_step_t}\n'
|
||||||
# f'glast: {glast}\n'
|
# f'glast: {glast}\n'
|
||||||
|
|
|
@ -37,6 +37,9 @@ import pyqtgraph as pg
|
||||||
from msgspec import field
|
from msgspec import field
|
||||||
|
|
||||||
# from .. import brokers
|
# from .. import brokers
|
||||||
|
from ..accounting import (
|
||||||
|
MktPair,
|
||||||
|
)
|
||||||
from ..data.feed import (
|
from ..data.feed import (
|
||||||
open_feed,
|
open_feed,
|
||||||
Feed,
|
Feed,
|
||||||
|
@ -62,7 +65,6 @@ from ._style import hcolor
|
||||||
from ._fsp import (
|
from ._fsp import (
|
||||||
update_fsp_chart,
|
update_fsp_chart,
|
||||||
start_fsp_displays,
|
start_fsp_displays,
|
||||||
has_vlm,
|
|
||||||
open_vlm_displays,
|
open_vlm_displays,
|
||||||
)
|
)
|
||||||
from ._forms import (
|
from ._forms import (
|
||||||
|
@ -163,7 +165,7 @@ class DisplayState(Struct):
|
||||||
Chart-local real-time graphics state container.
|
Chart-local real-time graphics state container.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
fqsn: str
|
fqme: str
|
||||||
godwidget: GodWidget
|
godwidget: GodWidget
|
||||||
quotes: dict[str, Any]
|
quotes: dict[str, Any]
|
||||||
|
|
||||||
|
@ -223,7 +225,7 @@ async def increment_history_view(
|
||||||
async for msg in istream:
|
async for msg in istream:
|
||||||
|
|
||||||
profiler = Profiler(
|
profiler = Profiler(
|
||||||
msg=f'History chart cycle for: `{ds.fqsn}`',
|
msg=f'History chart cycle for: `{ds.fqme}`',
|
||||||
delayed=True,
|
delayed=True,
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
|
@ -232,7 +234,7 @@ async def increment_history_view(
|
||||||
|
|
||||||
# l3 = ds.viz.shm.array[-3:]
|
# l3 = ds.viz.shm.array[-3:]
|
||||||
# print(
|
# print(
|
||||||
# f'fast step for {ds.flume.symbol.fqsn}:\n'
|
# f'fast step for {ds.flume.mkt.fqme}:\n'
|
||||||
# f'{list(l3["time"])}\n'
|
# f'{list(l3["time"])}\n'
|
||||||
# f'{l3}\n'
|
# f'{l3}\n'
|
||||||
# )
|
# )
|
||||||
|
@ -317,17 +319,17 @@ async def graphics_update_loop(
|
||||||
|
|
||||||
dss: dict[str, DisplayState] = {}
|
dss: dict[str, DisplayState] = {}
|
||||||
|
|
||||||
for fqsn, flume in feed.flumes.items():
|
for fqme, flume in feed.flumes.items():
|
||||||
ohlcv = flume.rt_shm
|
ohlcv = flume.rt_shm
|
||||||
hist_ohlcv = flume.hist_shm
|
hist_ohlcv = flume.hist_shm
|
||||||
symbol = flume.symbol
|
mkt = flume.mkt
|
||||||
fqsn = symbol.fqsn
|
fqme = mkt.fqme
|
||||||
|
|
||||||
# update last price sticky
|
# update last price sticky
|
||||||
fast_viz = fast_chart._vizs[fqsn]
|
fast_viz = fast_chart._vizs[fqme]
|
||||||
index_field = fast_viz.index_field
|
index_field = fast_viz.index_field
|
||||||
fast_pi = fast_viz.plot
|
fast_pi = fast_viz.plot
|
||||||
last_price_sticky = fast_pi.getAxis('right')._stickies[fqsn]
|
last_price_sticky = fast_pi.getAxis('right')._stickies[fqme]
|
||||||
last_price_sticky.update_from_data(
|
last_price_sticky.update_from_data(
|
||||||
*ohlcv.array[-1][[
|
*ohlcv.array[-1][[
|
||||||
index_field,
|
index_field,
|
||||||
|
@ -336,9 +338,9 @@ async def graphics_update_loop(
|
||||||
)
|
)
|
||||||
last_price_sticky.show()
|
last_price_sticky.show()
|
||||||
|
|
||||||
hist_viz = hist_chart._vizs[fqsn]
|
hist_viz = hist_chart._vizs[fqme]
|
||||||
slow_pi = hist_viz.plot
|
slow_pi = hist_viz.plot
|
||||||
hist_last_price_sticky = slow_pi.getAxis('right')._stickies[fqsn]
|
hist_last_price_sticky = slow_pi.getAxis('right')._stickies[fqme]
|
||||||
hist_last_price_sticky.update_from_data(
|
hist_last_price_sticky.update_from_data(
|
||||||
*hist_ohlcv.array[-1][[
|
*hist_ohlcv.array[-1][[
|
||||||
index_field,
|
index_field,
|
||||||
|
@ -346,7 +348,7 @@ async def graphics_update_loop(
|
||||||
]]
|
]]
|
||||||
)
|
)
|
||||||
|
|
||||||
vlm_chart = vlm_charts[fqsn]
|
vlm_chart = vlm_charts[fqme]
|
||||||
vlm_viz = vlm_chart._vizs.get('volume') if vlm_chart else None
|
vlm_viz = vlm_chart._vizs.get('volume') if vlm_chart else None
|
||||||
|
|
||||||
(
|
(
|
||||||
|
@ -361,13 +363,13 @@ async def graphics_update_loop(
|
||||||
|
|
||||||
last, volume = ohlcv.array[-1][['close', 'volume']]
|
last, volume = ohlcv.array[-1][['close', 'volume']]
|
||||||
|
|
||||||
symbol = flume.symbol
|
mkt = flume.mkt
|
||||||
|
|
||||||
l1 = L1Labels(
|
l1 = L1Labels(
|
||||||
fast_pi,
|
fast_pi,
|
||||||
# determine precision/decimal lengths
|
# determine precision/decimal lengths
|
||||||
digits=symbol.tick_size_digits,
|
digits=mkt.price_tick_digits,
|
||||||
size_digits=symbol.lot_size_digits,
|
size_digits=mkt.size_tick_digits,
|
||||||
)
|
)
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
|
@ -381,8 +383,8 @@ async def graphics_update_loop(
|
||||||
fast_chart.show()
|
fast_chart.show()
|
||||||
last_quote_s = time.time()
|
last_quote_s = time.time()
|
||||||
|
|
||||||
dss[fqsn] = ds = linked.display_state = DisplayState(**{
|
dss[fqme] = ds = linked.display_state = DisplayState(**{
|
||||||
'fqsn': fqsn,
|
'fqme': fqme,
|
||||||
'godwidget': godwidget,
|
'godwidget': godwidget,
|
||||||
'quotes': {},
|
'quotes': {},
|
||||||
|
|
||||||
|
@ -450,15 +452,15 @@ async def graphics_update_loop(
|
||||||
and quote_rate >= display_rate
|
and quote_rate >= display_rate
|
||||||
):
|
):
|
||||||
pass
|
pass
|
||||||
# log.warning(f'High quote rate {symbol.key}: {quote_rate}')
|
# log.warning(f'High quote rate {mkt.fqme}: {quote_rate}')
|
||||||
|
|
||||||
last_quote_s = time.time()
|
last_quote_s = time.time()
|
||||||
|
|
||||||
for fqsn, quote in quotes.items():
|
for fqme, quote in quotes.items():
|
||||||
ds = dss[fqsn]
|
ds = dss[fqme]
|
||||||
ds.quotes = quote
|
ds.quotes = quote
|
||||||
|
|
||||||
rt_pi, hist_pi = pis[fqsn]
|
rt_pi, hist_pi = pis[fqme]
|
||||||
|
|
||||||
# chart isn't active/shown so skip render cycle and
|
# chart isn't active/shown so skip render cycle and
|
||||||
# pause feed(s)
|
# pause feed(s)
|
||||||
|
@ -466,14 +468,14 @@ async def graphics_update_loop(
|
||||||
fast_chart.linked.isHidden()
|
fast_chart.linked.isHidden()
|
||||||
or not rt_pi.isVisible()
|
or not rt_pi.isVisible()
|
||||||
):
|
):
|
||||||
print(f'{fqsn} skipping update for HIDDEN CHART')
|
print(f'{fqme} skipping update for HIDDEN CHART')
|
||||||
fast_chart.pause_all_feeds()
|
fast_chart.pause_all_feeds()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ic = fast_chart.view._in_interact
|
ic = fast_chart.view._in_interact
|
||||||
if ic:
|
if ic:
|
||||||
fast_chart.pause_all_feeds()
|
fast_chart.pause_all_feeds()
|
||||||
print(f'{fqsn} PAUSING DURING INTERACTION')
|
print(f'{fqme} PAUSING DURING INTERACTION')
|
||||||
await ic.wait()
|
await ic.wait()
|
||||||
fast_chart.resume_all_feeds()
|
fast_chart.resume_all_feeds()
|
||||||
|
|
||||||
|
@ -495,7 +497,7 @@ def graphics_update_cycle(
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = Profiler(
|
profiler = Profiler(
|
||||||
msg=f'Graphics loop cycle for: `{ds.fqsn}`',
|
msg=f'Graphics loop cycle for: `{ds.fqme}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
delayed=True,
|
delayed=True,
|
||||||
|
@ -509,7 +511,7 @@ def graphics_update_cycle(
|
||||||
# - use a streaming minmax algo and drop the use of the
|
# - use a streaming minmax algo and drop the use of the
|
||||||
# state-tracking ``multi_maxmin()`` routine from above?
|
# state-tracking ``multi_maxmin()`` routine from above?
|
||||||
|
|
||||||
fqsn = ds.fqsn
|
fqme = ds.fqme
|
||||||
chart = ds.chart
|
chart = ds.chart
|
||||||
vlm_chart = ds.vlm_chart
|
vlm_chart = ds.vlm_chart
|
||||||
|
|
||||||
|
@ -548,7 +550,7 @@ def graphics_update_cycle(
|
||||||
# the true range? This way you can slap in orders outside the
|
# the true range? This way you can slap in orders outside the
|
||||||
# current L1 (only) book range.
|
# current L1 (only) book range.
|
||||||
main_vb: ChartView = main_viz.plot.vb
|
main_vb: ChartView = main_viz.plot.vb
|
||||||
this_viz: Viz = chart._vizs[fqsn]
|
this_viz: Viz = chart._vizs[fqme]
|
||||||
this_vb: ChartView = this_viz.plot.vb
|
this_vb: ChartView = this_viz.plot.vb
|
||||||
this_yr = this_vb._yrange
|
this_yr = this_vb._yrange
|
||||||
if this_yr:
|
if this_yr:
|
||||||
|
@ -600,7 +602,7 @@ def graphics_update_cycle(
|
||||||
profiler,
|
profiler,
|
||||||
)
|
)
|
||||||
|
|
||||||
profiler(f'{fqsn} `multi_maxmin()` call')
|
profiler(f'{fqme} `multi_maxmin()` call')
|
||||||
|
|
||||||
# iterate frames of ticks-by-type such that we only update graphics
|
# iterate frames of ticks-by-type such that we only update graphics
|
||||||
# using the last update per type where possible.
|
# using the last update per type where possible.
|
||||||
|
@ -828,7 +830,7 @@ def graphics_update_cycle(
|
||||||
|
|
||||||
# update any overlayed fsp flows
|
# update any overlayed fsp flows
|
||||||
if (
|
if (
|
||||||
curve_name != fqsn
|
curve_name != fqme
|
||||||
):
|
):
|
||||||
update_fsp_chart(
|
update_fsp_chart(
|
||||||
viz,
|
viz,
|
||||||
|
@ -939,7 +941,7 @@ def graphics_update_cycle(
|
||||||
liv and do_rt_update
|
liv and do_rt_update
|
||||||
or do_px_step
|
or do_px_step
|
||||||
)
|
)
|
||||||
and curve_name not in {fqsn}
|
and curve_name not in {fqme}
|
||||||
):
|
):
|
||||||
update_fsp_chart(
|
update_fsp_chart(
|
||||||
viz,
|
viz,
|
||||||
|
@ -1008,7 +1010,7 @@ async def link_views_with_region(
|
||||||
hist_pi.addItem(region, ignoreBounds=True)
|
hist_pi.addItem(region, ignoreBounds=True)
|
||||||
region.setOpacity(6/16)
|
region.setOpacity(6/16)
|
||||||
|
|
||||||
viz = rt_chart.get_viz(flume.symbol.fqsn)
|
viz = rt_chart.get_viz(flume.mkt.fqme)
|
||||||
assert viz
|
assert viz
|
||||||
index_field = viz.index_field
|
index_field = viz.index_field
|
||||||
|
|
||||||
|
@ -1035,7 +1037,7 @@ async def link_views_with_region(
|
||||||
# HFT/real-time chart.
|
# HFT/real-time chart.
|
||||||
rng = mn, mx = viewRange[0]
|
rng = mn, mx = viewRange[0]
|
||||||
|
|
||||||
# hist_viz = hist_chart.get_viz(flume.symbol.fqsn)
|
# hist_viz = hist_chart.get_viz(flume.mkt.fqme)
|
||||||
# hist = hist_viz.shm.array[-3:]
|
# hist = hist_viz.shm.array[-3:]
|
||||||
# print(
|
# print(
|
||||||
# f'mn: {mn}\n'
|
# f'mn: {mn}\n'
|
||||||
|
@ -1153,7 +1155,7 @@ _quote_throttle_rate: int = 60 - 6
|
||||||
|
|
||||||
async def display_symbol_data(
|
async def display_symbol_data(
|
||||||
godwidget: GodWidget,
|
godwidget: GodWidget,
|
||||||
fqsns: list[str],
|
fqmes: list[str],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
order_mode_started: trio.Event,
|
order_mode_started: trio.Event,
|
||||||
|
|
||||||
|
@ -1176,9 +1178,9 @@ async def display_symbol_data(
|
||||||
# group_key=loading_sym_key,
|
# group_key=loading_sym_key,
|
||||||
# )
|
# )
|
||||||
|
|
||||||
for fqsn in fqsns:
|
for fqme in fqmes:
|
||||||
loading_sym_key = sbar.open_status(
|
loading_sym_key = sbar.open_status(
|
||||||
f'loading {fqsn} ->',
|
f'loading {fqme} ->',
|
||||||
group_key=True
|
group_key=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1197,7 +1199,7 @@ async def display_symbol_data(
|
||||||
# TODO: we should be able to increase this if we use some
|
# TODO: we should be able to increase this if we use some
|
||||||
# `mypyc` speedups elsewhere? 22ish seems to be the sweet
|
# `mypyc` speedups elsewhere? 22ish seems to be the sweet
|
||||||
# spot for single-feed chart.
|
# spot for single-feed chart.
|
||||||
num_of_feeds = len(fqsns)
|
num_of_feeds = len(fqmes)
|
||||||
mx: int = 22
|
mx: int = 22
|
||||||
if num_of_feeds > 1:
|
if num_of_feeds > 1:
|
||||||
# there will be more ctx switches with more than 1 feed so we
|
# there will be more ctx switches with more than 1 feed so we
|
||||||
|
@ -1213,20 +1215,19 @@ async def display_symbol_data(
|
||||||
|
|
||||||
feed: Feed
|
feed: Feed
|
||||||
async with open_feed(
|
async with open_feed(
|
||||||
fqsns,
|
fqmes,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
tick_throttle=cycles_per_feed,
|
tick_throttle=cycles_per_feed,
|
||||||
|
|
||||||
) as feed:
|
) as feed:
|
||||||
|
|
||||||
# use expanded contract symbols passed back from feed layer.
|
# use expanded contract symbols passed back from feed layer.
|
||||||
fqsns = list(feed.flumes.keys())
|
fqmes = list(feed.flumes.keys())
|
||||||
|
|
||||||
# step_size_s = 1
|
# step_size_s = 1
|
||||||
# tf_key = tf_in_1s[step_size_s]
|
# tf_key = tf_in_1s[step_size_s]
|
||||||
godwidget.window.setWindowTitle(
|
godwidget.window.setWindowTitle(
|
||||||
f'{fqsns} '
|
f'{fqmes} '
|
||||||
# f'tick:{symbol.tick_size} '
|
# f'tick:{mkt.tick_size} '
|
||||||
# f'step:{tf_key} '
|
# f'step:{tf_key} '
|
||||||
)
|
)
|
||||||
# generate order mode side-pane UI
|
# generate order mode side-pane UI
|
||||||
|
@ -1236,8 +1237,8 @@ async def display_symbol_data(
|
||||||
godwidget.pp_pane = pp_pane
|
godwidget.pp_pane = pp_pane
|
||||||
|
|
||||||
# create top history view chart above the "main rt chart".
|
# create top history view chart above the "main rt chart".
|
||||||
rt_linked = godwidget.rt_linked
|
rt_linked: LinkedSplits = godwidget.rt_linked
|
||||||
hist_linked = godwidget.hist_linked
|
hist_linked: LinkedSplits = godwidget.hist_linked
|
||||||
|
|
||||||
# NOTE: here we insert the slow-history chart set into
|
# NOTE: here we insert the slow-history chart set into
|
||||||
# the fast chart's splitter -> so it's a splitter of charts
|
# the fast chart's splitter -> so it's a splitter of charts
|
||||||
|
@ -1277,22 +1278,21 @@ async def display_symbol_data(
|
||||||
|
|
||||||
# for the "first"/selected symbol we create new chart widgets
|
# for the "first"/selected symbol we create new chart widgets
|
||||||
# and sub-charts for FSPs
|
# and sub-charts for FSPs
|
||||||
fqsn, flume = fitems[0]
|
fqme, flume = fitems[0]
|
||||||
|
|
||||||
# TODO NOTE: THIS CONTROLS WHAT SYMBOL IS USED FOR ORDER MODE
|
# TODO NOTE: THIS CONTROLS WHAT SYMBOL IS USED FOR ORDER MODE
|
||||||
# SUBMISSIONS, we need to make this switch based on selection.
|
# SUBMISSIONS, we need to make this switch based on selection.
|
||||||
rt_linked._symbol = flume.symbol
|
rt_linked.set_mkt_info(flume.mkt)
|
||||||
hist_linked._symbol = flume.symbol
|
hist_linked.set_mkt_info(flume.mkt)
|
||||||
|
|
||||||
ohlcv: ShmArray = flume.rt_shm
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
hist_ohlcv: ShmArray = flume.hist_shm
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
symbol = flume.symbol
|
mkt: MktPair = flume.mkt
|
||||||
brokername = symbol.brokers[0]
|
fqme = mkt.fqme
|
||||||
fqsn = symbol.fqsn
|
|
||||||
|
|
||||||
hist_chart = hist_linked.plot_ohlc_main(
|
hist_chart = hist_linked.plot_ohlc_main(
|
||||||
symbol,
|
mkt,
|
||||||
hist_ohlcv,
|
hist_ohlcv,
|
||||||
flume,
|
flume,
|
||||||
# in the case of history chart we explicitly set `False`
|
# in the case of history chart we explicitly set `False`
|
||||||
|
@ -1306,15 +1306,15 @@ async def display_symbol_data(
|
||||||
|
|
||||||
# ensure the last datum graphic is generated
|
# ensure the last datum graphic is generated
|
||||||
# for zoom-interaction purposes.
|
# for zoom-interaction purposes.
|
||||||
hist_viz = hist_chart.get_viz(fqsn)
|
hist_viz = hist_chart.get_viz(fqme)
|
||||||
hist_viz.draw_last(array_key=fqsn)
|
hist_viz.draw_last(array_key=fqme)
|
||||||
pis.setdefault(fqsn, [None, None])[1] = hist_chart.plotItem
|
pis.setdefault(fqme, [None, None])[1] = hist_chart.plotItem
|
||||||
|
|
||||||
# don't show when not focussed
|
# don't show when not focussed
|
||||||
hist_linked.cursor.always_show_xlabel = False
|
hist_linked.cursor.always_show_xlabel = False
|
||||||
|
|
||||||
rt_chart = rt_linked.plot_ohlc_main(
|
rt_chart = rt_linked.plot_ohlc_main(
|
||||||
symbol,
|
mkt,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
flume,
|
flume,
|
||||||
# in the case of history chart we explicitly set `False`
|
# in the case of history chart we explicitly set `False`
|
||||||
|
@ -1324,8 +1324,8 @@ async def display_symbol_data(
|
||||||
'last_step_color': 'original',
|
'last_step_color': 'original',
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
rt_viz = rt_chart.get_viz(fqsn)
|
rt_viz = rt_chart.get_viz(fqme)
|
||||||
pis.setdefault(fqsn, [None, None])[0] = rt_chart.plotItem
|
pis.setdefault(fqme, [None, None])[0] = rt_chart.plotItem
|
||||||
|
|
||||||
# for pause/resume on mouse interaction
|
# for pause/resume on mouse interaction
|
||||||
rt_chart.feed = feed
|
rt_chart.feed = feed
|
||||||
|
@ -1337,11 +1337,10 @@ async def display_symbol_data(
|
||||||
None | ChartPlotWidget
|
None | ChartPlotWidget
|
||||||
] = {}.fromkeys(feed.flumes)
|
] = {}.fromkeys(feed.flumes)
|
||||||
if (
|
if (
|
||||||
not symbol.broker_info[brokername].get('no_vlm', False)
|
flume.has_vlm()
|
||||||
and has_vlm(ohlcv)
|
|
||||||
and vlm_chart is None
|
and vlm_chart is None
|
||||||
):
|
):
|
||||||
vlm_chart = vlm_charts[fqsn] = await ln.start(
|
vlm_chart = vlm_charts[fqme] = await ln.start(
|
||||||
open_vlm_displays,
|
open_vlm_displays,
|
||||||
rt_linked,
|
rt_linked,
|
||||||
flume,
|
flume,
|
||||||
|
@ -1375,26 +1374,26 @@ async def display_symbol_data(
|
||||||
godwidget.resize_all()
|
godwidget.resize_all()
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
for fqsn, flume in fitems[1:]:
|
for fqme, flume in fitems[1:]:
|
||||||
# get a new color from the palette
|
# get a new color from the palette
|
||||||
bg_chart_color, bg_last_bar_color = next(palette)
|
bg_chart_color, bg_last_bar_color = next(palette)
|
||||||
|
|
||||||
ohlcv: ShmArray = flume.rt_shm
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
hist_ohlcv: ShmArray = flume.hist_shm
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
symbol = flume.symbol
|
mkt = flume.mkt
|
||||||
fqsn = symbol.fqsn
|
fqme = mkt.fqme
|
||||||
|
|
||||||
hist_pi = hist_chart.overlay_plotitem(
|
hist_pi = hist_chart.overlay_plotitem(
|
||||||
name=fqsn,
|
name=fqme,
|
||||||
axis_title=fqsn,
|
axis_title=fqme,
|
||||||
)
|
)
|
||||||
|
|
||||||
hist_viz = hist_chart.draw_curve(
|
hist_viz = hist_chart.draw_curve(
|
||||||
fqsn,
|
fqme,
|
||||||
hist_ohlcv,
|
hist_ohlcv,
|
||||||
flume,
|
flume,
|
||||||
array_key=fqsn,
|
array_key=fqme,
|
||||||
overlay=hist_pi,
|
overlay=hist_pi,
|
||||||
pi=hist_pi,
|
pi=hist_pi,
|
||||||
is_ohlc=True,
|
is_ohlc=True,
|
||||||
|
@ -1405,26 +1404,26 @@ async def display_symbol_data(
|
||||||
|
|
||||||
# ensure the last datum graphic is generated
|
# ensure the last datum graphic is generated
|
||||||
# for zoom-interaction purposes.
|
# for zoom-interaction purposes.
|
||||||
hist_viz.draw_last(array_key=fqsn)
|
hist_viz.draw_last(array_key=fqme)
|
||||||
|
|
||||||
# TODO: we need a better API to do this..
|
# TODO: we need a better API to do this..
|
||||||
# specially store ref to shm for lookup in display loop
|
# specially store ref to shm for lookup in display loop
|
||||||
# since only a placeholder of `None` is entered in
|
# since only a placeholder of `None` is entered in
|
||||||
# ``.draw_curve()``.
|
# ``.draw_curve()``.
|
||||||
hist_viz = hist_chart._vizs[fqsn]
|
hist_viz = hist_chart._vizs[fqme]
|
||||||
assert hist_viz.plot is hist_pi
|
assert hist_viz.plot is hist_pi
|
||||||
pis.setdefault(fqsn, [None, None])[1] = hist_pi
|
pis.setdefault(fqme, [None, None])[1] = hist_pi
|
||||||
|
|
||||||
rt_pi = rt_chart.overlay_plotitem(
|
rt_pi = rt_chart.overlay_plotitem(
|
||||||
name=fqsn,
|
name=fqme,
|
||||||
axis_title=fqsn,
|
axis_title=fqme,
|
||||||
)
|
)
|
||||||
|
|
||||||
rt_viz = rt_chart.draw_curve(
|
rt_viz = rt_chart.draw_curve(
|
||||||
fqsn,
|
fqme,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
flume,
|
flume,
|
||||||
array_key=fqsn,
|
array_key=fqme,
|
||||||
overlay=rt_pi,
|
overlay=rt_pi,
|
||||||
pi=rt_pi,
|
pi=rt_pi,
|
||||||
is_ohlc=True,
|
is_ohlc=True,
|
||||||
|
@ -1437,9 +1436,9 @@ async def display_symbol_data(
|
||||||
# specially store ref to shm for lookup in display loop
|
# specially store ref to shm for lookup in display loop
|
||||||
# since only a placeholder of `None` is entered in
|
# since only a placeholder of `None` is entered in
|
||||||
# ``.draw_curve()``.
|
# ``.draw_curve()``.
|
||||||
rt_viz = rt_chart._vizs[fqsn]
|
rt_viz = rt_chart._vizs[fqme]
|
||||||
assert rt_viz.plot is rt_pi
|
assert rt_viz.plot is rt_pi
|
||||||
pis.setdefault(fqsn, [None, None])[0] = rt_pi
|
pis.setdefault(fqme, [None, None])[0] = rt_pi
|
||||||
|
|
||||||
rt_chart.setFocus()
|
rt_chart.setFocus()
|
||||||
|
|
||||||
|
@ -1455,7 +1454,7 @@ async def display_symbol_data(
|
||||||
|
|
||||||
# greedily do a view range default and pane resizing
|
# greedily do a view range default and pane resizing
|
||||||
# on startup before loading the order-mode machinery.
|
# on startup before loading the order-mode machinery.
|
||||||
for fqsn, flume in feed.flumes.items():
|
for fqme, flume in feed.flumes.items():
|
||||||
|
|
||||||
# size view to data prior to order mode init
|
# size view to data prior to order mode init
|
||||||
rt_chart.main_viz.default_view(
|
rt_chart.main_viz.default_view(
|
||||||
|
@ -1473,6 +1472,7 @@ async def display_symbol_data(
|
||||||
|
|
||||||
hist_chart.main_viz.default_view(
|
hist_chart.main_viz.default_view(
|
||||||
do_min_bars=True,
|
do_min_bars=True,
|
||||||
|
do_ds=False,
|
||||||
)
|
)
|
||||||
hist_linked.graphics_cycle()
|
hist_linked.graphics_cycle()
|
||||||
|
|
||||||
|
@ -1497,13 +1497,13 @@ async def display_symbol_data(
|
||||||
)
|
)
|
||||||
|
|
||||||
# boot order-mode
|
# boot order-mode
|
||||||
order_ctl_symbol: str = fqsns[0]
|
order_ctl_fqme: str = fqmes[0]
|
||||||
mode: OrderMode
|
mode: OrderMode
|
||||||
async with (
|
async with (
|
||||||
open_order_mode(
|
open_order_mode(
|
||||||
feed,
|
feed,
|
||||||
godwidget,
|
godwidget,
|
||||||
fqsns[0],
|
order_ctl_fqme,
|
||||||
order_mode_started,
|
order_mode_started,
|
||||||
loglevel=loglevel
|
loglevel=loglevel
|
||||||
) as mode
|
) as mode
|
||||||
|
@ -1511,7 +1511,7 @@ async def display_symbol_data(
|
||||||
|
|
||||||
rt_linked.mode = mode
|
rt_linked.mode = mode
|
||||||
|
|
||||||
rt_viz = rt_chart.get_viz(order_ctl_symbol)
|
rt_viz = rt_chart.get_viz(order_ctl_fqme)
|
||||||
rt_viz.plot.setFocus()
|
rt_viz.plot.setFocus()
|
||||||
|
|
||||||
# default view adjuments and sidepane alignment
|
# default view adjuments and sidepane alignment
|
||||||
|
@ -1524,7 +1524,7 @@ async def display_symbol_data(
|
||||||
hist_chart.main_viz.default_view(
|
hist_chart.main_viz.default_view(
|
||||||
do_min_bars=True,
|
do_min_bars=True,
|
||||||
)
|
)
|
||||||
hist_viz = hist_chart.get_viz(fqsn)
|
hist_viz = hist_chart.get_viz(fqme)
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
godwidget.resize_all()
|
godwidget.resize_all()
|
||||||
|
|
|
@ -29,7 +29,6 @@ from typing import (
|
||||||
Any,
|
Any,
|
||||||
)
|
)
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
import msgspec
|
import msgspec
|
||||||
import tractor
|
import tractor
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
|
@ -46,7 +45,7 @@ from ..data._sharedmem import (
|
||||||
try_read,
|
try_read,
|
||||||
)
|
)
|
||||||
from ..data.feed import Flume
|
from ..data.feed import Flume
|
||||||
from ..data._source import Symbol
|
from ..accounting import MktPair
|
||||||
from ._chart import (
|
from ._chart import (
|
||||||
ChartPlotWidget,
|
ChartPlotWidget,
|
||||||
LinkedSplits,
|
LinkedSplits,
|
||||||
|
@ -72,14 +71,6 @@ from .._profile import Profiler
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
def has_vlm(ohlcv: ShmArray) -> bool:
|
|
||||||
# make sure that the instrument supports volume history
|
|
||||||
# (sometimes this is not the case for some commodities and
|
|
||||||
# derivatives)
|
|
||||||
vlm = ohlcv.array['volume']
|
|
||||||
return not bool(np.all(np.isin(vlm, -1)) or np.all(np.isnan(vlm)))
|
|
||||||
|
|
||||||
|
|
||||||
def update_fsp_chart(
|
def update_fsp_chart(
|
||||||
viz,
|
viz,
|
||||||
graphics_name: str,
|
graphics_name: str,
|
||||||
|
@ -398,7 +389,7 @@ class FspAdmin:
|
||||||
portal: tractor.Portal,
|
portal: tractor.Portal,
|
||||||
complete: trio.Event,
|
complete: trio.Event,
|
||||||
started: trio.Event,
|
started: trio.Event,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
dst_fsp_flume: Flume,
|
dst_fsp_flume: Flume,
|
||||||
conf: dict,
|
conf: dict,
|
||||||
target: Fsp,
|
target: Fsp,
|
||||||
|
@ -418,7 +409,7 @@ class FspAdmin:
|
||||||
cascade,
|
cascade,
|
||||||
|
|
||||||
# data feed key
|
# data feed key
|
||||||
fqsn=fqsn,
|
fqme=fqme,
|
||||||
|
|
||||||
# TODO: pass `Flume.to_msg()`s here?
|
# TODO: pass `Flume.to_msg()`s here?
|
||||||
# mems
|
# mems
|
||||||
|
@ -436,7 +427,7 @@ class FspAdmin:
|
||||||
in self._flow_registry.items()
|
in self._flow_registry.items()
|
||||||
],
|
],
|
||||||
|
|
||||||
) as (ctx, last_index),
|
) as (ctx, _),
|
||||||
ctx.open_stream() as stream,
|
ctx.open_stream() as stream,
|
||||||
):
|
):
|
||||||
|
|
||||||
|
@ -444,7 +435,7 @@ class FspAdmin:
|
||||||
|
|
||||||
# register output data
|
# register output data
|
||||||
self._registry[
|
self._registry[
|
||||||
(fqsn, ns_path)
|
(fqme, ns_path)
|
||||||
] = (
|
] = (
|
||||||
stream,
|
stream,
|
||||||
dst_fsp_flume.rt_shm,
|
dst_fsp_flume.rt_shm,
|
||||||
|
@ -484,26 +475,42 @@ class FspAdmin:
|
||||||
|
|
||||||
) -> (Flume, trio.Event):
|
) -> (Flume, trio.Event):
|
||||||
|
|
||||||
fqsn = self.flume.symbol.fqsn
|
src_mkt: MktPair = self.flume.mkt
|
||||||
|
fqme: str = src_mkt.get_fqme(delim_char='')
|
||||||
|
|
||||||
# allocate an output shm array
|
# allocate an output shm array
|
||||||
key, dst_shm, opened = maybe_mk_fsp_shm(
|
key, dst_shm, opened = maybe_mk_fsp_shm(
|
||||||
fqsn,
|
fqme,
|
||||||
target=target,
|
target=target,
|
||||||
readonly=True,
|
readonly=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
portal = self.cluster.get(worker_name) or self.rr_next_portal()
|
portal: tractor.Portal = (
|
||||||
provider_tag = portal.channel.uid
|
self.cluster.get(worker_name)
|
||||||
|
or self.rr_next_portal()
|
||||||
|
)
|
||||||
|
|
||||||
symbol = Symbol(
|
# TODO: this should probably be turned into a
|
||||||
key=key,
|
# ``Cascade`` type which describes the routing
|
||||||
broker_info={
|
# of an fsp's IO in terms of sinc -> source
|
||||||
provider_tag: {'asset_type': 'fsp'},
|
# shm/IPC endpoints?
|
||||||
},
|
mkt = MktPair(
|
||||||
|
|
||||||
|
# make this a couple addrs encapsing
|
||||||
|
# the flume routing?
|
||||||
|
src=src_mkt.dst,
|
||||||
|
dst=target.name,
|
||||||
|
|
||||||
|
# make this a precision / rounding value?
|
||||||
|
price_tick=src_mkt.price_tick,
|
||||||
|
size_tick=src_mkt.size_tick,
|
||||||
|
|
||||||
|
bs_mktid=target.name,
|
||||||
|
broker='piker',
|
||||||
|
_atype='fsp',
|
||||||
)
|
)
|
||||||
dst_fsp_flume = Flume(
|
dst_fsp_flume = Flume(
|
||||||
symbol=symbol,
|
mkt=mkt,
|
||||||
_rt_shm_token=dst_shm.token,
|
_rt_shm_token=dst_shm.token,
|
||||||
first_quote={},
|
first_quote={},
|
||||||
|
|
||||||
|
@ -519,7 +526,7 @@ class FspAdmin:
|
||||||
|
|
||||||
# if not opened:
|
# if not opened:
|
||||||
# raise RuntimeError(
|
# raise RuntimeError(
|
||||||
# f'Already started FSP `{fqsn}:{func_name}`'
|
# f'Already started FSP `{fqme}:{func_name}`'
|
||||||
# )
|
# )
|
||||||
|
|
||||||
complete = trio.Event()
|
complete = trio.Event()
|
||||||
|
@ -529,7 +536,7 @@ class FspAdmin:
|
||||||
portal,
|
portal,
|
||||||
complete,
|
complete,
|
||||||
started,
|
started,
|
||||||
fqsn,
|
fqme,
|
||||||
dst_fsp_flume,
|
dst_fsp_flume,
|
||||||
conf,
|
conf,
|
||||||
target,
|
target,
|
||||||
|
|
|
@ -123,10 +123,10 @@ class LevelLine(pg.InfiniteLine):
|
||||||
self._track_cursor: bool = False
|
self._track_cursor: bool = False
|
||||||
self.always_show_labels = always_show_labels
|
self.always_show_labels = always_show_labels
|
||||||
|
|
||||||
self._on_drag_start = lambda l: None
|
self._on_drag_start = lambda lvln: None
|
||||||
self._on_drag_end = lambda l: None
|
self._on_drag_end = lambda lvln: None
|
||||||
|
|
||||||
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
self._y_incr_mult = float(1 / chart.linked.mkt.size_tick)
|
||||||
self._right_end_sc: float = 0
|
self._right_end_sc: float = 0
|
||||||
|
|
||||||
# use px caching
|
# use px caching
|
||||||
|
|
|
@ -104,4 +104,6 @@ async def notify_from_ems_status_msg(
|
||||||
log.runtime(result)
|
log.runtime(result)
|
||||||
|
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
log.warn('Tried to send a notification but \'notify-send\' not present')
|
log.warn(
|
||||||
|
'Tried to send a notification but \'notify-send\' not present'
|
||||||
|
)
|
||||||
|
|
|
@ -14,10 +14,10 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
Position info and display
|
Position (pos) info and display to track ur PnLz B)
|
||||||
|
|
||||||
"""
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from copy import copy
|
from copy import copy
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
@ -45,8 +45,17 @@ from ..calc import (
|
||||||
pnl,
|
pnl,
|
||||||
puterize,
|
puterize,
|
||||||
)
|
)
|
||||||
from ..clearing._allocate import Allocator
|
from ..accounting import (
|
||||||
from ..pp import Position
|
Allocator,
|
||||||
|
MktPair,
|
||||||
|
)
|
||||||
|
from ..accounting import (
|
||||||
|
Position,
|
||||||
|
)
|
||||||
|
from ..accounting._mktinfo import (
|
||||||
|
_derivs,
|
||||||
|
)
|
||||||
|
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data.feed import (
|
from ..data.feed import (
|
||||||
Feed,
|
Feed,
|
||||||
|
@ -85,7 +94,7 @@ async def update_pnl_from_feed(
|
||||||
|
|
||||||
pp: PositionTracker = order_mode.current_pp
|
pp: PositionTracker = order_mode.current_pp
|
||||||
live: Position = pp.live_pp
|
live: Position = pp.live_pp
|
||||||
key: str = live.symbol.front_fqsn()
|
key: str = live.mkt.fqme
|
||||||
|
|
||||||
log.info(f'Starting pnl display for {pp.alloc.account}')
|
log.info(f'Starting pnl display for {pp.alloc.account}')
|
||||||
|
|
||||||
|
@ -119,7 +128,7 @@ async def update_pnl_from_feed(
|
||||||
|
|
||||||
# watch out for wrong quote msg-data if you muck
|
# watch out for wrong quote msg-data if you muck
|
||||||
# with backend feed subs code..
|
# with backend feed subs code..
|
||||||
# assert sym == quote['fqsn']
|
# assert sym == quote['fqme']
|
||||||
|
|
||||||
for tick in iterticks(quote, types):
|
for tick in iterticks(quote, types):
|
||||||
# print(f'{1/period} Hz')
|
# print(f'{1/period} Hz')
|
||||||
|
@ -238,7 +247,7 @@ class SettingsPane:
|
||||||
# a ``brokerd`) then error and switch back to the last
|
# a ``brokerd`) then error and switch back to the last
|
||||||
# selection.
|
# selection.
|
||||||
if tracker is None:
|
if tracker is None:
|
||||||
sym = old_tracker.charts[0].linked.symbol.key
|
sym: str = old_tracker.charts[0].linked.mkt.fqme
|
||||||
log.error(
|
log.error(
|
||||||
f'Account `{account_name}` can not be set for {sym}'
|
f'Account `{account_name}` can not be set for {sym}'
|
||||||
)
|
)
|
||||||
|
@ -409,9 +418,10 @@ class SettingsPane:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
mode = self.order_mode
|
mode = self.order_mode
|
||||||
sym = mode.chart.linked.symbol
|
mkt: MktPair = mode.chart.linked.mkt
|
||||||
size = tracker.live_pp.size
|
size = tracker.live_pp.size
|
||||||
flume: Feed = mode.feed.flumes[sym.fqsn]
|
fqme: str = mkt.fqme
|
||||||
|
flume: Feed = mode.feed.flumes[fqme]
|
||||||
pnl_value = 0
|
pnl_value = 0
|
||||||
|
|
||||||
if size:
|
if size:
|
||||||
|
@ -424,9 +434,8 @@ class SettingsPane:
|
||||||
|
|
||||||
# maybe start update task
|
# maybe start update task
|
||||||
global _pnl_tasks
|
global _pnl_tasks
|
||||||
fqsn = sym.front_fqsn()
|
if fqme not in _pnl_tasks:
|
||||||
if fqsn not in _pnl_tasks:
|
_pnl_tasks[fqme] = True
|
||||||
_pnl_tasks[fqsn] = True
|
|
||||||
self.order_mode.nursery.start_soon(
|
self.order_mode.nursery.start_soon(
|
||||||
update_pnl_from_feed,
|
update_pnl_from_feed,
|
||||||
flume,
|
flume,
|
||||||
|
@ -495,14 +504,6 @@ def pp_line(
|
||||||
return line
|
return line
|
||||||
|
|
||||||
|
|
||||||
_derivs = (
|
|
||||||
'future',
|
|
||||||
'continuous_future',
|
|
||||||
'option',
|
|
||||||
'futures_option',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: move into annoate module?
|
# TODO: move into annoate module?
|
||||||
def mk_level_marker(
|
def mk_level_marker(
|
||||||
chart: ChartPlotWidget,
|
chart: ChartPlotWidget,
|
||||||
|
@ -557,7 +558,7 @@ class Nav(Struct):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
for key, chart in self.charts.items():
|
for key, chart in self.charts.items():
|
||||||
size_digits = size_digits or chart.linked.symbol.lot_size_digits
|
size_digits = size_digits or chart.linked.mkt.size_tick_digits
|
||||||
line = self.lines.get(key)
|
line = self.lines.get(key)
|
||||||
level_marker = self.level_markers[key]
|
level_marker = self.level_markers[key]
|
||||||
pp_label = self.pp_labels[key]
|
pp_label = self.pp_labels[key]
|
||||||
|
@ -864,7 +865,7 @@ class PositionTracker:
|
||||||
alloc = self.alloc
|
alloc = self.alloc
|
||||||
|
|
||||||
# update allocator settings
|
# update allocator settings
|
||||||
asset_type = pp.symbol.type_key
|
asset_type = pp.mkt.type_key
|
||||||
|
|
||||||
# specific configs by asset class / type
|
# specific configs by asset class / type
|
||||||
if asset_type in _derivs:
|
if asset_type in _derivs:
|
||||||
|
|
|
@ -639,10 +639,10 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
godw = self.godwidget
|
godw = self.godwidget
|
||||||
|
|
||||||
# first entry in the cache is the current symbol(s)
|
# first entry in the cache is the current symbol(s)
|
||||||
fqsns = set()
|
fqmes = set()
|
||||||
for multi_fqsns in list(godw._chart_cache):
|
for multi_fqmes in list(godw._chart_cache):
|
||||||
for fqsn in set(multi_fqsns):
|
for fqme in set(multi_fqmes):
|
||||||
fqsns.add(fqsn)
|
fqmes.add(fqme)
|
||||||
|
|
||||||
if keep_current_item_selected:
|
if keep_current_item_selected:
|
||||||
sel = self.view.selectionModel()
|
sel = self.view.selectionModel()
|
||||||
|
@ -650,7 +650,7 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
|
|
||||||
self.view.set_section_entries(
|
self.view.set_section_entries(
|
||||||
'cache',
|
'cache',
|
||||||
list(fqsns),
|
list(fqmes),
|
||||||
# remove all other completion results except for cache
|
# remove all other completion results except for cache
|
||||||
clear_all=only,
|
clear_all=only,
|
||||||
reverse=True,
|
reverse=True,
|
||||||
|
@ -722,18 +722,18 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
cidx, provider, symbol = value
|
cidx, provider, symbol = value
|
||||||
godw = self.godwidget
|
godw = self.godwidget
|
||||||
|
|
||||||
fqsn = f'{symbol}.{provider}'
|
fqme = f'{symbol}.{provider}'
|
||||||
log.info(f'Requesting symbol: {fqsn}')
|
log.info(f'Requesting symbol: {fqme}')
|
||||||
|
|
||||||
# assert provider in symbol
|
# assert provider in symbol
|
||||||
await godw.load_symbols(
|
await godw.load_symbols(
|
||||||
fqsns=[fqsn],
|
fqmes=[fqme],
|
||||||
loglevel='info',
|
loglevel='info',
|
||||||
)
|
)
|
||||||
|
|
||||||
# fully qualified symbol name (SNS i guess is what we're
|
# fully qualified symbol name (SNS i guess is what we're
|
||||||
# making?)
|
# making?)
|
||||||
fqsn = '.'.join([symbol, provider]).lower()
|
fqme = '.'.join([symbol, provider]).lower()
|
||||||
|
|
||||||
if clear_to_cache:
|
if clear_to_cache:
|
||||||
|
|
||||||
|
@ -743,7 +743,7 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
# LIFO order. this is normally only done internally by
|
# LIFO order. this is normally only done internally by
|
||||||
# the chart on new symbols being loaded into memory
|
# the chart on new symbols being loaded into memory
|
||||||
godw.set_chart_symbols(
|
godw.set_chart_symbols(
|
||||||
(fqsn,), (
|
(fqme,), (
|
||||||
godw.hist_linked,
|
godw.hist_linked,
|
||||||
godw.rt_linked,
|
godw.rt_linked,
|
||||||
)
|
)
|
||||||
|
@ -753,7 +753,7 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
)
|
)
|
||||||
|
|
||||||
self.bar.focus()
|
self.bar.focus()
|
||||||
return fqsn
|
return fqme
|
||||||
|
|
||||||
def space_dims(self) -> tuple[float, float]:
|
def space_dims(self) -> tuple[float, float]:
|
||||||
'''
|
'''
|
||||||
|
|
|
@ -23,7 +23,10 @@ WARNING: this code likely doesn't work at all (yet)
|
||||||
"""
|
"""
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import (
|
||||||
|
QtCore,
|
||||||
|
QtWidgets,
|
||||||
|
)
|
||||||
|
|
||||||
from .quantdom.charts import CenteredTextItem
|
from .quantdom.charts import CenteredTextItem
|
||||||
from .quantdom.base import Quotes
|
from .quantdom.base import Quotes
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -36,13 +36,18 @@ import trio
|
||||||
from PyQt5.QtCore import Qt
|
from PyQt5.QtCore import Qt
|
||||||
|
|
||||||
from .. import config
|
from .. import config
|
||||||
from ..pp import Position
|
from ..accounting import (
|
||||||
from ..clearing._client import open_ems, OrderBook
|
Allocator,
|
||||||
from ..clearing._allocate import (
|
Position,
|
||||||
mk_allocator,
|
mk_allocator,
|
||||||
|
MktPair,
|
||||||
|
Symbol,
|
||||||
|
)
|
||||||
|
from ..clearing._client import (
|
||||||
|
open_ems,
|
||||||
|
OrderClient,
|
||||||
)
|
)
|
||||||
from ._style import _font
|
from ._style import _font
|
||||||
from ..data._source import Symbol
|
|
||||||
from ..data.feed import (
|
from ..data.feed import (
|
||||||
Feed,
|
Feed,
|
||||||
Flume,
|
Flume,
|
||||||
|
@ -89,7 +94,7 @@ class Dialog(Struct):
|
||||||
order: Order
|
order: Order
|
||||||
symbol: str
|
symbol: str
|
||||||
lines: list[LevelLine]
|
lines: list[LevelLine]
|
||||||
last_status_close: Callable = lambda: None
|
last_status_close: Callable | None = None
|
||||||
msgs: dict[str, dict] = {}
|
msgs: dict[str, dict] = {}
|
||||||
fills: dict[str, Any] = {}
|
fills: dict[str, Any] = {}
|
||||||
|
|
||||||
|
@ -120,7 +125,7 @@ class OrderMode:
|
||||||
chart: ChartPlotWidget # type: ignore # noqa
|
chart: ChartPlotWidget # type: ignore # noqa
|
||||||
hist_chart: ChartPlotWidget # type: ignore # noqa
|
hist_chart: ChartPlotWidget # type: ignore # noqa
|
||||||
nursery: trio.Nursery # used by ``ui._position`` code?
|
nursery: trio.Nursery # used by ``ui._position`` code?
|
||||||
book: OrderBook
|
client: OrderClient
|
||||||
lines: LineEditor
|
lines: LineEditor
|
||||||
arrows: ArrowEditor
|
arrows: ArrowEditor
|
||||||
multistatus: MultiStatus
|
multistatus: MultiStatus
|
||||||
|
@ -284,15 +289,29 @@ class OrderMode:
|
||||||
# since that's illogical / a no-op.
|
# since that's illogical / a no-op.
|
||||||
return
|
return
|
||||||
|
|
||||||
symbol = self.chart.linked.symbol
|
mkt: MktPair = self.chart.linked.mkt
|
||||||
|
|
||||||
|
# NOTE : we could also use instead,
|
||||||
|
# mkt.quantize(price, quantity_type='price')
|
||||||
|
# but it returns a Decimal and it's probably gonna
|
||||||
|
# be slower?
|
||||||
|
# TODO: should we be enforcing this precision
|
||||||
|
# at a different layer in the stack? right now
|
||||||
|
# any precision error will literally be relayed
|
||||||
|
# all the way back from the backend.
|
||||||
|
|
||||||
|
price = round(
|
||||||
|
price,
|
||||||
|
ndigits=mkt.price_tick_digits,
|
||||||
|
)
|
||||||
|
|
||||||
order = self._staged_order = Order(
|
order = self._staged_order = Order(
|
||||||
action=action,
|
action=action,
|
||||||
price=price,
|
price=price,
|
||||||
account=self.current_pp.alloc.account,
|
account=self.current_pp.alloc.account,
|
||||||
size=0,
|
size=0,
|
||||||
symbol=symbol,
|
symbol=mkt.fqme,
|
||||||
brokers=symbol.brokers,
|
brokers=[mkt.broker],
|
||||||
oid='', # filled in on submit
|
oid='', # filled in on submit
|
||||||
exec_mode=trigger_type, # dark or live
|
exec_mode=trigger_type, # dark or live
|
||||||
)
|
)
|
||||||
|
@ -349,12 +368,17 @@ class OrderMode:
|
||||||
'''
|
'''
|
||||||
if not order:
|
if not order:
|
||||||
staged: Order = self._staged_order
|
staged: Order = self._staged_order
|
||||||
|
|
||||||
# apply order fields for ems
|
# apply order fields for ems
|
||||||
oid = str(uuid.uuid4())
|
oid = str(uuid.uuid4())
|
||||||
order = staged.copy()
|
|
||||||
order.oid = oid
|
|
||||||
|
|
||||||
order.symbol = order.symbol.front_fqsn()
|
# NOTE: we have to str-ify `MktPair` first since we can't
|
||||||
|
# cast to it without being mega explicit with
|
||||||
|
# `msgspec.Struct`, which we're not yet..
|
||||||
|
order: Order = staged.copy({
|
||||||
|
'symbol': str(staged.symbol),
|
||||||
|
'oid': oid,
|
||||||
|
})
|
||||||
|
|
||||||
lines = self.lines_from_order(
|
lines = self.lines_from_order(
|
||||||
order,
|
order,
|
||||||
|
@ -401,13 +425,13 @@ class OrderMode:
|
||||||
|
|
||||||
# send order cmd to ems
|
# send order cmd to ems
|
||||||
if send_msg:
|
if send_msg:
|
||||||
self.book.send(order)
|
self.client.send_nowait(order)
|
||||||
else:
|
else:
|
||||||
# just register for control over this order
|
# just register for control over this order
|
||||||
# TODO: some kind of mini-perms system here based on
|
# TODO: some kind of mini-perms system here based on
|
||||||
# an out-of-band tagging/auth sub-sys for multiplayer
|
# an out-of-band tagging/auth sub-sys for multiplayer
|
||||||
# order control?
|
# order control?
|
||||||
self.book._sent_orders[order.oid] = order
|
self.client._sent_orders[order.oid] = order
|
||||||
|
|
||||||
return dialog
|
return dialog
|
||||||
|
|
||||||
|
@ -428,14 +452,23 @@ class OrderMode:
|
||||||
line: LevelLine,
|
line: LevelLine,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
'''
|
||||||
|
Retreive the level line's end state, compute the size
|
||||||
|
and price for the new price-level, send an update msg to
|
||||||
|
the EMS, adjust mirrored level line on secondary chart.
|
||||||
|
|
||||||
level = line.value()
|
'''
|
||||||
|
mktinfo: MktPair = self.chart.linked.mkt
|
||||||
|
level = round(
|
||||||
|
line.value(),
|
||||||
|
ndigits=mktinfo.price_tick_digits,
|
||||||
|
)
|
||||||
# updated by level change callback set in ``.new_line_from_order()``
|
# updated by level change callback set in ``.new_line_from_order()``
|
||||||
dialog = line.dialog
|
dialog = line.dialog
|
||||||
size = dialog.order.size
|
size = dialog.order.size
|
||||||
|
|
||||||
# NOTE: sends modified order msg to EMS
|
# NOTE: sends modified order msg to EMS
|
||||||
self.book.send_update(
|
self.client.update_nowait(
|
||||||
uuid=line.dialog.uuid,
|
uuid=line.dialog.uuid,
|
||||||
price=level,
|
price=level,
|
||||||
size=size,
|
size=size,
|
||||||
|
@ -465,7 +498,9 @@ class OrderMode:
|
||||||
# a submission is the start of a new order dialog
|
# a submission is the start of a new order dialog
|
||||||
dialog = self.dialogs[uuid]
|
dialog = self.dialogs[uuid]
|
||||||
dialog.lines = lines
|
dialog.lines = lines
|
||||||
dialog.last_status_close()
|
cls: Callable | None = dialog.last_status_close
|
||||||
|
if cls:
|
||||||
|
cls()
|
||||||
|
|
||||||
for line in lines:
|
for line in lines:
|
||||||
|
|
||||||
|
@ -517,7 +552,7 @@ class OrderMode:
|
||||||
# XXX: seems to fail on certain types of races?
|
# XXX: seems to fail on certain types of races?
|
||||||
# assert len(lines) == 2
|
# assert len(lines) == 2
|
||||||
if lines:
|
if lines:
|
||||||
flume: Flume = self.feed.flumes[chart.linked.symbol.fqsn]
|
flume: Flume = self.feed.flumes[chart.linked.mkt.fqme]
|
||||||
_, _, ratio = flume.get_ds_info()
|
_, _, ratio = flume.get_ds_info()
|
||||||
|
|
||||||
for chart, shm in [
|
for chart, shm in [
|
||||||
|
@ -551,7 +586,7 @@ class OrderMode:
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
msg = self.book._sent_orders.pop(uuid, None)
|
msg = self.client._sent_orders.pop(uuid, None)
|
||||||
|
|
||||||
if msg is not None:
|
if msg is not None:
|
||||||
self.lines.remove_line(uuid=uuid)
|
self.lines.remove_line(uuid=uuid)
|
||||||
|
@ -607,7 +642,7 @@ class OrderMode:
|
||||||
dialog.last_status_close = cancel_status_close
|
dialog.last_status_close = cancel_status_close
|
||||||
|
|
||||||
ids.append(oid)
|
ids.append(oid)
|
||||||
self.book.cancel(uuid=oid)
|
self.client.cancel_nowait(uuid=oid)
|
||||||
|
|
||||||
return ids
|
return ids
|
||||||
|
|
||||||
|
@ -629,17 +664,21 @@ class OrderMode:
|
||||||
and src not in ('dark', 'paperboi')
|
and src not in ('dark', 'paperboi')
|
||||||
and src not in symbol
|
and src not in symbol
|
||||||
):
|
):
|
||||||
fqsn = symbol + '.' + src
|
fqme = symbol + '.' + src
|
||||||
brokername = src
|
brokername = src
|
||||||
else:
|
else:
|
||||||
fqsn = symbol
|
fqme = symbol
|
||||||
*head, brokername = fqsn.rsplit('.')
|
*head, brokername = fqme.rsplit('.')
|
||||||
|
|
||||||
# fill out complex fields
|
# fill out complex fields
|
||||||
order.oid = str(order.oid)
|
order.oid = str(order.oid)
|
||||||
order.brokers = [brokername]
|
order.brokers = [brokername]
|
||||||
order.symbol = Symbol.from_fqsn(
|
|
||||||
fqsn=fqsn,
|
# TODO: change this over to `MktPair`, but it's
|
||||||
|
# gonna be tough since we don't have any such data
|
||||||
|
# really in our clearing msg schema..
|
||||||
|
order.symbol = Symbol.from_fqme(
|
||||||
|
fqsn=fqme,
|
||||||
info={},
|
info={},
|
||||||
)
|
)
|
||||||
dialog = self.submit_order(
|
dialog = self.submit_order(
|
||||||
|
@ -655,7 +694,7 @@ async def open_order_mode(
|
||||||
|
|
||||||
feed: Feed,
|
feed: Feed,
|
||||||
godw: GodWidget,
|
godw: GodWidget,
|
||||||
fqsn: str,
|
fqme: str,
|
||||||
started: trio.Event,
|
started: trio.Event,
|
||||||
loglevel: str = 'info'
|
loglevel: str = 'info'
|
||||||
|
|
||||||
|
@ -674,19 +713,22 @@ async def open_order_mode(
|
||||||
multistatus = chart.window().status_bar
|
multistatus = chart.window().status_bar
|
||||||
done = multistatus.open_status('starting order mode..')
|
done = multistatus.open_status('starting order mode..')
|
||||||
|
|
||||||
book: OrderBook
|
client: OrderClient
|
||||||
trades_stream: tractor.MsgStream
|
trades_stream: tractor.MsgStream
|
||||||
|
|
||||||
# The keys in this dict **must** be in set our set of "normalized"
|
# The keys in this dict **must** be in set our set of "normalized"
|
||||||
# symbol names (i.e. the same names you'd get back in search
|
# symbol names (i.e. the same names you'd get back in search
|
||||||
# results) in order for position msgs to correctly trigger the
|
# results) in order for position msgs to correctly trigger the
|
||||||
# display of a position indicator on screen.
|
# display of a position indicator on screen.
|
||||||
position_msgs: dict[str, list[BrokerdPosition]]
|
position_msgs: dict[str, dict[str, BrokerdPosition]]
|
||||||
|
|
||||||
# spawn EMS actor-service
|
# spawn EMS actor-service
|
||||||
async with (
|
async with (
|
||||||
open_ems(fqsn, loglevel=loglevel) as (
|
open_ems(
|
||||||
book,
|
fqme,
|
||||||
|
loglevel=loglevel,
|
||||||
|
) as (
|
||||||
|
client,
|
||||||
trades_stream,
|
trades_stream,
|
||||||
position_msgs,
|
position_msgs,
|
||||||
brokerd_accounts,
|
brokerd_accounts,
|
||||||
|
@ -695,21 +737,21 @@ async def open_order_mode(
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery() as tn,
|
||||||
|
|
||||||
):
|
):
|
||||||
log.info(f'Opening order mode for {fqsn}')
|
log.info(f'Opening order mode for {fqme}')
|
||||||
|
|
||||||
# annotations editors
|
# annotations editors
|
||||||
lines = LineEditor(godw=godw)
|
lines = LineEditor(godw=godw)
|
||||||
arrows = ArrowEditor(godw=godw)
|
arrows = ArrowEditor(godw=godw)
|
||||||
|
|
||||||
# symbol id
|
# market endpoint info
|
||||||
symbol = chart.linked.symbol
|
mkt: MktPair = chart.linked.mkt
|
||||||
|
|
||||||
# map of per-provider account keys to position tracker instances
|
# map of per-provider account keys to position tracker instances
|
||||||
trackers: dict[str, PositionTracker] = {}
|
trackers: dict[str, PositionTracker] = {}
|
||||||
|
|
||||||
# load account names from ``brokers.toml``
|
# load account names from ``brokers.toml``
|
||||||
accounts_def = config.load_accounts(
|
accounts_def = config.load_accounts(
|
||||||
providers=symbol.brokers
|
providers=[mkt.broker],
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX: ``brokerd`` delivers a set of account names that it
|
# XXX: ``brokerd`` delivers a set of account names that it
|
||||||
|
@ -732,17 +774,17 @@ async def open_order_mode(
|
||||||
|
|
||||||
# net-zero pp
|
# net-zero pp
|
||||||
startup_pp = Position(
|
startup_pp = Position(
|
||||||
symbol=symbol,
|
mkt=mkt,
|
||||||
size=0,
|
size=0,
|
||||||
ppu=0,
|
ppu=0,
|
||||||
|
|
||||||
# XXX: BLEH, do we care about this on the client side?
|
# XXX: BLEH, do we care about this on the client side?
|
||||||
bsuid=symbol,
|
bs_mktid=mkt.key,
|
||||||
)
|
)
|
||||||
|
|
||||||
# allocator config
|
# allocator config
|
||||||
alloc = mk_allocator(
|
alloc: Allocator = mk_allocator(
|
||||||
symbol=symbol,
|
mkt=mkt,
|
||||||
account=account_name,
|
account=account_name,
|
||||||
|
|
||||||
# if this startup size is greater the allocator limit,
|
# if this startup size is greater the allocator limit,
|
||||||
|
@ -813,7 +855,7 @@ async def open_order_mode(
|
||||||
chart,
|
chart,
|
||||||
hist_chart,
|
hist_chart,
|
||||||
tn,
|
tn,
|
||||||
book,
|
client,
|
||||||
lines,
|
lines,
|
||||||
arrows,
|
arrows,
|
||||||
multistatus,
|
multistatus,
|
||||||
|
@ -861,12 +903,14 @@ async def open_order_mode(
|
||||||
# Pack position messages by account, should only be one-to-one.
|
# Pack position messages by account, should only be one-to-one.
|
||||||
# NOTE: requires the backend exactly specifies
|
# NOTE: requires the backend exactly specifies
|
||||||
# the expected symbol key in its positions msg.
|
# the expected symbol key in its positions msg.
|
||||||
for (broker, acctid), msgs in position_msgs.items():
|
for (
|
||||||
for msg in msgs:
|
(broker, acctid),
|
||||||
log.info(f'Loading pp for {acctid}@{broker}:\n{pformat(msg)}')
|
pps_by_fqme
|
||||||
|
) in position_msgs.items():
|
||||||
|
for msg in pps_by_fqme.values():
|
||||||
await process_trade_msg(
|
await process_trade_msg(
|
||||||
mode,
|
mode,
|
||||||
book,
|
client,
|
||||||
msg,
|
msg,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -900,7 +944,7 @@ async def open_order_mode(
|
||||||
|
|
||||||
await process_trade_msg(
|
await process_trade_msg(
|
||||||
mode,
|
mode,
|
||||||
book,
|
client,
|
||||||
msg,
|
msg,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -908,7 +952,7 @@ async def open_order_mode(
|
||||||
process_trades_and_update_ui,
|
process_trades_and_update_ui,
|
||||||
trades_stream,
|
trades_stream,
|
||||||
mode,
|
mode,
|
||||||
book,
|
client,
|
||||||
)
|
)
|
||||||
|
|
||||||
yield mode
|
yield mode
|
||||||
|
@ -918,7 +962,7 @@ async def process_trades_and_update_ui(
|
||||||
|
|
||||||
trades_stream: tractor.MsgStream,
|
trades_stream: tractor.MsgStream,
|
||||||
mode: OrderMode,
|
mode: OrderMode,
|
||||||
book: OrderBook,
|
client: OrderClient,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
@ -927,16 +971,22 @@ async def process_trades_and_update_ui(
|
||||||
async for msg in trades_stream:
|
async for msg in trades_stream:
|
||||||
await process_trade_msg(
|
await process_trade_msg(
|
||||||
mode,
|
mode,
|
||||||
book,
|
client,
|
||||||
msg,
|
msg,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def process_trade_msg(
|
async def process_trade_msg(
|
||||||
mode: OrderMode,
|
mode: OrderMode,
|
||||||
book: OrderBook,
|
client: OrderClient,
|
||||||
msg: dict,
|
msg: dict,
|
||||||
|
|
||||||
|
# emit linux DE notification?
|
||||||
|
# XXX: currently my experience with `dunst` is that this
|
||||||
|
# is horrible slow and clunky and invasive and noisy so i'm
|
||||||
|
# disabling it for now until we find a better UX solution..
|
||||||
|
do_notify: bool = False,
|
||||||
|
|
||||||
) -> tuple[Dialog, Status]:
|
) -> tuple[Dialog, Status]:
|
||||||
|
|
||||||
fmsg = pformat(msg)
|
fmsg = pformat(msg)
|
||||||
|
@ -946,18 +996,24 @@ async def process_trade_msg(
|
||||||
if name in (
|
if name in (
|
||||||
'position',
|
'position',
|
||||||
):
|
):
|
||||||
sym = mode.chart.linked.symbol
|
sym: MktPair = mode.chart.linked.mkt
|
||||||
pp_msg_symbol = msg['symbol'].lower()
|
pp_msg_symbol = msg['symbol'].lower()
|
||||||
fqsn = sym.front_fqsn()
|
fqme = sym.fqme
|
||||||
broker, key = sym.front_feed()
|
broker = sym.broker
|
||||||
if (
|
if (
|
||||||
pp_msg_symbol == fqsn
|
pp_msg_symbol == fqme
|
||||||
or pp_msg_symbol == fqsn.removesuffix(f'.{broker}')
|
or pp_msg_symbol == fqme.removesuffix(f'.{broker}')
|
||||||
):
|
):
|
||||||
log.info(f'{fqsn} matched pp msg: {fmsg}')
|
log.info(
|
||||||
|
f'Loading position for `{fqme}`:\n'
|
||||||
|
f'{fmsg}'
|
||||||
|
)
|
||||||
tracker = mode.trackers[msg['account']]
|
tracker = mode.trackers[msg['account']]
|
||||||
tracker.live_pp.update_from_msg(msg)
|
tracker.live_pp.update_from_msg(msg)
|
||||||
tracker.update_from_pp(set_as_startup=True) # status/pane UI
|
tracker.update_from_pp(
|
||||||
|
set_as_startup=True,
|
||||||
|
)
|
||||||
|
# status/pane UI
|
||||||
mode.pane.update_status_ui(tracker)
|
mode.pane.update_status_ui(tracker)
|
||||||
|
|
||||||
if tracker.live_pp.size:
|
if tracker.live_pp.size:
|
||||||
|
@ -974,7 +1030,7 @@ async def process_trade_msg(
|
||||||
dialog: Dialog = mode.dialogs.get(oid)
|
dialog: Dialog = mode.dialogs.get(oid)
|
||||||
|
|
||||||
if dialog:
|
if dialog:
|
||||||
fqsn = dialog.symbol
|
fqme = dialog.symbol
|
||||||
|
|
||||||
match msg:
|
match msg:
|
||||||
case Status(
|
case Status(
|
||||||
|
@ -996,17 +1052,17 @@ async def process_trade_msg(
|
||||||
)
|
)
|
||||||
assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}'
|
assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}'
|
||||||
|
|
||||||
sym = mode.chart.linked.symbol
|
sym: MktPair = mode.chart.linked.mkt
|
||||||
fqsn = sym.front_fqsn()
|
fqme = sym.fqme
|
||||||
if (
|
if (
|
||||||
((order.symbol + f'.{msg.src}') == fqsn)
|
((order.symbol + f'.{msg.src}') == fqme)
|
||||||
|
|
||||||
# a existing dark order for the same symbol
|
# a existing dark order for the same symbol
|
||||||
or (
|
or (
|
||||||
order.symbol == fqsn
|
order.symbol == fqme
|
||||||
and (
|
and (
|
||||||
msg.src in ('dark', 'paperboi')
|
msg.src in ('dark', 'paperboi')
|
||||||
or (msg.src in fqsn)
|
or (msg.src in fqme)
|
||||||
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
@ -1053,6 +1109,7 @@ async def process_trade_msg(
|
||||||
)
|
)
|
||||||
mode.lines.remove_line(uuid=oid)
|
mode.lines.remove_line(uuid=oid)
|
||||||
msg.req = req
|
msg.req = req
|
||||||
|
if do_notify:
|
||||||
await notify_from_ems_status_msg(msg)
|
await notify_from_ems_status_msg(msg)
|
||||||
|
|
||||||
# response to completed 'dialog' for order request
|
# response to completed 'dialog' for order request
|
||||||
|
@ -1062,6 +1119,7 @@ async def process_trade_msg(
|
||||||
req=req,
|
req=req,
|
||||||
):
|
):
|
||||||
msg.req = Order(**req)
|
msg.req = Order(**req)
|
||||||
|
if do_notify:
|
||||||
await notify_from_ems_status_msg(msg)
|
await notify_from_ems_status_msg(msg)
|
||||||
mode.lines.remove_line(uuid=oid)
|
mode.lines.remove_line(uuid=oid)
|
||||||
|
|
||||||
|
@ -1069,7 +1127,7 @@ async def process_trade_msg(
|
||||||
case Status(resp='fill'):
|
case Status(resp='fill'):
|
||||||
|
|
||||||
# handle out-of-piker fills reporting?
|
# handle out-of-piker fills reporting?
|
||||||
order: Order = book._sent_orders.get(oid)
|
order: Order = client._sent_orders.get(oid)
|
||||||
if not order:
|
if not order:
|
||||||
log.warning(f'order {oid} is unknown')
|
log.warning(f'order {oid} is unknown')
|
||||||
order = msg.req
|
order = msg.req
|
||||||
|
|
|
@ -1 +1,3 @@
|
||||||
pytest
|
pytest
|
||||||
|
docker
|
||||||
|
elasticsearch
|
||||||
|
|
|
@ -14,5 +14,7 @@
|
||||||
# ``asyncvnc`` for sending interactions to ib-gw inside docker
|
# ``asyncvnc`` for sending interactions to ib-gw inside docker
|
||||||
-e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc
|
-e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc
|
||||||
|
|
||||||
# ``cryptofeed`` for connecting to various crypto exchanges + custom fixes
|
|
||||||
-e git+https://github.com/pikers/cryptofeed.git@date_parsing#egg=cryptofeed
|
# ``tomlkit`` for account files and configs; we've
|
||||||
|
# added some new features that need to get upstreamed:
|
||||||
|
-e git+https://github.com/pikers/tomlkit.git@piker_pin#egg=tomlkit
|
||||||
|
|
12
setup.py
12
setup.py
|
@ -40,18 +40,21 @@ setup(
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'piker = piker.cli:cli',
|
'piker = piker.cli:cli',
|
||||||
'pikerd = piker.cli:pikerd',
|
'pikerd = piker.cli:pikerd',
|
||||||
|
'ledger = piker.accounting.cli:ledger',
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
install_requires=[
|
install_requires=[
|
||||||
'toml',
|
# 'tomlkit', # fork & fix for now..
|
||||||
'tomli', # fastest pure py reader
|
'tomli', # for pre-3.11
|
||||||
'click',
|
'tomli-w', # for fast ledger writing
|
||||||
'colorlog',
|
'colorlog',
|
||||||
'attrs',
|
'attrs',
|
||||||
'pygments',
|
'pygments',
|
||||||
'colorama', # numba traceback coloring
|
'colorama', # numba traceback coloring
|
||||||
'msgspec', # performant IPC messaging and structs
|
'msgspec', # performant IPC messaging and structs
|
||||||
'protobuf',
|
'protobuf',
|
||||||
|
'typer',
|
||||||
|
'rich',
|
||||||
|
|
||||||
# async
|
# async
|
||||||
'trio',
|
'trio',
|
||||||
|
@ -63,8 +66,7 @@ setup(
|
||||||
# normally pinned to particular git hashes..
|
# normally pinned to particular git hashes..
|
||||||
# 'tractor',
|
# 'tractor',
|
||||||
# 'asyncvnc',
|
# 'asyncvnc',
|
||||||
# 'pyqtgraph',
|
# 'anyio-marketstore', # mkts tsdb client
|
||||||
# anyio-marketstore # mkts tsdb client
|
|
||||||
|
|
||||||
# brokers
|
# brokers
|
||||||
'asks', # for non-ws rest apis
|
'asks', # for non-ws rest apis
|
||||||
|
|
|
@ -13,7 +13,6 @@ from piker.service import (
|
||||||
Services,
|
Services,
|
||||||
)
|
)
|
||||||
from piker.log import get_console_log
|
from piker.log import get_console_log
|
||||||
from piker.clearing._client import open_ems
|
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
|
@ -87,8 +86,11 @@ def log(
|
||||||
@acm
|
@acm
|
||||||
async def _open_test_pikerd(
|
async def _open_test_pikerd(
|
||||||
tmpconfdir: str,
|
tmpconfdir: str,
|
||||||
|
|
||||||
reg_addr: tuple[str, int] | None = None,
|
reg_addr: tuple[str, int] | None = None,
|
||||||
loglevel: str = 'warning',
|
loglevel: str = 'warning',
|
||||||
|
debug_mode: bool = False,
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
|
@ -101,6 +103,9 @@ async def _open_test_pikerd(
|
||||||
a different port then the default to allow testing alongside
|
a different port then the default to allow testing alongside
|
||||||
a running stack.
|
a running stack.
|
||||||
|
|
||||||
|
Calls `.service._actor_runtime.maybe_open_pikerd()``
|
||||||
|
to boot the root actor / tractor runtime.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
import random
|
import random
|
||||||
from piker.service import maybe_open_pikerd
|
from piker.service import maybe_open_pikerd
|
||||||
|
@ -118,10 +123,7 @@ async def _open_test_pikerd(
|
||||||
'piker_test_dir': tmpconfdir,
|
'piker_test_dir': tmpconfdir,
|
||||||
},
|
},
|
||||||
|
|
||||||
# tests may need to spawn containers dynamically
|
debug_mode=debug_mode,
|
||||||
# or just in sequence per test, so we keep root.
|
|
||||||
drop_root_perms_for_ahab=False,
|
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) as service_manager,
|
) as service_manager,
|
||||||
|
@ -143,14 +145,60 @@ async def _open_test_pikerd(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def tmpconfdir(
|
||||||
|
tmp_path: Path,
|
||||||
|
) -> Path:
|
||||||
|
'''
|
||||||
|
Ensure the `brokers.toml` file for the test run exists
|
||||||
|
since we changed it to not touch files by default.
|
||||||
|
|
||||||
|
Here we override the default (in the user dir) and
|
||||||
|
set the global module var the same as we do inside
|
||||||
|
the `tmpconfdir` fixture.
|
||||||
|
|
||||||
|
'''
|
||||||
|
tmpconfdir: Path = tmp_path / '_testing'
|
||||||
|
tmpconfdir.mkdir()
|
||||||
|
|
||||||
|
# touch the `brokers.toml` file since it won't
|
||||||
|
# exist in the tmp test dir by default!
|
||||||
|
# override config dir in the root actor (aka
|
||||||
|
# this top level testing process).
|
||||||
|
from piker import config
|
||||||
|
config._config_dir: Path = tmpconfdir
|
||||||
|
|
||||||
|
conf, path = config.load(
|
||||||
|
conf_name='brokers',
|
||||||
|
touch_if_dne=True,
|
||||||
|
)
|
||||||
|
assert path.is_file(), 'WTH.. `brokers.toml` not created!?'
|
||||||
|
|
||||||
|
return tmpconfdir
|
||||||
|
|
||||||
|
# NOTE: the `tmp_dir` fixture will wipe any files older then 3 test
|
||||||
|
# sessions by default:
|
||||||
|
# https://docs.pytest.org/en/6.2.x/tmpdir.html#the-default-base-temporary-directory
|
||||||
|
# BUT, if we wanted to always wipe conf dir and all contained files,
|
||||||
|
# rmtree(str(tmp_path))
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def root_conf(tmpconfdir) -> dict:
|
||||||
|
return config.load(
|
||||||
|
'conf',
|
||||||
|
touch_if_dne=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def open_test_pikerd(
|
def open_test_pikerd(
|
||||||
request: pytest.FixtureRequest,
|
request: pytest.FixtureRequest,
|
||||||
tmp_path: Path,
|
tmp_path: Path,
|
||||||
|
tmpconfdir: Path,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
):
|
):
|
||||||
tmpconfdir: Path = tmp_path / '_testing'
|
|
||||||
tmpconfdir.mkdir()
|
|
||||||
tmpconfdir_str: str = str(tmpconfdir)
|
tmpconfdir_str: str = str(tmpconfdir)
|
||||||
|
|
||||||
# NOTE: on linux the tmp config dir is generally located at:
|
# NOTE: on linux the tmp config dir is generally located at:
|
||||||
|
@ -160,6 +208,20 @@ def open_test_pikerd(
|
||||||
# https://docs.pytest.org/en/6.2.x/tmpdir.html#the-default-base-temporary-directory
|
# https://docs.pytest.org/en/6.2.x/tmpdir.html#the-default-base-temporary-directory
|
||||||
print(f'CURRENT TEST CONF DIR: {tmpconfdir}')
|
print(f'CURRENT TEST CONF DIR: {tmpconfdir}')
|
||||||
|
|
||||||
|
conf = request.config
|
||||||
|
debug_mode: bool = conf.option.usepdb
|
||||||
|
if (
|
||||||
|
debug_mode
|
||||||
|
and conf.option.capture != 'no'
|
||||||
|
):
|
||||||
|
# TODO: how to disable capture dynamically?
|
||||||
|
# conf._configured = False
|
||||||
|
# conf._do_configure()
|
||||||
|
pytest.fail(
|
||||||
|
'To use `--pdb` (with `tractor` subactors) you also must also '
|
||||||
|
'pass `-s`!'
|
||||||
|
)
|
||||||
|
|
||||||
yield partial(
|
yield partial(
|
||||||
_open_test_pikerd,
|
_open_test_pikerd,
|
||||||
|
|
||||||
|
@ -171,49 +233,11 @@ def open_test_pikerd(
|
||||||
# bind in level from fixture, which is itself set by
|
# bind in level from fixture, which is itself set by
|
||||||
# `--ll <value>` cli flag.
|
# `--ll <value>` cli flag.
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
)
|
|
||||||
|
|
||||||
# NOTE: the `tmp_dir` fixture will wipe any files older then 3 test
|
debug_mode=debug_mode,
|
||||||
# sessions by default:
|
)
|
||||||
# https://docs.pytest.org/en/6.2.x/tmpdir.html#the-default-base-temporary-directory
|
|
||||||
# BUT, if we wanted to always wipe conf dir and all contained files,
|
|
||||||
# rmtree(str(tmp_path))
|
|
||||||
|
|
||||||
# TODO: teardown checks such as,
|
# TODO: teardown checks such as,
|
||||||
# - no leaked subprocs or shm buffers
|
# - no leaked subprocs or shm buffers
|
||||||
# - all requested container service are torn down
|
# - all requested container service are torn down
|
||||||
# - certain ``tractor`` runtime state?
|
# - certain ``tractor`` runtime state?
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def _open_test_pikerd_and_ems(
|
|
||||||
fqsn,
|
|
||||||
mode,
|
|
||||||
loglevel,
|
|
||||||
open_test_pikerd
|
|
||||||
):
|
|
||||||
async with (
|
|
||||||
open_test_pikerd() as (_, _, _, services),
|
|
||||||
open_ems(
|
|
||||||
fqsn,
|
|
||||||
mode=mode,
|
|
||||||
loglevel=loglevel,
|
|
||||||
) as ems_services,
|
|
||||||
):
|
|
||||||
yield (services, ems_services)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def open_test_pikerd_and_ems(
|
|
||||||
open_test_pikerd,
|
|
||||||
fqsn: str = 'xbtusdt.kraken',
|
|
||||||
mode: str = 'paper',
|
|
||||||
loglevel: str = 'info',
|
|
||||||
):
|
|
||||||
yield partial(
|
|
||||||
_open_test_pikerd_and_ems,
|
|
||||||
fqsn,
|
|
||||||
mode,
|
|
||||||
loglevel,
|
|
||||||
open_test_pikerd
|
|
||||||
)
|
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
'''
|
||||||
|
`piker.accounting` mgmt calculations for
|
||||||
|
- positioning
|
||||||
|
- ledger updates
|
||||||
|
- config file IO
|
||||||
|
|
||||||
|
'''
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from piker import config
|
||||||
|
|
||||||
|
|
||||||
|
def test_root_conf_networking_section(
|
||||||
|
root_conf: dict,
|
||||||
|
):
|
||||||
|
conf, path = config.load(
|
||||||
|
'conf',
|
||||||
|
touch_if_dne=True,
|
||||||
|
)
|
||||||
|
assert conf['network']['tsdb']
|
||||||
|
|
||||||
|
|
||||||
|
def test_account_file_default_empty(
|
||||||
|
tmpconfdir: Path,
|
||||||
|
):
|
||||||
|
conf, path = config.load_account(
|
||||||
|
'kraken',
|
||||||
|
'paper',
|
||||||
|
)
|
||||||
|
|
||||||
|
# ensure the account file empty but created
|
||||||
|
# and in the correct place in the filesystem!
|
||||||
|
assert not conf
|
||||||
|
assert path.parent.is_dir()
|
||||||
|
assert path.parent.name == 'accounting'
|
|
@ -1,11 +1,12 @@
|
||||||
from typing import AsyncContextManager
|
from typing import AsyncContextManager
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
import trio
|
import pytest
|
||||||
from elasticsearch import (
|
from elasticsearch import (
|
||||||
Elasticsearch,
|
Elasticsearch,
|
||||||
ConnectionError,
|
ConnectionError,
|
||||||
)
|
)
|
||||||
|
import trio
|
||||||
|
|
||||||
from piker.service import marketstore
|
from piker.service import marketstore
|
||||||
from piker.service import elastic
|
from piker.service import elastic
|
||||||
|
@ -14,6 +15,7 @@ from piker.service import elastic
|
||||||
def test_marketstore_startup_and_version(
|
def test_marketstore_startup_and_version(
|
||||||
open_test_pikerd: AsyncContextManager,
|
open_test_pikerd: AsyncContextManager,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
root_conf: dict,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Verify marketstore tsdb starts up and we can
|
Verify marketstore tsdb starts up and we can
|
||||||
|
@ -21,18 +23,39 @@ def test_marketstore_startup_and_version(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
|
user_conf: dict = {
|
||||||
|
'grpc_listen_port': 5995 + 6,
|
||||||
|
'ws_listen_port': 5993 + 6,
|
||||||
|
}
|
||||||
|
|
||||||
|
dname: str # service name
|
||||||
|
config: dict # service name
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_test_pikerd(
|
open_test_pikerd(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
tsdb=True
|
# tsdb=True
|
||||||
) as (
|
) as (
|
||||||
_, # host
|
_, # host
|
||||||
_, # port
|
_, # port
|
||||||
pikerd_portal,
|
pikerd_portal,
|
||||||
services,
|
services,
|
||||||
),
|
),
|
||||||
|
|
||||||
|
marketstore.start_ahab_daemon(
|
||||||
|
services,
|
||||||
|
user_conf,
|
||||||
|
loglevel=loglevel,
|
||||||
|
|
||||||
|
) as (dname, config)
|
||||||
):
|
):
|
||||||
|
# ensure user config was applied
|
||||||
|
for k, v in user_conf.items():
|
||||||
|
assert config[k] == v
|
||||||
|
|
||||||
|
# netconf: dict = root_conf['network']
|
||||||
|
# tsdbconf = netconf['tsdb']
|
||||||
|
|
||||||
# TODO: we should probably make this connection poll
|
# TODO: we should probably make this connection poll
|
||||||
# loop part of the `get_client()` implementation no?
|
# loop part of the `get_client()` implementation no?
|
||||||
|
|
||||||
|
@ -45,7 +68,12 @@ def test_marketstore_startup_and_version(
|
||||||
for _ in range(3):
|
for _ in range(3):
|
||||||
|
|
||||||
# NOTE: default sockaddr is embedded within
|
# NOTE: default sockaddr is embedded within
|
||||||
async with marketstore.get_client() as client:
|
async with marketstore.get_client(
|
||||||
|
host='localhost',
|
||||||
|
port=user_conf['grpc_listen_port'],
|
||||||
|
|
||||||
|
) as client:
|
||||||
|
print(f'Client is up @ {user_conf}!')
|
||||||
|
|
||||||
with trio.move_on_after(1) as cs:
|
with trio.move_on_after(1) as cs:
|
||||||
syms = await client.list_symbols()
|
syms = await client.list_symbols()
|
||||||
|
@ -64,11 +92,18 @@ def test_marketstore_startup_and_version(
|
||||||
)
|
)
|
||||||
print('VERSION CHECKED')
|
print('VERSION CHECKED')
|
||||||
|
|
||||||
|
|
||||||
break # get out of retry-connect loop
|
break # get out of retry-connect loop
|
||||||
|
else:
|
||||||
|
raise RuntimeError('Failed to connect to {conf}!')
|
||||||
|
|
||||||
|
# gracefully teardown docker-daemon-service
|
||||||
|
print(f'Cancelling docker service {dname}')
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skip
|
||||||
def test_elasticsearch_startup_and_version(
|
def test_elasticsearch_startup_and_version(
|
||||||
open_test_pikerd: AsyncContextManager,
|
open_test_pikerd: AsyncContextManager,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
@ -80,18 +115,29 @@ def test_elasticsearch_startup_and_version(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
port = 19200
|
port: int = 19200
|
||||||
|
user_conf: dict = {
|
||||||
|
'port': port,
|
||||||
|
}
|
||||||
|
|
||||||
|
dname: str # service name
|
||||||
|
config: dict # service name
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_test_pikerd(
|
open_test_pikerd(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
es=True
|
|
||||||
) as (
|
) as (
|
||||||
_, # host
|
_, # host
|
||||||
_, # port
|
_, # port
|
||||||
pikerd_portal,
|
pikerd_portal,
|
||||||
services,
|
services,
|
||||||
),
|
),
|
||||||
|
elastic.start_ahab_daemon(
|
||||||
|
services,
|
||||||
|
user_conf,
|
||||||
|
loglevel=loglevel,
|
||||||
|
|
||||||
|
) as (dname, config)
|
||||||
):
|
):
|
||||||
# TODO: much like the above connect loop for mkts, we should
|
# TODO: much like the above connect loop for mkts, we should
|
||||||
# probably make this sync start part of the
|
# probably make this sync start part of the
|
|
@ -0,0 +1,406 @@
|
||||||
|
'''
|
||||||
|
Execution mgmt system (EMS) e2e testing.
|
||||||
|
|
||||||
|
Most tests leverage our paper clearing engine found (currently) in
|
||||||
|
``piker.clearing._paper_engine`.
|
||||||
|
|
||||||
|
Ideally in the longer run we are able to support forms of (non-clearing)
|
||||||
|
live order tests against certain backends that make it possible to do
|
||||||
|
so..
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import (
|
||||||
|
contextmanager as cm,
|
||||||
|
)
|
||||||
|
from typing import (
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
AsyncContextManager,
|
||||||
|
Literal,
|
||||||
|
)
|
||||||
|
|
||||||
|
import trio
|
||||||
|
from exceptiongroup import BaseExceptionGroup
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import tractor
|
||||||
|
from uuid import uuid4
|
||||||
|
|
||||||
|
from piker.service import Services
|
||||||
|
from piker.log import get_logger
|
||||||
|
from piker.clearing._messages import (
|
||||||
|
Order,
|
||||||
|
Status,
|
||||||
|
# Cancel,
|
||||||
|
BrokerdPosition,
|
||||||
|
)
|
||||||
|
from piker.clearing import (
|
||||||
|
open_ems,
|
||||||
|
OrderClient,
|
||||||
|
)
|
||||||
|
from piker.accounting import (
|
||||||
|
unpack_fqme,
|
||||||
|
)
|
||||||
|
from piker.accounting import (
|
||||||
|
open_pps,
|
||||||
|
Position,
|
||||||
|
)
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def order_and_and_wait_for_ppmsg(
|
||||||
|
client: OrderClient,
|
||||||
|
trades_stream: tractor.MsgStream,
|
||||||
|
fqme: str,
|
||||||
|
|
||||||
|
action: Literal['buy', 'sell'],
|
||||||
|
price: float = 100e3, # just a super high price.
|
||||||
|
size: float = 0.01,
|
||||||
|
|
||||||
|
exec_mode: str = 'live',
|
||||||
|
account: str = 'paper',
|
||||||
|
|
||||||
|
) -> list[Status | BrokerdPosition]:
|
||||||
|
'''
|
||||||
|
Start piker, place a trade and assert data in
|
||||||
|
pps stream, ledger and position table.
|
||||||
|
|
||||||
|
'''
|
||||||
|
sent: list[Order] = []
|
||||||
|
broker, mktep, venue, suffix = unpack_fqme(fqme)
|
||||||
|
|
||||||
|
order = Order(
|
||||||
|
exec_mode=exec_mode,
|
||||||
|
action=action, # TODO: remove this from our schema?
|
||||||
|
oid=str(uuid4()),
|
||||||
|
account=account,
|
||||||
|
size=size,
|
||||||
|
symbol=fqme,
|
||||||
|
price=price,
|
||||||
|
brokers=[broker],
|
||||||
|
)
|
||||||
|
sent.append(order)
|
||||||
|
await client.send(order)
|
||||||
|
|
||||||
|
# TODO: i guess we should still test the old sync-API?
|
||||||
|
# client.send_nowait(order)
|
||||||
|
|
||||||
|
# Wait for position message before moving on to verify flow(s)
|
||||||
|
# for the multi-order position entry/exit.
|
||||||
|
msgs: list[Status | BrokerdPosition] = []
|
||||||
|
async for msg in trades_stream:
|
||||||
|
match msg:
|
||||||
|
case {'name': 'position'}:
|
||||||
|
ppmsg = BrokerdPosition(**msg)
|
||||||
|
msgs.append(ppmsg)
|
||||||
|
break
|
||||||
|
|
||||||
|
case {'name': 'status'}:
|
||||||
|
msgs.append(Status(**msg))
|
||||||
|
|
||||||
|
return sent, msgs
|
||||||
|
|
||||||
|
|
||||||
|
def run_and_tollerate_cancels(
|
||||||
|
fn: Callable[..., Awaitable],
|
||||||
|
|
||||||
|
expect_errs: tuple[Exception] | None = None,
|
||||||
|
tollerate_errs: tuple[Exception] = (tractor.ContextCancelled,),
|
||||||
|
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Run ``trio``-``piker`` runtime with potential tolerance for
|
||||||
|
inter-actor cancellation during teardown (normally just
|
||||||
|
`tractor.ContextCancelled`s).
|
||||||
|
|
||||||
|
'''
|
||||||
|
if expect_errs:
|
||||||
|
with pytest.raises(BaseExceptionGroup) as exc_info:
|
||||||
|
trio.run(fn)
|
||||||
|
|
||||||
|
for err in exc_info.value.exceptions:
|
||||||
|
assert type(err) in expect_errs
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
trio.run(fn)
|
||||||
|
except tollerate_errs:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def load_and_check_pos(
|
||||||
|
order: Order,
|
||||||
|
ppmsg: BrokerdPosition,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
with open_pps(ppmsg.broker, ppmsg.account) as table:
|
||||||
|
|
||||||
|
if ppmsg.size == 0:
|
||||||
|
assert ppmsg.symbol not in table.pps
|
||||||
|
yield None
|
||||||
|
return
|
||||||
|
|
||||||
|
else:
|
||||||
|
# NOTE: a special case is here since the `PpTable.pps` are
|
||||||
|
# normally indexed by the particular broker's
|
||||||
|
# `Position.bs_mktid: str` (a unique market / symbol id provided
|
||||||
|
# by their systems/design) but for the paper engine case, this
|
||||||
|
# is the same the fqme.
|
||||||
|
pp: Position = table.pps[ppmsg.symbol]
|
||||||
|
|
||||||
|
assert ppmsg.size == pp.size
|
||||||
|
assert ppmsg.avg_price == pp.ppu
|
||||||
|
|
||||||
|
yield pp
|
||||||
|
|
||||||
|
|
||||||
|
def test_ems_err_on_bad_broker(
|
||||||
|
open_test_pikerd: Services,
|
||||||
|
loglevel: str,
|
||||||
|
):
|
||||||
|
async def load_bad_fqme():
|
||||||
|
try:
|
||||||
|
async with (
|
||||||
|
open_test_pikerd() as (_, _, _, _),
|
||||||
|
|
||||||
|
open_ems(
|
||||||
|
'doggycoin.doggy',
|
||||||
|
mode='paper',
|
||||||
|
loglevel=loglevel,
|
||||||
|
) as _
|
||||||
|
):
|
||||||
|
pytest.fail('EMS is working on non-broker!?')
|
||||||
|
except ModuleNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
run_and_tollerate_cancels(load_bad_fqme)
|
||||||
|
|
||||||
|
|
||||||
|
async def match_ppmsgs_on_ems_boot(
|
||||||
|
ppmsgs: list[BrokerdPosition],
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Given a list of input position msgs, verify they match
|
||||||
|
what is loaded from the EMS on connect.
|
||||||
|
|
||||||
|
'''
|
||||||
|
by_acct: dict[tuple, list[BrokerdPosition]] = {}
|
||||||
|
for msg in ppmsgs:
|
||||||
|
by_acct.setdefault(
|
||||||
|
(msg.broker, msg.account),
|
||||||
|
[],
|
||||||
|
).append(msg)
|
||||||
|
|
||||||
|
# TODO: actually support multi-mkts to `open_ems()`
|
||||||
|
# but for now just pass the first fqme.
|
||||||
|
fqme = msg.symbol
|
||||||
|
|
||||||
|
# disconnect from EMS, reconnect and ensure we get our same
|
||||||
|
# position relayed to us again in the startup msg.
|
||||||
|
async with (
|
||||||
|
open_ems(
|
||||||
|
fqme,
|
||||||
|
mode='paper',
|
||||||
|
loglevel='info',
|
||||||
|
) as (
|
||||||
|
_, # OrderClient
|
||||||
|
_, # tractor.MsgStream
|
||||||
|
startup_pps,
|
||||||
|
accounts,
|
||||||
|
_, # dialogs,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
for (broker, account), ppmsgs in by_acct.items():
|
||||||
|
assert account in accounts
|
||||||
|
|
||||||
|
# lookup all msgs rx-ed for this account
|
||||||
|
rx_msgs = startup_pps[(broker, account)]
|
||||||
|
|
||||||
|
for expect_ppmsg in ppmsgs:
|
||||||
|
rx_msg = BrokerdPosition(**rx_msgs[expect_ppmsg.symbol])
|
||||||
|
assert rx_msg == expect_ppmsg
|
||||||
|
|
||||||
|
|
||||||
|
async def submit_and_check(
|
||||||
|
fills: tuple[dict],
|
||||||
|
loglevel: str,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
BrokerdPosition,
|
||||||
|
Position,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Enter a trade and assert entries are made in pps and ledger files.
|
||||||
|
|
||||||
|
Shutdown the ems-client and ensure on reconnect we get the expected
|
||||||
|
matching ``BrokerdPosition`` and pps.toml entries.
|
||||||
|
|
||||||
|
'''
|
||||||
|
broker: str = 'kraken'
|
||||||
|
mkt_key: str = 'xbtusdt'
|
||||||
|
fqme: str = f'{mkt_key}.{broker}'
|
||||||
|
|
||||||
|
startup_pps: dict[
|
||||||
|
tuple[str, str], # brokername, acctid
|
||||||
|
list[BrokerdPosition],
|
||||||
|
]
|
||||||
|
async with (
|
||||||
|
open_ems(
|
||||||
|
fqme,
|
||||||
|
mode='paper',
|
||||||
|
loglevel=loglevel,
|
||||||
|
) as (
|
||||||
|
client, # OrderClient
|
||||||
|
trades_stream, # tractor.MsgStream
|
||||||
|
startup_pps,
|
||||||
|
accounts,
|
||||||
|
_, # dialogs
|
||||||
|
)
|
||||||
|
):
|
||||||
|
# no positions on startup
|
||||||
|
assert not startup_pps
|
||||||
|
assert 'paper' in accounts
|
||||||
|
|
||||||
|
od: dict
|
||||||
|
for od in fills:
|
||||||
|
print(f'Sending order {od} for fill')
|
||||||
|
size = od['size']
|
||||||
|
sent, msgs = await order_and_and_wait_for_ppmsg(
|
||||||
|
client,
|
||||||
|
trades_stream,
|
||||||
|
fqme,
|
||||||
|
action='buy' if size > 0 else 'sell',
|
||||||
|
price=100e3 if size > 0 else 0,
|
||||||
|
size=size,
|
||||||
|
)
|
||||||
|
|
||||||
|
last_order: Order = sent[-1]
|
||||||
|
last_resp = msgs[-1]
|
||||||
|
assert isinstance(last_resp, BrokerdPosition)
|
||||||
|
ppmsg = last_resp
|
||||||
|
|
||||||
|
# check that pps.toml for account has been updated
|
||||||
|
# and all ems position msgs match that state.
|
||||||
|
with load_and_check_pos(
|
||||||
|
last_order,
|
||||||
|
ppmsg,
|
||||||
|
) as pos:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return ppmsg, pos
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'fills',
|
||||||
|
[
|
||||||
|
# buy and leave
|
||||||
|
({'size': 0.001},),
|
||||||
|
|
||||||
|
# sell short, then buy back to net-zero in dst
|
||||||
|
(
|
||||||
|
{'size': -0.001},
|
||||||
|
{'size': 0.001},
|
||||||
|
),
|
||||||
|
|
||||||
|
# multi-partial entry and exits from net-zero, to short and back
|
||||||
|
# to net-zero.
|
||||||
|
(
|
||||||
|
# enters
|
||||||
|
{'size': 0.001},
|
||||||
|
{'size': 0.002},
|
||||||
|
|
||||||
|
# partial exit
|
||||||
|
{'size': -0.001},
|
||||||
|
|
||||||
|
# partial enter
|
||||||
|
{'size': 0.0015},
|
||||||
|
{'size': 0.001},
|
||||||
|
{'size': 0.002},
|
||||||
|
|
||||||
|
# nearly back to zero.
|
||||||
|
{'size': -0.001},
|
||||||
|
|
||||||
|
# switch to net-short
|
||||||
|
{'size': -0.025},
|
||||||
|
{'size': -0.0195},
|
||||||
|
|
||||||
|
# another entry
|
||||||
|
{'size': 0.001},
|
||||||
|
|
||||||
|
# final cover to net-zero again.
|
||||||
|
{'size': 0.038},
|
||||||
|
),
|
||||||
|
],
|
||||||
|
ids='fills={}'.format,
|
||||||
|
)
|
||||||
|
def test_multi_fill_positions(
|
||||||
|
open_test_pikerd: AsyncContextManager,
|
||||||
|
loglevel: str,
|
||||||
|
|
||||||
|
fills: tuple[dict],
|
||||||
|
|
||||||
|
check_cross_session: bool = False,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
ppmsg: BrokerdPosition
|
||||||
|
pos: Position
|
||||||
|
|
||||||
|
accum_size: float = 0
|
||||||
|
for fill in fills:
|
||||||
|
accum_size += fill['size']
|
||||||
|
|
||||||
|
async def atest():
|
||||||
|
|
||||||
|
# export to outer scope for audit on second runtime-boot.
|
||||||
|
nonlocal ppmsg, pos
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_test_pikerd() as (_, _, _, _),
|
||||||
|
):
|
||||||
|
ppmsg, pos = await submit_and_check(
|
||||||
|
fills=fills,
|
||||||
|
loglevel=loglevel,
|
||||||
|
)
|
||||||
|
assert ppmsg.size == accum_size
|
||||||
|
|
||||||
|
run_and_tollerate_cancels(atest)
|
||||||
|
|
||||||
|
if (
|
||||||
|
check_cross_session
|
||||||
|
or accum_size != 0
|
||||||
|
):
|
||||||
|
# rerun just to check that position info is persistent for the paper
|
||||||
|
# account (i.e. a user can expect to see paper pps persist across
|
||||||
|
# runtime sessions.
|
||||||
|
async def just_check_pp():
|
||||||
|
nonlocal ppmsg
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_test_pikerd() as (_, _, _, _),
|
||||||
|
):
|
||||||
|
await match_ppmsgs_on_ems_boot([ppmsg])
|
||||||
|
|
||||||
|
run_and_tollerate_cancels(just_check_pp)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: still need to implement offline storage of darks/alerts/paper
|
||||||
|
# lives probably all the same way.. see
|
||||||
|
# https://github.com/pikers/piker/issues/463
|
||||||
|
def test_open_orders_reloaded(
|
||||||
|
open_test_pikerd: AsyncContextManager,
|
||||||
|
loglevel: str,
|
||||||
|
|
||||||
|
# fills: tuple[dict],
|
||||||
|
|
||||||
|
check_cross_session: bool = False,
|
||||||
|
):
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
def test_dark_order_clearing():
|
||||||
|
...
|
|
@ -7,19 +7,20 @@ from pprint import pprint
|
||||||
from typing import AsyncContextManager
|
from typing import AsyncContextManager
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
# import tractor
|
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from piker.data import (
|
from piker.data import (
|
||||||
ShmArray,
|
ShmArray,
|
||||||
open_feed,
|
open_feed,
|
||||||
)
|
)
|
||||||
from piker.data._source import (
|
from piker.data.flows import Flume
|
||||||
unpack_fqsn,
|
from piker.accounting import (
|
||||||
|
unpack_fqme,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
'fqsns',
|
'fqmes',
|
||||||
[
|
[
|
||||||
# binance
|
# binance
|
||||||
(100, {'btcusdt.binance', 'ethusdt.binance'}, False),
|
(100, {'btcusdt.binance', 'ethusdt.binance'}, False),
|
||||||
|
@ -30,20 +31,20 @@ from piker.data._source import (
|
||||||
# binance + kraken
|
# binance + kraken
|
||||||
(100, {'btcusdt.binance', 'xbtusd.kraken'}, False),
|
(100, {'btcusdt.binance', 'xbtusd.kraken'}, False),
|
||||||
],
|
],
|
||||||
ids=lambda param: f'quotes={param[0]}@fqsns={param[1]}',
|
ids=lambda param: f'quotes={param[0]}@fqmes={param[1]}',
|
||||||
)
|
)
|
||||||
def test_multi_fqsn_feed(
|
def test_multi_fqsn_feed(
|
||||||
open_test_pikerd: AsyncContextManager,
|
open_test_pikerd: AsyncContextManager,
|
||||||
fqsns: set[str],
|
fqmes: set[str],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
ci_env: bool
|
ci_env: bool,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Start a real-time data feed for provided fqsn and pull
|
Start a real-time data feed for provided fqme and pull
|
||||||
a few quotes then simply shut down.
|
a few quotes then simply shut down.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
max_quotes, fqsns, run_in_ci = fqsns
|
max_quotes, fqmes, run_in_ci = fqmes
|
||||||
|
|
||||||
if (
|
if (
|
||||||
ci_env
|
ci_env
|
||||||
|
@ -52,15 +53,15 @@ def test_multi_fqsn_feed(
|
||||||
pytest.skip('Skipping CI disabled test due to feed restrictions')
|
pytest.skip('Skipping CI disabled test due to feed restrictions')
|
||||||
|
|
||||||
brokers = set()
|
brokers = set()
|
||||||
for fqsn in fqsns:
|
for fqme in fqmes:
|
||||||
brokername, key, suffix = unpack_fqsn(fqsn)
|
brokername, *_ = unpack_fqme(fqme)
|
||||||
brokers.add(brokername)
|
brokers.add(brokername)
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
async with (
|
async with (
|
||||||
open_test_pikerd(),
|
open_test_pikerd(),
|
||||||
open_feed(
|
open_feed(
|
||||||
fqsns,
|
fqmes,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
# TODO: ensure throttle rate is applied
|
# TODO: ensure throttle rate is applied
|
||||||
|
@ -71,20 +72,20 @@ def test_multi_fqsn_feed(
|
||||||
) as feed
|
) as feed
|
||||||
):
|
):
|
||||||
# verify shm buffers exist
|
# verify shm buffers exist
|
||||||
for fqin in fqsns:
|
for fqin in fqmes:
|
||||||
flume = feed.flumes[fqin]
|
flume = feed.flumes[fqin]
|
||||||
ohlcv: ShmArray = flume.rt_shm
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
hist_ohlcv: ShmArray = flume.hist_shm
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
async with feed.open_multi_stream(brokers) as stream:
|
async with feed.open_multi_stream(brokers) as stream:
|
||||||
|
|
||||||
# pull the first startup quotes, one for each fqsn, and
|
# pull the first startup quotes, one for each fqme, and
|
||||||
# ensure they match each flume's startup quote value.
|
# ensure they match each flume's startup quote value.
|
||||||
fqsns_copy = fqsns.copy()
|
fqsns_copy = fqmes.copy()
|
||||||
with trio.fail_after(0.5):
|
with trio.fail_after(0.5):
|
||||||
for _ in range(1):
|
for _ in range(1):
|
||||||
first_quotes = await stream.receive()
|
first_quotes = await stream.receive()
|
||||||
for fqsn, quote in first_quotes.items():
|
for fqme, quote in first_quotes.items():
|
||||||
|
|
||||||
# XXX: TODO: WTF apparently this error will get
|
# XXX: TODO: WTF apparently this error will get
|
||||||
# supressed and only show up in the teardown
|
# supressed and only show up in the teardown
|
||||||
|
@ -92,18 +93,17 @@ def test_multi_fqsn_feed(
|
||||||
# <tractorbugurl>
|
# <tractorbugurl>
|
||||||
# assert 0
|
# assert 0
|
||||||
|
|
||||||
fqsns_copy.remove(fqsn)
|
fqsns_copy.remove(fqme)
|
||||||
flume = feed.flumes[fqsn]
|
flume: Flume = feed.flumes[fqme]
|
||||||
assert quote['last'] == flume.first_quote['last']
|
assert quote['last'] == flume.first_quote['last']
|
||||||
|
|
||||||
cntr = Counter()
|
cntr = Counter()
|
||||||
with trio.fail_after(6):
|
with trio.fail_after(6):
|
||||||
async for quotes in stream:
|
async for quotes in stream:
|
||||||
for fqsn, quote in quotes.items():
|
for fqme, quote in quotes.items():
|
||||||
cntr[fqsn] += 1
|
cntr[fqme] += 1
|
||||||
|
|
||||||
# await tractor.breakpoint()
|
flume = feed.flumes[fqme]
|
||||||
flume = feed.flumes[fqsn]
|
|
||||||
ohlcv: ShmArray = flume.rt_shm
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
hist_ohlcv: ShmArray = flume.hist_shm
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
|
@ -116,7 +116,7 @@ def test_multi_fqsn_feed(
|
||||||
# assert last == rt_row['close']
|
# assert last == rt_row['close']
|
||||||
# assert last == hist_row['close']
|
# assert last == hist_row['close']
|
||||||
pprint(
|
pprint(
|
||||||
f'{fqsn}: {quote}\n'
|
f'{fqme}: {quote}\n'
|
||||||
f'rt_ohlc: {rt_row}\n'
|
f'rt_ohlc: {rt_row}\n'
|
||||||
f'hist_ohlc: {hist_row}\n'
|
f'hist_ohlc: {hist_row}\n'
|
||||||
)
|
)
|
||||||
|
@ -124,6 +124,6 @@ def test_multi_fqsn_feed(
|
||||||
if cntr.total() >= max_quotes:
|
if cntr.total() >= max_quotes:
|
||||||
break
|
break
|
||||||
|
|
||||||
assert set(cntr.keys()) == fqsns
|
assert set(cntr.keys()) == fqmes
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
|
@ -1,230 +0,0 @@
|
||||||
'''
|
|
||||||
Paper-mode testing
|
|
||||||
'''
|
|
||||||
|
|
||||||
import trio
|
|
||||||
from exceptiongroup import BaseExceptionGroup
|
|
||||||
from typing import (
|
|
||||||
AsyncContextManager,
|
|
||||||
Literal,
|
|
||||||
)
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from tractor._exceptions import ContextCancelled
|
|
||||||
from uuid import uuid4
|
|
||||||
from functools import partial
|
|
||||||
|
|
||||||
from piker.log import get_logger
|
|
||||||
from piker.clearing._messages import Order
|
|
||||||
from piker.pp import (
|
|
||||||
open_pps,
|
|
||||||
)
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def get_fqsn(broker, symbol):
|
|
||||||
fqsn = f'{symbol}.{broker}'
|
|
||||||
return (fqsn, symbol, broker)
|
|
||||||
|
|
||||||
|
|
||||||
oid = ''
|
|
||||||
test_exec_mode = 'live'
|
|
||||||
(fqsn, symbol, broker) = get_fqsn('kraken', 'xbtusdt')
|
|
||||||
brokers = [broker]
|
|
||||||
account = 'paper'
|
|
||||||
|
|
||||||
|
|
||||||
async def _async_main(
|
|
||||||
open_test_pikerd_and_ems: AsyncContextManager,
|
|
||||||
action: Literal['buy', 'sell'] | None = None,
|
|
||||||
price: int = 30000,
|
|
||||||
executions: int = 1,
|
|
||||||
size: float = 0.01,
|
|
||||||
|
|
||||||
# Assert options
|
|
||||||
assert_entries: bool = False,
|
|
||||||
assert_pps: bool = False,
|
|
||||||
assert_zeroed_pps: bool = False,
|
|
||||||
assert_msg: bool = False,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Start piker, place a trade and assert data in
|
|
||||||
pps stream, ledger and position table.
|
|
||||||
|
|
||||||
'''
|
|
||||||
oid: str = ''
|
|
||||||
last_msg = {}
|
|
||||||
|
|
||||||
async with open_test_pikerd_and_ems() as (
|
|
||||||
services,
|
|
||||||
(book, trades_stream, pps, accounts, dialogs),
|
|
||||||
):
|
|
||||||
if action:
|
|
||||||
for x in range(executions):
|
|
||||||
oid = str(uuid4())
|
|
||||||
order = Order(
|
|
||||||
exec_mode=test_exec_mode,
|
|
||||||
action=action,
|
|
||||||
oid=oid,
|
|
||||||
account=account,
|
|
||||||
size=size,
|
|
||||||
symbol=fqsn,
|
|
||||||
price=price,
|
|
||||||
brokers=brokers,
|
|
||||||
)
|
|
||||||
# This is actually a syncronous call to push a message
|
|
||||||
book.send(order)
|
|
||||||
|
|
||||||
async for msg in trades_stream:
|
|
||||||
last_msg = msg
|
|
||||||
match msg:
|
|
||||||
# Wait for position message before moving on
|
|
||||||
case {'name': 'position'}:
|
|
||||||
break
|
|
||||||
|
|
||||||
# Teardown piker like a user would
|
|
||||||
raise KeyboardInterrupt
|
|
||||||
|
|
||||||
if assert_entries or assert_pps or assert_zeroed_pps or assert_msg:
|
|
||||||
_assert(
|
|
||||||
assert_entries,
|
|
||||||
assert_pps,
|
|
||||||
assert_zeroed_pps,
|
|
||||||
pps,
|
|
||||||
last_msg,
|
|
||||||
size,
|
|
||||||
executions,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _assert(
|
|
||||||
assert_entries,
|
|
||||||
assert_pps,
|
|
||||||
assert_zerod_pps,
|
|
||||||
pps,
|
|
||||||
last_msg,
|
|
||||||
size,
|
|
||||||
executions,
|
|
||||||
):
|
|
||||||
with (
|
|
||||||
open_pps(broker, account, write_on_exit=False) as table,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Assert multiple cases including pps,
|
|
||||||
ledger and final position message state
|
|
||||||
|
|
||||||
'''
|
|
||||||
if assert_entries:
|
|
||||||
for key, val in [
|
|
||||||
('broker', broker),
|
|
||||||
('account', account),
|
|
||||||
('symbol', fqsn),
|
|
||||||
('size', size * executions),
|
|
||||||
('currency', symbol),
|
|
||||||
('avg_price', table.pps[symbol].ppu)
|
|
||||||
]:
|
|
||||||
assert last_msg[key] == val
|
|
||||||
|
|
||||||
if assert_pps:
|
|
||||||
last_ppu = pps[(broker, account)][-1]
|
|
||||||
assert last_ppu['avg_price'] == table.pps[symbol].ppu
|
|
||||||
|
|
||||||
if assert_zerod_pps:
|
|
||||||
assert not bool(table.pps)
|
|
||||||
|
|
||||||
|
|
||||||
def _run_test_and_check(fn):
|
|
||||||
'''
|
|
||||||
Close position and assert empty position in pps
|
|
||||||
|
|
||||||
'''
|
|
||||||
with pytest.raises(BaseExceptionGroup) as exc_info:
|
|
||||||
trio.run(fn)
|
|
||||||
|
|
||||||
for exception in exc_info.value.exceptions:
|
|
||||||
assert isinstance(exception, KeyboardInterrupt) or isinstance(
|
|
||||||
exception, ContextCancelled
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_buy(
|
|
||||||
open_test_pikerd_and_ems: AsyncContextManager,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Enter a trade and assert entries are made in pps and ledger files.
|
|
||||||
|
|
||||||
'''
|
|
||||||
_run_test_and_check(
|
|
||||||
partial(
|
|
||||||
_async_main,
|
|
||||||
open_test_pikerd_and_ems=open_test_pikerd_and_ems,
|
|
||||||
action='buy',
|
|
||||||
assert_entries=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Open ems and assert existence of pps entries
|
|
||||||
_run_test_and_check(
|
|
||||||
partial(
|
|
||||||
_async_main,
|
|
||||||
open_test_pikerd_and_ems=open_test_pikerd_and_ems,
|
|
||||||
assert_pps=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_sell(
|
|
||||||
open_test_pikerd_and_ems: AsyncContextManager,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Sell position and ensure pps are zeroed.
|
|
||||||
|
|
||||||
'''
|
|
||||||
_run_test_and_check(
|
|
||||||
partial(
|
|
||||||
_async_main,
|
|
||||||
open_test_pikerd_and_ems=open_test_pikerd_and_ems,
|
|
||||||
action='sell',
|
|
||||||
price=1,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
_run_test_and_check(
|
|
||||||
partial(
|
|
||||||
_async_main,
|
|
||||||
open_test_pikerd_and_ems=open_test_pikerd_and_ems,
|
|
||||||
assert_zeroed_pps=True,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def test_multi_sell(
|
|
||||||
open_test_pikerd_and_ems: AsyncContextManager,
|
|
||||||
):
|
|
||||||
'''
|
|
||||||
Make 5 market limit buy orders and
|
|
||||||
then sell 5 slots at the same price.
|
|
||||||
Finally, assert cleared positions.
|
|
||||||
|
|
||||||
'''
|
|
||||||
_run_test_and_check(
|
|
||||||
partial(
|
|
||||||
_async_main,
|
|
||||||
open_test_pikerd_and_ems=open_test_pikerd_and_ems,
|
|
||||||
action='buy',
|
|
||||||
executions=5,
|
|
||||||
),
|
|
||||||
)
|
|
||||||
|
|
||||||
_run_test_and_check(
|
|
||||||
partial(
|
|
||||||
_async_main,
|
|
||||||
open_test_pikerd_and_ems=open_test_pikerd_and_ems,
|
|
||||||
action='sell',
|
|
||||||
executions=5,
|
|
||||||
price=1,
|
|
||||||
assert_zeroed_pps=True,
|
|
||||||
),
|
|
||||||
)
|
|
|
@ -2,9 +2,13 @@
|
||||||
Actor tree daemon sub-service verifications
|
Actor tree daemon sub-service verifications
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from typing import AsyncContextManager
|
from typing import (
|
||||||
|
AsyncContextManager,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
|
|
||||||
|
from exceptiongroup import BaseExceptionGroup
|
||||||
import pytest
|
import pytest
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
@ -24,7 +28,7 @@ from piker.clearing._messages import (
|
||||||
Status,
|
Status,
|
||||||
)
|
)
|
||||||
from piker.clearing._client import (
|
from piker.clearing._client import (
|
||||||
OrderBook,
|
OrderClient,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -33,8 +37,8 @@ def test_runtime_boot(
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Verify we can boot the `pikerd` service stack using the
|
Verify we can boot the `pikerd` service stack using the
|
||||||
`open_test_pikerd` fixture helper and that registry address details
|
`open_test_pikerd()` fixture helper and that contact-registry
|
||||||
match up.
|
address details match up.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
|
@ -55,6 +59,46 @@ def test_runtime_boot(
|
||||||
assert pikerd_portal.channel.raddr == daemon_addr
|
assert pikerd_portal.channel.raddr == daemon_addr
|
||||||
assert pikerd_portal.channel.raddr == portal.channel.raddr
|
assert pikerd_portal.channel.raddr == portal.channel.raddr
|
||||||
|
|
||||||
|
# no service tasks should be started
|
||||||
|
assert not services.service_tasks
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_datafeed_actors(
|
||||||
|
open_test_pikerd: AsyncContextManager,
|
||||||
|
loglevel: str,
|
||||||
|
# cancel_method: str,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Verify that booting a data feed starts a `brokerd`
|
||||||
|
actor and a singleton global `samplerd` and opening
|
||||||
|
an order mode in paper opens the `paperboi` service.
|
||||||
|
|
||||||
|
'''
|
||||||
|
actor_name: str = 'brokerd'
|
||||||
|
backend: str = 'kraken'
|
||||||
|
brokerd_name: str = f'{actor_name}.{backend}'
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with (
|
||||||
|
open_test_pikerd(),
|
||||||
|
|
||||||
|
open_feed(
|
||||||
|
['xbtusdt.kraken'],
|
||||||
|
loglevel=loglevel,
|
||||||
|
) as feed
|
||||||
|
):
|
||||||
|
# halt rt quote streams since we aren't testing them
|
||||||
|
await feed.pause()
|
||||||
|
|
||||||
|
async with (
|
||||||
|
ensure_service(brokerd_name),
|
||||||
|
ensure_service('samplerd'),
|
||||||
|
):
|
||||||
|
await trio.sleep(0.1)
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
@ -73,45 +117,69 @@ async def ensure_service(
|
||||||
yield portal
|
yield portal
|
||||||
|
|
||||||
|
|
||||||
def test_ensure_datafeed_actors(
|
def run_test_w_cancel_method(
|
||||||
open_test_pikerd: AsyncContextManager,
|
cancel_method: str,
|
||||||
loglevel: str,
|
main: Callable,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Verify that booting a data feed starts a `brokerd`
|
Run our runtime under trio and expect a certain type of cancel condition
|
||||||
actor and a singleton global `samplerd` and opening
|
depending on input.
|
||||||
an order mode in paper opens the `paperboi` service.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
actor_name: str = 'brokerd'
|
cancelled_msg: str = (
|
||||||
backend: str = 'kraken'
|
"was remotely cancelled by remote actor (\'pikerd\'")
|
||||||
brokerd_name: str = f'{actor_name}.{backend}'
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
async with (
|
|
||||||
open_test_pikerd(),
|
|
||||||
open_feed(
|
|
||||||
['xbtusdt.kraken'],
|
|
||||||
loglevel=loglevel,
|
|
||||||
) as feed
|
|
||||||
):
|
|
||||||
# halt rt quote streams since we aren't testing them
|
|
||||||
await feed.pause()
|
|
||||||
|
|
||||||
async with (
|
|
||||||
ensure_service(brokerd_name),
|
|
||||||
ensure_service('samplerd'),
|
|
||||||
):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
if cancel_method == 'sigint':
|
||||||
|
with pytest.raises(
|
||||||
|
BaseExceptionGroup,
|
||||||
|
) as exc_info:
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
multi = exc_info.value
|
||||||
|
|
||||||
|
for suberr in multi.exceptions:
|
||||||
|
match suberr:
|
||||||
|
# ensure we receive a remote cancellation error caused
|
||||||
|
# by the pikerd root actor since we used the
|
||||||
|
# `.cancel_service()` API above B)
|
||||||
|
case tractor.ContextCancelled():
|
||||||
|
assert cancelled_msg in suberr.args[0]
|
||||||
|
|
||||||
|
case KeyboardInterrupt():
|
||||||
|
pass
|
||||||
|
|
||||||
|
case _:
|
||||||
|
pytest.fail(f'Unexpected error {suberr}')
|
||||||
|
|
||||||
|
elif cancel_method == 'services':
|
||||||
|
|
||||||
|
# XXX NOTE: oddly, when you pass --pdb to pytest, i think since
|
||||||
|
# we also use that to enable the underlying tractor debug mode,
|
||||||
|
# it causes this to not raise for some reason? So if you see
|
||||||
|
# that while changing this test.. it's prolly that.
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
tractor.ContextCancelled
|
||||||
|
) as exc_info:
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
assert cancelled_msg in exc_info.value.args[0]
|
||||||
|
|
||||||
|
else:
|
||||||
|
pytest.fail(f'Test is broken due to {cancel_method}')
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'cancel_method',
|
||||||
|
['services', 'sigint'],
|
||||||
|
)
|
||||||
def test_ensure_ems_in_paper_actors(
|
def test_ensure_ems_in_paper_actors(
|
||||||
open_test_pikerd: AsyncContextManager,
|
open_test_pikerd: AsyncContextManager,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
|
cancel_method: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
actor_name: str = 'brokerd'
|
actor_name: str = 'brokerd'
|
||||||
|
@ -121,8 +189,7 @@ def test_ensure_ems_in_paper_actors(
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
# type declares
|
# type declares
|
||||||
book: OrderBook
|
client: OrderClient
|
||||||
trades_stream: tractor.MsgStream
|
|
||||||
pps: dict[str, list[BrokerdPosition]]
|
pps: dict[str, list[BrokerdPosition]]
|
||||||
accounts: list[str]
|
accounts: list[str]
|
||||||
dialogs: dict[str, Status]
|
dialogs: dict[str, Status]
|
||||||
|
@ -139,8 +206,8 @@ def test_ensure_ems_in_paper_actors(
|
||||||
mode='paper',
|
mode='paper',
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as (
|
) as (
|
||||||
book,
|
client,
|
||||||
trades_stream,
|
_, # trades_stream: tractor.MsgStream
|
||||||
pps,
|
pps,
|
||||||
accounts,
|
accounts,
|
||||||
dialogs,
|
dialogs,
|
||||||
|
@ -151,6 +218,9 @@ def test_ensure_ems_in_paper_actors(
|
||||||
# local ledger and `pps.toml` state ;)
|
# local ledger and `pps.toml` state ;)
|
||||||
assert not pps
|
assert not pps
|
||||||
assert not dialogs
|
assert not dialogs
|
||||||
|
# XXX: should be new client with no state from other tests
|
||||||
|
assert not client._sent_orders
|
||||||
|
assert accounts
|
||||||
|
|
||||||
pikerd_subservices = ['emsd', 'samplerd']
|
pikerd_subservices = ['emsd', 'samplerd']
|
||||||
|
|
||||||
|
@ -166,13 +236,13 @@ def test_ensure_ems_in_paper_actors(
|
||||||
# implicitly by the ems.
|
# implicitly by the ems.
|
||||||
assert brokerd_name in services.service_tasks
|
assert brokerd_name in services.service_tasks
|
||||||
|
|
||||||
print('ALL SERVICES STARTED, terminating..')
|
print('ALL SERVICES STARTED, cancelling runtime with:\n'
|
||||||
|
f'-> {cancel_method}')
|
||||||
|
|
||||||
|
if cancel_method == 'services':
|
||||||
await services.cancel_service('emsd')
|
await services.cancel_service('emsd')
|
||||||
|
|
||||||
with pytest.raises(
|
elif cancel_method == 'sigint':
|
||||||
tractor._exceptions.ContextCancelled,
|
raise KeyboardInterrupt
|
||||||
) as exc_info:
|
|
||||||
trio.run(main)
|
|
||||||
|
|
||||||
cancel_msg: str = '_emsd_main()` was remotely cancelled by its caller'
|
run_test_w_cancel_method(cancel_method, main)
|
||||||
assert cancel_msg in exc_info.value.args[0]
|
|
||||||
|
|
Loading…
Reference in New Issue