First stage port of `.data.feed` to `MktPair`
Add `MktPair` handling block for when a backend delivers a `mkt_info`-field containing init msg. Adjust the original `Symbol`-style `'symbol_info'` msg processing to do `Decimal` defaults and convert to `MktPair` including slapping in a hacky `_atype: str` field XD General initial name changes to `bs_mktid` and `_fqme` throughout!pre_overruns_ctxcancelled
parent
da7371988a
commit
8b0aead72e
|
@ -26,6 +26,7 @@ from collections import (
|
||||||
Counter,
|
Counter,
|
||||||
)
|
)
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from decimal import Decimal
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import time
|
import time
|
||||||
|
@ -71,8 +72,10 @@ from ._sharedmem import (
|
||||||
from .ingest import get_ingestormod
|
from .ingest import get_ingestormod
|
||||||
from .types import Struct
|
from .types import Struct
|
||||||
from ..accounting._mktinfo import (
|
from ..accounting._mktinfo import (
|
||||||
|
Asset,
|
||||||
|
MktPair,
|
||||||
|
unpack_fqme,
|
||||||
Symbol,
|
Symbol,
|
||||||
unpack_fqsn,
|
|
||||||
)
|
)
|
||||||
from ._source import base_iohlc_dtype
|
from ._source import base_iohlc_dtype
|
||||||
from ..ui import _search
|
from ..ui import _search
|
||||||
|
@ -565,7 +568,7 @@ async def tsdb_backfill(
|
||||||
timeframe=timeframe,
|
timeframe=timeframe,
|
||||||
)
|
)
|
||||||
|
|
||||||
broker, symbol, expiry = unpack_fqsn(fqsn)
|
broker, symbol, expiry = unpack_fqme(fqsn)
|
||||||
try:
|
try:
|
||||||
(
|
(
|
||||||
latest_start_dt,
|
latest_start_dt,
|
||||||
|
@ -1009,17 +1012,44 @@ async def allocate_persistent_feed(
|
||||||
|
|
||||||
# the broker-specific fully qualified symbol name,
|
# the broker-specific fully qualified symbol name,
|
||||||
# but ensure it is lower-cased for external use.
|
# but ensure it is lower-cased for external use.
|
||||||
bfqsn = msg['fqsn'].lower()
|
bs_mktid = msg['fqsn'].lower()
|
||||||
|
|
||||||
# true fqsn including broker/provider suffix
|
# true fqme including broker/provider suffix
|
||||||
fqsn = '.'.join((bfqsn, brokername))
|
fqme = '.'.join((bs_mktid, brokername))
|
||||||
# msg['fqsn'] = bfqsn
|
|
||||||
|
|
||||||
symbol = Symbol.from_fqsn(
|
mktinfo = msg.get('mkt_info')
|
||||||
fqsn=fqsn,
|
if not mktinfo:
|
||||||
info=msg['symbol_info'],
|
|
||||||
)
|
mktinfo = msg['symbol_info']
|
||||||
assert symbol.type_key
|
|
||||||
|
# TODO: read out renamed/new tick size fields in block below!
|
||||||
|
price_tick = mktinfo.get(
|
||||||
|
'price_tick_size',
|
||||||
|
Decimal('0.01'),
|
||||||
|
)
|
||||||
|
size_tick = mktinfo.get(
|
||||||
|
'lot_tick_size',
|
||||||
|
Decimal('0.0'),
|
||||||
|
)
|
||||||
|
|
||||||
|
log.warning(f'FQME: {fqme} -> backend needs port to `MktPair`')
|
||||||
|
mkt = MktPair.from_fqme(
|
||||||
|
fqme,
|
||||||
|
price_tick=price_tick,
|
||||||
|
size_tick=size_tick,
|
||||||
|
bs_mktid=bs_mktid,
|
||||||
|
|
||||||
|
_atype=mktinfo['asset_type']
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# the new msg-protocol is to expect an already packed
|
||||||
|
# ``Asset`` and ``MktPair`` object from the backend
|
||||||
|
mkt = mktinfo
|
||||||
|
assert isinstance(mkt, MktPair)
|
||||||
|
assert isinstance(mkt.dst, Asset)
|
||||||
|
|
||||||
|
assert mkt.type_key
|
||||||
|
|
||||||
# HISTORY storage, run 2 tasks:
|
# HISTORY storage, run 2 tasks:
|
||||||
# - a history loader / maintainer
|
# - a history loader / maintainer
|
||||||
|
@ -1040,17 +1070,24 @@ async def allocate_persistent_feed(
|
||||||
manage_history,
|
manage_history,
|
||||||
mod,
|
mod,
|
||||||
bus,
|
bus,
|
||||||
fqsn,
|
fqme,
|
||||||
some_data_ready,
|
some_data_ready,
|
||||||
feed_is_live,
|
feed_is_live,
|
||||||
)
|
)
|
||||||
|
|
||||||
# yield back control to starting nursery once we receive either
|
# yield back control to starting nursery once we receive either
|
||||||
# some history or a real-time quote.
|
# some history or a real-time quote.
|
||||||
log.info(f'waiting on history to load: {fqsn}')
|
log.info(f'waiting on history to load: {fqme}')
|
||||||
await some_data_ready.wait()
|
await some_data_ready.wait()
|
||||||
|
|
||||||
|
symbol = Symbol.from_fqsn(
|
||||||
|
fqsn=fqme,
|
||||||
|
info=msg['symbol_info'],
|
||||||
|
)
|
||||||
flume = Flume(
|
flume = Flume(
|
||||||
|
# TODO: we have to use this for now since currently the
|
||||||
|
# MktPair above doesn't render the correct output key it seems
|
||||||
|
# when we provide the `MktInfo` here?..?
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
first_quote=first_quote,
|
first_quote=first_quote,
|
||||||
_rt_shm_token=rt_shm.token,
|
_rt_shm_token=rt_shm.token,
|
||||||
|
@ -1061,7 +1098,7 @@ async def allocate_persistent_feed(
|
||||||
|
|
||||||
# for ambiguous names we simply apply the retreived
|
# for ambiguous names we simply apply the retreived
|
||||||
# feed to that name (for now).
|
# feed to that name (for now).
|
||||||
bus.feeds[symstr] = bus.feeds[bfqsn] = flume
|
bus.feeds[symstr] = bus.feeds[bs_mktid] = flume
|
||||||
|
|
||||||
task_status.started()
|
task_status.started()
|
||||||
|
|
||||||
|
@ -1104,7 +1141,7 @@ async def allocate_persistent_feed(
|
||||||
# start sample loop and shm incrementer task for OHLC style sampling
|
# start sample loop and shm incrementer task for OHLC style sampling
|
||||||
# at the above registered step periods.
|
# at the above registered step periods.
|
||||||
try:
|
try:
|
||||||
log.info(f'Starting sampler task for {fqsn}')
|
log.info(f'Starting sampler task for {fqme}')
|
||||||
await sample_and_broadcast(
|
await sample_and_broadcast(
|
||||||
bus,
|
bus,
|
||||||
rt_shm,
|
rt_shm,
|
||||||
|
@ -1114,7 +1151,7 @@ async def allocate_persistent_feed(
|
||||||
sum_tick_vlm
|
sum_tick_vlm
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
log.warning(f'{fqsn} feed task terminated')
|
log.warning(f'{fqme} feed task terminated')
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -1197,22 +1234,22 @@ async def open_feed_bus(
|
||||||
# subscriber
|
# subscriber
|
||||||
flume = bus.feeds[symbol]
|
flume = bus.feeds[symbol]
|
||||||
sym = flume.symbol
|
sym = flume.symbol
|
||||||
bfqsn = sym.key
|
bs_mktid = sym.key
|
||||||
fqsn = sym.fqsn # true fqsn
|
fqsn = sym.fqsn # true fqsn
|
||||||
assert bfqsn in fqsn and brokername in fqsn
|
assert bs_mktid in fqsn and brokername in fqsn
|
||||||
|
|
||||||
if sym.suffix:
|
if sym.suffix:
|
||||||
bfqsn = fqsn.removesuffix(f'.{brokername}')
|
bs_mktid = fqsn.removesuffix(f'.{brokername}')
|
||||||
log.warning(f'{brokername} expanded symbol {symbol} -> {bfqsn}')
|
log.warning(f'{brokername} expanded symbol {symbol} -> {bs_mktid}')
|
||||||
|
|
||||||
# pack for ``.started()`` sync msg
|
# pack for ``.started()`` sync msg
|
||||||
flumes[fqsn] = flume
|
flumes[fqsn] = flume
|
||||||
|
|
||||||
# we use the broker-specific fqsn (bfqsn) for
|
# we use the broker-specific market id (bs_mktid) for the
|
||||||
# the sampler subscription since the backend isn't (yet)
|
# sampler subscription since the backend isn't (yet) expected to
|
||||||
# expected to append it's own name to the fqsn, so we filter
|
# append it's own name to the fqsn, so we filter on keys which
|
||||||
# on keys which *do not* include that name (e.g .ib) .
|
# *do not* include that name (e.g .ib) .
|
||||||
bus._subscribers.setdefault(bfqsn, set())
|
bus._subscribers.setdefault(bs_mktid, set())
|
||||||
|
|
||||||
# sync feed subscribers with flume handles
|
# sync feed subscribers with flume handles
|
||||||
await ctx.started(
|
await ctx.started(
|
||||||
|
@ -1276,9 +1313,9 @@ async def open_feed_bus(
|
||||||
# maybe use the current task-id to key the sub list that's
|
# maybe use the current task-id to key the sub list that's
|
||||||
# added / removed? Or maybe we can add a general
|
# added / removed? Or maybe we can add a general
|
||||||
# pause-resume by sub-key api?
|
# pause-resume by sub-key api?
|
||||||
bfqsn = fqsn.removesuffix(f'.{brokername}')
|
bs_mktid = fqsn.removesuffix(f'.{brokername}')
|
||||||
local_subs.setdefault(bfqsn, set()).add(sub)
|
local_subs.setdefault(bs_mktid, set()).add(sub)
|
||||||
bus.add_subs(bfqsn, {sub})
|
bus.add_subs(bs_mktid, {sub})
|
||||||
|
|
||||||
# sync caller with all subs registered state
|
# sync caller with all subs registered state
|
||||||
sub_registered.set()
|
sub_registered.set()
|
||||||
|
@ -1291,16 +1328,16 @@ async def open_feed_bus(
|
||||||
async for msg in stream:
|
async for msg in stream:
|
||||||
|
|
||||||
if msg == 'pause':
|
if msg == 'pause':
|
||||||
for bfqsn, subs in local_subs.items():
|
for bs_mktid, subs in local_subs.items():
|
||||||
log.info(
|
log.info(
|
||||||
f'Pausing {bfqsn} feed for {uid}')
|
f'Pausing {bs_mktid} feed for {uid}')
|
||||||
bus.remove_subs(bfqsn, subs)
|
bus.remove_subs(bs_mktid, subs)
|
||||||
|
|
||||||
elif msg == 'resume':
|
elif msg == 'resume':
|
||||||
for bfqsn, subs in local_subs.items():
|
for bs_mktid, subs in local_subs.items():
|
||||||
log.info(
|
log.info(
|
||||||
f'Resuming {bfqsn} feed for {uid}')
|
f'Resuming {bs_mktid} feed for {uid}')
|
||||||
bus.add_subs(bfqsn, subs)
|
bus.add_subs(bs_mktid, subs)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise ValueError(msg)
|
raise ValueError(msg)
|
||||||
|
@ -1314,8 +1351,8 @@ async def open_feed_bus(
|
||||||
cs.cancel()
|
cs.cancel()
|
||||||
|
|
||||||
# drop all subs for this task from the bus
|
# drop all subs for this task from the bus
|
||||||
for bfqsn, subs in local_subs.items():
|
for bs_mktid, subs in local_subs.items():
|
||||||
bus.remove_subs(bfqsn, subs)
|
bus.remove_subs(bs_mktid, subs)
|
||||||
|
|
||||||
|
|
||||||
class Feed(Struct):
|
class Feed(Struct):
|
||||||
|
@ -1512,7 +1549,7 @@ async def open_feed(
|
||||||
feed = Feed()
|
feed = Feed()
|
||||||
|
|
||||||
for fqsn in fqsns:
|
for fqsn in fqsns:
|
||||||
brokername, key, suffix = unpack_fqsn(fqsn)
|
brokername, key, suffix = unpack_fqme(fqsn)
|
||||||
bfqsn = fqsn.replace('.' + brokername, '')
|
bfqsn = fqsn.replace('.' + brokername, '')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
Loading…
Reference in New Issue