Draft a feed cacheing sub-system

cached_feeds
Tyler Goodlet 2021-03-25 10:26:02 -04:00
parent 0d4073dbd2
commit 65e7680cdd
1 changed files with 90 additions and 40 deletions

View File

@ -26,14 +26,22 @@ from contextlib import asynccontextmanager
from importlib import import_module from importlib import import_module
from types import ModuleType from types import ModuleType
from typing import ( from typing import (
Dict, Any, Sequence, AsyncIterator, Optional Dict, Any, Sequence,
AsyncIterator, Optional,
Callable, Awaitable
) )
import trio
import tractor import tractor
from pydantic import BaseModel
from ..brokers import get_brokermod from ..brokers import get_brokermod
from ..log import get_logger, get_console_log from ..log import get_logger, get_console_log
from .._daemon import spawn_brokerd, maybe_open_pikerd from .._daemon import (
spawn_brokerd,
maybe_open_pikerd,
maybe_spawn_brokerd,
)
from ._normalize import iterticks from ._normalize import iterticks
from ._sharedmem import ( from ._sharedmem import (
maybe_open_shm_array, maybe_open_shm_array,
@ -74,57 +82,97 @@ def get_ingestormod(name: str) -> ModuleType:
return module return module
@asynccontextmanager # @dataclass
async def maybe_spawn_brokerd( class _FeedsCache(BaseModel):
brokername: str, """Data feeds manager.
loglevel: Optional[str] = None,
# XXX: you should pretty much never want debug mode This is a brokerd side api used to manager persistent real-time
# for data daemons when running in production. streams that can be allocated and left alive indefinitely.
debug_mode: bool = True,
) -> tractor._portal.Portal:
"""If no ``brokerd.{brokername}`` daemon-actor can be found,
spawn one in a local subactor and return a portal to it.
""" """
if loglevel: brokername: str
get_console_log(loglevel) nursery: trio.Nursery
tasks: Dict[str, trio.CancelScope] = {}
dname = f'brokerd.{brokername}' class Config:
async with tractor.find_actor(dname) as portal: arbitrary_types_allowed = True
# WTF: why doesn't this work? # tasks: Dict[str, trio.CancelScope] = field(default_factory=dict)
if portal is not None:
yield portal
else: async def start_feed(
# ask root ``pikerd`` daemon to spawn the daemon we need if symbol: str,
# pikerd is not live we now become the root of the func: Callable[[int], Awaitable[None]],
# process tree ) -> None:
async with maybe_open_pikerd( """Start a bg feed task and register a surrouding cancel scope
loglevel=loglevel for it.
) as pikerd_portal:
if pikerd_portal is None: """
# we are root so spawn brokerd directly in our tree with trio.CancelCscope() as cs:
# the root nursery is accessed through process global state pass
await spawn_brokerd(brokername, loglevel=loglevel)
else: async def cancel_all(self) -> None:
await pikerd_portal.run( for name, cs in self.tasks.item():
spawn_brokerd, log.debug(f'Cancelling cached feed for {name}')
brokername=brokername, cs.cancel()
loglevel=loglevel,
debug_mode=debug_mode,
) _feeds: _FeedsCache = None
def get_feeds_manager(
brokername: str,
nursery: Optional[trio.Nursery] = None,
) -> _FeedsCache:
"""
Retreive data feeds manager from process global scope.
"""
global _feeds
if nursery is not None:
assert _feeds is None, "Feeds manager is already setup?"
# this is initial setup by parent actor
_feeds = _FeedsCache(
brokername=brokername,
nursery=nursery,
)
assert not _feeds.tasks
assert _feeds.brokername == brokername, "Uhhh wtf"
return _feeds
async def _setup_persistent_feeds(brokername: str) -> None:
"""Allocate a actor-wide service nursery in ``brokerd``
such that feeds can be run in the background persistently by
the broker backend as needed.
"""
async with trio.open_nursery() as service_nursery:
_feeds = get_feeds_manager(brokername, service_nursery)
# we pin this task to keep the feeds manager active until the
# parent actor decides to tear it down
await trio.sleep_forever()
@tractor.stream
async def allocate_cached_feed(
ctx: tractor.Context,
symbol: str
):
_feeds = get_feeds_manager(brokername, service_nursery)
# setup shared mem buffer
pass
async with tractor.wait_for_actor(dname) as portal:
yield portal
@dataclass @dataclass
class Feed: class Feed:
"""A data feed for client-side interaction with far-process """A data feed for client-side interaction with far-process# }}}
real-time data sources. real-time data sources.
This is an thin abstraction on top of ``tractor``'s portals for This is an thin abstraction on top of ``tractor``'s portals for
@ -279,6 +327,8 @@ async def open_feed(
if opened: if opened:
assert data['is_shm_writer'] assert data['is_shm_writer']
log.info("Started shared mem bar writer") log.info("Started shared mem bar writer")
else:
s = attach_shm_array(shm_token)
shm_token['dtype_descr'] = list(shm_token['dtype_descr']) shm_token['dtype_descr'] = list(shm_token['dtype_descr'])
assert shm_token == shm.token # sanity assert shm_token == shm.token # sanity