Compare commits
	
		
			46 Commits 
		
	
	
		
			18e6b8beaa
			...
			2cb7b505e1
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 2cb7b505e1 | |
|  | 33a37f24c7 | |
|  | 022432cce7 | |
|  | f54cd5222c | |
|  | 2cc4e70187 | |
|  | b2cead3539 | |
|  | e5edccdada | |
|  | e993001b2e | |
|  | d15f515913 | |
|  | d6e1cd94a7 | |
|  | 9563a3e908 | |
|  | 31464143ad | |
|  | 8ca8ff85ea | |
|  | 477f1fa29e | |
|  | 04170c7f91 | |
|  | 1716f84f36 | |
|  | 4da75b40f3 | |
|  | 8165c684e5 | |
|  | ddf47c4555 | |
|  | c196188eaa | |
|  | dd0d2bd07f | |
|  | 3a7a983451 | |
|  | 7d62b9d5e7 | |
|  | c96e3d377a | |
|  | 04e03be475 | |
|  | e87939e1f2 | |
|  | 3630c02f48 | |
|  | fdf34e51bb | |
|  | a0b540022e | |
|  | 20f6343be2 | |
|  | 2f30fc4fb8 | |
|  | 76b5547c2d | |
|  | 08811f9a61 | |
|  | b577180773 | |
|  | f12c452d96 | |
|  | 3531c2edc1 | |
|  | 97dd7e766a | |
|  | ab1463d942 | |
|  | 5314cb79d4 | |
|  | 0c0b7116e3 | |
|  | 19c343e8b2 | |
|  | b7883325a9 | |
|  | 37ca081555 | |
|  | 44b8c70521 | |
|  | e6af97c596 | |
|  | 95ace5acb8 | 
							
								
								
									
										234
									
								
								README.rst
								
								
								
								
							
							
						
						
									
										234
									
								
								README.rst
								
								
								
								
							|  | @ -1,161 +1,162 @@ | ||||||
| piker | piker | ||||||
| ----- | ----- | ||||||
| trading gear for hackers | trading gear for hackers. | ||||||
| 
 | 
 | ||||||
| |gh_actions| | |gh_actions| | ||||||
| 
 | 
 | ||||||
| .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square | .. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square | ||||||
|     :target: https://actions-badge.atrox.dev/piker/pikers/goto |     :target: https://actions-badge.atrox.dev/piker/pikers/goto | ||||||
| 
 | 
 | ||||||
| ``piker`` is a broker agnostic, next-gen FOSS toolset and runtime for | ``piker`` is a broker agnostic, next-gen FOSS toolset for real-time | ||||||
| real-time computational trading targeted at `hardcore Linux users | computational trading targeted at `hardcore Linux users <comp_trader>`_ . | ||||||
| <comp_trader>`_ . |  | ||||||
| 
 | 
 | ||||||
| we use much bleeding edge tech including (but not limited to): | we use as much bleeding edge tech as possible including (but not limited to): | ||||||
| 
 | 
 | ||||||
| - latest python for glue_ | - latest python for glue_ | ||||||
| - uv_ for packaging and distribution | - trio_ & tractor_ for our distributed, multi-core, real-time streaming | ||||||
| - trio_ & tractor_ for our distributed `structured concurrency`_ runtime |   `structured concurrency`_ runtime B) | ||||||
| - Qt_ for pristine low latency UIs | - Qt_ for pristine high performance UIs | ||||||
| - pyqtgraph_ (which we've extended) for real-time charting and graphics | - pyqtgraph_ for real-time charting | ||||||
| - ``polars`` ``numpy`` and ``numba`` for redic `fast numerics`_ | - ``polars`` ``numpy`` and ``numba`` for `fast numerics`_ | ||||||
| - `apache arrow and parquet`_ for time-series storage | - `apache arrow and parquet`_ for time series history management | ||||||
|  |   persistence and sharing | ||||||
|  | - (prototyped) techtonicdb_ for L2 book storage | ||||||
| 
 | 
 | ||||||
| potential projects we might integrate with soon, | .. |travis| image:: https://img.shields.io/travis/pikers/piker/master.svg | ||||||
| 
 |     :target: https://travis-ci.org/pikers/piker | ||||||
| - (already prototyped in ) techtonicdb_ for L2 book storage |  | ||||||
| 
 |  | ||||||
| .. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/ |  | ||||||
| .. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue |  | ||||||
| .. _uv: https://docs.astral.sh/uv/ |  | ||||||
| .. _trio: https://github.com/python-trio/trio | .. _trio: https://github.com/python-trio/trio | ||||||
| .. _tractor: https://github.com/goodboy/tractor | .. _tractor: https://github.com/goodboy/tractor | ||||||
| .. _structured concurrency: https://trio.discourse.group/ | .. _structured concurrency: https://trio.discourse.group/ | ||||||
|  | .. _marketstore: https://github.com/alpacahq/marketstore | ||||||
|  | .. _techtonicdb: https://github.com/0b01/tectonicdb | ||||||
| .. _Qt: https://www.qt.io/ | .. _Qt: https://www.qt.io/ | ||||||
| .. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph | .. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph | ||||||
|  | .. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue | ||||||
| .. _apache arrow and parquet: https://arrow.apache.org/faq/ | .. _apache arrow and parquet: https://arrow.apache.org/faq/ | ||||||
| .. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/ | .. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/ | ||||||
| .. _techtonicdb: https://github.com/0b01/tectonicdb | .. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/ | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| focus and feats: | focus and features: | ||||||
| **************** | ******************* | ||||||
| fitting with these tenets, we're always open to new | - 100% federated: your code, your hardware, your data feeds, your broker fills. | ||||||
| framework/lib/service interop suggestions and ideas! | - zero web: low latency, native software that doesn't try to re-invent the OS | ||||||
|  | - maximal **privacy**: prevent brokers and mms from knowing your | ||||||
|  |   planz; smack their spreads with dark volume. | ||||||
|  | - zero clutter: modal, context oriented UIs that echew minimalism, reduce | ||||||
|  |   thought noise and encourage un-emotion. | ||||||
|  | - first class parallelism: built from the ground up on next-gen structured concurrency | ||||||
|  |   primitives. | ||||||
|  | - traders first: broker/exchange/asset-class agnostic | ||||||
|  | - systems grounded: real-time financial signal processing that will | ||||||
|  |   make any queuing or DSP eng juice their shorts. | ||||||
|  | - non-tina UX: sleek, powerful keyboard driven interaction with expected use in tiling wms | ||||||
|  | - data collaboration: every process and protocol is multi-host scalable. | ||||||
|  | - fight club ready: zero interest in adoption by suits; no corporate friendly license, ever. | ||||||
| 
 | 
 | ||||||
| - **100% federated**: | fitting with these tenets, we're always open to new framework suggestions and ideas. | ||||||
|   your code, your hardware, your data feeds, your broker fills. |  | ||||||
| 
 | 
 | ||||||
| - **zero web**: | building the best looking, most reliable, keyboard friendly trading | ||||||
|   low latency as a prime objective, native UIs and modern IPC | platform is the dream; join the cause. | ||||||
|   protocols without trying to re-invent the "OS-as-an-app".. |  | ||||||
| 
 |  | ||||||
| - **maximal privacy**: |  | ||||||
|   prevent brokers and mms from knowing your planz; smack their |  | ||||||
|   spreads with dark volume from a VPN tunnel. |  | ||||||
| 
 |  | ||||||
| - **zero clutter**: |  | ||||||
|   modal, context oriented UIs that echew minimalism, reduce thought |  | ||||||
|   noise and encourage un-emotion. |  | ||||||
| 
 |  | ||||||
| - **first class parallelism**: |  | ||||||
|   built from the ground up on a next-gen structured concurrency |  | ||||||
|   supervision sys. |  | ||||||
| 
 |  | ||||||
| - **traders first**: |  | ||||||
|   broker/exchange/venue/asset-class/money-sys agnostic |  | ||||||
| 
 |  | ||||||
| - **systems grounded**: |  | ||||||
|   real-time financial signal processing (fsp) that will make any |  | ||||||
|   queuing or DSP eng juice their shorts. |  | ||||||
| 
 |  | ||||||
| - **non-tina UX**: |  | ||||||
|   sleek, powerful keyboard driven interaction with expected use in |  | ||||||
|   tiling wms (or maybe even a DDE). |  | ||||||
| 
 |  | ||||||
| - **data collab at scale**: |  | ||||||
|   every actor-process and protocol is multi-host aware. |  | ||||||
| 
 |  | ||||||
| - **fight club ready**: |  | ||||||
|   zero interest in adoption by suits; no corporate friendly license, |  | ||||||
|   ever. |  | ||||||
| 
 |  | ||||||
| building the hottest looking, fastest, most reliable, keyboard |  | ||||||
| friendly FOSS trading platform is the dream; join the cause. |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| a sane install with `uv` | sane install with `poetry` | ||||||
| ************************ | ************************** | ||||||
| bc why install with `python` when you can faster with `rust` :: | TODO! | ||||||
| 
 | 
 | ||||||
|     uv lock | 
 | ||||||
|  | rigorous install on ``nixos`` using ``poetry2nix`` | ||||||
|  | ************************************************** | ||||||
|  | TODO! | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| hacky install on nixos | hacky install on nixos | ||||||
| ********************** | ********************** | ||||||
| ``NixOS`` is our core devs' distro of choice for which we offer | `NixOS` is our core devs' distro of choice for which we offer | ||||||
| a stringently defined development shell envoirment that can be loaded with:: | a stringently defined development shell envoirment that can be loaded with:: | ||||||
| 
 | 
 | ||||||
|     nix-shell default.nix |     nix-shell develop.nix | ||||||
|  | 
 | ||||||
|  | this will setup the required python environment to run piker, make sure to | ||||||
|  | run:: | ||||||
|  | 
 | ||||||
|  |     pip install -r requirements.txt -e . | ||||||
|  | 
 | ||||||
|  | once after loading the shell | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| start a chart | install wild-west style via `pip` | ||||||
| ************* | ********************************* | ||||||
| run a realtime OHLCV chart stand-alone:: | ``piker`` is currently under heavy pre-alpha development and as such | ||||||
|  | should be cloned from this repo and hacked on directly. | ||||||
| 
 | 
 | ||||||
|     piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken | for a development install:: | ||||||
| 
 | 
 | ||||||
| this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes |     git clone git@github.com:pikers/piker.git | ||||||
| overlayed on the same graph. Use of `piker` without first starting |     cd piker | ||||||
| a daemon (`pikerd` - see below) means there is an implicit spawning of the |     virtualenv env | ||||||
| multi-actor-runtime (implemented as a `tractor` app). |     source ./env/bin/activate | ||||||
| 
 |     pip install -r requirements.txt -e . | ||||||
| For additional subsystem feats available through our chart UI see the |  | ||||||
| various sub-readmes: |  | ||||||
| 
 |  | ||||||
| - order control using a mouse-n-keyboard UX B) |  | ||||||
| - cross venue market-pair (what most call "symbol") search, select, overlay Bo |  | ||||||
| - financial-signal-processing (`piker.fsp`) write-n-reload to sub-chart BO |  | ||||||
| - src-asset derivatives scan for anal, like the infamous "max pain" XO |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| spawn a daemon standalone | check out our charts | ||||||
| ************************* | ******************** | ||||||
| we call the root actor-process the ``pikerd``. it can be (and is | bet you weren't expecting this from the foss:: | ||||||
| recommended normally to be) started separately from the ``piker | 
 | ||||||
| chart`` program:: |     piker -l info -b kraken -b binance chart btcusdt.binance --pdb | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | this runs the main chart (currently with 1m sampled OHLC) in in debug | ||||||
|  | mode and you can practice paper trading using the following | ||||||
|  | micro-manual: | ||||||
|  | 
 | ||||||
|  | ``order_mode`` ( | ||||||
|  |     edge triggered activation by any of the following keys, | ||||||
|  |     ``mouse-click`` on y-level to submit at that price | ||||||
|  |     ): | ||||||
|  | 
 | ||||||
|  |     - ``f``/ ``ctl-f`` to stage buy | ||||||
|  |     - ``d``/ ``ctl-d`` to stage sell | ||||||
|  |     - ``a`` to stage alert | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | ``search_mode`` ( | ||||||
|  |     ``ctl-l`` or ``ctl-space`` to open, | ||||||
|  |     ``ctl-c`` or ``ctl-space`` to close | ||||||
|  |     ) : | ||||||
|  | 
 | ||||||
|  |     - begin typing to have symbol search automatically lookup | ||||||
|  |       symbols from all loaded backend (broker) providers | ||||||
|  |     - arrow keys and mouse click to navigate selection | ||||||
|  |     - vi-like ``ctl-[hjkl]`` for navigation | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | you can also configure your position allocation limits from the | ||||||
|  | sidepane. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | run in distributed mode | ||||||
|  | *********************** | ||||||
|  | start the service manager and data feed daemon in the background and | ||||||
|  | connect to it:: | ||||||
| 
 | 
 | ||||||
|     pikerd -l info --pdb |     pikerd -l info --pdb | ||||||
| 
 | 
 | ||||||
| the daemon does nothing until a ``piker``-client (like ``piker |  | ||||||
| chart``) connects and requests some particular sub-system. for |  | ||||||
| a connecting chart ``pikerd`` will spawn and manage at least, |  | ||||||
| 
 | 
 | ||||||
| - a data-feed daemon: ``datad`` which does all the work of comms with | connect your chart:: | ||||||
|   the backend provider (in this case the ``binance`` cex). |  | ||||||
| - a paper-trading engine instance, ``paperboi.binance``, (if no live |  | ||||||
|   account has been configured) which allows for auto/manual order |  | ||||||
|   control against the live quote stream. |  | ||||||
| 
 | 
 | ||||||
| *using* an actor-service (aka micro-daemon) manager which dynamically |     piker -l info -b kraken -b binance chart xmrusdt.binance --pdb | ||||||
| supervises various sub-subsystems-as-services throughout the ``piker`` |  | ||||||
| runtime-stack. |  | ||||||
| 
 | 
 | ||||||
| now you can (implicitly) connect your chart:: |  | ||||||
| 
 | 
 | ||||||
|     piker chart btcusdt.spot.binance | enjoy persistent real-time data feeds tied to daemon lifetime. the next | ||||||
| 
 | time you spawn a chart it will load much faster since the data feed has | ||||||
| since ``pikerd`` was started separately you can now enjoy a persistent | been cached and is now always running live in the background until you | ||||||
| real-time data stream tied to the daemon-tree's lifetime. i.e. the next | kill ``pikerd``. | ||||||
| time you spawn a chart it will obviously not only load much faster |  | ||||||
| (since the underlying ``datad.binance`` is left running with its |  | ||||||
| in-memory IPC data structures) but also the data-feed and any order |  | ||||||
| mgmt states should be persistent until you finally cancel ``pikerd``. |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| if anyone asks you what this project is about | if anyone asks you what this project is about | ||||||
| ********************************************* | ********************************************* | ||||||
| you don't talk about it; just use it. | you don't talk about it. | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| how do i get involved? | how do i get involved? | ||||||
|  | @ -165,15 +166,6 @@ enter the matrix. | ||||||
| 
 | 
 | ||||||
| how come there ain't that many docs | how come there ain't that many docs | ||||||
| *********************************** | *********************************** | ||||||
| i mean we want/need them but building the core right has been higher | suck it up, learn the code; no one is trying to sell you on anything. | ||||||
| prio then marketting (and likely will stay that way Bp). | also, we need lotsa help so if you want to start somewhere and can't | ||||||
| 
 | necessarily write serious code, this might be the place for you! | ||||||
| soo, suck it up bc, |  | ||||||
| 
 |  | ||||||
| - no one is trying to sell you on anything |  | ||||||
| - learning the code base is prolly way more valuable |  | ||||||
| - the UI/UXs are intended to be "intuitive" for any hacker.. |  | ||||||
| 
 |  | ||||||
| we obviously need tonz help so if you want to start somewhere and |  | ||||||
| can't necessarily write "advanced" concurrent python/rust code, this |  | ||||||
| helping document literally anything might be the place for you! |  | ||||||
|  |  | ||||||
|  | @ -30,7 +30,8 @@ from types import ModuleType | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     Iterator, |     Iterator, | ||||||
|     Generator |     Generator, | ||||||
|  |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import pendulum | import pendulum | ||||||
|  | @ -59,8 +60,10 @@ from ..clearing._messages import ( | ||||||
|     BrokerdPosition, |     BrokerdPosition, | ||||||
| ) | ) | ||||||
| from piker.types import Struct | from piker.types import Struct | ||||||
|  | from piker.log import get_logger | ||||||
|  | 
 | ||||||
|  | if TYPE_CHECKING: | ||||||
|     from piker.data._symcache import SymbologyCache |     from piker.data._symcache import SymbologyCache | ||||||
| from ..log import get_logger |  | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
|  | @ -493,6 +496,17 @@ class Account(Struct): | ||||||
| 
 | 
 | ||||||
|         _mktmap_table: dict[str, MktPair] | None = None, |         _mktmap_table: dict[str, MktPair] | None = None, | ||||||
| 
 | 
 | ||||||
|  |         only_require: list[str]|True = True, | ||||||
|  |         # ^list of fqmes that are "required" to be processed from | ||||||
|  |         # this ledger pass; we often don't care about others and | ||||||
|  |         # definitely shouldn't always error in such cases. | ||||||
|  |         # (eg. broker backend loaded that doesn't yet supsport the | ||||||
|  |         # symcache but also, inside the paper engine we don't ad-hoc | ||||||
|  |         # request `get_mkt_info()` for every symbol in the ledger, | ||||||
|  |         # only the one for which we're simulating against). | ||||||
|  |         # TODO, not sure if there's a better soln for this, ideally | ||||||
|  |         # all backends get symcache support afap i guess.. | ||||||
|  | 
 | ||||||
|     ) -> dict[str, Position]: |     ) -> dict[str, Position]: | ||||||
|         ''' |         ''' | ||||||
|         Update the internal `.pps[str, Position]` table from input |         Update the internal `.pps[str, Position]` table from input | ||||||
|  | @ -535,11 +549,32 @@ class Account(Struct): | ||||||
|                 if _mktmap_table is None: |                 if _mktmap_table is None: | ||||||
|                     raise |                     raise | ||||||
| 
 | 
 | ||||||
|  |                 required: bool = ( | ||||||
|  |                     only_require is True | ||||||
|  |                     or ( | ||||||
|  |                         only_require is not True | ||||||
|  |                         and | ||||||
|  |                         fqme in only_require | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|                 # XXX: caller is allowed to provide a fallback |                 # XXX: caller is allowed to provide a fallback | ||||||
|                 # mktmap table for the case where a new position is |                 # mktmap table for the case where a new position is | ||||||
|                 # being added and the preloaded symcache didn't |                 # being added and the preloaded symcache didn't | ||||||
|                 # have this entry prior (eg. with frickin IB..) |                 # have this entry prior (eg. with frickin IB..) | ||||||
|                 mkt = _mktmap_table[fqme] |                 if ( | ||||||
|  |                     not (mkt := _mktmap_table.get(fqme)) | ||||||
|  |                     and | ||||||
|  |                     required | ||||||
|  |                 ): | ||||||
|  |                     raise | ||||||
|  | 
 | ||||||
|  |                 elif not required: | ||||||
|  |                     continue | ||||||
|  | 
 | ||||||
|  |                 else: | ||||||
|  |                     # should be an entry retreived somewhere | ||||||
|  |                     assert mkt | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
|             if not (pos := pps.get(bs_mktid)): |             if not (pos := pps.get(bs_mktid)): | ||||||
| 
 | 
 | ||||||
|  | @ -656,7 +691,7 @@ class Account(Struct): | ||||||
|     def write_config(self) -> None: |     def write_config(self) -> None: | ||||||
|         ''' |         ''' | ||||||
|         Write the current account state to the user's account TOML file, normally |         Write the current account state to the user's account TOML file, normally | ||||||
|         something like ``pps.toml``. |         something like `pps.toml`. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         # TODO: show diff output? |         # TODO: show diff output? | ||||||
|  |  | ||||||
|  | @ -23,6 +23,7 @@ from __future__ import annotations | ||||||
| from contextlib import ( | from contextlib import ( | ||||||
|     asynccontextmanager as acm, |     asynccontextmanager as acm, | ||||||
| ) | ) | ||||||
|  | from functools import partial | ||||||
| from types import ModuleType | from types import ModuleType | ||||||
| from typing import ( | from typing import ( | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
|  | @ -190,14 +191,17 @@ def broker_init( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn_brokerd( | async def spawn_brokerd( | ||||||
| 
 |  | ||||||
|     brokername: str, |     brokername: str, | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
|     **tractor_kwargs, |     **tractor_kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> bool: | ) -> bool: | ||||||
|  |     ''' | ||||||
|  |     Spawn a `brokerd.<backendname>` subactor service daemon | ||||||
|  |     using `pikerd`'s service mngr. | ||||||
| 
 | 
 | ||||||
|  |     ''' | ||||||
|     from piker.service._util import log  # use service mngr log |     from piker.service._util import log  # use service mngr log | ||||||
|     log.info(f'Spawning {brokername} broker daemon') |     log.info(f'Spawning {brokername} broker daemon') | ||||||
| 
 | 
 | ||||||
|  | @ -217,27 +221,35 @@ async def spawn_brokerd( | ||||||
| 
 | 
 | ||||||
|     # ask `pikerd` to spawn a new sub-actor and manage it under its |     # ask `pikerd` to spawn a new sub-actor and manage it under its | ||||||
|     # actor nursery |     # actor nursery | ||||||
|     from piker.service import Services |     from piker.service import ( | ||||||
| 
 |         get_service_mngr, | ||||||
|     dname: str = tractor_kwargs.pop('name')  # f'brokerd.{brokername}' |         ServiceMngr, | ||||||
|     portal = await Services.actor_n.start_actor( |  | ||||||
|         dname, |  | ||||||
|         enable_modules=_data_mods + tractor_kwargs.pop('enable_modules'), |  | ||||||
|         debug_mode=Services.debug_mode, |  | ||||||
|         **tractor_kwargs |  | ||||||
|     ) |     ) | ||||||
| 
 |     dname: str = tractor_kwargs.pop('name')  # f'brokerd.{brokername}' | ||||||
|     # NOTE: the service mngr expects an already spawned actor + its |     mngr: ServiceMngr = get_service_mngr() | ||||||
|     # portal ref in order to do non-blocking setup of brokerd |     ctx: tractor.Context = await mngr.start_service( | ||||||
|     # service nursery. |         daemon_name=dname, | ||||||
|     await Services.start_service_task( |         ctx_ep=partial( | ||||||
|         dname, |  | ||||||
|         portal, |  | ||||||
| 
 |  | ||||||
|             # signature of target root-task endpoint |             # signature of target root-task endpoint | ||||||
|             daemon_fixture_ep, |             daemon_fixture_ep, | ||||||
|  | 
 | ||||||
|  |             # passed to daemon_fixture_ep(**kwargs) | ||||||
|             brokername=brokername, |             brokername=brokername, | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
|  |         ), | ||||||
|  |         debug_mode=mngr.debug_mode, | ||||||
|  |         loglevel=loglevel, | ||||||
|  |         enable_modules=( | ||||||
|  |             _data_mods | ||||||
|  |             + | ||||||
|  |             tractor_kwargs.pop('enable_modules') | ||||||
|  |         ), | ||||||
|  |         **tractor_kwargs | ||||||
|  |     ) | ||||||
|  |     assert ( | ||||||
|  |         not ctx.cancel_called | ||||||
|  |         and ctx.portal  # parent side | ||||||
|  |         and dname in ctx.chan.uid  # subactor is named as desired | ||||||
|     ) |     ) | ||||||
|     return True |     return True | ||||||
| 
 | 
 | ||||||
|  | @ -262,8 +274,7 @@ async def maybe_spawn_brokerd( | ||||||
|     from piker.service import maybe_spawn_daemon |     from piker.service import maybe_spawn_daemon | ||||||
| 
 | 
 | ||||||
|     async with maybe_spawn_daemon( |     async with maybe_spawn_daemon( | ||||||
| 
 |         service_name=f'brokerd.{brokername}', | ||||||
|         f'brokerd.{brokername}', |  | ||||||
|         service_task_target=spawn_brokerd, |         service_task_target=spawn_brokerd, | ||||||
|         spawn_args={ |         spawn_args={ | ||||||
|             'brokername': brokername, |             'brokername': brokername, | ||||||
|  |  | ||||||
|  | @ -42,6 +42,7 @@ from trio_typing import TaskStatus | ||||||
| from pendulum import ( | from pendulum import ( | ||||||
|     from_timestamp, |     from_timestamp, | ||||||
| ) | ) | ||||||
|  | from rapidfuzz import process as fuzzy | ||||||
| import numpy as np | import numpy as np | ||||||
| import tractor | import tractor | ||||||
| 
 | 
 | ||||||
|  | @ -110,7 +111,6 @@ class AggTrade(Struct, frozen=True): | ||||||
| 
 | 
 | ||||||
| async def stream_messages( | async def stream_messages( | ||||||
|     ws: NoBsWs, |     ws: NoBsWs, | ||||||
| 
 |  | ||||||
| ) -> AsyncGenerator[NoBsWs, dict]: | ) -> AsyncGenerator[NoBsWs, dict]: | ||||||
| 
 | 
 | ||||||
|     # TODO: match syntax here! |     # TODO: match syntax here! | ||||||
|  | @ -221,8 +221,6 @@ def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]: | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO, why aren't frame resp `log.info()`s showing in upstream |  | ||||||
| # code?! |  | ||||||
| @acm | @acm | ||||||
| async def open_history_client( | async def open_history_client( | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
|  | @ -465,8 +463,6 @@ async def stream_quotes( | ||||||
|     ): |     ): | ||||||
|         init_msgs: list[FeedInit] = [] |         init_msgs: list[FeedInit] = [] | ||||||
|         for sym in symbols: |         for sym in symbols: | ||||||
|             mkt: MktPair |  | ||||||
|             pair: Pair |  | ||||||
|             mkt, pair = await get_mkt_info(sym) |             mkt, pair = await get_mkt_info(sym) | ||||||
| 
 | 
 | ||||||
|             # build out init msgs according to latest spec |             # build out init msgs according to latest spec | ||||||
|  | @ -515,6 +511,7 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|             # start streaming |             # start streaming | ||||||
|             async for typ, quote in msg_gen: |             async for typ, quote in msg_gen: | ||||||
|  | 
 | ||||||
|                 # period = time.time() - last |                 # period = time.time() - last | ||||||
|                 # hz = 1/period if period else float('inf') |                 # hz = 1/period if period else float('inf') | ||||||
|                 # if hz > 60: |                 # if hz > 60: | ||||||
|  | @ -550,7 +547,7 @@ async def open_symbol_search( | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|                 # repack in fqme-keyed table |                 # repack in fqme-keyed table | ||||||
|                 byfqme: dict[str, Pair] = {} |                 byfqme: dict[start, Pair] = {} | ||||||
|                 for pair in pairs.values(): |                 for pair in pairs.values(): | ||||||
|                     byfqme[pair.bs_fqme] = pair |                     byfqme[pair.bs_fqme] = pair | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -1250,12 +1250,6 @@ async def load_aio_clients( | ||||||
| 
 | 
 | ||||||
|         for i in range(connect_retries): |         for i in range(connect_retries): | ||||||
|             try: |             try: | ||||||
|                 log.info( |  | ||||||
|                     'Trying `ib_async` connect\n' |  | ||||||
|                     f'{host}: {port}\n' |  | ||||||
|                     f'clientId: {client_id}\n' |  | ||||||
|                     f'timeout: {connect_timeout}\n' |  | ||||||
|                 ) |  | ||||||
|                 await ib.connectAsync( |                 await ib.connectAsync( | ||||||
|                     host, |                     host, | ||||||
|                     port, |                     port, | ||||||
|  | @ -1373,9 +1367,7 @@ async def load_clients_for_trio( | ||||||
|     a ``tractor.to_asyncio.open_channel_from()``. |     a ``tractor.to_asyncio.open_channel_from()``. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     async with load_aio_clients( |     async with load_aio_clients() as accts2clients: | ||||||
|         disconnect_on_exit=False, |  | ||||||
|     ) as accts2clients: |  | ||||||
| 
 | 
 | ||||||
|         to_trio.send_nowait(accts2clients) |         to_trio.send_nowait(accts2clients) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -62,7 +62,7 @@ from piker._cacheables import ( | ||||||
| ) | ) | ||||||
| from piker.log import get_logger | from piker.log import get_logger | ||||||
| from piker.data.validate import FeedInit | from piker.data.validate import FeedInit | ||||||
| from piker.types import Struct  # NOTE, this is already a `tractor.msg.Struct` | from piker.types import Struct | ||||||
| from piker.data import ( | from piker.data import ( | ||||||
|     def_iohlcv_fields, |     def_iohlcv_fields, | ||||||
|     match_from_pairs, |     match_from_pairs, | ||||||
|  | @ -98,18 +98,9 @@ class KucoinMktPair(Struct, frozen=True): | ||||||
|     def size_tick(self) -> Decimal: |     def size_tick(self) -> Decimal: | ||||||
|         return Decimal(str(self.quoteMinSize)) |         return Decimal(str(self.quoteMinSize)) | ||||||
| 
 | 
 | ||||||
|     callauctionFirstStageStartTime: None|float |  | ||||||
|     callauctionIsEnabled: bool |  | ||||||
|     callauctionPriceCeiling: float|None |  | ||||||
|     callauctionPriceFloor: float|None |  | ||||||
|     callauctionSecondStageStartTime: float|None |  | ||||||
|     callauctionThirdStageStartTime: float|None |  | ||||||
| 
 |  | ||||||
|     enableTrading: bool |     enableTrading: bool | ||||||
|     feeCategory: int |  | ||||||
|     feeCurrency: str |     feeCurrency: str | ||||||
|     isMarginEnabled: bool |     isMarginEnabled: bool | ||||||
|     makerFeeCoefficient: float |  | ||||||
|     market: str |     market: str | ||||||
|     minFunds: float |     minFunds: float | ||||||
|     name: str |     name: str | ||||||
|  | @ -119,10 +110,11 @@ class KucoinMktPair(Struct, frozen=True): | ||||||
|     quoteIncrement: float |     quoteIncrement: float | ||||||
|     quoteMaxSize: float |     quoteMaxSize: float | ||||||
|     quoteMinSize: float |     quoteMinSize: float | ||||||
|     st: bool |  | ||||||
|     symbol: str  # our bs_mktid, kucoin's internal id |     symbol: str  # our bs_mktid, kucoin's internal id | ||||||
|  |     feeCategory: int | ||||||
|  |     makerFeeCoefficient: float | ||||||
|     takerFeeCoefficient: float |     takerFeeCoefficient: float | ||||||
|     tradingStartTime: float|None |     st: bool | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class AccountTrade(Struct, frozen=True): | class AccountTrade(Struct, frozen=True): | ||||||
|  | @ -404,13 +396,7 @@ class Client: | ||||||
|         pairs: dict[str, KucoinMktPair] = {} |         pairs: dict[str, KucoinMktPair] = {} | ||||||
|         fqmes2mktids: bidict[str, str] = bidict() |         fqmes2mktids: bidict[str, str] = bidict() | ||||||
|         for item in entries: |         for item in entries: | ||||||
|             try: |  | ||||||
|             pair = pairs[item['name']] = KucoinMktPair(**item) |             pair = pairs[item['name']] = KucoinMktPair(**item) | ||||||
|             except TypeError as te: |  | ||||||
|                 raise TypeError( |  | ||||||
|                     '`KucoinMktPair` and reponse fields do not match ??\n' |  | ||||||
|                     f'{KucoinMktPair.fields_diff(item)}\n' |  | ||||||
|                 ) from te |  | ||||||
|             fqmes2mktids[ |             fqmes2mktids[ | ||||||
|                 item['name'].lower().replace('-', '') |                 item['name'].lower().replace('-', '') | ||||||
|             ] = pair.name |             ] = pair.name | ||||||
|  |  | ||||||
|  | @ -1,49 +0,0 @@ | ||||||
| piker.clearing |  | ||||||
| ______________ |  | ||||||
| trade execution-n-control subsys for both live and paper trading as |  | ||||||
| well as algo-trading manual override/interaction across any backend |  | ||||||
| broker and data provider. |  | ||||||
| 
 |  | ||||||
| avail UIs |  | ||||||
| ********* |  | ||||||
| 
 |  | ||||||
| order ctl |  | ||||||
| --------- |  | ||||||
| the `piker.clearing` subsys is exposed mainly though |  | ||||||
| the `piker chart` GUI as a "chart trader" style UX and |  | ||||||
| is automatically enabled whenever a chart is opened. |  | ||||||
| 
 |  | ||||||
| .. ^TODO, more prose here! |  | ||||||
| 
 |  | ||||||
| the "manual" order control features are exposed via the |  | ||||||
| `piker.ui.order_mode` API and can pretty much always be |  | ||||||
| used (at least) in simulated-trading mode, aka "paper"-mode, and |  | ||||||
| the micro-manual is as follows: |  | ||||||
| 
 |  | ||||||
| ``order_mode`` ( |  | ||||||
|     edge triggered activation by any of the following keys, |  | ||||||
|     ``mouse-click`` on y-level to submit at that price |  | ||||||
|     ): |  | ||||||
| 
 |  | ||||||
|     - ``f``/ ``ctl-f`` to stage buy |  | ||||||
|     - ``d``/ ``ctl-d`` to stage sell |  | ||||||
|     - ``a`` to stage alert |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| ``search_mode`` ( |  | ||||||
|     ``ctl-l`` or ``ctl-space`` to open, |  | ||||||
|     ``ctl-c`` or ``ctl-space`` to close |  | ||||||
|     ) : |  | ||||||
| 
 |  | ||||||
|     - begin typing to have symbol search automatically lookup |  | ||||||
|       symbols from all loaded backend (broker) providers |  | ||||||
|     - arrow keys and mouse click to navigate selection |  | ||||||
|     - vi-like ``ctl-[hjkl]`` for navigation |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| position (pp) mgmt |  | ||||||
| ------------------ |  | ||||||
| you can also configure your position allocation limits from the |  | ||||||
| sidepane. |  | ||||||
| 
 |  | ||||||
| .. ^TODO, explain and provide tut once more refined! |  | ||||||
|  | @ -653,7 +653,11 @@ class Router(Struct): | ||||||
|             flume = feed.flumes[fqme] |             flume = feed.flumes[fqme] | ||||||
|             first_quote: dict = flume.first_quote |             first_quote: dict = flume.first_quote | ||||||
|             book: DarkBook = self.get_dark_book(broker) |             book: DarkBook = self.get_dark_book(broker) | ||||||
|             book.lasts[fqme]: float = float(first_quote['last']) | 
 | ||||||
|  |             if not (last := first_quote.get('last')): | ||||||
|  |                 last: float = flume.rt_shm.array[-1]['close'] | ||||||
|  | 
 | ||||||
|  |             book.lasts[fqme]: float = float(last) | ||||||
| 
 | 
 | ||||||
|             async with self.maybe_open_brokerd_dialog( |             async with self.maybe_open_brokerd_dialog( | ||||||
|                 brokermod=brokermod, |                 brokermod=brokermod, | ||||||
|  | @ -716,7 +720,7 @@ class Router(Struct): | ||||||
|             subs = self.subscribers[sub_key] |             subs = self.subscribers[sub_key] | ||||||
| 
 | 
 | ||||||
|         sent_some: bool = False |         sent_some: bool = False | ||||||
|         for client_stream in subs: |         for client_stream in subs.copy(): | ||||||
|             try: |             try: | ||||||
|                 await client_stream.send(msg) |                 await client_stream.send(msg) | ||||||
|                 sent_some = True |                 sent_some = True | ||||||
|  | @ -1010,6 +1014,10 @@ async def translate_and_relay_brokerd_events( | ||||||
|                 status_msg.brokerd_msg = msg |                 status_msg.brokerd_msg = msg | ||||||
|                 status_msg.src = msg.broker_details['name'] |                 status_msg.src = msg.broker_details['name'] | ||||||
| 
 | 
 | ||||||
|  |                 if not status_msg.req: | ||||||
|  |                     # likely some order change state? | ||||||
|  |                     await tractor.pause() | ||||||
|  |                 else: | ||||||
|                     await router.client_broadcast( |                     await router.client_broadcast( | ||||||
|                         status_msg.req.symbol, |                         status_msg.req.symbol, | ||||||
|                         status_msg, |                         status_msg, | ||||||
|  |  | ||||||
|  | @ -653,6 +653,7 @@ async def open_trade_dialog( | ||||||
|                 # in) use manually constructed table from calling |                 # in) use manually constructed table from calling | ||||||
|                 # the `.get_mkt_info()` provider EP above. |                 # the `.get_mkt_info()` provider EP above. | ||||||
|                 _mktmap_table=mkt_by_fqme, |                 _mktmap_table=mkt_by_fqme, | ||||||
|  |                 only_require=list(mkt_by_fqme), | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             pp_msgs: list[BrokerdPosition] = [] |             pp_msgs: list[BrokerdPosition] = [] | ||||||
|  |  | ||||||
|  | @ -335,7 +335,7 @@ def services(config, tl, ports): | ||||||
|                 name='service_query', |                 name='service_query', | ||||||
|                 loglevel=config['loglevel'] if tl else None, |                 loglevel=config['loglevel'] if tl else None, | ||||||
|             ), |             ), | ||||||
|             tractor.get_arbiter( |             tractor.get_registry( | ||||||
|                 host=host, |                 host=host, | ||||||
|                 port=ports[0] |                 port=ports[0] | ||||||
|             ) as portal |             ) as portal | ||||||
|  |  | ||||||
|  | @ -104,15 +104,14 @@ def get_app_dir( | ||||||
|     # `tractor`) with the testing dir and check for it whenever we |     # `tractor`) with the testing dir and check for it whenever we | ||||||
|     # detect `pytest` is being used (which it isn't under normal |     # detect `pytest` is being used (which it isn't under normal | ||||||
|     # operation). |     # operation). | ||||||
|     # if "pytest" in sys.modules: |     if "pytest" in sys.modules: | ||||||
|     #     import tractor |         import tractor | ||||||
|     #     actor = tractor.current_actor(err_on_no_runtime=False) |         actor = tractor.current_actor(err_on_no_runtime=False) | ||||||
|     #     if actor:  # runtime is up |         if actor:  # runtime is up | ||||||
|     #         rvs = tractor._state._runtime_vars |             rvs = tractor._state._runtime_vars | ||||||
|     #         import pdbp; pdbp.set_trace() |             testdirpath = Path(rvs['piker_vars']['piker_test_dir']) | ||||||
|     #         testdirpath = Path(rvs['piker_vars']['piker_test_dir']) |             assert testdirpath.exists(), 'piker test harness might be borked!?' | ||||||
|     #         assert testdirpath.exists(), 'piker test harness might be borked!?' |             app_name = str(testdirpath) | ||||||
|     #         app_name = str(testdirpath) |  | ||||||
| 
 | 
 | ||||||
|     if platform.system() == 'Windows': |     if platform.system() == 'Windows': | ||||||
|         key = "APPDATA" if roaming else "LOCALAPPDATA" |         key = "APPDATA" if roaming else "LOCALAPPDATA" | ||||||
|  |  | ||||||
|  | @ -25,10 +25,12 @@ from collections import ( | ||||||
|     defaultdict, |     defaultdict, | ||||||
| ) | ) | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
|  | from functools import partial | ||||||
| import time | import time | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|     AsyncIterator, |     AsyncIterator, | ||||||
|  |     Callable, | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | @ -42,7 +44,7 @@ from tractor.trionics import ( | ||||||
|     maybe_open_nursery, |     maybe_open_nursery, | ||||||
| ) | ) | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio import TaskStatus | ||||||
| 
 | 
 | ||||||
| from .ticktools import ( | from .ticktools import ( | ||||||
|     frame_ticks, |     frame_ticks, | ||||||
|  | @ -53,6 +55,9 @@ from ._util import ( | ||||||
|     get_console_log, |     get_console_log, | ||||||
| ) | ) | ||||||
| from ..service import maybe_spawn_daemon | from ..service import maybe_spawn_daemon | ||||||
|  | from piker.log import ( | ||||||
|  |     mk_repr, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     from ._sharedmem import ( |     from ._sharedmem import ( | ||||||
|  | @ -70,6 +75,7 @@ if TYPE_CHECKING: | ||||||
| _default_delay_s: float = 1.0 | _default_delay_s: float = 1.0 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO: use new `tractor.singleton_acm` API for this! | ||||||
| class Sampler: | class Sampler: | ||||||
|     ''' |     ''' | ||||||
|     Global sampling engine registry. |     Global sampling engine registry. | ||||||
|  | @ -79,9 +85,9 @@ class Sampler: | ||||||
| 
 | 
 | ||||||
|     This non-instantiated type is meant to be a singleton within |     This non-instantiated type is meant to be a singleton within | ||||||
|     a `samplerd` actor-service spawned once by the user wishing to |     a `samplerd` actor-service spawned once by the user wishing to | ||||||
|     time-step-sample (real-time) quote feeds, see |     time-step-sample a (real-time) quote feeds, see | ||||||
|     ``.service.maybe_open_samplerd()`` and the below |     `.service.maybe_open_samplerd()` and the below | ||||||
|     ``register_with_sampler()``. |     `register_with_sampler()`. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     service_nursery: None | trio.Nursery = None |     service_nursery: None | trio.Nursery = None | ||||||
|  | @ -375,7 +381,10 @@ async def register_with_sampler( | ||||||
|                 assert Sampler.ohlcv_shms |                 assert Sampler.ohlcv_shms | ||||||
| 
 | 
 | ||||||
|             # unblock caller |             # unblock caller | ||||||
|             await ctx.started(set(Sampler.ohlcv_shms.keys())) |             await ctx.started( | ||||||
|  |                 # XXX bc msgpack only allows one array type! | ||||||
|  |                 list(Sampler.ohlcv_shms.keys()) | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             if open_index_stream: |             if open_index_stream: | ||||||
|                 try: |                 try: | ||||||
|  | @ -419,7 +428,6 @@ async def register_with_sampler( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn_samplerd( | async def spawn_samplerd( | ||||||
| 
 |  | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
|     **extra_tractor_kwargs |     **extra_tractor_kwargs | ||||||
| 
 | 
 | ||||||
|  | @ -429,7 +437,10 @@ async def spawn_samplerd( | ||||||
|     update and increment count write and stream broadcasting. |     update and increment count write and stream broadcasting. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     from piker.service import Services |     from piker.service import ( | ||||||
|  |         get_service_mngr, | ||||||
|  |         ServiceMngr, | ||||||
|  |     ) | ||||||
| 
 | 
 | ||||||
|     dname = 'samplerd' |     dname = 'samplerd' | ||||||
|     log.info(f'Spawning `{dname}`') |     log.info(f'Spawning `{dname}`') | ||||||
|  | @ -437,26 +448,33 @@ async def spawn_samplerd( | ||||||
|     # singleton lock creation of ``samplerd`` since we only ever want |     # singleton lock creation of ``samplerd`` since we only ever want | ||||||
|     # one daemon per ``pikerd`` proc tree. |     # one daemon per ``pikerd`` proc tree. | ||||||
|     # TODO: make this built-into the service api? |     # TODO: make this built-into the service api? | ||||||
|     async with Services.locks[dname + '_singleton']: |     mngr: ServiceMngr = get_service_mngr() | ||||||
|  |     already_started: bool = dname in mngr.service_tasks | ||||||
| 
 | 
 | ||||||
|         if dname not in Services.service_tasks: |     async with mngr._locks[dname + '_singleton']: | ||||||
|  |         ctx: Context = await mngr.start_service( | ||||||
|  |             daemon_name=dname, | ||||||
|  |             ctx_ep=partial( | ||||||
|  |                 register_with_sampler, | ||||||
|  |                 period_s=1, | ||||||
|  |                 sub_for_broadcasts=False, | ||||||
|  |             ), | ||||||
|  |             debug_mode=mngr.debug_mode,  # set by pikerd flag | ||||||
| 
 | 
 | ||||||
|             portal = await Services.actor_n.start_actor( |             # proxy-through to tractor | ||||||
|                 dname, |  | ||||||
|             enable_modules=[ |             enable_modules=[ | ||||||
|                 'piker.data._sampling', |                 'piker.data._sampling', | ||||||
|             ], |             ], | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
|                 debug_mode=Services.debug_mode,  # set by pikerd flag |  | ||||||
|             **extra_tractor_kwargs |             **extra_tractor_kwargs | ||||||
|         ) |         ) | ||||||
| 
 |         if not already_started: | ||||||
|             await Services.start_service_task( |             assert ( | ||||||
|                 dname, |                 ctx | ||||||
|                 portal, |                 and | ||||||
|                 register_with_sampler, |                 ctx.portal | ||||||
|                 period_s=1, |                 and | ||||||
|                 sub_for_broadcasts=False, |                 not ctx.cancel_called | ||||||
|             ) |             ) | ||||||
|             return True |             return True | ||||||
| 
 | 
 | ||||||
|  | @ -561,7 +579,6 @@ async def open_sample_stream( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def sample_and_broadcast( | async def sample_and_broadcast( | ||||||
| 
 |  | ||||||
|     bus: _FeedsBus,  # noqa |     bus: _FeedsBus,  # noqa | ||||||
|     rt_shm: ShmArray, |     rt_shm: ShmArray, | ||||||
|     hist_shm: ShmArray, |     hist_shm: ShmArray, | ||||||
|  | @ -582,11 +599,22 @@ async def sample_and_broadcast( | ||||||
| 
 | 
 | ||||||
|     overruns = Counter() |     overruns = Counter() | ||||||
| 
 | 
 | ||||||
|  |     # multiline nested `dict` formatter (since rn quote-msgs are | ||||||
|  |     # just that). | ||||||
|  |     pfmt: Callable[[str], str] = mk_repr() | ||||||
|  | 
 | ||||||
|     # iterate stream delivered by broker |     # iterate stream delivered by broker | ||||||
|     async for quotes in quote_stream: |     async for quotes in quote_stream: | ||||||
|         # print(quotes) |  | ||||||
| 
 | 
 | ||||||
|         # TODO: ``numba`` this! |         # XXX WARNING XXX only enable for debugging bc ow can cost | ||||||
|  |         # ALOT of perf with HF-feedz!!! | ||||||
|  |         # | ||||||
|  |         # log.info( | ||||||
|  |         #     'Rx live quotes:\n' | ||||||
|  |         #     f'{pfmt(quotes)}' | ||||||
|  |         # ) | ||||||
|  | 
 | ||||||
|  |         # TODO: `numba` this! | ||||||
|         for broker_symbol, quote in quotes.items(): |         for broker_symbol, quote in quotes.items(): | ||||||
|             # TODO: in theory you can send the IPC msg *before* writing |             # TODO: in theory you can send the IPC msg *before* writing | ||||||
|             # to the sharedmem array to decrease latency, however, that |             # to the sharedmem array to decrease latency, however, that | ||||||
|  | @ -659,6 +687,18 @@ async def sample_and_broadcast( | ||||||
|             sub_key: str = broker_symbol.lower() |             sub_key: str = broker_symbol.lower() | ||||||
|             subs: set[Sub] = bus.get_subs(sub_key) |             subs: set[Sub] = bus.get_subs(sub_key) | ||||||
| 
 | 
 | ||||||
|  |             if not subs: | ||||||
|  |                 all_bs_fqmes: list[str] = list( | ||||||
|  |                     bus._subscribers.keys() | ||||||
|  |                 ) | ||||||
|  |                 log.warning( | ||||||
|  |                     f'No subscribers for {brokername!r} live-quote ??\n' | ||||||
|  |                     f'broker_symbol: {broker_symbol}\n\n' | ||||||
|  | 
 | ||||||
|  |                     f'Maybe the backend-sys symbol does not match one of,\n' | ||||||
|  |                     f'{pfmt(all_bs_fqmes)}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|             # NOTE: by default the broker backend doesn't append |             # NOTE: by default the broker backend doesn't append | ||||||
|             # it's own "name" into the fqme schema (but maybe it |             # it's own "name" into the fqme schema (but maybe it | ||||||
|             # should?) so we have to manually generate the correct |             # should?) so we have to manually generate the correct | ||||||
|  | @ -889,6 +929,7 @@ async def uniform_rate_send( | ||||||
|             # to consumers which crash or lose network connection. |             # to consumers which crash or lose network connection. | ||||||
|             # I.e. we **DO NOT** want to crash and propagate up to |             # I.e. we **DO NOT** want to crash and propagate up to | ||||||
|             # ``pikerd`` these kinds of errors! |             # ``pikerd`` these kinds of errors! | ||||||
|  |             trio.EndOfChannel, | ||||||
|             trio.ClosedResourceError, |             trio.ClosedResourceError, | ||||||
|             trio.BrokenResourceError, |             trio.BrokenResourceError, | ||||||
|             ConnectionResetError, |             ConnectionResetError, | ||||||
|  |  | ||||||
|  | @ -273,7 +273,7 @@ async def _reconnect_forever( | ||||||
|                 nobsws._connected.set() |                 nobsws._connected.set() | ||||||
|                 await trio.sleep_forever() |                 await trio.sleep_forever() | ||||||
|         except HandshakeError: |         except HandshakeError: | ||||||
|             log.exception(f'Retrying connection') |             log.exception('Retrying connection') | ||||||
| 
 | 
 | ||||||
|         # ws & nursery block ends |         # ws & nursery block ends | ||||||
| 
 | 
 | ||||||
|  | @ -359,8 +359,8 @@ async def open_autorecon_ws( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| JSONRPC response-request style machinery for transparent multiplexing of msgs | JSONRPC response-request style machinery for transparent multiplexing | ||||||
| over a NoBsWs. | of msgs over a `NoBsWs`. | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| 
 | 
 | ||||||
|  | @ -377,44 +377,78 @@ async def open_jsonrpc_session( | ||||||
|     url: str, |     url: str, | ||||||
|     start_id: int = 0, |     start_id: int = 0, | ||||||
|     response_type: type = JSONRPCResult, |     response_type: type = JSONRPCResult, | ||||||
|     request_type: Optional[type] = None, |     msg_recv_timeout: float = float('inf'), | ||||||
|     request_hook: Optional[Callable] = None, |     # ^NOTE, since only `deribit` is using this jsonrpc stuff atm | ||||||
|     error_hook: Optional[Callable] = None, |     # and options mkts are generally "slow moving".. | ||||||
|  |     # | ||||||
|  |     # FURTHER if we break the underlying ws connection then since we | ||||||
|  |     # don't pass a `fixture` to the task that manages `NoBsWs`, i.e. | ||||||
|  |     # `_reconnect_forever()`, the jsonrpc "transport pipe" get's | ||||||
|  |     # broken and never restored with wtv init sequence is required to | ||||||
|  |     # re-establish a working req-resp session. | ||||||
|  | 
 | ||||||
|  |     # request_type: Optional[type] = None, | ||||||
|  |     # request_hook: Optional[Callable] = None, | ||||||
|  |     # error_hook: Optional[Callable] = None, | ||||||
| ) -> Callable[[str, dict], dict]: | ) -> Callable[[str, dict], dict]: | ||||||
| 
 | 
 | ||||||
|  |     # NOTE, store all request msgs so we can raise errors on the | ||||||
|  |     # caller side! | ||||||
|  |     req_msgs: dict[int, dict] = {} | ||||||
|  | 
 | ||||||
|     async with ( |     async with ( | ||||||
|         trio.open_nursery() as n, |         trio.open_nursery() as n, | ||||||
|         open_autorecon_ws(url) as ws |         open_autorecon_ws( | ||||||
|  |             url=url, | ||||||
|  |             msg_recv_timeout=msg_recv_timeout, | ||||||
|  |         ) as ws | ||||||
|     ): |     ): | ||||||
|         rpc_id: Iterable = count(start_id) |         rpc_id: Iterable[int] = count(start_id) | ||||||
|         rpc_results: dict[int, dict] = {} |         rpc_results: dict[int, dict] = {} | ||||||
| 
 | 
 | ||||||
|         async def json_rpc(method: str, params: dict) -> dict: |         async def json_rpc( | ||||||
|  |             method: str, | ||||||
|  |             params: dict, | ||||||
|  |         ) -> dict: | ||||||
|             ''' |             ''' | ||||||
|             perform a json rpc call and wait for the result, raise exception in |             perform a json rpc call and wait for the result, raise exception in | ||||||
|             case of error field present on response |             case of error field present on response | ||||||
|             ''' |             ''' | ||||||
|  |             nonlocal req_msgs | ||||||
|  | 
 | ||||||
|  |             req_id: int = next(rpc_id) | ||||||
|             msg = { |             msg = { | ||||||
|                 'jsonrpc': '2.0', |                 'jsonrpc': '2.0', | ||||||
|                 'id': next(rpc_id), |                 'id': req_id, | ||||||
|                 'method': method, |                 'method': method, | ||||||
|                 'params': params |                 'params': params | ||||||
|             } |             } | ||||||
|             _id = msg['id'] |             _id = msg['id'] | ||||||
| 
 | 
 | ||||||
|             rpc_results[_id] = { |             result = rpc_results[_id] = { | ||||||
|                 'result': None, |                 'result': None, | ||||||
|                 'event': trio.Event() |                 'error': None, | ||||||
|  |                 'event': trio.Event(),  # signal caller resp arrived | ||||||
|             } |             } | ||||||
|  |             req_msgs[_id] = msg | ||||||
| 
 | 
 | ||||||
|             await ws.send_msg(msg) |             await ws.send_msg(msg) | ||||||
| 
 | 
 | ||||||
|  |             # wait for reponse before unblocking requester code | ||||||
|             await rpc_results[_id]['event'].wait() |             await rpc_results[_id]['event'].wait() | ||||||
| 
 | 
 | ||||||
|             ret = rpc_results[_id]['result'] |             if (maybe_result := result['result']): | ||||||
| 
 |                 ret = maybe_result | ||||||
|                 del rpc_results[_id] |                 del rpc_results[_id] | ||||||
| 
 | 
 | ||||||
|  |             else: | ||||||
|  |                 err = result['error'] | ||||||
|  |                 raise Exception( | ||||||
|  |                     f'JSONRPC request failed\n' | ||||||
|  |                     f'req: {msg}\n' | ||||||
|  |                     f'resp: {err}\n' | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|             if ret.error is not None: |             if ret.error is not None: | ||||||
|                 raise Exception(json.dumps(ret.error, indent=4)) |                 raise Exception(json.dumps(ret.error, indent=4)) | ||||||
| 
 | 
 | ||||||
|  | @ -428,6 +462,7 @@ async def open_jsonrpc_session( | ||||||
|             the server side. |             the server side. | ||||||
| 
 | 
 | ||||||
|             ''' |             ''' | ||||||
|  |             nonlocal req_msgs | ||||||
|             async for msg in ws: |             async for msg in ws: | ||||||
|                 match msg: |                 match msg: | ||||||
|                     case { |                     case { | ||||||
|  | @ -451,15 +486,29 @@ async def open_jsonrpc_session( | ||||||
|                         'params': _, |                         'params': _, | ||||||
|                     }: |                     }: | ||||||
|                         log.debug(f'Recieved\n{msg}') |                         log.debug(f'Recieved\n{msg}') | ||||||
|                         if request_hook: |                         # if request_hook: | ||||||
|                             await request_hook(request_type(**msg)) |                         #     await request_hook(request_type(**msg)) | ||||||
| 
 | 
 | ||||||
|                     case { |                     case { | ||||||
|                         'error': error |                         'error': error | ||||||
|                     }: |                     }: | ||||||
|                         log.warning(f'Recieved\n{error}') |                         # if error_hook: | ||||||
|                         if error_hook: |                         #     await error_hook(response_type(**msg)) | ||||||
|                             await error_hook(response_type(**msg)) | 
 | ||||||
|  |                         # retreive orig request msg, set error | ||||||
|  |                         # response in original "result" msg, | ||||||
|  |                         # THEN FINALLY set the event to signal caller | ||||||
|  |                         # to raise the error in the parent task. | ||||||
|  |                         req_id: int = error['id'] | ||||||
|  |                         req_msg: dict = req_msgs[req_id] | ||||||
|  |                         result: dict = rpc_results[req_id] | ||||||
|  |                         result['error'] = error | ||||||
|  |                         result['event'].set() | ||||||
|  |                         log.error( | ||||||
|  |                             f'JSONRPC request failed\n' | ||||||
|  |                             f'req: {req_msg}\n' | ||||||
|  |                             f'resp: {error}\n' | ||||||
|  |                         ) | ||||||
| 
 | 
 | ||||||
|                     case _: |                     case _: | ||||||
|                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') |                         log.warning(f'Unhandled JSON-RPC msg!?\n{msg}') | ||||||
|  |  | ||||||
|  | @ -540,7 +540,10 @@ async def open_feed_bus( | ||||||
|         # subscription since the backend isn't (yet) expected to |         # subscription since the backend isn't (yet) expected to | ||||||
|         # append it's own name to the fqme, so we filter on keys |         # append it's own name to the fqme, so we filter on keys | ||||||
|         # which *do not* include that name (e.g .ib) . |         # which *do not* include that name (e.g .ib) . | ||||||
|         bus._subscribers.setdefault(bs_fqme, set()) |         bus._subscribers.setdefault( | ||||||
|  |             bs_fqme, | ||||||
|  |             set(), | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|     # sync feed subscribers with flume handles |     # sync feed subscribers with flume handles | ||||||
|     await ctx.started( |     await ctx.started( | ||||||
|  |  | ||||||
							
								
								
									
										28
									
								
								piker/log.py
								
								
								
								
							
							
						
						
									
										28
									
								
								piker/log.py
								
								
								
								
							|  | @ -18,7 +18,11 @@ | ||||||
| Log like a forester! | Log like a forester! | ||||||
| """ | """ | ||||||
| import logging | import logging | ||||||
|  | import reprlib | ||||||
| import json | import json | ||||||
|  | from typing import ( | ||||||
|  |     Callable, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
| from pygments import ( | from pygments import ( | ||||||
|  | @ -84,3 +88,27 @@ def colorize_json( | ||||||
|         # likeable styles: algol_nu, tango, monokai |         # likeable styles: algol_nu, tango, monokai | ||||||
|         formatters.TerminalTrueColorFormatter(style=style) |         formatters.TerminalTrueColorFormatter(style=style) | ||||||
|     ) |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def mk_repr( | ||||||
|  |     **repr_kws, | ||||||
|  | ) -> Callable[[str], str]: | ||||||
|  |     ''' | ||||||
|  |     Allocate and deliver a `repr.Repr` instance with provided input | ||||||
|  |     settings using the std-lib's `reprlib` mod, | ||||||
|  |      * https://docs.python.org/3/library/reprlib.html | ||||||
|  | 
 | ||||||
|  |     ------ Ex. ------ | ||||||
|  |     An up to 6-layer-nested `dict` as multi-line: | ||||||
|  |     - https://stackoverflow.com/a/79102479 | ||||||
|  |     - https://docs.python.org/3/library/reprlib.html#reprlib.Repr.maxlevel | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     def_kws: dict[str, int] = dict( | ||||||
|  |         indent=2, | ||||||
|  |         maxlevel=6,  # recursion levels | ||||||
|  |         maxstring=66,  # match editor line-len limit | ||||||
|  |     ) | ||||||
|  |     def_kws |= repr_kws | ||||||
|  |     reprr = reprlib.Repr(**def_kws) | ||||||
|  |     return reprr.repr | ||||||
|  |  | ||||||
|  | @ -30,7 +30,11 @@ Actor runtime primtives and (distributed) service APIs for, | ||||||
|   => TODO: maybe to (re)move elsewhere? |   => TODO: maybe to (re)move elsewhere? | ||||||
| 
 | 
 | ||||||
| ''' | ''' | ||||||
| from ._mngr import Services as Services | from ._mngr import ( | ||||||
|  |     get_service_mngr as get_service_mngr, | ||||||
|  |     open_service_mngr as open_service_mngr, | ||||||
|  |     ServiceMngr as ServiceMngr, | ||||||
|  | ) | ||||||
| from ._registry import ( | from ._registry import ( | ||||||
|     _tractor_kwargs as _tractor_kwargs, |     _tractor_kwargs as _tractor_kwargs, | ||||||
|     _default_reg_addr as _default_reg_addr, |     _default_reg_addr as _default_reg_addr, | ||||||
|  |  | ||||||
|  | @ -21,7 +21,6 @@ | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
| import os | import os | ||||||
| from typing import ( | from typing import ( | ||||||
|     Optional, |  | ||||||
|     Any, |     Any, | ||||||
|     ClassVar, |     ClassVar, | ||||||
| ) | ) | ||||||
|  | @ -30,13 +29,13 @@ from contextlib import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
| import trio |  | ||||||
| 
 | 
 | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     get_console_log, |     get_console_log, | ||||||
| ) | ) | ||||||
| from ._mngr import ( | from ._mngr import ( | ||||||
|     Services, |     open_service_mngr, | ||||||
|  |     ServiceMngr, | ||||||
| ) | ) | ||||||
| from ._registry import (  # noqa | from ._registry import (  # noqa | ||||||
|     _tractor_kwargs, |     _tractor_kwargs, | ||||||
|  | @ -59,7 +58,7 @@ async def open_piker_runtime( | ||||||
|     registry_addrs: list[tuple[str, int]] = [], |     registry_addrs: list[tuple[str, int]] = [], | ||||||
| 
 | 
 | ||||||
|     enable_modules: list[str] = [], |     enable_modules: list[str] = [], | ||||||
|     loglevel: Optional[str] = None, |     loglevel: str|None = None, | ||||||
| 
 | 
 | ||||||
|     # XXX NOTE XXX: you should pretty much never want debug mode |     # XXX NOTE XXX: you should pretty much never want debug mode | ||||||
|     # for data daemons when running in production. |     # for data daemons when running in production. | ||||||
|  | @ -119,6 +118,10 @@ async def open_piker_runtime( | ||||||
|                 # spawn other specialized daemons I think? |                 # spawn other specialized daemons I think? | ||||||
|                 enable_modules=enable_modules, |                 enable_modules=enable_modules, | ||||||
| 
 | 
 | ||||||
|  |                 # TODO: how to configure this? | ||||||
|  |                 # keep it on by default if debug mode is set? | ||||||
|  |                 # maybe_enable_greenback=debug_mode, | ||||||
|  | 
 | ||||||
|                 **tractor_kwargs, |                 **tractor_kwargs, | ||||||
|             ) as actor, |             ) as actor, | ||||||
| 
 | 
 | ||||||
|  | @ -167,12 +170,13 @@ async def open_pikerd( | ||||||
| 
 | 
 | ||||||
|     **kwargs, |     **kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> Services: | ) -> ServiceMngr: | ||||||
|     ''' |     ''' | ||||||
|     Start a root piker daemon with an indefinite lifetime. |     Start a root piker daemon actor (aka `pikerd`) with an indefinite | ||||||
|  |     lifetime. | ||||||
| 
 | 
 | ||||||
|     A root actor nursery is created which can be used to create and keep |     A root actor-nursery is created which can be used to spawn and | ||||||
|     alive underling services (see below). |     supervise underling service sub-actors (see below). | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     # NOTE: for the root daemon we always enable the root |     # NOTE: for the root daemon we always enable the root | ||||||
|  | @ -199,8 +203,6 @@ async def open_pikerd( | ||||||
|             root_actor, |             root_actor, | ||||||
|             reg_addrs, |             reg_addrs, | ||||||
|         ), |         ), | ||||||
|         tractor.open_nursery() as actor_nursery, |  | ||||||
|         trio.open_nursery() as service_nursery, |  | ||||||
|     ): |     ): | ||||||
|         for addr in reg_addrs: |         for addr in reg_addrs: | ||||||
|             if addr not in root_actor.accept_addrs: |             if addr not in root_actor.accept_addrs: | ||||||
|  | @ -209,25 +211,17 @@ async def open_pikerd( | ||||||
|                     'Maybe you have another daemon already running?' |                     'Maybe you have another daemon already running?' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|         # assign globally for future daemon/task creation |         mngr: ServiceMngr | ||||||
|         Services.actor_n = actor_nursery |         async with open_service_mngr( | ||||||
|         Services.service_n = service_nursery |             debug_mode=debug_mode, | ||||||
|         Services.debug_mode = debug_mode |         ) as mngr: | ||||||
| 
 |             yield mngr | ||||||
|         try: |  | ||||||
|             yield Services |  | ||||||
| 
 |  | ||||||
|         finally: |  | ||||||
|             # TODO: is this more clever/efficient? |  | ||||||
|             # if 'samplerd' in Services.service_tasks: |  | ||||||
|             #     await Services.cancel_service('samplerd') |  | ||||||
|             service_nursery.cancel_scope.cancel() |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: do we even need this? | # TODO: do we even need this? | ||||||
| # @acm | # @acm | ||||||
| # async def maybe_open_runtime( | # async def maybe_open_runtime( | ||||||
| #     loglevel: Optional[str] = None, | #     loglevel: str|None = None, | ||||||
| #     **kwargs, | #     **kwargs, | ||||||
| 
 | 
 | ||||||
| # ) -> None: | # ) -> None: | ||||||
|  | @ -256,7 +250,7 @@ async def maybe_open_pikerd( | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
|     **kwargs, |     **kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> tractor._portal.Portal | ClassVar[Services]: | ) -> tractor._portal.Portal | ClassVar[ServiceMngr]: | ||||||
|     ''' |     ''' | ||||||
|     If no ``pikerd`` daemon-root-actor can be found start it and |     If no ``pikerd`` daemon-root-actor can be found start it and | ||||||
|     yield up (we should probably figure out returning a portal to self |     yield up (we should probably figure out returning a portal to self | ||||||
|  |  | ||||||
|  | @ -49,7 +49,7 @@ from requests.exceptions import ( | ||||||
|     ReadTimeout, |     ReadTimeout, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| from ._mngr import Services | from ._mngr import ServiceMngr | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     log,  # sub-sys logger |     log,  # sub-sys logger | ||||||
|     get_console_log, |     get_console_log, | ||||||
|  | @ -453,7 +453,7 @@ async def open_ahabd( | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def start_ahab_service( | async def start_ahab_service( | ||||||
|     services: Services, |     services: ServiceMngr, | ||||||
|     service_name: str, |     service_name: str, | ||||||
| 
 | 
 | ||||||
|     # endpoint config passed as **kwargs |     # endpoint config passed as **kwargs | ||||||
|  | @ -549,7 +549,8 @@ async def start_ahab_service( | ||||||
|         log.warning('Failed to cancel root permsed container') |         log.warning('Failed to cancel root permsed container') | ||||||
| 
 | 
 | ||||||
|     except ( |     except ( | ||||||
|         trio.MultiError, |         # trio.MultiError, | ||||||
|  |         ExceptionGroup, | ||||||
|     ) as err: |     ) as err: | ||||||
|         for subexc in err.exceptions: |         for subexc in err.exceptions: | ||||||
|             if isinstance(subexc, PermissionError): |             if isinstance(subexc, PermissionError): | ||||||
|  |  | ||||||
|  | @ -26,14 +26,17 @@ from typing import ( | ||||||
| from contextlib import ( | from contextlib import ( | ||||||
|     asynccontextmanager as acm, |     asynccontextmanager as acm, | ||||||
| ) | ) | ||||||
|  | from collections import defaultdict | ||||||
| 
 | 
 | ||||||
| import tractor | import tractor | ||||||
|  | import trio | ||||||
| 
 | 
 | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     log,  # sub-sys logger |     log,  # sub-sys logger | ||||||
| ) | ) | ||||||
| from ._mngr import ( | from ._mngr import ( | ||||||
|     Services, |     get_service_mngr, | ||||||
|  |     ServiceMngr, | ||||||
| ) | ) | ||||||
| from ._actor_runtime import maybe_open_pikerd | from ._actor_runtime import maybe_open_pikerd | ||||||
| from ._registry import find_service | from ._registry import find_service | ||||||
|  | @ -41,15 +44,14 @@ from ._registry import find_service | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def maybe_spawn_daemon( | async def maybe_spawn_daemon( | ||||||
| 
 |  | ||||||
|     service_name: str, |     service_name: str, | ||||||
|     service_task_target: Callable, |     service_task_target: Callable, | ||||||
| 
 |  | ||||||
|     spawn_args: dict[str, Any], |     spawn_args: dict[str, Any], | ||||||
| 
 | 
 | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
|     singleton: bool = False, |     singleton: bool = False, | ||||||
| 
 | 
 | ||||||
|  |     _locks = defaultdict(trio.Lock), | ||||||
|     **pikerd_kwargs, |     **pikerd_kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> tractor.Portal: | ) -> tractor.Portal: | ||||||
|  | @ -67,7 +69,7 @@ async def maybe_spawn_daemon( | ||||||
|     ''' |     ''' | ||||||
|     # serialize access to this section to avoid |     # serialize access to this section to avoid | ||||||
|     # 2 or more tasks racing to create a daemon |     # 2 or more tasks racing to create a daemon | ||||||
|     lock = Services.locks[service_name] |     lock = _locks[service_name] | ||||||
|     await lock.acquire() |     await lock.acquire() | ||||||
| 
 | 
 | ||||||
|     async with find_service( |     async with find_service( | ||||||
|  | @ -132,7 +134,65 @@ async def maybe_spawn_daemon( | ||||||
|         async with tractor.wait_for_actor(service_name) as portal: |         async with tractor.wait_for_actor(service_name) as portal: | ||||||
|             lock.release() |             lock.release() | ||||||
|             yield portal |             yield portal | ||||||
|             await portal.cancel_actor() |             # --- ---- --- | ||||||
|  |             # XXX NOTE XXX | ||||||
|  |             # --- ---- --- | ||||||
|  |             # DO NOT PUT A `portal.cancel_actor()` here (as was prior)! | ||||||
|  |             # | ||||||
|  |             # Doing so will cause an "out-of-band" ctxc | ||||||
|  |             # (`tractor.ContextCancelled`) to be raised inside the | ||||||
|  |             # `ServiceMngr.open_context_in_task()`'s call to | ||||||
|  |             # `ctx.wait_for_result()` AND the internal self-ctxc | ||||||
|  |             # "graceful capture" WILL NOT CATCH IT! | ||||||
|  |             # | ||||||
|  |             # This can cause certain types of operations to raise | ||||||
|  |             # that ctxc BEFORE THEY `return`, resulting in | ||||||
|  |             # a "false-negative" ctxc being raised when really | ||||||
|  |             # nothing actually failed, other then our semantic | ||||||
|  |             # "failure" to suppress an expected, graceful, | ||||||
|  |             # self-cancel scenario.. | ||||||
|  |             # | ||||||
|  |             # bUt wHy duZ It WorK lIKe dis.. | ||||||
|  |             # ------------------------------ | ||||||
|  |             # from the perspective of the `tractor.Context` this | ||||||
|  |             # cancel request was conducted "out of band" since | ||||||
|  |             # `Context.cancel()` was never called and thus the | ||||||
|  |             # `._cancel_called: bool` was never set. Despite the | ||||||
|  |             # remote `.canceller` being set to `pikerd` (i.e. the | ||||||
|  |             # same `Actor.uid` of the raising service-mngr task) the | ||||||
|  |             # service-task's ctx itself was never marked as having | ||||||
|  |             # requested cancellation and thus still raises the ctxc | ||||||
|  |             # bc it was unaware of any such request. | ||||||
|  |             # | ||||||
|  |             # How to make grokin these cases easier tho? | ||||||
|  |             # ------------------------------------------ | ||||||
|  |             # Because `Portal.cancel_actor()` was called it requests | ||||||
|  |             # "full-`Actor`-runtime-cancellation" of it's peer | ||||||
|  |             # process which IS NOT THE SAME as a single inter-actor | ||||||
|  |             # RPC task cancelling its local context with a remote | ||||||
|  |             # peer `Task` in that same peer process. | ||||||
|  |             # | ||||||
|  |             # ?TODO? It might be better if we do one (or all) of the | ||||||
|  |             # following: | ||||||
|  |             # | ||||||
|  |             # -[ ] at least set a special message for the | ||||||
|  |             #    `ContextCancelled` when raised locally by the | ||||||
|  |             #    unaware ctx task such that we check for the | ||||||
|  |             #    `.canceller` being *our `Actor`* and in the case | ||||||
|  |             #    where `Context._cancel_called == False` we specially | ||||||
|  |             #    note that this is likely an "out-of-band" | ||||||
|  |             #    runtime-cancel request triggered by some call to | ||||||
|  |             #    `Portal.cancel_actor()`, possibly even reporting the | ||||||
|  |             #    exact LOC of that caller by tracking it inside our | ||||||
|  |             #    portal-type? | ||||||
|  |             # -[ ] possibly add another field `ContextCancelled` like | ||||||
|  |             #    maybe a, | ||||||
|  |             #    `.request_type: Literal['os', 'proc', 'actor', | ||||||
|  |             #    'ctx']` type thing which would allow immediately | ||||||
|  |             #    being able to tell what kind of cancellation caused | ||||||
|  |             #    the unexpected ctxc? | ||||||
|  |             # -[ ] REMOVE THIS COMMENT, once we've settled on how to | ||||||
|  |             #     better augment `tractor` to be more explicit on this! | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def spawn_emsd( | async def spawn_emsd( | ||||||
|  | @ -147,21 +207,22 @@ async def spawn_emsd( | ||||||
|     """ |     """ | ||||||
|     log.info('Spawning emsd') |     log.info('Spawning emsd') | ||||||
| 
 | 
 | ||||||
|     portal = await Services.actor_n.start_actor( |     smngr: ServiceMngr = get_service_mngr() | ||||||
|  |     portal = await smngr.actor_n.start_actor( | ||||||
|         'emsd', |         'emsd', | ||||||
|         enable_modules=[ |         enable_modules=[ | ||||||
|             'piker.clearing._ems', |             'piker.clearing._ems', | ||||||
|             'piker.clearing._client', |             'piker.clearing._client', | ||||||
|         ], |         ], | ||||||
|         loglevel=loglevel, |         loglevel=loglevel, | ||||||
|         debug_mode=Services.debug_mode,  # set by pikerd flag |         debug_mode=smngr.debug_mode,  # set by pikerd flag | ||||||
|         **extra_tractor_kwargs |         **extra_tractor_kwargs | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     # non-blocking setup of clearing service |     # non-blocking setup of clearing service | ||||||
|     from ..clearing._ems import _setup_persistent_emsd |     from ..clearing._ems import _setup_persistent_emsd | ||||||
| 
 | 
 | ||||||
|     await Services.start_service_task( |     await smngr.start_service_task( | ||||||
|         'emsd', |         'emsd', | ||||||
|         portal, |         portal, | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -18,16 +18,29 @@ | ||||||
| daemon-service management API. | daemon-service management API. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
|  | from __future__ import annotations | ||||||
|  | from contextlib import ( | ||||||
|  |     asynccontextmanager as acm, | ||||||
|  |     # contextmanager as cm, | ||||||
|  | ) | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
|  | from dataclasses import ( | ||||||
|  |     dataclass, | ||||||
|  |     field, | ||||||
|  | ) | ||||||
|  | import functools | ||||||
|  | import inspect | ||||||
| from typing import ( | from typing import ( | ||||||
|     Callable, |     Callable, | ||||||
|     Any, |     Any, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| import trio | import msgspec | ||||||
| from trio_typing import TaskStatus |  | ||||||
| import tractor | import tractor | ||||||
|  | import trio | ||||||
|  | from trio import TaskStatus | ||||||
| from tractor import ( | from tractor import ( | ||||||
|  |     ActorNursery, | ||||||
|     current_actor, |     current_actor, | ||||||
|     ContextCancelled, |     ContextCancelled, | ||||||
|     Context, |     Context, | ||||||
|  | @ -39,6 +52,130 @@ from ._util import ( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # TODO: implement a singleton deco-API for wrapping the below | ||||||
|  | # factory's impl for general actor-singleton use? | ||||||
|  | # | ||||||
|  | # @singleton | ||||||
|  | # async def open_service_mngr( | ||||||
|  | #     **init_kwargs, | ||||||
|  | # ) -> ServiceMngr: | ||||||
|  | #     ''' | ||||||
|  | #     Note this function body is invoke IFF no existing singleton instance already | ||||||
|  | #     exists in this proc's memory. | ||||||
|  | 
 | ||||||
|  | #     ''' | ||||||
|  | #     # setup | ||||||
|  | #     yield ServiceMngr(**init_kwargs) | ||||||
|  | #     # teardown | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: singleton factory API instead of a class API | ||||||
|  | @acm | ||||||
|  | async def open_service_mngr( | ||||||
|  |     *, | ||||||
|  |     debug_mode: bool = False, | ||||||
|  | 
 | ||||||
|  |     # impl deat which ensures a single global instance | ||||||
|  |     _singleton: list[ServiceMngr|None] = [None], | ||||||
|  |     **init_kwargs, | ||||||
|  | 
 | ||||||
|  | ) -> ServiceMngr: | ||||||
|  |     ''' | ||||||
|  |     Open a multi-subactor-as-service-daemon tree supervisor. | ||||||
|  | 
 | ||||||
|  |     The delivered `ServiceMngr` is a singleton instance for each | ||||||
|  |     actor-process and is allocated on first open and never | ||||||
|  |     de-allocated unless explicitly deleted by al call to | ||||||
|  |     `del_service_mngr()`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # TODO: factor this an allocation into | ||||||
|  |     # a `._mngr.open_service_mngr()` and put in the | ||||||
|  |     # once-n-only-once setup/`.__aenter__()` part! | ||||||
|  |     # -[ ] how to make this only happen on the `mngr == None` case? | ||||||
|  |     #  |_ use `.trionics.maybe_open_context()` (for generic | ||||||
|  |     #     async-with-style-only-once of the factory impl, though | ||||||
|  |     #     what do we do for the allocation case? | ||||||
|  |     #    / `.maybe_open_nursery()` (since for this specific case | ||||||
|  |     #    it's simpler?) to activate | ||||||
|  |     async with ( | ||||||
|  |         tractor.open_nursery() as an, | ||||||
|  |         trio.open_nursery() as tn, | ||||||
|  |     ): | ||||||
|  |         # impl specific obvi.. | ||||||
|  |         init_kwargs.update({ | ||||||
|  |             'actor_n': an, | ||||||
|  |             'service_n': tn, | ||||||
|  |         }) | ||||||
|  | 
 | ||||||
|  |         mngr: ServiceMngr|None | ||||||
|  |         if (mngr := _singleton[0]) is None: | ||||||
|  | 
 | ||||||
|  |             log.info('Allocating a new service mngr!') | ||||||
|  |             mngr = _singleton[0] = ServiceMngr(**init_kwargs) | ||||||
|  | 
 | ||||||
|  |             # TODO: put into `.__aenter__()` section of | ||||||
|  |             # eventual `@singleton_acm` API wrapper. | ||||||
|  |             # | ||||||
|  |             # assign globally for future daemon/task creation | ||||||
|  |             mngr.actor_n = an | ||||||
|  |             mngr.service_n = tn | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             assert ( | ||||||
|  |                 mngr.actor_n | ||||||
|  |                 and | ||||||
|  |                 mngr.service_tn | ||||||
|  |             ) | ||||||
|  |             log.info( | ||||||
|  |                 'Using extant service mngr!\n\n' | ||||||
|  |                 f'{mngr!r}\n'  # it has a nice `.__repr__()` of services state | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             # NOTE: this is a singleton factory impl specific detail | ||||||
|  |             # which should be supported in the condensed | ||||||
|  |             # `@singleton_acm` API? | ||||||
|  |             mngr.debug_mode = debug_mode | ||||||
|  | 
 | ||||||
|  |             yield mngr | ||||||
|  |         finally: | ||||||
|  |             # TODO: is this more clever/efficient? | ||||||
|  |             # if 'samplerd' in mngr.service_tasks: | ||||||
|  |             #     await mngr.cancel_service('samplerd') | ||||||
|  |             tn.cancel_scope.cancel() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_service_mngr() -> ServiceMngr: | ||||||
|  |     ''' | ||||||
|  |     Try to get the singleton service-mngr for this actor presuming it | ||||||
|  |     has already been allocated using, | ||||||
|  | 
 | ||||||
|  |     .. code:: python | ||||||
|  | 
 | ||||||
|  |         async with open_<@singleton_acm(func)>() as mngr` | ||||||
|  |             ... this block kept open ... | ||||||
|  | 
 | ||||||
|  |     If not yet allocated raise a `ServiceError`. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # https://stackoverflow.com/a/12627202 | ||||||
|  |     # https://docs.python.org/3/library/inspect.html#inspect.Signature | ||||||
|  |     maybe_mngr: ServiceMngr|None = inspect.signature( | ||||||
|  |         open_service_mngr | ||||||
|  |     ).parameters['_singleton'].default[0] | ||||||
|  | 
 | ||||||
|  |     if maybe_mngr is None: | ||||||
|  |         raise RuntimeError( | ||||||
|  |             'Someone must allocate a `ServiceMngr` using\n\n' | ||||||
|  |             '`async with open_service_mngr()` beforehand!!\n' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     return maybe_mngr | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| # TODO: we need remote wrapping and a general soln: | # TODO: we need remote wrapping and a general soln: | ||||||
| # - factor this into a ``tractor.highlevel`` extension # pack for the | # - factor this into a ``tractor.highlevel`` extension # pack for the | ||||||
| #   library. | #   library. | ||||||
|  | @ -46,31 +183,46 @@ from ._util import ( | ||||||
| #   to the pikerd actor for starting services remotely! | #   to the pikerd actor for starting services remotely! | ||||||
| # - prolly rename this to ActorServicesNursery since it spawns | # - prolly rename this to ActorServicesNursery since it spawns | ||||||
| #   new actors and supervises them to completion? | #   new actors and supervises them to completion? | ||||||
| class Services: | @dataclass | ||||||
|  | class ServiceMngr: | ||||||
|  | # class ServiceMngr(msgspec.Struct): | ||||||
|  |     ''' | ||||||
|  |     A multi-subactor-as-service manager. | ||||||
| 
 | 
 | ||||||
|     actor_n: tractor._supervise.ActorNursery |     Spawn, supervise and monitor service/daemon subactors in a SC | ||||||
|  |     process tree. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     actor_n: ActorNursery | ||||||
|     service_n: trio.Nursery |     service_n: trio.Nursery | ||||||
|     debug_mode: bool  # tractor sub-actor debug mode flag |     debug_mode: bool = False # tractor sub-actor debug mode flag | ||||||
|  | 
 | ||||||
|     service_tasks: dict[ |     service_tasks: dict[ | ||||||
|         str, |         str, | ||||||
|         tuple[ |         tuple[ | ||||||
|             trio.CancelScope, |             trio.CancelScope, | ||||||
|  |             Context, | ||||||
|             Portal, |             Portal, | ||||||
|             trio.Event, |             trio.Event, | ||||||
|         ] |         ] | ||||||
|     ] = {} |     ] = field(default_factory=dict) | ||||||
|     locks = defaultdict(trio.Lock) | 
 | ||||||
|  |     # internal per-service task mutexs | ||||||
|  |     _locks = defaultdict(trio.Lock) | ||||||
| 
 | 
 | ||||||
|     @classmethod |  | ||||||
|     async def start_service_task( |     async def start_service_task( | ||||||
|         self, |         self, | ||||||
|         name: str, |         name: str, | ||||||
|         portal: Portal, |         portal: Portal, | ||||||
|  | 
 | ||||||
|  |         # TODO: typevar for the return type of the target and then | ||||||
|  |         # use it below for `ctx_res`? | ||||||
|         target: Callable, |         target: Callable, | ||||||
|  | 
 | ||||||
|         allow_overruns: bool = False, |         allow_overruns: bool = False, | ||||||
|         **ctx_kwargs, |         **ctx_kwargs, | ||||||
| 
 | 
 | ||||||
|     ) -> (trio.CancelScope, Context): |     ) -> (trio.CancelScope, Context, Any): | ||||||
|         ''' |         ''' | ||||||
|         Open a context in a service sub-actor, add to a stack |         Open a context in a service sub-actor, add to a stack | ||||||
|         that gets unwound at ``pikerd`` teardown. |         that gets unwound at ``pikerd`` teardown. | ||||||
|  | @ -83,6 +235,7 @@ class Services: | ||||||
|             task_status: TaskStatus[ |             task_status: TaskStatus[ | ||||||
|                 tuple[ |                 tuple[ | ||||||
|                     trio.CancelScope, |                     trio.CancelScope, | ||||||
|  |                     Context, | ||||||
|                     trio.Event, |                     trio.Event, | ||||||
|                     Any, |                     Any, | ||||||
|                 ] |                 ] | ||||||
|  | @ -90,64 +243,87 @@ class Services: | ||||||
| 
 | 
 | ||||||
|         ) -> Any: |         ) -> Any: | ||||||
| 
 | 
 | ||||||
|  |             # TODO: use the ctx._scope directly here instead? | ||||||
|  |             # -[ ] actually what semantics do we expect for this | ||||||
|  |             #   usage!? | ||||||
|             with trio.CancelScope() as cs: |             with trio.CancelScope() as cs: | ||||||
| 
 |                 try: | ||||||
|                     async with portal.open_context( |                     async with portal.open_context( | ||||||
|                         target, |                         target, | ||||||
|                         allow_overruns=allow_overruns, |                         allow_overruns=allow_overruns, | ||||||
|                         **ctx_kwargs, |                         **ctx_kwargs, | ||||||
| 
 | 
 | ||||||
|                 ) as (ctx, first): |                     ) as (ctx, started): | ||||||
| 
 | 
 | ||||||
|                         # unblock once the remote context has started |                         # unblock once the remote context has started | ||||||
|                         complete = trio.Event() |                         complete = trio.Event() | ||||||
|                     task_status.started((cs, complete, first)) |                         task_status.started(( | ||||||
|  |                             cs, | ||||||
|  |                             ctx, | ||||||
|  |                             complete, | ||||||
|  |                             started, | ||||||
|  |                         )) | ||||||
|                         log.info( |                         log.info( | ||||||
|                         f'`pikerd` service {name} started with value {first}' |                             f'`pikerd` service {name} started with value {started}' | ||||||
|                         ) |                         ) | ||||||
|                     try: |  | ||||||
|                         # wait on any context's return value |                         # wait on any context's return value | ||||||
|                         # and any final portal result from the |                         # and any final portal result from the | ||||||
|                         # sub-actor. |                         # sub-actor. | ||||||
|                         ctx_res: Any = await ctx.result() |                         ctx_res: Any = await ctx.wait_for_result() | ||||||
| 
 | 
 | ||||||
|                         # NOTE: blocks indefinitely until cancelled |                         # NOTE: blocks indefinitely until cancelled | ||||||
|                         # either by error from the target context |                         # either by error from the target context | ||||||
|                         # function or by being cancelled here by the |                         # function or by being cancelled here by the | ||||||
|                         # surrounding cancel scope. |                         # surrounding cancel scope. | ||||||
|                         return (await portal.result(), ctx_res) |                         return ( | ||||||
|  |                             await portal.wait_for_result(), | ||||||
|  |                             ctx_res, | ||||||
|  |                         ) | ||||||
|  | 
 | ||||||
|                 except ContextCancelled as ctxe: |                 except ContextCancelled as ctxe: | ||||||
|                     canceller: tuple[str, str] = ctxe.canceller |                     canceller: tuple[str, str] = ctxe.canceller | ||||||
|                     our_uid: tuple[str, str] = current_actor().uid |                     our_uid: tuple[str, str] = current_actor().uid | ||||||
|                     if ( |                     if ( | ||||||
|                             canceller != portal.channel.uid |                         canceller != portal.chan.uid | ||||||
|                         and |                         and | ||||||
|                         canceller != our_uid |                         canceller != our_uid | ||||||
|                     ): |                     ): | ||||||
|                         log.cancel( |                         log.cancel( | ||||||
|                                 f'Actor-service {name} was remotely cancelled?\n' |                             f'Actor-service `{name}` was remotely cancelled by a peer?\n' | ||||||
|                                 f'remote canceller: {canceller}\n' | 
 | ||||||
|                                 f'Keeping {our_uid} alive, ignoring sub-actor cancel..\n' |                             # TODO: this would be a good spot to use | ||||||
|  |                             # a respawn feature Bo | ||||||
|  |                             f'-> Keeping `pikerd` service manager alive despite this inter-peer cancel\n\n' | ||||||
|  | 
 | ||||||
|  |                             f'cancellee: {portal.chan.uid}\n' | ||||||
|  |                             f'canceller: {canceller}\n' | ||||||
|                         ) |                         ) | ||||||
|                     else: |                     else: | ||||||
|                         raise |                         raise | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
|                 finally: |                 finally: | ||||||
|  |                     # NOTE: the ctx MUST be cancelled first if we | ||||||
|  |                     # don't want the above `ctx.wait_for_result()` to | ||||||
|  |                     # raise a self-ctxc. WHY, well since from the ctx's | ||||||
|  |                     # perspective the cancel request will have | ||||||
|  |                     # arrived out-out-of-band at the `Actor.cancel()` | ||||||
|  |                     # level, thus `Context.cancel_called == False`, | ||||||
|  |                     # meaning `ctx._is_self_cancelled() == False`. | ||||||
|  |                     # with trio.CancelScope(shield=True): | ||||||
|  |                     # await ctx.cancel() | ||||||
|                     await portal.cancel_actor() |                     await portal.cancel_actor() | ||||||
|                     complete.set() |                     complete.set() | ||||||
|                     self.service_tasks.pop(name) |                     self.service_tasks.pop(name) | ||||||
| 
 | 
 | ||||||
|         cs, complete, first = await self.service_n.start(open_context_in_task) |         cs, sub_ctx, complete, started = await self.service_n.start( | ||||||
|  |             open_context_in_task | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|         # store the cancel scope and portal for later cancellation or |         # store the cancel scope and portal for later cancellation or | ||||||
|         # retstart if needed. |         # retstart if needed. | ||||||
|         self.service_tasks[name] = (cs, portal, complete) |         self.service_tasks[name] = (cs, sub_ctx, portal, complete) | ||||||
|  |         return cs, sub_ctx, started | ||||||
| 
 | 
 | ||||||
|         return cs, first |  | ||||||
| 
 |  | ||||||
|     @classmethod |  | ||||||
|     async def cancel_service( |     async def cancel_service( | ||||||
|         self, |         self, | ||||||
|         name: str, |         name: str, | ||||||
|  | @ -158,8 +334,80 @@ class Services: | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         log.info(f'Cancelling `pikerd` service {name}') |         log.info(f'Cancelling `pikerd` service {name}') | ||||||
|         cs, portal, complete = self.service_tasks[name] |         cs, sub_ctx, portal, complete = self.service_tasks[name] | ||||||
|         cs.cancel() | 
 | ||||||
|  |         # cs.cancel() | ||||||
|  |         await sub_ctx.cancel() | ||||||
|         await complete.wait() |         await complete.wait() | ||||||
|         assert name not in self.service_tasks, \ | 
 | ||||||
|  |         if name in self.service_tasks: | ||||||
|  |             # TODO: custom err? | ||||||
|  |             # raise ServiceError( | ||||||
|  |             raise RuntimeError( | ||||||
|                 f'Serice task for {name} not terminated?' |                 f'Serice task for {name} not terminated?' | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         # assert name not in self.service_tasks, \ | ||||||
|  |         #     f'Serice task for {name} not terminated?' | ||||||
|  | 
 | ||||||
|  |     async def start_service( | ||||||
|  |         self, | ||||||
|  |         daemon_name: str, | ||||||
|  |         ctx_ep: Callable,  # kwargs must `partial`-ed in! | ||||||
|  | 
 | ||||||
|  |         debug_mode: bool = False, | ||||||
|  |         **tractor_actor_kwargs, | ||||||
|  | 
 | ||||||
|  |     ) -> Context: | ||||||
|  |         ''' | ||||||
|  |         Start a "service" task in a new sub-actor (daemon) and manage it's lifetime | ||||||
|  |         indefinitely. | ||||||
|  | 
 | ||||||
|  |         Services can be cancelled/shutdown using `.cancel_service()`. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         entry: tuple|None = self.service_tasks.get(daemon_name) | ||||||
|  |         if entry: | ||||||
|  |             (cs, sub_ctx, portal, complete) = entry | ||||||
|  |             return sub_ctx | ||||||
|  | 
 | ||||||
|  |         if daemon_name not in self.service_tasks: | ||||||
|  |             portal = await self.actor_n.start_actor( | ||||||
|  |                 daemon_name, | ||||||
|  |                 debug_mode=(  # maybe set globally during allocate | ||||||
|  |                     debug_mode | ||||||
|  |                     or | ||||||
|  |                     self.debug_mode | ||||||
|  |                 ), | ||||||
|  |                 **tractor_actor_kwargs, | ||||||
|  |             ) | ||||||
|  |             ctx_kwargs: dict[str, Any] = {} | ||||||
|  |             if isinstance(ctx_ep, functools.partial): | ||||||
|  |                 ctx_kwargs: dict[str, Any] = ctx_ep.keywords | ||||||
|  |                 ctx_ep: Callable = ctx_ep.func | ||||||
|  | 
 | ||||||
|  |             (cs, sub_ctx, started) = await self.start_service_task( | ||||||
|  |                 daemon_name, | ||||||
|  |                 portal, | ||||||
|  |                 ctx_ep, | ||||||
|  |                 **ctx_kwargs, | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             return sub_ctx | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: | ||||||
|  | # -[ ] factor all the common shit from `.data._sampling` | ||||||
|  | #   and `.brokers._daemon` into here / `ServiceMngr` | ||||||
|  | #   in terms of allocating the `Portal` as part of the | ||||||
|  | #   "service-in-subactor" starting! | ||||||
|  | # -[ ] move to `tractor.hilevel._service`, import and use here! | ||||||
|  | # NOTE: purposely leaks the ref to the mod-scope Bo | ||||||
|  | # import tractor | ||||||
|  | # from tractor.hilevel import ( | ||||||
|  | #     open_service_mngr, | ||||||
|  | #     ServiceMngr, | ||||||
|  | # ) | ||||||
|  | # mngr: ServiceMngr|None = None | ||||||
|  | # with tractor.hilevel.open_service_mngr() as mngr: | ||||||
|  | #     Services = proxy(mngr) | ||||||
|  |  | ||||||
|  | @ -21,11 +21,13 @@ from typing import ( | ||||||
|     TYPE_CHECKING, |     TYPE_CHECKING, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | # TODO: oof, needs to be changed to `httpx`! | ||||||
| import asks | import asks | ||||||
| 
 | 
 | ||||||
| if TYPE_CHECKING: | if TYPE_CHECKING: | ||||||
|     import docker |     import docker | ||||||
|     from ._ahab import DockerContainer |     from ._ahab import DockerContainer | ||||||
|  |     from . import ServiceMngr | ||||||
| 
 | 
 | ||||||
| from ._util import log  # sub-sys logger | from ._util import log  # sub-sys logger | ||||||
| from ._util import ( | from ._util import ( | ||||||
|  | @ -127,7 +129,7 @@ def start_elasticsearch( | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def start_ahab_daemon( | async def start_ahab_daemon( | ||||||
|     service_mngr: Services, |     service_mngr: ServiceMngr, | ||||||
|     user_config: dict | None = None, |     user_config: dict | None = None, | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -53,7 +53,7 @@ import pendulum | ||||||
| # import purerpc | # import purerpc | ||||||
| 
 | 
 | ||||||
| from ..data.feed import maybe_open_feed | from ..data.feed import maybe_open_feed | ||||||
| from . import Services | from . import ServiceMngr | ||||||
| from ._util import ( | from ._util import ( | ||||||
|     log,  # sub-sys logger |     log,  # sub-sys logger | ||||||
|     get_console_log, |     get_console_log, | ||||||
|  | @ -233,7 +233,7 @@ def start_marketstore( | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
| async def start_ahab_daemon( | async def start_ahab_daemon( | ||||||
|     service_mngr: Services, |     service_mngr: ServiceMngr, | ||||||
|     user_config: dict | None = None, |     user_config: dict | None = None, | ||||||
|     loglevel: str | None = None, |     loglevel: str | None = None, | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -161,7 +161,12 @@ class NativeStorageClient: | ||||||
| 
 | 
 | ||||||
|     def index_files(self): |     def index_files(self): | ||||||
|         for path in self._datadir.iterdir(): |         for path in self._datadir.iterdir(): | ||||||
|             if path.name in {'borked', 'expired',}: |             if ( | ||||||
|  |                 path.name in {'borked', 'expired',} | ||||||
|  |                 or | ||||||
|  |                 '.parquet' not in str(path) | ||||||
|  |             ): | ||||||
|  |                 # ignore all non-apache files (for now) | ||||||
|                 continue |                 continue | ||||||
| 
 | 
 | ||||||
|             key: str = path.name.rstrip('.parquet') |             key: str = path.name.rstrip('.parquet') | ||||||
|  |  | ||||||
|  | @ -44,8 +44,10 @@ import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import tractor | import tractor | ||||||
| from pendulum import ( | from pendulum import ( | ||||||
|  |     Interval, | ||||||
|     DateTime, |     DateTime, | ||||||
|     Duration, |     Duration, | ||||||
|  |     duration as mk_duration, | ||||||
|     from_timestamp, |     from_timestamp, | ||||||
| ) | ) | ||||||
| import numpy as np | import numpy as np | ||||||
|  | @ -214,7 +216,8 @@ async def maybe_fill_null_segments( | ||||||
|         # pair, immediately stop backfilling? |         # pair, immediately stop backfilling? | ||||||
|         if ( |         if ( | ||||||
|             start_dt |             start_dt | ||||||
|             and end_dt < start_dt |             and | ||||||
|  |             end_dt < start_dt | ||||||
|         ): |         ): | ||||||
|             await tractor.pause() |             await tractor.pause() | ||||||
|             break |             break | ||||||
|  | @ -262,6 +265,7 @@ async def maybe_fill_null_segments( | ||||||
|         except tractor.ContextCancelled: |         except tractor.ContextCancelled: | ||||||
|             # log.exception |             # log.exception | ||||||
|             await tractor.pause() |             await tractor.pause() | ||||||
|  |             raise | ||||||
| 
 | 
 | ||||||
|     null_segs_detected.set() |     null_segs_detected.set() | ||||||
|     # RECHECK for more null-gaps |     # RECHECK for more null-gaps | ||||||
|  | @ -349,7 +353,7 @@ async def maybe_fill_null_segments( | ||||||
| 
 | 
 | ||||||
| async def start_backfill( | async def start_backfill( | ||||||
|     get_hist, |     get_hist, | ||||||
|     frame_types: dict[str, Duration] | None, |     def_frame_duration: Duration, | ||||||
|     mod: ModuleType, |     mod: ModuleType, | ||||||
|     mkt: MktPair, |     mkt: MktPair, | ||||||
|     shm: ShmArray, |     shm: ShmArray, | ||||||
|  | @ -379,22 +383,23 @@ async def start_backfill( | ||||||
|         update_start_on_prepend: bool = False |         update_start_on_prepend: bool = False | ||||||
|         if backfill_until_dt is None: |         if backfill_until_dt is None: | ||||||
| 
 | 
 | ||||||
|             # TODO: drop this right and just expose the backfill |             # TODO: per-provider default history-durations? | ||||||
|             # limits inside a [storage] section in conf.toml? |             # -[ ] inside the `open_history_client()` config allow | ||||||
|             # when no tsdb "last datum" is provided, we just load |             #    declaring the history duration limits instead of | ||||||
|             # some near-term history. |             #    guessing and/or applying the same limits to all? | ||||||
|             # periods = { |             # | ||||||
|             #     1: {'days': 1}, |             # -[ ] allow declaring (default) per-provider backfill | ||||||
|             #     60: {'days': 14}, |             #     limits inside a [storage] sub-section in conf.toml? | ||||||
|             # } |             # | ||||||
| 
 |             # NOTE, when no tsdb "last datum" is provided, we just | ||||||
|             # do a decently sized backfill and load it into storage. |             # load some near-term history by presuming a "decently | ||||||
|  |             # large" 60s duration limit and a much shorter 1s range. | ||||||
|             periods = { |             periods = { | ||||||
|                 1: {'days': 2}, |                 1: {'days': 2}, | ||||||
|                 60: {'years': 6}, |                 60: {'years': 6}, | ||||||
|             } |             } | ||||||
|             period_duration: int = periods[timeframe] |             period_duration: int = periods[timeframe] | ||||||
|             update_start_on_prepend = True |             update_start_on_prepend: bool = True | ||||||
| 
 | 
 | ||||||
|             # NOTE: manually set the "latest" datetime which we intend to |             # NOTE: manually set the "latest" datetime which we intend to | ||||||
|             # backfill history "until" so as to adhere to the history |             # backfill history "until" so as to adhere to the history | ||||||
|  | @ -416,7 +421,6 @@ async def start_backfill( | ||||||
|                 f'backfill_until_dt: {backfill_until_dt}\n' |                 f'backfill_until_dt: {backfill_until_dt}\n' | ||||||
|                 f'last_start_dt: {last_start_dt}\n' |                 f'last_start_dt: {last_start_dt}\n' | ||||||
|             ) |             ) | ||||||
| 
 |  | ||||||
|             try: |             try: | ||||||
|                 ( |                 ( | ||||||
|                     array, |                     array, | ||||||
|  | @ -426,71 +430,114 @@ async def start_backfill( | ||||||
|                     timeframe, |                     timeframe, | ||||||
|                     end_dt=last_start_dt, |                     end_dt=last_start_dt, | ||||||
|                 ) |                 ) | ||||||
| 
 |  | ||||||
|             except NoData as _daterr: |             except NoData as _daterr: | ||||||
|                 # 3 cases: |                 orig_last_start_dt: datetime = last_start_dt | ||||||
|                 # - frame in the middle of a legit venue gap |                 gap_report: str = ( | ||||||
|                 # - history actually began at the `last_start_dt` |                     f'EMPTY FRAME for `end_dt: {last_start_dt}`?\n' | ||||||
|                 # - some other unknown error (ib blocking the |                     f'{mod.name} -> tf@fqme: {timeframe}@{mkt.fqme}\n' | ||||||
|                 #   history bc they don't want you seeing how they |                     f'last_start_dt: {orig_last_start_dt}\n\n' | ||||||
|                 #   cucked all the tinas..) |                     f'bf_until: {backfill_until_dt}\n' | ||||||
|                 if dur := frame_types.get(timeframe): |  | ||||||
|                     # decrement by a frame's worth of duration and |  | ||||||
|                     # retry a few times. |  | ||||||
|                     last_start_dt.subtract( |  | ||||||
|                         seconds=dur.total_seconds() |  | ||||||
|                 ) |                 ) | ||||||
|                     log.warning( |                 # EMPTY FRAME signal with 3 (likely) causes: | ||||||
|                         f'{mod.name} -> EMPTY FRAME for end_dt?\n' |                 # | ||||||
|                         f'tf@fqme: {timeframe}@{mkt.fqme}\n' |                 # 1. range contains legit gap in venue history | ||||||
|                         'bf_until <- last_start_dt:\n' |                 # 2. history actually (edge case) **began** at the | ||||||
|                         f'{backfill_until_dt} <- {last_start_dt}\n' |                 #    value `last_start_dt` | ||||||
|                         f'Decrementing `end_dt` by {dur} and retry..\n' |                 # 3. some other unknown error (ib blocking the | ||||||
|  |                 #    history-query bc they don't want you seeing how | ||||||
|  |                 #    they cucked all the tinas.. like with options | ||||||
|  |                 #    hist) | ||||||
|  |                 # | ||||||
|  |                 if def_frame_duration: | ||||||
|  |                     # decrement by a duration's (frame) worth of time | ||||||
|  |                     # as maybe indicated by the backend to see if we | ||||||
|  |                     # can get older data before this possible | ||||||
|  |                     # "history gap". | ||||||
|  |                     last_start_dt: datetime = last_start_dt.subtract( | ||||||
|  |                         seconds=def_frame_duration.total_seconds() | ||||||
|                     ) |                     ) | ||||||
|  |                     gap_report += ( | ||||||
|  |                         f'Decrementing `end_dt` and retrying with,\n' | ||||||
|  |                         f'def_frame_duration: {def_frame_duration}\n' | ||||||
|  |                         f'(new) last_start_dt: {last_start_dt}\n' | ||||||
|  |                     ) | ||||||
|  |                     log.warning(gap_report) | ||||||
|  |                     # skip writing to shm/tsdb and try the next | ||||||
|  |                     # duration's worth of prior history. | ||||||
|                     continue |                     continue | ||||||
| 
 | 
 | ||||||
|             # broker says there never was or is no more history to pull |                 else: | ||||||
|             except DataUnavailable: |                     # await tractor.pause() | ||||||
|                 log.warning( |                     raise DataUnavailable(gap_report) | ||||||
|                     f'NO-MORE-DATA in range?\n' |  | ||||||
|                     f'`{mod.name}` halted history:\n' |  | ||||||
|                     f'tf@fqme: {timeframe}@{mkt.fqme}\n' |  | ||||||
|                     'bf_until <- last_start_dt:\n' |  | ||||||
|                     f'{backfill_until_dt} <- {last_start_dt}\n' |  | ||||||
|                 ) |  | ||||||
| 
 | 
 | ||||||
|                 # ugh, what's a better way? |             # broker says there never was or is no more history to pull | ||||||
|                 # TODO: fwiw, we probably want a way to signal a throttle |             except DataUnavailable as due: | ||||||
|                 # condition (eg. with ib) so that we can halt the |                 message: str = due.args[0] | ||||||
|                 # request loop until the condition is resolved? |                 log.warning( | ||||||
|                 if timeframe > 1: |                     f'Provider {mod.name!r} halted backfill due to,\n\n' | ||||||
|                     await tractor.pause() | 
 | ||||||
|  |                     f'{message}\n' | ||||||
|  | 
 | ||||||
|  |                     f'fqme: {mkt.fqme}\n' | ||||||
|  |                     f'timeframe: {timeframe}\n' | ||||||
|  |                     f'last_start_dt: {last_start_dt}\n' | ||||||
|  |                     f'bf_until: {backfill_until_dt}\n' | ||||||
|  |                 ) | ||||||
|  |                 # UGH: what's a better way? | ||||||
|  |                 # TODO: backends are responsible for being correct on | ||||||
|  |                 # this right!? | ||||||
|  |                 # -[ ] in the `ib` case we could maybe offer some way | ||||||
|  |                 #     to halt the request loop until the condition is | ||||||
|  |                 #     resolved or should the backend be entirely in | ||||||
|  |                 #     charge of solving such faults? yes, right? | ||||||
|                 return |                 return | ||||||
| 
 | 
 | ||||||
|  |             time: np.ndarray = array['time'] | ||||||
|             assert ( |             assert ( | ||||||
|                 array['time'][0] |                 time[0] | ||||||
|                 == |                 == | ||||||
|                 next_start_dt.timestamp() |                 next_start_dt.timestamp() | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             diff = last_start_dt - next_start_dt |             assert time[-1] == next_end_dt.timestamp() | ||||||
|             frame_time_diff_s = diff.seconds | 
 | ||||||
|  |             expected_dur: Interval = last_start_dt - next_start_dt | ||||||
| 
 | 
 | ||||||
|             # frame's worth of sample-period-steps, in seconds |             # frame's worth of sample-period-steps, in seconds | ||||||
|             frame_size_s: float = len(array) * timeframe |             frame_size_s: float = len(array) * timeframe | ||||||
|             expected_frame_size_s: float = frame_size_s + timeframe |             recv_frame_dur: Duration = ( | ||||||
|             if frame_time_diff_s > expected_frame_size_s: |                 from_timestamp(array[-1]['time']) | ||||||
| 
 |                 - | ||||||
|  |                 from_timestamp(array[0]['time']) | ||||||
|  |             ) | ||||||
|  |             if ( | ||||||
|  |                 (lt_frame := (recv_frame_dur < expected_dur)) | ||||||
|  |                 or | ||||||
|  |                 (null_frame := (frame_size_s == 0)) | ||||||
|  |                 # ^XXX, should NEVER hit now! | ||||||
|  |             ): | ||||||
|                 # XXX: query result includes a start point prior to our |                 # XXX: query result includes a start point prior to our | ||||||
|                 # expected "frame size" and thus is likely some kind of |                 # expected "frame size" and thus is likely some kind of | ||||||
|                 # history gap (eg. market closed period, outage, etc.) |                 # history gap (eg. market closed period, outage, etc.) | ||||||
|                 # so just report it to console for now. |                 # so just report it to console for now. | ||||||
|  |                 if lt_frame: | ||||||
|  |                     reason = 'Possible GAP (or first-datum)' | ||||||
|  |                 else: | ||||||
|  |                     assert null_frame | ||||||
|  |                     reason = 'NULL-FRAME' | ||||||
|  | 
 | ||||||
|  |                 missing_dur: Interval = expected_dur.end - recv_frame_dur.end | ||||||
|                 log.warning( |                 log.warning( | ||||||
|                     'GAP DETECTED:\n' |                     f'{timeframe}s-series {reason} detected!\n' | ||||||
|                     f'last_start_dt: {last_start_dt}\n' |                     f'fqme: {mkt.fqme}\n' | ||||||
|                     f'diff: {diff}\n' |                     f'last_start_dt: {last_start_dt}\n\n' | ||||||
|                     f'frame_time_diff_s: {frame_time_diff_s}\n' |                     f'recv interval: {recv_frame_dur}\n' | ||||||
|  |                     f'expected interval: {expected_dur}\n\n' | ||||||
|  | 
 | ||||||
|  |                     f'Missing duration of history of {missing_dur.in_words()!r}\n' | ||||||
|  |                     f'{missing_dur}\n' | ||||||
|                 ) |                 ) | ||||||
|  |                 # await tractor.pause() | ||||||
| 
 | 
 | ||||||
|             to_push = diff_history( |             to_push = diff_history( | ||||||
|                 array, |                 array, | ||||||
|  | @ -565,7 +612,8 @@ async def start_backfill( | ||||||
|             # long-term storage. |             # long-term storage. | ||||||
|             if ( |             if ( | ||||||
|                 storage is not None |                 storage is not None | ||||||
|                 and write_tsdb |                 and | ||||||
|  |                 write_tsdb | ||||||
|             ): |             ): | ||||||
|                 log.info( |                 log.info( | ||||||
|                     f'Writing {ln} frame to storage:\n' |                     f'Writing {ln} frame to storage:\n' | ||||||
|  | @ -578,6 +626,7 @@ async def start_backfill( | ||||||
|                     'crypto', |                     'crypto', | ||||||
|                     'crypto_currency', |                     'crypto_currency', | ||||||
|                     'fiat',  # a "forex pair" |                     'fiat',  # a "forex pair" | ||||||
|  |                     'perpetual_future',  # stupid "perps" from cex land | ||||||
|                 }: |                 }: | ||||||
|                     # for now, our table key schema is not including |                     # for now, our table key schema is not including | ||||||
|                     # the dst[/src] source asset token. |                     # the dst[/src] source asset token. | ||||||
|  | @ -685,7 +734,7 @@ async def back_load_from_tsdb( | ||||||
|         last_tsdb_dt |         last_tsdb_dt | ||||||
|         and latest_start_dt |         and latest_start_dt | ||||||
|     ): |     ): | ||||||
|         backfilled_size_s = ( |         backfilled_size_s: Duration = ( | ||||||
|             latest_start_dt - last_tsdb_dt |             latest_start_dt - last_tsdb_dt | ||||||
|         ).seconds |         ).seconds | ||||||
|         # if the shm buffer len is not large enough to contain |         # if the shm buffer len is not large enough to contain | ||||||
|  | @ -908,6 +957,8 @@ async def tsdb_backfill( | ||||||
|             f'{pformat(config)}\n' |             f'{pformat(config)}\n' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|  |         # concurrently load the provider's most-recent-frame AND any | ||||||
|  |         # pre-existing tsdb history already saved in `piker` storage. | ||||||
|         dt_eps: list[DateTime, DateTime] = [] |         dt_eps: list[DateTime, DateTime] = [] | ||||||
|         async with trio.open_nursery() as tn: |         async with trio.open_nursery() as tn: | ||||||
|             tn.start_soon( |             tn.start_soon( | ||||||
|  | @ -918,7 +969,6 @@ async def tsdb_backfill( | ||||||
|                 timeframe, |                 timeframe, | ||||||
|                 config, |                 config, | ||||||
|             ) |             ) | ||||||
| 
 |  | ||||||
|             tsdb_entry: tuple = await load_tsdb_hist( |             tsdb_entry: tuple = await load_tsdb_hist( | ||||||
|                 storage, |                 storage, | ||||||
|                 mkt, |                 mkt, | ||||||
|  | @ -947,6 +997,25 @@ async def tsdb_backfill( | ||||||
|                 mr_end_dt, |                 mr_end_dt, | ||||||
|             ) = dt_eps |             ) = dt_eps | ||||||
| 
 | 
 | ||||||
|  |             first_frame_dur_s: Duration = (mr_end_dt - mr_start_dt).seconds | ||||||
|  |             calced_frame_size: Duration = mk_duration( | ||||||
|  |                 seconds=first_frame_dur_s, | ||||||
|  |             ) | ||||||
|  |             # NOTE, attempt to use the backend declared default frame | ||||||
|  |             # sizing (as allowed by their time-series query APIs) and | ||||||
|  |             # if not provided try to construct a default from the | ||||||
|  |             # first frame received above. | ||||||
|  |             def_frame_durs: dict[ | ||||||
|  |                 int, | ||||||
|  |                 Duration, | ||||||
|  |             ]|None = config.get('frame_types', None) | ||||||
|  |             if def_frame_durs: | ||||||
|  |                 def_frame_size: Duration = def_frame_durs[timeframe] | ||||||
|  |                 assert def_frame_size == calced_frame_size | ||||||
|  |             else: | ||||||
|  |                 # use what we calced from first frame above. | ||||||
|  |                 def_frame_size = calced_frame_size | ||||||
|  | 
 | ||||||
|             # NOTE: when there's no offline data, there's 2 cases: |             # NOTE: when there's no offline data, there's 2 cases: | ||||||
|             # - data backend doesn't support timeframe/sample |             # - data backend doesn't support timeframe/sample | ||||||
|             #   period (in which case `dt_eps` should be `None` and |             #   period (in which case `dt_eps` should be `None` and | ||||||
|  | @ -977,7 +1046,7 @@ async def tsdb_backfill( | ||||||
|                     partial( |                     partial( | ||||||
|                         start_backfill, |                         start_backfill, | ||||||
|                         get_hist=get_hist, |                         get_hist=get_hist, | ||||||
|                         frame_types=config.get('frame_types', None), |                         def_frame_duration=def_frame_size, | ||||||
|                         mod=mod, |                         mod=mod, | ||||||
|                         mkt=mkt, |                         mkt=mkt, | ||||||
|                         shm=shm, |                         shm=shm, | ||||||
|  |  | ||||||
|  | @ -2,13 +2,13 @@ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "anyio" | name = "anyio" | ||||||
| version = "3.7.1" | version = "4.6.2.post1" | ||||||
| description = "High level compatibility layer for multiple asynchronous event loop implementations" | description = "High level compatibility layer for multiple asynchronous event loop implementations" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.9" | ||||||
| files = [ | files = [ | ||||||
|     {file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"}, |     {file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"}, | ||||||
|     {file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"}, |     {file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.dependencies] | [package.dependencies] | ||||||
|  | @ -50,7 +50,7 @@ files = [ | ||||||
| [[package]] | [[package]] | ||||||
| name = "asyncvnc" | name = "asyncvnc" | ||||||
| version = "1.1.0" | version = "1.1.0" | ||||||
| description = "" | description = "Asynchronous VNC for Python" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">= 3.7" | python-versions = ">= 3.7" | ||||||
| files = [] | files = [] | ||||||
|  | @ -69,21 +69,22 @@ resolved_reference = "825447564e3af6b0d4a0996793f1ca7fb360c48f" | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "attrs" | name = "attrs" | ||||||
| version = "23.1.0" | version = "23.2.0" | ||||||
| description = "Classes Without Boilerplate" | description = "Classes Without Boilerplate" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.7" | ||||||
| files = [ | files = [ | ||||||
|     {file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"}, |     {file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"}, | ||||||
|     {file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"}, |     {file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.extras] | [package.extras] | ||||||
| cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] | cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] | ||||||
| dev = ["attrs[docs,tests]", "pre-commit"] | dev = ["attrs[tests]", "pre-commit"] | ||||||
| docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] | docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] | ||||||
| tests = ["attrs[tests-no-zope]", "zope-interface"] | tests = ["attrs[tests-no-zope]", "zope-interface"] | ||||||
| tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] | tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"] | ||||||
|  | tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "bidict" | name = "bidict" | ||||||
|  | @ -103,75 +104,78 @@ test = ["hypothesis", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "py | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "cffi" | name = "cffi" | ||||||
| version = "1.15.1" | version = "1.17.1" | ||||||
| description = "Foreign Function Interface for Python calling C code." | description = "Foreign Function Interface for Python calling C code." | ||||||
| optional = false | optional = false | ||||||
| python-versions = "*" | python-versions = ">=3.8" | ||||||
| files = [ | files = [ | ||||||
|     {file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"}, |     {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, | ||||||
|     {file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"}, |     {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, | ||||||
|     {file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"}, |     {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, | ||||||
|     {file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"}, |     {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, | ||||||
|     {file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"}, |     {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, | ||||||
|     {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"}, |     {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, | ||||||
|     {file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"}, |     {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"}, |     {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"}, |     {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"}, |     {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"}, |     {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"}, |     {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"}, |     {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"}, |     {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"}, |     {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"}, |     {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"}, |     {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, | ||||||
|     {file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"}, |     {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"}, |     {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"}, |     {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"}, |     {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"}, |     {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"}, |     {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"}, |     {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"}, |     {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"}, |     {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"}, |     {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, | ||||||
|     {file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"}, |     {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, | ||||||
|     {file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"}, |     {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, | ||||||
|     {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"}, |     {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, | ||||||
|     {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"}, |     {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, | ||||||
|     {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"}, |     {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, | ||||||
|     {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"}, |     {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, | ||||||
|     {file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"}, |     {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, | ||||||
|     {file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"}, |     {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, | ||||||
|     {file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"}, |     {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, | ||||||
|     {file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"}, |     {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, | ||||||
|     {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"}, |     {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, | ||||||
|     {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"}, |     {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, | ||||||
|     {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"}, |     {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, | ||||||
|     {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"}, |     {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, | ||||||
|     {file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"}, |     {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, | ||||||
|     {file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"}, |     {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, | ||||||
|     {file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"}, |     {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, | ||||||
|     {file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"}, |     {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, | ||||||
|     {file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"}, |     {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, | ||||||
|     {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"}, |     {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, | ||||||
|     {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"}, |     {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, | ||||||
|     {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"}, |     {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, | ||||||
|     {file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"}, |     {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, | ||||||
|     {file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"}, |     {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, | ||||||
|     {file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"}, |     {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"}, |     {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"}, |     {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"}, |     {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"}, |     {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"}, |     {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"}, |     {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"}, |     {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"}, |     {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"}, |     {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"}, |     {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, | ||||||
|     {file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"}, |     {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, | ||||||
|     {file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"}, |     {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, | ||||||
|  |     {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, | ||||||
|  |     {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, | ||||||
|  |     {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.dependencies] | [package.dependencies] | ||||||
|  | @ -221,47 +225,51 @@ development = ["black", "flake8", "mypy", "pytest", "types-colorama"] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "cryptography" | name = "cryptography" | ||||||
| version = "41.0.3" | version = "43.0.3" | ||||||
| description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." | description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.7" | ||||||
| files = [ | files = [ | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"}, |     {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"}, | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"}, |     {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"}, | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"}, |     {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"}, | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"}, |     {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"}, | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"}, |     {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"}, | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"}, |     {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"}, | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"}, |     {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"}, | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"}, |     {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"}, | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"}, |     {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"}, | ||||||
|     {file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"}, |     {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"}, | ||||||
|     {file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"}, |     {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"}, | ||||||
|     {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"}, |     {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"}, | ||||||
|     {file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"}, |     {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"}, | ||||||
|     {file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"}, |     {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"}, | ||||||
|     {file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"}, |     {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"}, | ||||||
|     {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"}, |     {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"}, | ||||||
|     {file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"}, |     {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"}, | ||||||
|     {file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"}, |     {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"}, | ||||||
|     {file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"}, |     {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"}, | ||||||
|     {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"}, |     {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"}, | ||||||
|     {file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"}, |     {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"}, | ||||||
|     {file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"}, |     {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"}, | ||||||
|     {file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"}, |     {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"}, | ||||||
|  |     {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"}, | ||||||
|  |     {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"}, | ||||||
|  |     {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"}, | ||||||
|  |     {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.dependencies] | [package.dependencies] | ||||||
| cffi = ">=1.12" | cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} | ||||||
| 
 | 
 | ||||||
| [package.extras] | [package.extras] | ||||||
| docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] | docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] | ||||||
| docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] | docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] | ||||||
| nox = ["nox"] | nox = ["nox"] | ||||||
| pep8test = ["black", "check-sdist", "mypy", "ruff"] | pep8test = ["check-sdist", "click", "mypy", "ruff"] | ||||||
| sdist = ["build"] | sdist = ["build"] | ||||||
| ssh = ["bcrypt (>=3.1.5)"] | ssh = ["bcrypt (>=3.1.5)"] | ||||||
| test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] | test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] | ||||||
| test-randomorder = ["pytest-randomly"] | test-randomorder = ["pytest-randomly"] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -333,13 +341,13 @@ files = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "eventkit" | name = "eventkit" | ||||||
| version = "1.0.1" | version = "1.0.3" | ||||||
| description = "Event-driven data pipelines" | description = "Event-driven data pipelines" | ||||||
| optional = false | optional = false | ||||||
| python-versions = "*" | python-versions = "*" | ||||||
| files = [ | files = [ | ||||||
|     {file = "eventkit-1.0.1-py3-none-any.whl", hash = "sha256:6060a6aa04d5c5d20f2e55b7c17e2a22e8d31f88f2c2791d60eab3301aa040da"}, |     {file = "eventkit-1.0.3-py3-none-any.whl", hash = "sha256:0e199527a89aff9d195b9671ad45d2cc9f79ecda0900de8ecfb4c864d67ad6a2"}, | ||||||
|     {file = "eventkit-1.0.1.tar.gz", hash = "sha256:56b99a6205f61cd995aa5e0036e37bd61f052f7d32560e60b6fe45e319a7ef3a"}, |     {file = "eventkit-1.0.3.tar.gz", hash = "sha256:99497f6f3c638a50ff7616f2f8cd887b18bbff3765dc1bd8681554db1467c933"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.dependencies] | [package.dependencies] | ||||||
|  | @ -347,13 +355,13 @@ numpy = "*" | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "exceptiongroup" | name = "exceptiongroup" | ||||||
| version = "1.1.3" | version = "1.2.2" | ||||||
| description = "Backport of PEP 654 (exception groups)" | description = "Backport of PEP 654 (exception groups)" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.7" | ||||||
| files = [ | files = [ | ||||||
|     {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, |     {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, | ||||||
|     {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, |     {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.extras] | [package.extras] | ||||||
|  | @ -480,31 +488,37 @@ nest-asyncio = "*" | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "idna" | name = "idna" | ||||||
| version = "3.4" | version = "3.10" | ||||||
| description = "Internationalized Domain Names in Applications (IDNA)" | description = "Internationalized Domain Names in Applications (IDNA)" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.5" | python-versions = ">=3.6" | ||||||
| files = [ | files = [ | ||||||
|     {file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"}, |     {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, | ||||||
|     {file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"}, |     {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
|  | [package.extras] | ||||||
|  | all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] | ||||||
|  | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "importlib-metadata" | name = "importlib-metadata" | ||||||
| version = "6.8.0" | version = "8.5.0" | ||||||
| description = "Read metadata from Python packages" | description = "Read metadata from Python packages" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.8" | python-versions = ">=3.8" | ||||||
| files = [ | files = [ | ||||||
|     {file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"}, |     {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, | ||||||
|     {file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"}, |     {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.dependencies] | [package.dependencies] | ||||||
| zipp = ">=0.5" | zipp = ">=3.20" | ||||||
| 
 | 
 | ||||||
| [package.extras] | [package.extras] | ||||||
| docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] | check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] | ||||||
|  | cover = ["pytest-cov"] | ||||||
|  | doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] | ||||||
|  | enabler = ["pytest-enabler (>=2.2)"] | ||||||
| perf = ["ipython"] | perf = ["ipython"] | ||||||
| testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] | testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] | ||||||
| 
 | 
 | ||||||
|  | @ -644,13 +658,13 @@ files = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "nest-asyncio" | name = "nest-asyncio" | ||||||
| version = "1.5.7" | version = "1.6.0" | ||||||
| description = "Patch asyncio to allow nested event loops" | description = "Patch asyncio to allow nested event loops" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.5" | python-versions = ">=3.5" | ||||||
| files = [ | files = [ | ||||||
|     {file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"}, |     {file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"}, | ||||||
|     {file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"}, |     {file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -729,13 +743,13 @@ files = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "outcome" | name = "outcome" | ||||||
| version = "1.2.0" | version = "1.3.0.post0" | ||||||
| description = "Capture the outcome of Python function calls." | description = "Capture the outcome of Python function calls." | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.7" | ||||||
| files = [ | files = [ | ||||||
|     {file = "outcome-1.2.0-py2.py3-none-any.whl", hash = "sha256:c4ab89a56575d6d38a05aa16daeaa333109c1f96167aba8901ab18b6b5e0f7f5"}, |     {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, | ||||||
|     {file = "outcome-1.2.0.tar.gz", hash = "sha256:6f82bd3de45da303cf1f771ecafa1633750a358436a8bb60e06a1ceb745d2672"}, |     {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.dependencies] | [package.dependencies] | ||||||
|  | @ -743,13 +757,13 @@ attrs = ">=19.2.0" | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "packaging" | name = "packaging" | ||||||
| version = "23.1" | version = "24.2" | ||||||
| description = "Core utilities for Python packages" | description = "Core utilities for Python packages" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.8" | ||||||
| files = [ | files = [ | ||||||
|     {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, |     {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, | ||||||
|     {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, |     {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -836,24 +850,24 @@ xlsxwriter = ["xlsxwriter"] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "pycparser" | name = "pycparser" | ||||||
| version = "2.21" | version = "2.22" | ||||||
| description = "C parser in Python" | description = "C parser in Python" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" | python-versions = ">=3.8" | ||||||
| files = [ | files = [ | ||||||
|     {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, |     {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, | ||||||
|     {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, |     {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "pygments" | name = "pygments" | ||||||
| version = "2.16.1" | version = "2.18.0" | ||||||
| description = "Pygments is a syntax highlighting package written in Python." | description = "Pygments is a syntax highlighting package written in Python." | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.8" | ||||||
| files = [ | files = [ | ||||||
|     {file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"}, |     {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, | ||||||
|     {file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"}, |     {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.extras] | [package.extras] | ||||||
|  | @ -861,24 +875,24 @@ plugins = ["importlib-metadata"] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "pyreadline3" | name = "pyreadline3" | ||||||
| version = "3.4.1" | version = "3.5.4" | ||||||
| description = "A python implementation of GNU readline." | description = "A python implementation of GNU readline." | ||||||
| optional = false | optional = false | ||||||
| python-versions = "*" | python-versions = ">=3.8" | ||||||
| files = [ | files = [ | ||||||
|     {file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"}, |     {file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"}, | ||||||
|     {file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"}, |     {file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "python-dateutil" | name = "python-dateutil" | ||||||
| version = "2.8.2" | version = "2.9.0.post0" | ||||||
| description = "Extensions to the standard Python datetime module" | description = "Extensions to the standard Python datetime module" | ||||||
| optional = false | optional = false | ||||||
| python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" | ||||||
| files = [ | files = [ | ||||||
|     {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, |     {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, | ||||||
|     {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, |     {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.dependencies] | [package.dependencies] | ||||||
|  | @ -897,13 +911,13 @@ files = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "rich" | name = "rich" | ||||||
| version = "13.5.2" | version = "13.9.4" | ||||||
| description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" | description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7.0" | python-versions = ">=3.8.0" | ||||||
| files = [ | files = [ | ||||||
|     {file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"}, |     {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, | ||||||
|     {file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"}, |     {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.dependencies] | [package.dependencies] | ||||||
|  | @ -926,13 +940,13 @@ files = [ | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "sniffio" | name = "sniffio" | ||||||
| version = "1.3.0" | version = "1.3.1" | ||||||
| description = "Sniff out which async library your code is running under" | description = "Sniff out which async library your code is running under" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.7" | ||||||
| files = [ | files = [ | ||||||
|     {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, |     {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, | ||||||
|     {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, |     {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -962,24 +976,24 @@ pyreadline3 = {version = "*", markers = "platform_system == \"Windows\""} | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "tomli" | name = "tomli" | ||||||
| version = "2.0.1" | version = "2.1.0" | ||||||
| description = "A lil' TOML parser" | description = "A lil' TOML parser" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.8" | ||||||
| files = [ | files = [ | ||||||
|     {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, |     {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"}, | ||||||
|     {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, |     {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "tomli-w" | name = "tomli-w" | ||||||
| version = "1.0.0" | version = "1.1.0" | ||||||
| description = "A lil' TOML writer" | description = "A lil' TOML writer" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.9" | ||||||
| files = [ | files = [ | ||||||
|     {file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"}, |     {file = "tomli_w-1.1.0-py3-none-any.whl", hash = "sha256:1403179c78193e3184bfaade390ddbd071cba48a32a2e62ba11aae47490c63f7"}, | ||||||
|     {file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"}, |     {file = "tomli_w-1.1.0.tar.gz", hash = "sha256:49e847a3a304d516a169a601184932ef0f6b61623fe680f836a2aa7128ed0d33"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -1114,13 +1128,13 @@ wsproto = ">=0.14" | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "typer" | name = "typer" | ||||||
| version = "0.9.0" | version = "0.9.4" | ||||||
| description = "Typer, build great CLIs. Easy to code. Based on Python type hints." | description = "Typer, build great CLIs. Easy to code. Based on Python type hints." | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.6" | python-versions = ">=3.6" | ||||||
| files = [ | files = [ | ||||||
|     {file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"}, |     {file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"}, | ||||||
|     {file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"}, |     {file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.dependencies] | [package.dependencies] | ||||||
|  | @ -1131,17 +1145,17 @@ typing-extensions = ">=3.7.4.3" | ||||||
| all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] | all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] | ||||||
| dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] | dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"] | ||||||
| doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] | doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"] | ||||||
| test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] | test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "typing-extensions" | name = "typing-extensions" | ||||||
| version = "4.7.1" | version = "4.12.2" | ||||||
| description = "Backported and Experimental Type Hints for Python 3.7+" | description = "Backported and Experimental Type Hints for Python 3.8+" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.7" | python-versions = ">=3.8" | ||||||
| files = [ | files = [ | ||||||
|     {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, |     {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, | ||||||
|     {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, |     {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
|  | @ -1244,18 +1258,22 @@ h11 = ">=0.9.0,<1" | ||||||
| 
 | 
 | ||||||
| [[package]] | [[package]] | ||||||
| name = "zipp" | name = "zipp" | ||||||
| version = "3.16.2" | version = "3.21.0" | ||||||
| description = "Backport of pathlib-compatible object wrapper for zip files" | description = "Backport of pathlib-compatible object wrapper for zip files" | ||||||
| optional = false | optional = false | ||||||
| python-versions = ">=3.8" | python-versions = ">=3.9" | ||||||
| files = [ | files = [ | ||||||
|     {file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"}, |     {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, | ||||||
|     {file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"}, |     {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| [package.extras] | [package.extras] | ||||||
| docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] | check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] | ||||||
| testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] | cover = ["pytest-cov"] | ||||||
|  | doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] | ||||||
|  | enabler = ["pytest-enabler (>=2.2)"] | ||||||
|  | test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] | ||||||
|  | type = ["pytest-mypy"] | ||||||
| 
 | 
 | ||||||
| [metadata] | [metadata] | ||||||
| lock-version = "2.0" | lock-version = "2.0" | ||||||
|  |  | ||||||
|  | @ -25,11 +25,11 @@ build-backend = "hatchling.build" | ||||||
| ignore = [] | ignore = [] | ||||||
| 
 | 
 | ||||||
| # https://docs.astral.sh/ruff/settings/#lint_per-file-ignores | # https://docs.astral.sh/ruff/settings/#lint_per-file-ignores | ||||||
| "piker/ui/qt.py" = [ | # "piker/ui/qt.py" = [ | ||||||
|   "E402", | #   "E402", | ||||||
|   'F401',  # unused imports (without __all__ or blah as blah) | #   'F401',  # unused imports (without __all__ or blah as blah) | ||||||
|   # "F841", # unused variable rules | #   # "F841", # unused variable rules | ||||||
| ] | # ] | ||||||
| # ignore-init-module-imports = false | # ignore-init-module-imports = false | ||||||
| 
 | 
 | ||||||
| # ------ - ------ | # ------ - ------ | ||||||
|  | @ -144,4 +144,4 @@ pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" } | ||||||
| asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" } | asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" } | ||||||
| tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" } | tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" } | ||||||
| msgspec = { git = "https://github.com/jcrist/msgspec.git" } | msgspec = { git = "https://github.com/jcrist/msgspec.git" } | ||||||
| tractor = { path = "../tractor", editable = true } | tractor = { path = "../tractor" } | ||||||
|  |  | ||||||
|  | @ -10,7 +10,7 @@ from piker import ( | ||||||
|     config, |     config, | ||||||
| ) | ) | ||||||
| from piker.service import ( | from piker.service import ( | ||||||
|     Services, |     get_service_mngr, | ||||||
| ) | ) | ||||||
| from piker.log import get_console_log | from piker.log import get_console_log | ||||||
| 
 | 
 | ||||||
|  | @ -129,7 +129,7 @@ async def _open_test_pikerd( | ||||||
|         ) as service_manager, |         ) as service_manager, | ||||||
|     ): |     ): | ||||||
|         # this proc/actor is the pikerd |         # this proc/actor is the pikerd | ||||||
|         assert service_manager is Services |         assert service_manager is get_service_mngr() | ||||||
| 
 | 
 | ||||||
|         async with tractor.wait_for_actor( |         async with tractor.wait_for_actor( | ||||||
|             'pikerd', |             'pikerd', | ||||||
|  |  | ||||||
|  | @ -26,7 +26,7 @@ import pytest | ||||||
| import tractor | import tractor | ||||||
| from uuid import uuid4 | from uuid import uuid4 | ||||||
| 
 | 
 | ||||||
| from piker.service import Services | from piker.service import ServiceMngr | ||||||
| from piker.log import get_logger | from piker.log import get_logger | ||||||
| from piker.clearing._messages import ( | from piker.clearing._messages import ( | ||||||
|     Order, |     Order, | ||||||
|  | @ -158,7 +158,7 @@ def load_and_check_pos( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def test_ems_err_on_bad_broker( | def test_ems_err_on_bad_broker( | ||||||
|     open_test_pikerd: Services, |     open_test_pikerd: ServiceMngr, | ||||||
|     loglevel: str, |     loglevel: str, | ||||||
| ): | ): | ||||||
|     async def load_bad_fqme(): |     async def load_bad_fqme(): | ||||||
|  |  | ||||||
|  | @ -15,7 +15,7 @@ import tractor | ||||||
| 
 | 
 | ||||||
| from piker.service import ( | from piker.service import ( | ||||||
|     find_service, |     find_service, | ||||||
|     Services, |     ServiceMngr, | ||||||
| ) | ) | ||||||
| from piker.data import ( | from piker.data import ( | ||||||
|     open_feed, |     open_feed, | ||||||
|  | @ -44,7 +44,7 @@ def test_runtime_boot( | ||||||
|     async def main(): |     async def main(): | ||||||
|         port = 6666 |         port = 6666 | ||||||
|         daemon_addr = ('127.0.0.1', port) |         daemon_addr = ('127.0.0.1', port) | ||||||
|         services: Services |         services: ServiceMngr | ||||||
| 
 | 
 | ||||||
|         async with ( |         async with ( | ||||||
|             open_test_pikerd( |             open_test_pikerd( | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue