fix_deribit_hist_queries_NEW #25
|
@ -0,0 +1,134 @@
|
||||||
|
with (import <nixpkgs> {});
|
||||||
|
let
|
||||||
|
glibStorePath = lib.getLib glib;
|
||||||
|
zlibStorePath = lib.getLib zlib;
|
||||||
|
zstdStorePath = lib.getLib zstd;
|
||||||
|
dbusStorePath = lib.getLib dbus;
|
||||||
|
libGLStorePath = lib.getLib libGL;
|
||||||
|
freetypeStorePath = lib.getLib freetype;
|
||||||
|
qt6baseStorePath = lib.getLib qt6.qtbase;
|
||||||
|
fontconfigStorePath = lib.getLib fontconfig;
|
||||||
|
libxkbcommonStorePath = lib.getLib libxkbcommon;
|
||||||
|
xcbutilcursorStorePath = lib.getLib xcb-util-cursor;
|
||||||
|
|
||||||
|
qtpyStorePath = lib.getLib python312Packages.qtpy;
|
||||||
|
pyqt6StorePath = lib.getLib python312Packages.pyqt6;
|
||||||
|
pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip;
|
||||||
|
rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz;
|
||||||
|
qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle;
|
||||||
|
|
||||||
|
xorgLibX11StorePath = lib.getLib xorg.libX11;
|
||||||
|
xorgLibxcbStorePath = lib.getLib xorg.libxcb;
|
||||||
|
xorgxcbutilwmStorePath = lib.getLib xorg.xcbutilwm;
|
||||||
|
xorgxcbutilimageStorePath = lib.getLib xorg.xcbutilimage;
|
||||||
|
xorgxcbutilerrorsStorePath = lib.getLib xorg.xcbutilerrors;
|
||||||
|
xorgxcbutilkeysymsStorePath = lib.getLib xorg.xcbutilkeysyms;
|
||||||
|
xorgxcbutilrenderutilStorePath = lib.getLib xorg.xcbutilrenderutil;
|
||||||
|
in
|
||||||
|
stdenv.mkDerivation {
|
||||||
|
name = "piker-qt6-uv";
|
||||||
|
buildInputs = [
|
||||||
|
# System requirements.
|
||||||
|
glib
|
||||||
|
zlib
|
||||||
|
dbus
|
||||||
|
zstd
|
||||||
|
libGL
|
||||||
|
freetype
|
||||||
|
qt6.qtbase
|
||||||
|
libgcc.lib
|
||||||
|
fontconfig
|
||||||
|
libxkbcommon
|
||||||
|
|
||||||
|
# Xorg requirements
|
||||||
|
xcb-util-cursor
|
||||||
|
xorg.libxcb
|
||||||
|
xorg.libX11
|
||||||
|
xorg.xcbutilwm
|
||||||
|
xorg.xcbutilimage
|
||||||
|
xorg.xcbutilerrors
|
||||||
|
xorg.xcbutilkeysyms
|
||||||
|
xorg.xcbutilrenderutil
|
||||||
|
|
||||||
|
# Python requirements.
|
||||||
|
python312Full
|
||||||
|
python312Packages.uv
|
||||||
|
python312Packages.qdarkstyle
|
||||||
|
python312Packages.rapidfuzz
|
||||||
|
python312Packages.pyqt6
|
||||||
|
python312Packages.qtpy
|
||||||
|
];
|
||||||
|
src = null;
|
||||||
|
shellHook = ''
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Set the Qt plugin path
|
||||||
|
# export QT_DEBUG_PLUGINS=1
|
||||||
|
|
||||||
|
QTBASE_PATH="${qt6baseStorePath}/lib"
|
||||||
|
QT_PLUGIN_PATH="$QTBASE_PATH/qt-6/plugins"
|
||||||
|
QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms"
|
||||||
|
|
||||||
|
LIB_GCC_PATH="${libgcc.lib}/lib"
|
||||||
|
GLIB_PATH="${glibStorePath}/lib"
|
||||||
|
ZSTD_PATH="${zstdStorePath}/lib"
|
||||||
|
ZLIB_PATH="${zlibStorePath}/lib"
|
||||||
|
DBUS_PATH="${dbusStorePath}/lib"
|
||||||
|
LIBGL_PATH="${libGLStorePath}/lib"
|
||||||
|
FREETYPE_PATH="${freetypeStorePath}/lib"
|
||||||
|
FONTCONFIG_PATH="${fontconfigStorePath}/lib"
|
||||||
|
LIB_XKB_COMMON_PATH="${libxkbcommonStorePath}/lib"
|
||||||
|
|
||||||
|
XCB_UTIL_CURSOR_PATH="${xcbutilcursorStorePath}/lib"
|
||||||
|
XORG_LIB_X11_PATH="${xorgLibX11StorePath}/lib"
|
||||||
|
XORG_LIB_XCB_PATH="${xorgLibxcbStorePath}/lib"
|
||||||
|
XORG_XCB_UTIL_IMAGE_PATH="${xorgxcbutilimageStorePath}/lib"
|
||||||
|
XORG_XCB_UTIL_WM_PATH="${xorgxcbutilwmStorePath}/lib"
|
||||||
|
XORG_XCB_UTIL_RENDER_UTIL_PATH="${xorgxcbutilrenderutilStorePath}/lib"
|
||||||
|
XORG_XCB_UTIL_KEYSYMS_PATH="${xorgxcbutilkeysymsStorePath}/lib"
|
||||||
|
XORG_XCB_UTIL_ERRORS_PATH="${xorgxcbutilerrorsStorePath}/lib"
|
||||||
|
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH"
|
||||||
|
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_GCC_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$DBUS_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GLIB_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZLIB_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZSTD_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIBGL_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FONTCONFIG_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FREETYPE_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_XKB_COMMON_PATH"
|
||||||
|
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XCB_UTIL_CURSOR_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_X11_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_XCB_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_IMAGE_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_WM_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_RENDER_UTIL_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_KEYSYMS_PATH"
|
||||||
|
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_ERRORS_PATH"
|
||||||
|
|
||||||
|
export LD_LIBRARY_PATH
|
||||||
|
|
||||||
|
RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages"
|
||||||
|
QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages"
|
||||||
|
QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages"
|
||||||
|
PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages"
|
||||||
|
PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages"
|
||||||
|
|
||||||
|
PATCH="$PATCH:$RPDFUZZ_PATH"
|
||||||
|
PATCH="$PATCH:$QDRKSTYLE_PATH"
|
||||||
|
PATCH="$PATCH:$QTPY_PATH"
|
||||||
|
PATCH="$PATCH:$PYQT6_PATH"
|
||||||
|
PATCH="$PATCH:$PYQT6_SIP_PATH"
|
||||||
|
|
||||||
|
export PATCH
|
||||||
|
|
||||||
|
# Install deps
|
||||||
|
uv lock
|
||||||
|
|
||||||
|
'';
|
||||||
|
}
|
|
@ -30,7 +30,8 @@ from types import ModuleType
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Iterator,
|
Iterator,
|
||||||
Generator
|
Generator,
|
||||||
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
||||||
import pendulum
|
import pendulum
|
||||||
|
@ -59,8 +60,10 @@ from ..clearing._messages import (
|
||||||
BrokerdPosition,
|
BrokerdPosition,
|
||||||
)
|
)
|
||||||
from piker.types import Struct
|
from piker.types import Struct
|
||||||
from piker.data._symcache import SymbologyCache
|
from piker.log import get_logger
|
||||||
from ..log import get_logger
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from piker.data._symcache import SymbologyCache
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -493,6 +496,17 @@ class Account(Struct):
|
||||||
|
|
||||||
_mktmap_table: dict[str, MktPair] | None = None,
|
_mktmap_table: dict[str, MktPair] | None = None,
|
||||||
|
|
||||||
|
only_require: list[str]|True = True,
|
||||||
|
# ^list of fqmes that are "required" to be processed from
|
||||||
|
# this ledger pass; we often don't care about others and
|
||||||
|
# definitely shouldn't always error in such cases.
|
||||||
|
# (eg. broker backend loaded that doesn't yet supsport the
|
||||||
|
# symcache but also, inside the paper engine we don't ad-hoc
|
||||||
|
# request `get_mkt_info()` for every symbol in the ledger,
|
||||||
|
# only the one for which we're simulating against).
|
||||||
|
# TODO, not sure if there's a better soln for this, ideally
|
||||||
|
# all backends get symcache support afap i guess..
|
||||||
|
|
||||||
) -> dict[str, Position]:
|
) -> dict[str, Position]:
|
||||||
'''
|
'''
|
||||||
Update the internal `.pps[str, Position]` table from input
|
Update the internal `.pps[str, Position]` table from input
|
||||||
|
@ -535,11 +549,32 @@ class Account(Struct):
|
||||||
if _mktmap_table is None:
|
if _mktmap_table is None:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
|
required: bool = (
|
||||||
|
only_require is True
|
||||||
|
or (
|
||||||
|
only_require is not True
|
||||||
|
and
|
||||||
|
fqme in only_require
|
||||||
|
)
|
||||||
|
)
|
||||||
# XXX: caller is allowed to provide a fallback
|
# XXX: caller is allowed to provide a fallback
|
||||||
# mktmap table for the case where a new position is
|
# mktmap table for the case where a new position is
|
||||||
# being added and the preloaded symcache didn't
|
# being added and the preloaded symcache didn't
|
||||||
# have this entry prior (eg. with frickin IB..)
|
# have this entry prior (eg. with frickin IB..)
|
||||||
mkt = _mktmap_table[fqme]
|
if (
|
||||||
|
not (mkt := _mktmap_table.get(fqme))
|
||||||
|
and
|
||||||
|
required
|
||||||
|
):
|
||||||
|
raise
|
||||||
|
|
||||||
|
elif not required:
|
||||||
|
continue
|
||||||
|
|
||||||
|
else:
|
||||||
|
# should be an entry retreived somewhere
|
||||||
|
assert mkt
|
||||||
|
|
||||||
|
|
||||||
if not (pos := pps.get(bs_mktid)):
|
if not (pos := pps.get(bs_mktid)):
|
||||||
|
|
||||||
|
@ -656,7 +691,7 @@ class Account(Struct):
|
||||||
def write_config(self) -> None:
|
def write_config(self) -> None:
|
||||||
'''
|
'''
|
||||||
Write the current account state to the user's account TOML file, normally
|
Write the current account state to the user's account TOML file, normally
|
||||||
something like ``pps.toml``.
|
something like `pps.toml`.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# TODO: show diff output?
|
# TODO: show diff output?
|
||||||
|
|
|
@ -50,7 +50,8 @@ __brokers__: list[str] = [
|
||||||
'binance',
|
'binance',
|
||||||
'ib',
|
'ib',
|
||||||
'kraken',
|
'kraken',
|
||||||
'kucoin'
|
'kucoin',
|
||||||
|
'deribit',
|
||||||
|
|
||||||
# broken but used to work
|
# broken but used to work
|
||||||
# 'questrade',
|
# 'questrade',
|
||||||
|
@ -61,7 +62,6 @@ __brokers__: list[str] = [
|
||||||
# wstrade
|
# wstrade
|
||||||
# iex
|
# iex
|
||||||
|
|
||||||
# deribit
|
|
||||||
# bitso
|
# bitso
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -71,7 +71,7 @@ def get_brokermod(brokername: str) -> ModuleType:
|
||||||
Return the imported broker module by name.
|
Return the imported broker module by name.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
module = import_module('.' + brokername, 'piker.brokers')
|
module: ModuleType = import_module('.' + brokername, 'piker.brokers')
|
||||||
# we only allow monkeying because it's for internal keying
|
# we only allow monkeying because it's for internal keying
|
||||||
module.name = module.__name__.split('.')[-1]
|
module.name = module.__name__.split('.')[-1]
|
||||||
return module
|
return module
|
||||||
|
|
|
@ -23,6 +23,7 @@ from __future__ import annotations
|
||||||
from contextlib import (
|
from contextlib import (
|
||||||
asynccontextmanager as acm,
|
asynccontextmanager as acm,
|
||||||
)
|
)
|
||||||
|
from functools import partial
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import (
|
from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
|
@ -190,14 +191,17 @@ def broker_init(
|
||||||
|
|
||||||
|
|
||||||
async def spawn_brokerd(
|
async def spawn_brokerd(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
**tractor_kwargs,
|
**tractor_kwargs,
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Spawn a `brokerd.<backendname>` subactor service daemon
|
||||||
|
using `pikerd`'s service mngr.
|
||||||
|
|
||||||
|
'''
|
||||||
from piker.service._util import log # use service mngr log
|
from piker.service._util import log # use service mngr log
|
||||||
log.info(f'Spawning {brokername} broker daemon')
|
log.info(f'Spawning {brokername} broker daemon')
|
||||||
|
|
||||||
|
@ -217,27 +221,35 @@ async def spawn_brokerd(
|
||||||
|
|
||||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||||
# actor nursery
|
# actor nursery
|
||||||
from piker.service import Services
|
from piker.service import (
|
||||||
|
get_service_mngr,
|
||||||
|
ServiceMngr,
|
||||||
|
)
|
||||||
dname: str = tractor_kwargs.pop('name') # f'brokerd.{brokername}'
|
dname: str = tractor_kwargs.pop('name') # f'brokerd.{brokername}'
|
||||||
portal = await Services.actor_n.start_actor(
|
mngr: ServiceMngr = get_service_mngr()
|
||||||
dname,
|
ctx: tractor.Context = await mngr.start_service(
|
||||||
enable_modules=_data_mods + tractor_kwargs.pop('enable_modules'),
|
daemon_name=dname,
|
||||||
debug_mode=Services.debug_mode,
|
ctx_ep=partial(
|
||||||
|
# signature of target root-task endpoint
|
||||||
|
daemon_fixture_ep,
|
||||||
|
|
||||||
|
# passed to daemon_fixture_ep(**kwargs)
|
||||||
|
brokername=brokername,
|
||||||
|
loglevel=loglevel,
|
||||||
|
),
|
||||||
|
debug_mode=mngr.debug_mode,
|
||||||
|
loglevel=loglevel,
|
||||||
|
enable_modules=(
|
||||||
|
_data_mods
|
||||||
|
+
|
||||||
|
tractor_kwargs.pop('enable_modules')
|
||||||
|
),
|
||||||
**tractor_kwargs
|
**tractor_kwargs
|
||||||
)
|
)
|
||||||
|
assert (
|
||||||
# NOTE: the service mngr expects an already spawned actor + its
|
not ctx.cancel_called
|
||||||
# portal ref in order to do non-blocking setup of brokerd
|
and ctx.portal # parent side
|
||||||
# service nursery.
|
and dname in ctx.chan.uid # subactor is named as desired
|
||||||
await Services.start_service_task(
|
|
||||||
dname,
|
|
||||||
portal,
|
|
||||||
|
|
||||||
# signature of target root-task endpoint
|
|
||||||
daemon_fixture_ep,
|
|
||||||
brokername=brokername,
|
|
||||||
loglevel=loglevel,
|
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -262,8 +274,7 @@ async def maybe_spawn_brokerd(
|
||||||
from piker.service import maybe_spawn_daemon
|
from piker.service import maybe_spawn_daemon
|
||||||
|
|
||||||
async with maybe_spawn_daemon(
|
async with maybe_spawn_daemon(
|
||||||
|
service_name=f'brokerd.{brokername}',
|
||||||
f'brokerd.{brokername}',
|
|
||||||
service_task_target=spawn_brokerd,
|
service_task_target=spawn_brokerd,
|
||||||
spawn_args={
|
spawn_args={
|
||||||
'brokername': brokername,
|
'brokername': brokername,
|
||||||
|
|
|
@ -18,10 +18,11 @@
|
||||||
Handy cross-broker utils.
|
Handy cross-broker utils.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
|
||||||
import json
|
import json
|
||||||
import asks
|
import httpx
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from ..log import (
|
from ..log import (
|
||||||
|
@ -60,11 +61,11 @@ class NoData(BrokerError):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
*args,
|
*args,
|
||||||
info: dict,
|
info: dict|None = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(*args)
|
super().__init__(*args)
|
||||||
self.info: dict = info
|
self.info: dict|None = info
|
||||||
|
|
||||||
# when raised, machinery can check if the backend
|
# when raised, machinery can check if the backend
|
||||||
# set a "frame size" for doing datetime calcs.
|
# set a "frame size" for doing datetime calcs.
|
||||||
|
@ -90,16 +91,18 @@ class DataThrottle(BrokerError):
|
||||||
|
|
||||||
|
|
||||||
def resproc(
|
def resproc(
|
||||||
resp: asks.response_objects.Response,
|
resp: httpx.Response,
|
||||||
log: logging.Logger,
|
log: logging.Logger,
|
||||||
return_json: bool = True,
|
return_json: bool = True,
|
||||||
log_resp: bool = False,
|
log_resp: bool = False,
|
||||||
|
|
||||||
) -> asks.response_objects.Response:
|
) -> httpx.Response:
|
||||||
"""Process response and return its json content.
|
'''
|
||||||
|
Process response and return its json content.
|
||||||
|
|
||||||
Raise the appropriate error on non-200 OK responses.
|
Raise the appropriate error on non-200 OK responses.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
if not resp.status_code == 200:
|
if not resp.status_code == 200:
|
||||||
raise BrokerError(resp.body)
|
raise BrokerError(resp.body)
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C)
|
# Copyright (C)
|
||||||
# Guillermo Rodriguez (aka ze jefe)
|
# Guillermo Rodriguez (aka ze jefe)
|
||||||
# Tyler Goodlet
|
# Tyler Goodlet
|
||||||
# (in stewardship for pikers)
|
# (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -25,14 +25,13 @@ from __future__ import annotations
|
||||||
from collections import ChainMap
|
from collections import ChainMap
|
||||||
from contextlib import (
|
from contextlib import (
|
||||||
asynccontextmanager as acm,
|
asynccontextmanager as acm,
|
||||||
|
AsyncExitStack,
|
||||||
)
|
)
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
Callable,
|
Callable,
|
||||||
Hashable,
|
|
||||||
Sequence,
|
|
||||||
Type,
|
Type,
|
||||||
)
|
)
|
||||||
import hmac
|
import hmac
|
||||||
|
@ -43,8 +42,7 @@ import trio
|
||||||
from pendulum import (
|
from pendulum import (
|
||||||
now,
|
now,
|
||||||
)
|
)
|
||||||
import asks
|
import httpx
|
||||||
from rapidfuzz import process as fuzzy
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from piker import config
|
from piker import config
|
||||||
|
@ -54,6 +52,7 @@ from piker.clearing._messages import (
|
||||||
from piker.accounting import (
|
from piker.accounting import (
|
||||||
Asset,
|
Asset,
|
||||||
digits_to_dec,
|
digits_to_dec,
|
||||||
|
MktPair,
|
||||||
)
|
)
|
||||||
from piker.types import Struct
|
from piker.types import Struct
|
||||||
from piker.data import (
|
from piker.data import (
|
||||||
|
@ -69,7 +68,6 @@ from .venues import (
|
||||||
PAIRTYPES,
|
PAIRTYPES,
|
||||||
Pair,
|
Pair,
|
||||||
MarketType,
|
MarketType,
|
||||||
|
|
||||||
_spot_url,
|
_spot_url,
|
||||||
_futes_url,
|
_futes_url,
|
||||||
_testnet_futes_url,
|
_testnet_futes_url,
|
||||||
|
@ -79,19 +77,18 @@ from .venues import (
|
||||||
log = get_logger('piker.brokers.binance')
|
log = get_logger('piker.brokers.binance')
|
||||||
|
|
||||||
|
|
||||||
def get_config() -> dict:
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
conf: dict
|
conf: dict
|
||||||
path: Path
|
path: Path
|
||||||
conf, path = config.load(
|
conf, path = config.load(
|
||||||
conf_name='brokers',
|
conf_name='brokers',
|
||||||
touch_if_dne=True,
|
touch_if_dne=True,
|
||||||
)
|
)
|
||||||
|
section: dict = conf.get('binance')
|
||||||
section = conf.get('binance')
|
|
||||||
|
|
||||||
if not section:
|
if not section:
|
||||||
log.warning(f'No config section found for binance in {path}')
|
log.warning(
|
||||||
|
f'No config section found for binance in {path}'
|
||||||
|
)
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
return section
|
return section
|
||||||
|
@ -147,7 +144,7 @@ def binance_timestamp(
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
'''
|
'''
|
||||||
Async ReST API client using ``trio`` + ``asks`` B)
|
Async ReST API client using `trio` + `httpx` B)
|
||||||
|
|
||||||
Supports all of the spot, margin and futures endpoints depending
|
Supports all of the spot, margin and futures endpoints depending
|
||||||
on method.
|
on method.
|
||||||
|
@ -156,10 +153,17 @@ class Client:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
|
venue_sessions: dict[
|
||||||
|
str, # venue key
|
||||||
|
tuple[httpx.AsyncClient, str] # session, eps path
|
||||||
|
],
|
||||||
|
conf: dict[str, Any],
|
||||||
# TODO: change this to `Client.[mkt_]venue: MarketType`?
|
# TODO: change this to `Client.[mkt_]venue: MarketType`?
|
||||||
mkt_mode: MarketType = 'spot',
|
mkt_mode: MarketType = 'spot',
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
self.conf = conf
|
||||||
|
|
||||||
# build out pair info tables for each market type
|
# build out pair info tables for each market type
|
||||||
# and wrap in a chain-map view for search / query.
|
# and wrap in a chain-map view for search / query.
|
||||||
self._spot_pairs: dict[str, Pair] = {} # spot info table
|
self._spot_pairs: dict[str, Pair] = {} # spot info table
|
||||||
|
@ -186,44 +190,13 @@ class Client:
|
||||||
# market symbols for use by search. See `.exch_info()`.
|
# market symbols for use by search. See `.exch_info()`.
|
||||||
self._pairs: ChainMap[str, Pair] = ChainMap()
|
self._pairs: ChainMap[str, Pair] = ChainMap()
|
||||||
|
|
||||||
# spot EPs sesh
|
|
||||||
self._sesh = asks.Session(connections=4)
|
|
||||||
self._sesh.base_location: str = _spot_url
|
|
||||||
# spot testnet
|
|
||||||
self._test_sesh: asks.Session = asks.Session(connections=4)
|
|
||||||
self._test_sesh.base_location: str = _testnet_spot_url
|
|
||||||
|
|
||||||
# margin and extended spot endpoints session.
|
|
||||||
self._sapi_sesh = asks.Session(connections=4)
|
|
||||||
self._sapi_sesh.base_location: str = _spot_url
|
|
||||||
|
|
||||||
# futes EPs sesh
|
|
||||||
self._fapi_sesh = asks.Session(connections=4)
|
|
||||||
self._fapi_sesh.base_location: str = _futes_url
|
|
||||||
# futes testnet
|
|
||||||
self._test_fapi_sesh: asks.Session = asks.Session(connections=4)
|
|
||||||
self._test_fapi_sesh.base_location: str = _testnet_futes_url
|
|
||||||
|
|
||||||
# global client "venue selection" mode.
|
# global client "venue selection" mode.
|
||||||
# set this when you want to switch venues and not have to
|
# set this when you want to switch venues and not have to
|
||||||
# specify the venue for the next request.
|
# specify the venue for the next request.
|
||||||
self.mkt_mode: MarketType = mkt_mode
|
self.mkt_mode: MarketType = mkt_mode
|
||||||
|
|
||||||
# per 8
|
# per-mkt-venue API client table
|
||||||
self.venue_sesh: dict[
|
self.venue_sesh = venue_sessions
|
||||||
str, # venue key
|
|
||||||
tuple[asks.Session, str] # session, eps path
|
|
||||||
] = {
|
|
||||||
'spot': (self._sesh, '/api/v3/'),
|
|
||||||
'spot_testnet': (self._test_sesh, '/fapi/v1/'),
|
|
||||||
|
|
||||||
'margin': (self._sapi_sesh, '/sapi/v1/'),
|
|
||||||
|
|
||||||
'usdtm_futes': (self._fapi_sesh, '/fapi/v1/'),
|
|
||||||
'usdtm_futes_testnet': (self._test_fapi_sesh, '/fapi/v1/'),
|
|
||||||
|
|
||||||
# 'futes_coin': self._dapi, # TODO
|
|
||||||
}
|
|
||||||
|
|
||||||
# lookup for going from `.mkt_mode: str` to the config
|
# lookup for going from `.mkt_mode: str` to the config
|
||||||
# subsection `key: str`
|
# subsection `key: str`
|
||||||
|
@ -238,40 +211,6 @@ class Client:
|
||||||
'futes': ['usdtm_futes'],
|
'futes': ['usdtm_futes'],
|
||||||
}
|
}
|
||||||
|
|
||||||
# for creating API keys see,
|
|
||||||
# https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072
|
|
||||||
self.conf: dict = get_config()
|
|
||||||
|
|
||||||
for key, subconf in self.conf.items():
|
|
||||||
if api_key := subconf.get('api_key', ''):
|
|
||||||
venue_keys: list[str] = self.confkey2venuekeys[key]
|
|
||||||
|
|
||||||
venue_key: str
|
|
||||||
sesh: asks.Session
|
|
||||||
for venue_key in venue_keys:
|
|
||||||
sesh, _ = self.venue_sesh[venue_key]
|
|
||||||
|
|
||||||
api_key_header: dict = {
|
|
||||||
# taken from official:
|
|
||||||
# https://github.com/binance/binance-futures-connector-python/blob/main/binance/api.py#L47
|
|
||||||
"Content-Type": "application/json;charset=utf-8",
|
|
||||||
|
|
||||||
# TODO: prolly should just always query and copy
|
|
||||||
# in the real latest ver?
|
|
||||||
"User-Agent": "binance-connector/6.1.6smbz6",
|
|
||||||
"X-MBX-APIKEY": api_key,
|
|
||||||
}
|
|
||||||
sesh.headers.update(api_key_header)
|
|
||||||
|
|
||||||
# if `.use_tesnet = true` in the config then
|
|
||||||
# also add headers for the testnet session which
|
|
||||||
# will be used for all order control
|
|
||||||
if subconf.get('use_testnet', False):
|
|
||||||
testnet_sesh, _ = self.venue_sesh[
|
|
||||||
venue_key + '_testnet'
|
|
||||||
]
|
|
||||||
testnet_sesh.headers.update(api_key_header)
|
|
||||||
|
|
||||||
def _mk_sig(
|
def _mk_sig(
|
||||||
self,
|
self,
|
||||||
data: dict,
|
data: dict,
|
||||||
|
@ -290,7 +229,6 @@ class Client:
|
||||||
'to define the creds for auth-ed endpoints!?'
|
'to define the creds for auth-ed endpoints!?'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# XXX: Info on security and authentification
|
# XXX: Info on security and authentification
|
||||||
# https://binance-docs.github.io/apidocs/#endpoint-security-type
|
# https://binance-docs.github.io/apidocs/#endpoint-security-type
|
||||||
if not (api_secret := subconf.get('api_secret')):
|
if not (api_secret := subconf.get('api_secret')):
|
||||||
|
@ -319,7 +257,7 @@ class Client:
|
||||||
params: dict,
|
params: dict,
|
||||||
|
|
||||||
method: str = 'get',
|
method: str = 'get',
|
||||||
venue: str | None = None, # if None use `.mkt_mode` state
|
venue: str|None = None, # if None use `.mkt_mode` state
|
||||||
signed: bool = False,
|
signed: bool = False,
|
||||||
allow_testnet: bool = False,
|
allow_testnet: bool = False,
|
||||||
|
|
||||||
|
@ -330,8 +268,9 @@ class Client:
|
||||||
- /fapi/v3/ USD-M FUTURES, or
|
- /fapi/v3/ USD-M FUTURES, or
|
||||||
- /api/v3/ SPOT/MARGIN
|
- /api/v3/ SPOT/MARGIN
|
||||||
|
|
||||||
account/market endpoint request depending on either passed in `venue: str`
|
account/market endpoint request depending on either passed in
|
||||||
or the current setting `.mkt_mode: str` setting, default `'spot'`.
|
`venue: str` or the current setting `.mkt_mode: str` setting,
|
||||||
|
default `'spot'`.
|
||||||
|
|
||||||
|
|
||||||
Docs per venue API:
|
Docs per venue API:
|
||||||
|
@ -360,9 +299,6 @@ class Client:
|
||||||
venue=venue_key,
|
venue=venue_key,
|
||||||
)
|
)
|
||||||
|
|
||||||
sesh: asks.Session
|
|
||||||
path: str
|
|
||||||
|
|
||||||
# Check if we're configured to route order requests to the
|
# Check if we're configured to route order requests to the
|
||||||
# venue equivalent's testnet.
|
# venue equivalent's testnet.
|
||||||
use_testnet: bool = False
|
use_testnet: bool = False
|
||||||
|
@ -387,11 +323,12 @@ class Client:
|
||||||
# ctl machinery B)
|
# ctl machinery B)
|
||||||
venue_key += '_testnet'
|
venue_key += '_testnet'
|
||||||
|
|
||||||
sesh, path = self.venue_sesh[venue_key]
|
client: httpx.AsyncClient
|
||||||
|
path: str
|
||||||
meth: Callable = getattr(sesh, method)
|
client, path = self.venue_sesh[venue_key]
|
||||||
|
meth: Callable = getattr(client, method)
|
||||||
resp = await meth(
|
resp = await meth(
|
||||||
path=path + endpoint,
|
url=path + endpoint,
|
||||||
params=params,
|
params=params,
|
||||||
timeout=float('inf'),
|
timeout=float('inf'),
|
||||||
)
|
)
|
||||||
|
@ -433,7 +370,15 @@ class Client:
|
||||||
item['filters'] = filters
|
item['filters'] = filters
|
||||||
|
|
||||||
pair_type: Type = PAIRTYPES[venue]
|
pair_type: Type = PAIRTYPES[venue]
|
||||||
pair: Pair = pair_type(**item)
|
try:
|
||||||
|
pair: Pair = pair_type(**item)
|
||||||
|
except Exception as e:
|
||||||
|
e.add_note(
|
||||||
|
"\nDon't panic, prolly stupid binance changed their symbology schema again..\n"
|
||||||
|
'Check out their API docs here:\n\n'
|
||||||
|
'https://binance-docs.github.io/apidocs/spot/en/#exchange-information'
|
||||||
|
)
|
||||||
|
raise
|
||||||
pair_table[pair.symbol.upper()] = pair
|
pair_table[pair.symbol.upper()] = pair
|
||||||
|
|
||||||
# update an additional top-level-cross-venue-table
|
# update an additional top-level-cross-venue-table
|
||||||
|
@ -528,7 +473,9 @@ class Client:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
pair_table: dict[str, Pair] = self._venue2pairs[
|
pair_table: dict[str, Pair] = self._venue2pairs[
|
||||||
venue or self.mkt_mode
|
venue
|
||||||
|
or
|
||||||
|
self.mkt_mode
|
||||||
]
|
]
|
||||||
if (
|
if (
|
||||||
expiry
|
expiry
|
||||||
|
@ -547,9 +494,9 @@ class Client:
|
||||||
venues: list[str] = [venue]
|
venues: list[str] = [venue]
|
||||||
|
|
||||||
# batch per-venue download of all exchange infos
|
# batch per-venue download of all exchange infos
|
||||||
async with trio.open_nursery() as rn:
|
async with trio.open_nursery() as tn:
|
||||||
for ven in venues:
|
for ven in venues:
|
||||||
rn.start_soon(
|
tn.start_soon(
|
||||||
self._cache_pairs,
|
self._cache_pairs,
|
||||||
ven,
|
ven,
|
||||||
)
|
)
|
||||||
|
@ -602,11 +549,11 @@ class Client:
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
|
|
||||||
fq_pairs: dict = await self.exch_info()
|
fq_pairs: dict[str, Pair] = await self.exch_info()
|
||||||
|
|
||||||
# TODO: cache this list like we were in
|
# TODO: cache this list like we were in
|
||||||
# `open_symbol_search()`?
|
# `open_symbol_search()`?
|
||||||
keys: list[str] = list(fq_pairs)
|
# keys: list[str] = list(fq_pairs)
|
||||||
|
|
||||||
return match_from_pairs(
|
return match_from_pairs(
|
||||||
pairs=fq_pairs,
|
pairs=fq_pairs,
|
||||||
|
@ -614,9 +561,20 @@ class Client:
|
||||||
score_cutoff=50,
|
score_cutoff=50,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def pair2venuekey(
|
||||||
|
self,
|
||||||
|
pair: Pair,
|
||||||
|
) -> str:
|
||||||
|
return {
|
||||||
|
'USDTM': 'usdtm_futes',
|
||||||
|
'SPOT': 'spot',
|
||||||
|
# 'COINM': 'coin_futes',
|
||||||
|
# ^-TODO-^ bc someone might want it..?
|
||||||
|
}[pair.venue]
|
||||||
|
|
||||||
async def bars(
|
async def bars(
|
||||||
self,
|
self,
|
||||||
symbol: str,
|
mkt: MktPair,
|
||||||
|
|
||||||
start_dt: datetime | None = None,
|
start_dt: datetime | None = None,
|
||||||
end_dt: datetime | None = None,
|
end_dt: datetime | None = None,
|
||||||
|
@ -646,16 +604,20 @@ class Client:
|
||||||
start_time = binance_timestamp(start_dt)
|
start_time = binance_timestamp(start_dt)
|
||||||
end_time = binance_timestamp(end_dt)
|
end_time = binance_timestamp(end_dt)
|
||||||
|
|
||||||
|
bs_pair: Pair = self._pairs[mkt.bs_fqme.upper()]
|
||||||
|
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
||||||
bars = await self._api(
|
bars = await self._api(
|
||||||
'klines',
|
'klines',
|
||||||
params={
|
params={
|
||||||
'symbol': symbol.upper(),
|
# NOTE: always query using their native symbology!
|
||||||
|
'symbol': mkt.bs_mktid.upper(),
|
||||||
'interval': '1m',
|
'interval': '1m',
|
||||||
'startTime': start_time,
|
'startTime': start_time,
|
||||||
'endTime': end_time,
|
'endTime': end_time,
|
||||||
'limit': limit
|
'limit': limit
|
||||||
},
|
},
|
||||||
|
venue=self.pair2venuekey(bs_pair),
|
||||||
allow_testnet=False,
|
allow_testnet=False,
|
||||||
)
|
)
|
||||||
new_bars: list[tuple] = []
|
new_bars: list[tuple] = []
|
||||||
|
@ -972,17 +934,148 @@ class Client:
|
||||||
await self.close_listen_key(key)
|
await self.close_listen_key(key)
|
||||||
|
|
||||||
|
|
||||||
|
_venue_urls: dict[str, str] = {
|
||||||
|
'spot': (
|
||||||
|
_spot_url,
|
||||||
|
'/api/v3/',
|
||||||
|
),
|
||||||
|
'spot_testnet': (
|
||||||
|
_testnet_spot_url,
|
||||||
|
'/fapi/v1/'
|
||||||
|
),
|
||||||
|
# margin and extended spot endpoints session.
|
||||||
|
# TODO: did this ever get implemented fully?
|
||||||
|
# 'margin': (
|
||||||
|
# _spot_url,
|
||||||
|
# '/sapi/v1/'
|
||||||
|
# ),
|
||||||
|
|
||||||
|
'usdtm_futes': (
|
||||||
|
_futes_url,
|
||||||
|
'/fapi/v1/',
|
||||||
|
),
|
||||||
|
|
||||||
|
'usdtm_futes_testnet': (
|
||||||
|
_testnet_futes_url,
|
||||||
|
'/fapi/v1/',
|
||||||
|
),
|
||||||
|
|
||||||
|
# TODO: for anyone who actually needs it ;P
|
||||||
|
# 'coin_futes': ()
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def init_api_keys(
|
||||||
|
client: Client,
|
||||||
|
conf: dict[str, Any],
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Set up per-venue API keys each http client according to the user's
|
||||||
|
`brokers.conf`.
|
||||||
|
|
||||||
|
For ex, to use spot-testnet and live usdt futures APIs:
|
||||||
|
|
||||||
|
```toml
|
||||||
|
[binance]
|
||||||
|
# spot test net
|
||||||
|
spot.use_testnet = true
|
||||||
|
spot.api_key = '<spot_api_key_from_binance_account>'
|
||||||
|
spot.api_secret = '<spot_api_key_password>'
|
||||||
|
|
||||||
|
# futes live
|
||||||
|
futes.use_testnet = false
|
||||||
|
accounts.usdtm = 'futes'
|
||||||
|
futes.api_key = '<futes_api_key_from_binance>'
|
||||||
|
futes.api_secret = '<futes_api_key_password>''
|
||||||
|
|
||||||
|
# if uncommented will use the built-in paper engine and not
|
||||||
|
# connect to `binance` API servers for order ctl.
|
||||||
|
# accounts.paper = 'paper'
|
||||||
|
```
|
||||||
|
|
||||||
|
'''
|
||||||
|
for key, subconf in conf.items():
|
||||||
|
if api_key := subconf.get('api_key', ''):
|
||||||
|
venue_keys: list[str] = client.confkey2venuekeys[key]
|
||||||
|
|
||||||
|
venue_key: str
|
||||||
|
client: httpx.AsyncClient
|
||||||
|
for venue_key in venue_keys:
|
||||||
|
client, _ = client.venue_sesh[venue_key]
|
||||||
|
|
||||||
|
api_key_header: dict = {
|
||||||
|
# taken from official:
|
||||||
|
# https://github.com/binance/binance-futures-connector-python/blob/main/binance/api.py#L47
|
||||||
|
"Content-Type": "application/json;charset=utf-8",
|
||||||
|
|
||||||
|
# TODO: prolly should just always query and copy
|
||||||
|
# in the real latest ver?
|
||||||
|
"User-Agent": "binance-connector/6.1.6smbz6",
|
||||||
|
"X-MBX-APIKEY": api_key,
|
||||||
|
}
|
||||||
|
client.headers.update(api_key_header)
|
||||||
|
|
||||||
|
# if `.use_tesnet = true` in the config then
|
||||||
|
# also add headers for the testnet session which
|
||||||
|
# will be used for all order control
|
||||||
|
if subconf.get('use_testnet', False):
|
||||||
|
testnet_sesh, _ = client.venue_sesh[
|
||||||
|
venue_key + '_testnet'
|
||||||
|
]
|
||||||
|
testnet_sesh.headers.update(api_key_header)
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def get_client() -> Client:
|
async def get_client(
|
||||||
|
mkt_mode: MarketType = 'spot',
|
||||||
|
) -> Client:
|
||||||
|
'''
|
||||||
|
Construct an single `piker` client which composes multiple underlying venue
|
||||||
|
specific API clients both for live and test networks.
|
||||||
|
|
||||||
client = Client()
|
'''
|
||||||
await client.exch_info()
|
venue_sessions: dict[
|
||||||
log.info(
|
str, # venue key
|
||||||
f'{client} in {client.mkt_mode} mode: caching exchange infos..\n'
|
tuple[httpx.AsyncClient, str] # session, eps path
|
||||||
'Cached multi-market pairs:\n'
|
] = {}
|
||||||
f'spot: {len(client._spot_pairs)}\n'
|
async with AsyncExitStack() as client_stack:
|
||||||
f'usdtm_futes: {len(client._ufutes_pairs)}\n'
|
for name, (base_url, path) in _venue_urls.items():
|
||||||
f'Total: {len(client._pairs)}\n'
|
api: httpx.AsyncClient = await client_stack.enter_async_context(
|
||||||
)
|
httpx.AsyncClient(
|
||||||
|
base_url=base_url,
|
||||||
|
# headers={},
|
||||||
|
|
||||||
yield client
|
# TODO: is there a way to numerate this?
|
||||||
|
# https://www.python-httpx.org/advanced/clients/#why-use-a-client
|
||||||
|
# connections=4
|
||||||
|
)
|
||||||
|
)
|
||||||
|
venue_sessions[name] = (
|
||||||
|
api,
|
||||||
|
path,
|
||||||
|
)
|
||||||
|
|
||||||
|
conf: dict[str, Any] = get_config()
|
||||||
|
# for creating API keys see,
|
||||||
|
# https://www.binance.com/en/support/faq/how-to-create-api-keys-on-binance-360002502072
|
||||||
|
client = Client(
|
||||||
|
venue_sessions=venue_sessions,
|
||||||
|
conf=conf,
|
||||||
|
mkt_mode=mkt_mode,
|
||||||
|
)
|
||||||
|
init_api_keys(
|
||||||
|
client=client,
|
||||||
|
conf=conf,
|
||||||
|
)
|
||||||
|
fq_pairs: dict[str, Pair] = await client.exch_info()
|
||||||
|
assert fq_pairs
|
||||||
|
log.info(
|
||||||
|
f'Loaded multi-venue `Client` in mkt_mode={client.mkt_mode!r}\n\n'
|
||||||
|
f'Symbology Summary:\n'
|
||||||
|
f'------ - ------\n'
|
||||||
|
f'spot: {len(client._spot_pairs)}\n'
|
||||||
|
f'usdtm_futes: {len(client._ufutes_pairs)}\n'
|
||||||
|
'------ - ------\n'
|
||||||
|
f'total: {len(client._pairs)}\n'
|
||||||
|
)
|
||||||
|
yield client
|
||||||
|
|
|
@ -264,15 +264,20 @@ async def open_trade_dialog(
|
||||||
# do a open_symcache() call.. though maybe we can hide
|
# do a open_symcache() call.. though maybe we can hide
|
||||||
# this in a new async version of open_account()?
|
# this in a new async version of open_account()?
|
||||||
async with open_cached_client('binance') as client:
|
async with open_cached_client('binance') as client:
|
||||||
subconf: dict = client.conf[venue_name]
|
subconf: dict|None = client.conf.get(venue_name)
|
||||||
use_testnet = subconf.get('use_testnet', False)
|
|
||||||
|
|
||||||
# XXX: if no futes.api_key or spot.api_key has been set we
|
# XXX: if no futes.api_key or spot.api_key has been set we
|
||||||
# always fall back to the paper engine!
|
# always fall back to the paper engine!
|
||||||
if not subconf.get('api_key'):
|
if (
|
||||||
|
not subconf
|
||||||
|
or
|
||||||
|
not subconf.get('api_key')
|
||||||
|
):
|
||||||
await ctx.started('paper')
|
await ctx.started('paper')
|
||||||
return
|
return
|
||||||
|
|
||||||
|
use_testnet: bool = subconf.get('use_testnet', False)
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_cached_client('binance') as client,
|
open_cached_client('binance') as client,
|
||||||
):
|
):
|
||||||
|
|
|
@ -48,6 +48,7 @@ import tractor
|
||||||
|
|
||||||
from piker.brokers import (
|
from piker.brokers import (
|
||||||
open_cached_client,
|
open_cached_client,
|
||||||
|
NoData,
|
||||||
)
|
)
|
||||||
from piker._cacheables import (
|
from piker._cacheables import (
|
||||||
async_lifo_cache,
|
async_lifo_cache,
|
||||||
|
@ -252,24 +253,30 @@ async def open_history_client(
|
||||||
else:
|
else:
|
||||||
client.mkt_mode = 'spot'
|
client.mkt_mode = 'spot'
|
||||||
|
|
||||||
# NOTE: always query using their native symbology!
|
array: np.ndarray = await client.bars(
|
||||||
mktid: str = mkt.bs_mktid
|
mkt=mkt,
|
||||||
array = await client.bars(
|
|
||||||
mktid,
|
|
||||||
start_dt=start_dt,
|
start_dt=start_dt,
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
)
|
)
|
||||||
|
if array.size == 0:
|
||||||
|
raise NoData(
|
||||||
|
f'No frame for {start_dt} -> {end_dt}\n'
|
||||||
|
)
|
||||||
|
|
||||||
times = array['time']
|
times = array['time']
|
||||||
if (
|
if not times.any():
|
||||||
end_dt is None
|
raise ValueError(
|
||||||
):
|
'Bad frame with null-times?\n\n'
|
||||||
inow = round(time.time())
|
f'{times}'
|
||||||
|
)
|
||||||
|
|
||||||
|
if end_dt is None:
|
||||||
|
inow: int = round(time.time())
|
||||||
if (inow - times[-1]) > 60:
|
if (inow - times[-1]) > 60:
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
|
|
||||||
start_dt = from_timestamp(times[0])
|
start_dt = from_timestamp(times[0])
|
||||||
end_dt = from_timestamp(times[-1])
|
end_dt = from_timestamp(times[-1])
|
||||||
|
|
||||||
return array, start_dt, end_dt
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||||
|
|
|
@ -137,10 +137,12 @@ class SpotPair(Pair, frozen=True):
|
||||||
quoteOrderQtyMarketAllowed: bool
|
quoteOrderQtyMarketAllowed: bool
|
||||||
isSpotTradingAllowed: bool
|
isSpotTradingAllowed: bool
|
||||||
isMarginTradingAllowed: bool
|
isMarginTradingAllowed: bool
|
||||||
|
otoAllowed: bool
|
||||||
|
|
||||||
defaultSelfTradePreventionMode: str
|
defaultSelfTradePreventionMode: str
|
||||||
allowedSelfTradePreventionModes: list[str]
|
allowedSelfTradePreventionModes: list[str]
|
||||||
permissions: list[str]
|
permissions: list[str]
|
||||||
|
permissionSets: list[list[str]]
|
||||||
|
|
||||||
# NOTE: see `.data._symcache.SymbologyCache.load()` for why
|
# NOTE: see `.data._symcache.SymbologyCache.load()` for why
|
||||||
ns_path: str = 'piker.brokers.binance:SpotPair'
|
ns_path: str = 'piker.brokers.binance:SpotPair'
|
||||||
|
@ -179,7 +181,6 @@ class FutesPair(Pair):
|
||||||
quoteAsset: str # 'USDT',
|
quoteAsset: str # 'USDT',
|
||||||
quotePrecision: int # 8,
|
quotePrecision: int # 8,
|
||||||
requiredMarginPercent: float # '5.0000',
|
requiredMarginPercent: float # '5.0000',
|
||||||
settlePlan: int # 0,
|
|
||||||
timeInForce: list[str] # ['GTC', 'IOC', 'FOK', 'GTX'],
|
timeInForce: list[str] # ['GTC', 'IOC', 'FOK', 'GTX'],
|
||||||
triggerProtect: float # '0.0500',
|
triggerProtect: float # '0.0500',
|
||||||
underlyingSubType: list[str] # ['PoW'],
|
underlyingSubType: list[str] # ['PoW'],
|
||||||
|
|
|
@ -25,6 +25,7 @@ from .api import (
|
||||||
get_client,
|
get_client,
|
||||||
)
|
)
|
||||||
from .feed import (
|
from .feed import (
|
||||||
|
get_mkt_info,
|
||||||
open_history_client,
|
open_history_client,
|
||||||
open_symbol_search,
|
open_symbol_search,
|
||||||
stream_quotes,
|
stream_quotes,
|
||||||
|
@ -34,15 +35,20 @@ from .feed import (
|
||||||
# open_trade_dialog,
|
# open_trade_dialog,
|
||||||
# norm_trade_records,
|
# norm_trade_records,
|
||||||
# )
|
# )
|
||||||
|
from .venues import (
|
||||||
|
OptionPair,
|
||||||
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'get_client',
|
'get_client',
|
||||||
# 'trades_dialogue',
|
# 'trades_dialogue',
|
||||||
|
'get_mkt_info',
|
||||||
'open_history_client',
|
'open_history_client',
|
||||||
'open_symbol_search',
|
'open_symbol_search',
|
||||||
'stream_quotes',
|
'stream_quotes',
|
||||||
|
'OptionPair',
|
||||||
# 'norm_trade_records',
|
# 'norm_trade_records',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -18,38 +18,59 @@
|
||||||
Deribit backend.
|
Deribit backend.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import Any, Optional, Callable
|
from typing import (
|
||||||
|
# Any,
|
||||||
|
# Optional,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
|
# from pprint import pformat
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import cryptofeed
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import pendulum
|
from pendulum import (
|
||||||
from rapidfuzz import process as fuzzy
|
from_timestamp,
|
||||||
|
)
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
from piker.brokers import open_cached_client
|
from piker.accounting import (
|
||||||
from piker.log import get_logger, get_console_log
|
Asset,
|
||||||
from piker.data import ShmArray
|
MktPair,
|
||||||
from piker.brokers._util import (
|
unpack_fqme,
|
||||||
BrokerError,
|
)
|
||||||
|
from piker.brokers import (
|
||||||
|
open_cached_client,
|
||||||
|
NoData,
|
||||||
DataUnavailable,
|
DataUnavailable,
|
||||||
)
|
)
|
||||||
|
from piker._cacheables import (
|
||||||
from cryptofeed import FeedHandler
|
async_lifo_cache,
|
||||||
from cryptofeed.defines import (
|
|
||||||
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
|
|
||||||
)
|
)
|
||||||
from cryptofeed.symbols import Symbol
|
from piker.log import (
|
||||||
|
get_logger,
|
||||||
|
mk_repr,
|
||||||
|
)
|
||||||
|
from piker.data.validate import FeedInit
|
||||||
|
|
||||||
|
|
||||||
from .api import (
|
from .api import (
|
||||||
Client, Trade,
|
Client,
|
||||||
get_config,
|
# get_config,
|
||||||
str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst,
|
piker_sym_to_cb_sym,
|
||||||
|
cb_sym_to_deribit_inst,
|
||||||
|
str_to_cb_sym,
|
||||||
maybe_open_price_feed
|
maybe_open_price_feed
|
||||||
)
|
)
|
||||||
|
from .venues import (
|
||||||
|
Pair,
|
||||||
|
OptionPair,
|
||||||
|
Trade,
|
||||||
|
)
|
||||||
|
|
||||||
_spawn_kwargs = {
|
_spawn_kwargs = {
|
||||||
'infect_asyncio': True,
|
'infect_asyncio': True,
|
||||||
|
@ -64,90 +85,215 @@ async def open_history_client(
|
||||||
mkt: MktPair,
|
mkt: MktPair,
|
||||||
) -> tuple[Callable, int]:
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
fnstrument: str = mkt.bs_fqme
|
|
||||||
# TODO implement history getter for the new storage layer.
|
# TODO implement history getter for the new storage layer.
|
||||||
async with open_cached_client('deribit') as client:
|
async with open_cached_client('deribit') as client:
|
||||||
|
|
||||||
|
pair: OptionPair = client._pairs[mkt.dst.name]
|
||||||
|
# XXX NOTE, the cuckers use ms !!!
|
||||||
|
creation_time_s: int = pair.creation_timestamp/1000
|
||||||
|
|
||||||
async def get_ohlc(
|
async def get_ohlc(
|
||||||
end_dt: Optional[datetime] = None,
|
timeframe: float,
|
||||||
start_dt: Optional[datetime] = None,
|
end_dt: datetime | None = None,
|
||||||
|
start_dt: datetime | None = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
np.ndarray,
|
np.ndarray,
|
||||||
datetime, # start
|
datetime, # start
|
||||||
datetime, # end
|
datetime, # end
|
||||||
]:
|
]:
|
||||||
|
if timeframe != 60:
|
||||||
|
raise DataUnavailable('Only 1m bars are supported')
|
||||||
|
|
||||||
array = await client.bars(
|
array: np.ndarray = await client.bars(
|
||||||
instrument,
|
mkt,
|
||||||
start_dt=start_dt,
|
start_dt=start_dt,
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
)
|
)
|
||||||
if len(array) == 0:
|
if len(array) == 0:
|
||||||
raise DataUnavailable
|
if (
|
||||||
|
end_dt is None
|
||||||
|
):
|
||||||
|
raise DataUnavailable(
|
||||||
|
'No history seems to exist yet?\n\n'
|
||||||
|
f'{mkt}'
|
||||||
|
)
|
||||||
|
elif (
|
||||||
|
end_dt
|
||||||
|
and
|
||||||
|
end_dt.timestamp() < creation_time_s
|
||||||
|
):
|
||||||
|
# the contract can't have history
|
||||||
|
# before it was created.
|
||||||
|
pair_type_str: str = type(pair).__name__
|
||||||
|
create_dt: datetime = from_timestamp(creation_time_s)
|
||||||
|
raise DataUnavailable(
|
||||||
|
f'No history prior to\n'
|
||||||
|
f'`{pair_type_str}.creation_timestamp: int = '
|
||||||
|
f'{pair.creation_timestamp}\n\n'
|
||||||
|
f'------ deribit sux ------\n'
|
||||||
|
f'WHICH IN "NORMAL PEOPLE WHO USE EPOCH TIME" form is,\n'
|
||||||
|
f'creation_time_s: {creation_time_s}\n'
|
||||||
|
f'create_dt: {create_dt}\n'
|
||||||
|
)
|
||||||
|
raise NoData(
|
||||||
|
f'No frame for {start_dt} -> {end_dt}\n'
|
||||||
|
)
|
||||||
|
|
||||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
start_dt = from_timestamp(array[0]['time'])
|
||||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
end_dt = from_timestamp(array[-1]['time'])
|
||||||
|
|
||||||
|
times = array['time']
|
||||||
|
if not times.any():
|
||||||
|
raise ValueError(
|
||||||
|
'Bad frame with null-times?\n\n'
|
||||||
|
f'{times}'
|
||||||
|
)
|
||||||
|
|
||||||
|
if end_dt is None:
|
||||||
|
inow: int = round(time.time())
|
||||||
|
if (inow - times[-1]) > 60:
|
||||||
|
await tractor.pause()
|
||||||
|
|
||||||
return array, start_dt, end_dt
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
yield (
|
||||||
|
get_ohlc,
|
||||||
|
{ # backfill config
|
||||||
|
'erlangs': 3,
|
||||||
|
'rate': 3,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@async_lifo_cache()
|
||||||
|
async def get_mkt_info(
|
||||||
|
fqme: str,
|
||||||
|
|
||||||
|
) -> tuple[MktPair, Pair|OptionPair] | None:
|
||||||
|
|
||||||
|
# uppercase since kraken bs_mktid is always upper
|
||||||
|
if 'deribit' not in fqme.lower():
|
||||||
|
fqme += '.deribit'
|
||||||
|
|
||||||
|
mkt_mode: str = ''
|
||||||
|
broker, mkt_ep, venue, expiry = unpack_fqme(fqme)
|
||||||
|
|
||||||
|
# NOTE: we always upper case all tokens to be consistent with
|
||||||
|
# binance's symbology style for pairs, like `BTCUSDT`, but in
|
||||||
|
# theory we could also just keep things lower case; as long as
|
||||||
|
# we're consistent and the symcache matches whatever this func
|
||||||
|
# returns, always!
|
||||||
|
expiry: str = expiry.upper()
|
||||||
|
venue: str = venue.upper()
|
||||||
|
# venue_lower: str = venue.lower()
|
||||||
|
|
||||||
|
mkt_mode: str = 'option'
|
||||||
|
|
||||||
|
async with open_cached_client(
|
||||||
|
'deribit',
|
||||||
|
) as client:
|
||||||
|
|
||||||
|
assets: dict[str, Asset] = await client.get_assets()
|
||||||
|
pair_str: str = mkt_ep.lower()
|
||||||
|
|
||||||
|
pair: Pair = await client.exch_info(
|
||||||
|
sym=pair_str,
|
||||||
|
)
|
||||||
|
mkt_mode = pair.venue
|
||||||
|
client.mkt_mode = mkt_mode
|
||||||
|
|
||||||
|
dst: Asset | None = assets.get(pair.bs_dst_asset)
|
||||||
|
src: Asset | None = assets.get(pair.bs_src_asset)
|
||||||
|
|
||||||
|
mkt = MktPair(
|
||||||
|
dst=dst,
|
||||||
|
src=src,
|
||||||
|
price_tick=pair.price_tick,
|
||||||
|
size_tick=pair.size_tick,
|
||||||
|
bs_mktid=pair.symbol,
|
||||||
|
venue=mkt_mode,
|
||||||
|
broker='deribit',
|
||||||
|
_atype=mkt_mode,
|
||||||
|
_fqme_without_src=True,
|
||||||
|
|
||||||
|
# expiry=pair.expiry,
|
||||||
|
# XXX TODO, currently we don't use it since it's
|
||||||
|
# already "described" in the `OptionPair.symbol: str`
|
||||||
|
# and if we slap in the ISO repr it's kinda hideous..
|
||||||
|
# -[ ] figure out the best either std
|
||||||
|
)
|
||||||
|
return mkt, pair
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: list[str],
|
symbols: list[str],
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
loglevel: str = None,
|
|
||||||
|
|
||||||
# startup sync
|
# startup sync
|
||||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
'''
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
Open a live quote stream for the market set defined by `symbols`.
|
||||||
|
|
||||||
sym = symbols[0]
|
Internally this starts a `cryptofeed.FeedHandler` inside an `asyncio`-side
|
||||||
|
task and relays through L1 and `Trade` msgs here to our `trio.Task`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
sym = symbols[0].split('.')[0]
|
||||||
|
init_msgs: list[FeedInit] = []
|
||||||
|
|
||||||
|
# multiline nested `dict` formatter (since rn quote-msgs are
|
||||||
|
# just that).
|
||||||
|
pfmt: Callable[[str], str] = mk_repr(
|
||||||
|
# so we can see `deribit`'s delightfully mega-long bs fields..
|
||||||
|
maxstring=100,
|
||||||
|
)
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_cached_client('deribit') as client,
|
open_cached_client('deribit') as client,
|
||||||
send_chan as send_chan
|
send_chan as send_chan
|
||||||
):
|
):
|
||||||
|
mkt: MktPair
|
||||||
|
pair: Pair
|
||||||
|
mkt, pair = await get_mkt_info(sym)
|
||||||
|
|
||||||
init_msgs = {
|
# build out init msgs according to latest spec
|
||||||
# pass back token, and bool, signalling if we're the writer
|
init_msgs.append(
|
||||||
# and that history has been written
|
FeedInit(
|
||||||
sym: {
|
mkt_info=mkt,
|
||||||
'symbol_info': {
|
)
|
||||||
'asset_type': 'option',
|
)
|
||||||
'price_tick_size': 0.0005
|
# build `cryptofeed` feed-handle
|
||||||
},
|
cf_sym: cryptofeed.Symbol = piker_sym_to_cb_sym(sym)
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
|
||||||
'fqsn': sym,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
nsym = piker_sym_to_cb_sym(sym)
|
from_cf: tractor.to_asyncio.LinkedTaskChannel
|
||||||
|
async with maybe_open_price_feed(sym) as from_cf:
|
||||||
|
|
||||||
async with maybe_open_price_feed(sym) as stream:
|
# load the "last trades" summary
|
||||||
|
last_trades_res: cryptofeed.LastTradesResult = await client.last_trades(
|
||||||
|
cb_sym_to_deribit_inst(cf_sym),
|
||||||
|
count=1,
|
||||||
|
)
|
||||||
|
last_trades: list[Trade] = last_trades_res.trades
|
||||||
|
|
||||||
cache = await client.cache_symbols()
|
# TODO, do we even need this or will the above always
|
||||||
|
# work?
|
||||||
|
# if not last_trades:
|
||||||
|
# await tractor.pause()
|
||||||
|
# async for typ, quote in from_cf:
|
||||||
|
# if typ == 'trade':
|
||||||
|
# last_trade = Trade(**(quote['data']))
|
||||||
|
# break
|
||||||
|
|
||||||
last_trades = (await client.last_trades(
|
# else:
|
||||||
cb_sym_to_deribit_inst(nsym), count=1)).trades
|
last_trade = Trade(
|
||||||
|
**(last_trades[0])
|
||||||
|
)
|
||||||
|
|
||||||
if len(last_trades) == 0:
|
first_quote: dict = {
|
||||||
last_trade = None
|
|
||||||
async for typ, quote in stream:
|
|
||||||
if typ == 'trade':
|
|
||||||
last_trade = Trade(**(quote['data']))
|
|
||||||
break
|
|
||||||
|
|
||||||
else:
|
|
||||||
last_trade = Trade(**(last_trades[0]))
|
|
||||||
|
|
||||||
first_quote = {
|
|
||||||
'symbol': sym,
|
'symbol': sym,
|
||||||
'last': last_trade.price,
|
'last': last_trade.price,
|
||||||
'brokerd_ts': last_trade.timestamp,
|
'brokerd_ts': last_trade.timestamp,
|
||||||
|
@ -158,13 +304,84 @@ async def stream_quotes(
|
||||||
'broker_ts': last_trade.timestamp
|
'broker_ts': last_trade.timestamp
|
||||||
}]
|
}]
|
||||||
}
|
}
|
||||||
task_status.started((init_msgs, first_quote))
|
task_status.started((
|
||||||
|
init_msgs,
|
||||||
|
first_quote,
|
||||||
|
))
|
||||||
|
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
||||||
async for typ, quote in stream:
|
# NOTE XXX, static for now!
|
||||||
topic = quote['symbol']
|
# => since this only handles ONE mkt feed at a time we
|
||||||
await send_chan.send({topic: quote})
|
# don't need a lookup table to map interleaved quotes
|
||||||
|
# from multiple possible mkt-pairs
|
||||||
|
topic: str = mkt.bs_fqme
|
||||||
|
|
||||||
|
# deliver until cancelled
|
||||||
|
async for typ, ref in from_cf:
|
||||||
|
match typ:
|
||||||
|
case 'trade':
|
||||||
|
trade: cryptofeed.types.Trade = ref
|
||||||
|
|
||||||
|
# TODO, re-impl this according to teh ideal
|
||||||
|
# fqme for opts that we choose!!
|
||||||
|
bs_fqme: str = cb_sym_to_deribit_inst(
|
||||||
|
str_to_cb_sym(trade.symbol)
|
||||||
|
).lower()
|
||||||
|
|
||||||
|
piker_quote: dict = {
|
||||||
|
'symbol': bs_fqme,
|
||||||
|
'last': trade.price,
|
||||||
|
'broker_ts': time.time(),
|
||||||
|
# ^TODO, name this `brokerd/datad_ts` and
|
||||||
|
# use `time.time_ns()` ??
|
||||||
|
'ticks': [{
|
||||||
|
'type': 'trade',
|
||||||
|
'price': float(trade.price),
|
||||||
|
'size': float(trade.amount),
|
||||||
|
'broker_ts': trade.timestamp,
|
||||||
|
}],
|
||||||
|
}
|
||||||
|
log.info(
|
||||||
|
f'deribit {typ!r} quote for {sym!r}\n\n'
|
||||||
|
f'{trade}\n\n'
|
||||||
|
f'{pfmt(piker_quote)}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
case 'l1':
|
||||||
|
book: cryptofeed.types.L1Book = ref
|
||||||
|
|
||||||
|
# TODO, so this is where we can possibly change things
|
||||||
|
# and instead lever the `MktPair.bs_fqme: str` output?
|
||||||
|
bs_fqme: str = cb_sym_to_deribit_inst(
|
||||||
|
str_to_cb_sym(book.symbol)
|
||||||
|
).lower()
|
||||||
|
|
||||||
|
piker_quote: dict = {
|
||||||
|
'symbol': bs_fqme,
|
||||||
|
'ticks': [
|
||||||
|
|
||||||
|
{'type': 'bid',
|
||||||
|
'price': float(book.bid_price),
|
||||||
|
'size': float(book.bid_size)},
|
||||||
|
|
||||||
|
{'type': 'bsize',
|
||||||
|
'price': float(book.bid_price),
|
||||||
|
'size': float(book.bid_size),},
|
||||||
|
|
||||||
|
{'type': 'ask',
|
||||||
|
'price': float(book.ask_price),
|
||||||
|
'size': float(book.ask_size),},
|
||||||
|
|
||||||
|
{'type': 'asize',
|
||||||
|
'price': float(book.ask_price),
|
||||||
|
'size': float(book.ask_size),}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
||||||
|
await send_chan.send({
|
||||||
|
topic: piker_quote,
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -174,12 +391,21 @@ async def open_symbol_search(
|
||||||
async with open_cached_client('deribit') as client:
|
async with open_cached_client('deribit') as client:
|
||||||
|
|
||||||
# load all symbols locally for fast search
|
# load all symbols locally for fast search
|
||||||
cache = await client.cache_symbols()
|
# cache = client._pairs
|
||||||
await ctx.started()
|
await ctx.started()
|
||||||
|
|
||||||
async with ctx.open_stream() as stream:
|
async with ctx.open_stream() as stream:
|
||||||
|
pattern: str
|
||||||
async for pattern in stream:
|
async for pattern in stream:
|
||||||
# repack in dict form
|
|
||||||
await stream.send(
|
# NOTE: pattern fuzzy-matching is done within
|
||||||
await client.search_symbols(pattern))
|
# the methd impl.
|
||||||
|
pairs: dict[str, Pair] = await client.search_symbols(
|
||||||
|
pattern,
|
||||||
|
)
|
||||||
|
# repack in fqme-keyed table
|
||||||
|
byfqme: dict[str, Pair] = {}
|
||||||
|
for pair in pairs.values():
|
||||||
|
byfqme[pair.bs_fqme] = pair
|
||||||
|
|
||||||
|
await stream.send(byfqme)
|
||||||
|
|
|
@ -0,0 +1,196 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Per market data-type definitions and schemas types.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
import pendulum
|
||||||
|
from typing import (
|
||||||
|
Literal,
|
||||||
|
Optional,
|
||||||
|
)
|
||||||
|
from decimal import Decimal
|
||||||
|
|
||||||
|
from piker.types import Struct
|
||||||
|
|
||||||
|
|
||||||
|
# API endpoint paths by venue / sub-API
|
||||||
|
_domain: str = 'deribit.com'
|
||||||
|
_url = f'https://www.{_domain}'
|
||||||
|
|
||||||
|
# WEBsocketz
|
||||||
|
_ws_url: str = f'wss://www.{_domain}/ws/api/v2'
|
||||||
|
|
||||||
|
# test nets
|
||||||
|
_testnet_ws_url: str = f'wss://test.{_domain}/ws/api/v2'
|
||||||
|
|
||||||
|
MarketType = Literal[
|
||||||
|
'option'
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def get_api_eps(venue: MarketType) -> tuple[str, str]:
|
||||||
|
'''
|
||||||
|
Return API ep root paths per venue.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return {
|
||||||
|
'option': (
|
||||||
|
_ws_url,
|
||||||
|
),
|
||||||
|
}[venue]
|
||||||
|
|
||||||
|
|
||||||
|
class Pair(Struct, frozen=True, kw_only=True):
|
||||||
|
|
||||||
|
symbol: str
|
||||||
|
|
||||||
|
# src
|
||||||
|
quote_currency: str # 'BTC'
|
||||||
|
|
||||||
|
# dst
|
||||||
|
base_currency: str # "BTC",
|
||||||
|
|
||||||
|
tick_size: float # 0.0001 # [{'above_price': 0.005, 'tick_size': 0.0005}]
|
||||||
|
tick_size_steps: list[dict[str, float]]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def price_tick(self) -> Decimal:
|
||||||
|
return Decimal(str(self.tick_size_steps[0]['above_price']))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_tick(self) -> Decimal:
|
||||||
|
return Decimal(str(self.tick_size))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bs_fqme(self) -> str:
|
||||||
|
return f'{self.symbol}'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bs_mktid(self) -> str:
|
||||||
|
return f'{self.symbol}.{self.venue}'
|
||||||
|
|
||||||
|
|
||||||
|
class OptionPair(Pair, frozen=True):
|
||||||
|
|
||||||
|
taker_commission: float # 0.0003
|
||||||
|
strike: float # 5000.0
|
||||||
|
settlement_period: str # 'day'
|
||||||
|
settlement_currency: str # "BTC",
|
||||||
|
rfq: bool # false
|
||||||
|
price_index: str # 'btc_usd'
|
||||||
|
option_type: str # 'call'
|
||||||
|
min_trade_amount: float # 0.1
|
||||||
|
maker_commission: float # 0.0003
|
||||||
|
kind: str # 'option'
|
||||||
|
is_active: bool # true
|
||||||
|
instrument_type: str # 'reversed'
|
||||||
|
instrument_name: str # 'BTC-1SEP24-55000-C'
|
||||||
|
instrument_id: int # 364671
|
||||||
|
expiration_timestamp: int # 1725177600000
|
||||||
|
creation_timestamp: int # 1724918461000
|
||||||
|
counter_currency: str # 'USD'
|
||||||
|
contract_size: float # '1.0'
|
||||||
|
block_trade_tick_size: float # '0.0001'
|
||||||
|
block_trade_min_trade_amount: int # '25'
|
||||||
|
block_trade_commission: float # '0.003'
|
||||||
|
|
||||||
|
# NOTE: see `.data._symcache.SymbologyCache.load()` for why
|
||||||
|
ns_path: str = 'piker.brokers.deribit:OptionPair'
|
||||||
|
|
||||||
|
# TODO, impl this without the MM:SS part of
|
||||||
|
# the `'THH:MM:SS..'` etc..
|
||||||
|
@property
|
||||||
|
def expiry(self) -> str:
|
||||||
|
iso_date = pendulum.from_timestamp(
|
||||||
|
self.expiration_timestamp / 1000
|
||||||
|
).isoformat()
|
||||||
|
return iso_date
|
||||||
|
|
||||||
|
@property
|
||||||
|
def venue(self) -> str:
|
||||||
|
return f'{self.instrument_type}_option'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bs_fqme(self) -> str:
|
||||||
|
return f'{self.symbol}'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bs_src_asset(self) -> str:
|
||||||
|
return f'{self.quote_currency}'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def bs_dst_asset(self) -> str:
|
||||||
|
return f'{self.symbol}'
|
||||||
|
|
||||||
|
|
||||||
|
PAIRTYPES: dict[MarketType, Pair] = {
|
||||||
|
'option': OptionPair,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class JSONRPCResult(Struct):
|
||||||
|
id: int
|
||||||
|
usIn: int
|
||||||
|
usOut: int
|
||||||
|
usDiff: int
|
||||||
|
testnet: bool
|
||||||
|
jsonrpc: str = '2.0'
|
||||||
|
error: Optional[dict] = None
|
||||||
|
result: Optional[list[dict]] = None
|
||||||
|
|
||||||
|
|
||||||
|
class JSONRPCChannel(Struct):
|
||||||
|
method: str
|
||||||
|
params: dict
|
||||||
|
jsonrpc: str = '2.0'
|
||||||
|
|
||||||
|
|
||||||
|
class KLinesResult(Struct):
|
||||||
|
low: list[float]
|
||||||
|
cost: list[float]
|
||||||
|
high: list[float]
|
||||||
|
open: list[float]
|
||||||
|
close: list[float]
|
||||||
|
ticks: list[int]
|
||||||
|
status: str
|
||||||
|
volume: list[float]
|
||||||
|
|
||||||
|
|
||||||
|
class Trade(Struct):
|
||||||
|
iv: float
|
||||||
|
price: float
|
||||||
|
amount: float
|
||||||
|
trade_id: str
|
||||||
|
contracts: float
|
||||||
|
direction: str
|
||||||
|
trade_seq: int
|
||||||
|
timestamp: int
|
||||||
|
mark_price: float
|
||||||
|
index_price: float
|
||||||
|
tick_direction: int
|
||||||
|
instrument_name: str
|
||||||
|
combo_id: Optional[str] = '',
|
||||||
|
combo_trade_id: Optional[int] = 0,
|
||||||
|
block_trade_id: Optional[str] = '',
|
||||||
|
block_trade_leg_count: Optional[int] = 0,
|
||||||
|
|
||||||
|
|
||||||
|
class LastTradesResult(Struct):
|
||||||
|
trades: list[Trade]
|
||||||
|
has_more: bool
|
|
@ -100,7 +100,7 @@ async def data_reset_hack(
|
||||||
log.warning(
|
log.warning(
|
||||||
no_setup_msg
|
no_setup_msg
|
||||||
+
|
+
|
||||||
f'REQUIRES A `vnc_addrs: array` ENTRY'
|
'REQUIRES A `vnc_addrs: array` ENTRY'
|
||||||
)
|
)
|
||||||
|
|
||||||
vnc_host, vnc_port = vnc_sockaddr.get(
|
vnc_host, vnc_port = vnc_sockaddr.get(
|
||||||
|
@ -259,7 +259,7 @@ def i3ipc_xdotool_manual_click_hack() -> None:
|
||||||
timeout=timeout,
|
timeout=timeout,
|
||||||
)
|
)
|
||||||
|
|
||||||
# re-activate and focus original window
|
# re-activate and focus original window
|
||||||
subprocess.call([
|
subprocess.call([
|
||||||
'xdotool',
|
'xdotool',
|
||||||
'windowactivate', '--sync', str(orig_win_id),
|
'windowactivate', '--sync', str(orig_win_id),
|
||||||
|
|
|
@ -287,9 +287,31 @@ class Client:
|
||||||
self.conf = config
|
self.conf = config
|
||||||
|
|
||||||
# NOTE: the ib.client here is "throttled" to 45 rps by default
|
# NOTE: the ib.client here is "throttled" to 45 rps by default
|
||||||
self.ib = ib
|
self.ib: IB = ib
|
||||||
self.ib.RaiseRequestErrors: bool = True
|
self.ib.RaiseRequestErrors: bool = True
|
||||||
|
|
||||||
|
# self._acnt_names: set[str] = {}
|
||||||
|
self._acnt_names: list[str] = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def acnts(self) -> list[str]:
|
||||||
|
# return list(self._acnt_names)
|
||||||
|
return self._acnt_names
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return (
|
||||||
|
f'<{type(self).__name__}('
|
||||||
|
f'ib={self.ib} '
|
||||||
|
f'acnts={self.acnts}'
|
||||||
|
|
||||||
|
# TODO: we need to mask out acnt-#s and other private
|
||||||
|
# infos if we're going to console this!
|
||||||
|
# f' |_.conf:\n'
|
||||||
|
# f' {pformat(self.conf)}\n'
|
||||||
|
|
||||||
|
')>'
|
||||||
|
)
|
||||||
|
|
||||||
async def get_fills(self) -> list[Fill]:
|
async def get_fills(self) -> list[Fill]:
|
||||||
'''
|
'''
|
||||||
Return list of rents `Fills` from trading session.
|
Return list of rents `Fills` from trading session.
|
||||||
|
@ -376,55 +398,63 @@ class Client:
|
||||||
# whatToShow='MIDPOINT',
|
# whatToShow='MIDPOINT',
|
||||||
# whatToShow='TRADES',
|
# whatToShow='TRADES',
|
||||||
)
|
)
|
||||||
log.info(
|
|
||||||
f'REQUESTING {ib_duration_str} worth {bar_size} BARS\n'
|
|
||||||
f'fqme: {fqme}\n'
|
|
||||||
f'global _enters: {_enters}\n'
|
|
||||||
f'kwargs: {pformat(kwargs)}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
bars = await self.ib.reqHistoricalDataAsync(
|
bars = await self.ib.reqHistoricalDataAsync(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
query_info: str = (
|
||||||
|
f'REQUESTING IB history BARS\n'
|
||||||
|
f' ------ - ------\n'
|
||||||
|
f'dt_duration: {dt_duration}\n'
|
||||||
|
f'ib_duration_str: {ib_duration_str}\n'
|
||||||
|
f'bar_size: {bar_size}\n'
|
||||||
|
f'fqme: {fqme}\n'
|
||||||
|
f'actor-global _enters: {_enters}\n'
|
||||||
|
f'kwargs: {pformat(kwargs)}\n'
|
||||||
|
)
|
||||||
# tail case if no history for range or none prior.
|
# tail case if no history for range or none prior.
|
||||||
|
# NOTE: there's actually 3 cases here to handle (and
|
||||||
|
# this should be read alongside the implementation of
|
||||||
|
# `.reqHistoricalDataAsync()`):
|
||||||
|
# - a timeout occurred in which case insync internals return
|
||||||
|
# an empty list thing with bars.clear()...
|
||||||
|
# - no data exists for the period likely due to
|
||||||
|
# a weekend, holiday or other non-trading period prior to
|
||||||
|
# ``end_dt`` which exceeds the ``duration``,
|
||||||
|
# - LITERALLY this is the start of the mkt's history!
|
||||||
if not bars:
|
if not bars:
|
||||||
# NOTE: there's actually 3 cases here to handle (and
|
# TODO: figure out wut's going on here.
|
||||||
# this should be read alongside the implementation of
|
|
||||||
# `.reqHistoricalDataAsync()`):
|
|
||||||
# - a timeout occurred in which case insync internals return
|
|
||||||
# an empty list thing with bars.clear()...
|
|
||||||
# - no data exists for the period likely due to
|
|
||||||
# a weekend, holiday or other non-trading period prior to
|
|
||||||
# ``end_dt`` which exceeds the ``duration``,
|
|
||||||
# - LITERALLY this is the start of the mkt's history!
|
|
||||||
|
|
||||||
|
# TODO: is this handy, a sync requester for tinkering
|
||||||
|
# with empty frame cases?
|
||||||
|
# def get_hist():
|
||||||
|
# return self.ib.reqHistoricalData(**kwargs)
|
||||||
|
# import pdbp
|
||||||
|
# pdbp.set_trace()
|
||||||
|
|
||||||
# sync requester for debugging empty frame cases
|
log.critical(
|
||||||
def get_hist():
|
'STUPID IB SAYS NO HISTORY\n\n'
|
||||||
return self.ib.reqHistoricalData(**kwargs)
|
+ query_info
|
||||||
|
)
|
||||||
|
|
||||||
assert get_hist
|
|
||||||
import pdbp
|
|
||||||
pdbp.set_trace()
|
|
||||||
|
|
||||||
return [], np.empty(0), dt_duration
|
|
||||||
# TODO: we could maybe raise ``NoData`` instead if we
|
# TODO: we could maybe raise ``NoData`` instead if we
|
||||||
# rewrite the method in the first case? right now there's no
|
# rewrite the method in the first case?
|
||||||
# way to detect a timeout.
|
# right now there's no way to detect a timeout..
|
||||||
|
return [], np.empty(0), dt_duration
|
||||||
|
|
||||||
# NOTE XXX: ensure minimum duration in bars B)
|
log.info(query_info)
|
||||||
# => we recursively call this method until we get at least
|
# NOTE XXX: ensure minimum duration in bars?
|
||||||
# as many bars such that they sum in aggregate to the the
|
# => recursively call this method until we get at least as
|
||||||
# desired total time (duration) at most.
|
# many bars such that they sum in aggregate to the the
|
||||||
# XXX XXX XXX
|
# desired total time (duration) at most.
|
||||||
# WHY DID WE EVEN NEED THIS ORIGINALLY!?
|
# - if you query over a gap and get no data
|
||||||
# XXX XXX XXX
|
# that may short circuit the history
|
||||||
# - if you query over a gap and get no data
|
|
||||||
# that may short circuit the history
|
|
||||||
if (
|
if (
|
||||||
end_dt
|
# XXX XXX XXX
|
||||||
and False
|
# => WHY DID WE EVEN NEED THIS ORIGINALLY!? <=
|
||||||
|
# XXX XXX XXX
|
||||||
|
False
|
||||||
|
and end_dt
|
||||||
):
|
):
|
||||||
nparr: np.ndarray = bars_to_np(bars)
|
nparr: np.ndarray = bars_to_np(bars)
|
||||||
times: np.ndarray = nparr['time']
|
times: np.ndarray = nparr['time']
|
||||||
|
@ -927,7 +957,10 @@ class Client:
|
||||||
warnset = True
|
warnset = True
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.info(f'Got first quote for {contract}')
|
log.info(
|
||||||
|
'Got first quote for contract\n'
|
||||||
|
f'{contract}\n'
|
||||||
|
)
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
if timeouterr and raise_on_timeout:
|
if timeouterr and raise_on_timeout:
|
||||||
|
@ -991,8 +1024,12 @@ class Client:
|
||||||
outsideRth=True,
|
outsideRth=True,
|
||||||
|
|
||||||
optOutSmartRouting=True,
|
optOutSmartRouting=True,
|
||||||
|
# TODO: need to understand this setting better as
|
||||||
|
# it pertains to shit ass mms..
|
||||||
routeMarketableToBbo=True,
|
routeMarketableToBbo=True,
|
||||||
|
|
||||||
designatedLocation='SMART',
|
designatedLocation='SMART',
|
||||||
|
|
||||||
# TODO: make all orders GTC?
|
# TODO: make all orders GTC?
|
||||||
# https://interactivebrokers.github.io/tws-api/classIBApi_1_1Order.html#a95539081751afb9980f4c6bd1655a6ba
|
# https://interactivebrokers.github.io/tws-api/classIBApi_1_1Order.html#a95539081751afb9980f4c6bd1655a6ba
|
||||||
# goodTillDate=f"yyyyMMdd-HH:mm:ss",
|
# goodTillDate=f"yyyyMMdd-HH:mm:ss",
|
||||||
|
@ -1120,8 +1157,8 @@ def get_config() -> dict[str, Any]:
|
||||||
names = list(accounts.keys())
|
names = list(accounts.keys())
|
||||||
accts = section['accounts'] = bidict(accounts)
|
accts = section['accounts'] = bidict(accounts)
|
||||||
log.info(
|
log.info(
|
||||||
f'brokers.toml defines {len(accts)} accounts: '
|
f'{path} defines {len(accts)} account aliases:\n'
|
||||||
f'{pformat(names)}'
|
f'{pformat(names)}\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
if section is None:
|
if section is None:
|
||||||
|
@ -1188,7 +1225,7 @@ async def load_aio_clients(
|
||||||
try_ports = list(try_ports.values())
|
try_ports = list(try_ports.values())
|
||||||
|
|
||||||
_err = None
|
_err = None
|
||||||
accounts_def = config.load_accounts(['ib'])
|
accounts_def: dict[str, str] = config.load_accounts(['ib'])
|
||||||
ports = try_ports if port is None else [port]
|
ports = try_ports if port is None else [port]
|
||||||
combos = list(itertools.product(hosts, ports))
|
combos = list(itertools.product(hosts, ports))
|
||||||
accounts_found: dict[str, Client] = {}
|
accounts_found: dict[str, Client] = {}
|
||||||
|
@ -1227,7 +1264,9 @@ async def load_aio_clients(
|
||||||
client = Client(ib=ib, config=conf)
|
client = Client(ib=ib, config=conf)
|
||||||
|
|
||||||
# update all actor-global caches
|
# update all actor-global caches
|
||||||
log.info(f"Caching client for {sockaddr}")
|
log.runtime(
|
||||||
|
f'Connected and caching `Client` @ {sockaddr!r}'
|
||||||
|
)
|
||||||
_client_cache[sockaddr] = client
|
_client_cache[sockaddr] = client
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -1242,37 +1281,59 @@ async def load_aio_clients(
|
||||||
OSError,
|
OSError,
|
||||||
) as ce:
|
) as ce:
|
||||||
_err = ce
|
_err = ce
|
||||||
log.warning(
|
message: str = (
|
||||||
f'Failed to connect on {host}:{port} for {i} time with,\n'
|
f'Failed to connect on {host}:{port} after {i} tries with\n'
|
||||||
f'{ib.client.apiError.value()}\n'
|
f'{ib.client.apiError.value()!r}\n\n'
|
||||||
'retrying with a new client id..')
|
'Retrying with a new client id..\n'
|
||||||
|
)
|
||||||
|
log.runtime(message)
|
||||||
|
else:
|
||||||
|
# XXX report loudly if we never established after all
|
||||||
|
# re-tries
|
||||||
|
log.warning(message)
|
||||||
|
|
||||||
# Pre-collect all accounts available for this
|
# Pre-collect all accounts available for this
|
||||||
# connection and map account names to this client
|
# connection and map account names to this client
|
||||||
# instance.
|
# instance.
|
||||||
for value in ib.accountValues():
|
for value in ib.accountValues():
|
||||||
acct_number = value.account
|
acct_number: str = value.account
|
||||||
|
|
||||||
entry = accounts_def.inverse.get(acct_number)
|
acnt_alias: str = accounts_def.inverse.get(acct_number)
|
||||||
if not entry:
|
if not acnt_alias:
|
||||||
|
|
||||||
|
# TODO: should we constuct the below reco-ex from
|
||||||
|
# the existing config content?
|
||||||
|
_, path = config.load(
|
||||||
|
conf_name='brokers',
|
||||||
|
)
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'No section in brokers.toml for account:'
|
'No alias in account section for account!\n'
|
||||||
f' {acct_number}\n'
|
f'Please add an acnt alias entry to your {path}\n'
|
||||||
f'Please add entry to continue using this API client'
|
'For example,\n\n'
|
||||||
|
|
||||||
|
'[ib.accounts]\n'
|
||||||
|
'margin = {accnt_number!r}\n'
|
||||||
|
'^^^^^^ <- you need this part!\n\n'
|
||||||
|
|
||||||
|
'This ensures `piker` will not leak private acnt info '
|
||||||
|
'to console output by default!\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
# surjection of account names to operating clients.
|
# surjection of account names to operating clients.
|
||||||
if acct_number not in accounts_found:
|
if acnt_alias not in accounts_found:
|
||||||
accounts_found[entry] = client
|
accounts_found[acnt_alias] = client
|
||||||
|
# client._acnt_names.add(acnt_alias)
|
||||||
|
client._acnt_names.append(acnt_alias)
|
||||||
|
|
||||||
log.info(
|
if accounts_found:
|
||||||
f'Loaded accounts for client @ {host}:{port}\n'
|
log.info(
|
||||||
f'{pformat(accounts_found)}'
|
f'Loaded accounts for api client\n\n'
|
||||||
)
|
f'{pformat(accounts_found)}\n'
|
||||||
|
)
|
||||||
|
|
||||||
# XXX: why aren't we just updating this directy above
|
# XXX: why aren't we just updating this directy above
|
||||||
# instead of using the intermediary `accounts_found`?
|
# instead of using the intermediary `accounts_found`?
|
||||||
_accounts2clients.update(accounts_found)
|
_accounts2clients.update(accounts_found)
|
||||||
|
|
||||||
# if we have no clients after the scan loop then error out.
|
# if we have no clients after the scan loop then error out.
|
||||||
if not _client_cache:
|
if not _client_cache:
|
||||||
|
@ -1472,7 +1533,7 @@ async def open_aio_client_method_relay(
|
||||||
msg: tuple[str, dict] | dict | None = await from_trio.get()
|
msg: tuple[str, dict] | dict | None = await from_trio.get()
|
||||||
match msg:
|
match msg:
|
||||||
case None: # termination sentinel
|
case None: # termination sentinel
|
||||||
print('asyncio PROXY-RELAY SHUTDOWN')
|
log.info('asyncio `Client` method-proxy SHUTDOWN!')
|
||||||
break
|
break
|
||||||
|
|
||||||
case (meth_name, kwargs):
|
case (meth_name, kwargs):
|
||||||
|
|
|
@ -1183,7 +1183,14 @@ async def deliver_trade_events(
|
||||||
pos
|
pos
|
||||||
and fill
|
and fill
|
||||||
):
|
):
|
||||||
assert fill.commissionReport == cr
|
now_cr: CommissionReport = fill.commissionReport
|
||||||
|
if (now_cr != cr):
|
||||||
|
log.warning(
|
||||||
|
'UhhHh ib updated the commission report mid-fill..?\n'
|
||||||
|
f'was: {pformat(cr)}\n'
|
||||||
|
f'now: {pformat(now_cr)}\n'
|
||||||
|
)
|
||||||
|
|
||||||
await emit_pp_update(
|
await emit_pp_update(
|
||||||
ems_stream,
|
ems_stream,
|
||||||
accounts_def,
|
accounts_def,
|
||||||
|
|
|
@ -671,8 +671,8 @@ async def _setup_quote_stream(
|
||||||
# making them mostly useless and explains why the scanner
|
# making them mostly useless and explains why the scanner
|
||||||
# is always slow XD
|
# is always slow XD
|
||||||
# '293', # Trade count for day
|
# '293', # Trade count for day
|
||||||
'294', # Trade rate / minute
|
# '294', # Trade rate / minute
|
||||||
'295', # Vlm rate / minute
|
# '295', # Vlm rate / minute
|
||||||
),
|
),
|
||||||
contract: Contract | None = None,
|
contract: Contract | None = None,
|
||||||
|
|
||||||
|
@ -915,9 +915,13 @@ async def stream_quotes(
|
||||||
|
|
||||||
if first_ticker:
|
if first_ticker:
|
||||||
first_quote: dict = normalize(first_ticker)
|
first_quote: dict = normalize(first_ticker)
|
||||||
log.info(
|
|
||||||
'Rxed init quote:\n'
|
# TODO: we need a stack-oriented log levels filters for
|
||||||
f'{pformat(first_quote)}'
|
# this!
|
||||||
|
# log.info(message, filter={'stack': 'live_feed'}) ?
|
||||||
|
log.runtime(
|
||||||
|
'Rxed init quote:\n\n'
|
||||||
|
f'{pformat(first_quote)}\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
# NOTE: it might be outside regular trading hours for
|
# NOTE: it might be outside regular trading hours for
|
||||||
|
@ -969,7 +973,11 @@ async def stream_quotes(
|
||||||
raise_on_timeout=True,
|
raise_on_timeout=True,
|
||||||
)
|
)
|
||||||
first_quote: dict = normalize(first_ticker)
|
first_quote: dict = normalize(first_ticker)
|
||||||
log.info(
|
|
||||||
|
# TODO: we need a stack-oriented log levels filters for
|
||||||
|
# this!
|
||||||
|
# log.info(message, filter={'stack': 'live_feed'}) ?
|
||||||
|
log.runtime(
|
||||||
'Rxed init quote:\n'
|
'Rxed init quote:\n'
|
||||||
f'{pformat(first_quote)}'
|
f'{pformat(first_quote)}'
|
||||||
)
|
)
|
||||||
|
|
|
@ -31,7 +31,11 @@ from typing import (
|
||||||
)
|
)
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import pendulum
|
from pendulum import (
|
||||||
|
DateTime,
|
||||||
|
parse,
|
||||||
|
from_timestamp,
|
||||||
|
)
|
||||||
from ib_insync import (
|
from ib_insync import (
|
||||||
Contract,
|
Contract,
|
||||||
Commodity,
|
Commodity,
|
||||||
|
@ -66,10 +70,11 @@ tx_sort: Callable = partial(
|
||||||
iter_by_dt,
|
iter_by_dt,
|
||||||
parsers={
|
parsers={
|
||||||
'dateTime': parse_flex_dt,
|
'dateTime': parse_flex_dt,
|
||||||
'datetime': pendulum.parse,
|
'datetime': parse,
|
||||||
# for some some fucking 2022 and
|
|
||||||
# back options records...fuck me.
|
# XXX: for some some fucking 2022 and
|
||||||
'date': pendulum.parse,
|
# back options records.. f@#$ me..
|
||||||
|
'date': parse,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -89,15 +94,38 @@ def norm_trade(
|
||||||
|
|
||||||
conid: int = str(record.get('conId') or record['conid'])
|
conid: int = str(record.get('conId') or record['conid'])
|
||||||
bs_mktid: str = str(conid)
|
bs_mktid: str = str(conid)
|
||||||
comms = record.get('commission')
|
|
||||||
if comms is None:
|
|
||||||
comms = -1*record['ibCommission']
|
|
||||||
|
|
||||||
price = record.get('price') or record['tradePrice']
|
# NOTE: sometimes weird records (like BTTX?)
|
||||||
|
# have no field for this?
|
||||||
|
comms: float = -1 * (
|
||||||
|
record.get('commission')
|
||||||
|
or record.get('ibCommission')
|
||||||
|
or 0
|
||||||
|
)
|
||||||
|
if not comms:
|
||||||
|
log.warning(
|
||||||
|
'No commissions found for record?\n'
|
||||||
|
f'{pformat(record)}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
price: float = (
|
||||||
|
record.get('price')
|
||||||
|
or record.get('tradePrice')
|
||||||
|
)
|
||||||
|
if price is None:
|
||||||
|
log.warning(
|
||||||
|
'No `price` field found in record?\n'
|
||||||
|
'Skipping normalization..\n'
|
||||||
|
f'{pformat(record)}\n'
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
# the api doesn't do the -/+ on the quantity for you but flex
|
# the api doesn't do the -/+ on the quantity for you but flex
|
||||||
# records do.. are you fucking serious ib...!?
|
# records do.. are you fucking serious ib...!?
|
||||||
size = record.get('quantity') or record['shares'] * {
|
size: float|int = (
|
||||||
|
record.get('quantity')
|
||||||
|
or record['shares']
|
||||||
|
) * {
|
||||||
'BOT': 1,
|
'BOT': 1,
|
||||||
'SLD': -1,
|
'SLD': -1,
|
||||||
}[record['side']]
|
}[record['side']]
|
||||||
|
@ -128,26 +156,31 @@ def norm_trade(
|
||||||
# otype = tail[6]
|
# otype = tail[6]
|
||||||
# strike = tail[7:]
|
# strike = tail[7:]
|
||||||
|
|
||||||
print(f'skipping opts contract {symbol}')
|
log.warning(
|
||||||
|
f'Skipping option contract -> NO SUPPORT YET!\n'
|
||||||
|
f'{symbol}\n'
|
||||||
|
)
|
||||||
return None
|
return None
|
||||||
|
|
||||||
# timestamping is way different in API records
|
# timestamping is way different in API records
|
||||||
dtstr = record.get('datetime')
|
dtstr: str = record.get('datetime')
|
||||||
date = record.get('date')
|
date: str = record.get('date')
|
||||||
flex_dtstr = record.get('dateTime')
|
flex_dtstr: str = record.get('dateTime')
|
||||||
|
|
||||||
if dtstr or date:
|
if dtstr or date:
|
||||||
dt = pendulum.parse(dtstr or date)
|
dt: DateTime = parse(dtstr or date)
|
||||||
|
|
||||||
elif flex_dtstr:
|
elif flex_dtstr:
|
||||||
# probably a flex record with a wonky non-std timestamp..
|
# probably a flex record with a wonky non-std timestamp..
|
||||||
dt = parse_flex_dt(record['dateTime'])
|
dt: DateTime = parse_flex_dt(record['dateTime'])
|
||||||
|
|
||||||
# special handling of symbol extraction from
|
# special handling of symbol extraction from
|
||||||
# flex records using some ad-hoc schema parsing.
|
# flex records using some ad-hoc schema parsing.
|
||||||
asset_type: str = record.get(
|
asset_type: str = (
|
||||||
'assetCategory'
|
record.get('assetCategory')
|
||||||
) or record.get('secType', 'STK')
|
or record.get('secType')
|
||||||
|
or 'STK'
|
||||||
|
)
|
||||||
|
|
||||||
if (expiry := (
|
if (expiry := (
|
||||||
record.get('lastTradeDateOrContractMonth')
|
record.get('lastTradeDateOrContractMonth')
|
||||||
|
@ -357,6 +390,7 @@ def norm_trade_records(
|
||||||
if txn is None:
|
if txn is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# inject txns sorted by datetime
|
||||||
insort(
|
insort(
|
||||||
records,
|
records,
|
||||||
txn,
|
txn,
|
||||||
|
@ -405,7 +439,7 @@ def api_trades_to_ledger_entries(
|
||||||
txn_dict[attr_name] = val
|
txn_dict[attr_name] = val
|
||||||
|
|
||||||
tid = str(txn_dict['execId'])
|
tid = str(txn_dict['execId'])
|
||||||
dt = pendulum.from_timestamp(txn_dict['time'])
|
dt = from_timestamp(txn_dict['time'])
|
||||||
txn_dict['datetime'] = str(dt)
|
txn_dict['datetime'] = str(dt)
|
||||||
acctid = accounts[txn_dict['acctNumber']]
|
acctid = accounts[txn_dict['acctNumber']]
|
||||||
|
|
||||||
|
|
|
@ -209,7 +209,10 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
|
||||||
break
|
break
|
||||||
|
|
||||||
ib_client = proxy._aio_ns.ib
|
ib_client = proxy._aio_ns.ib
|
||||||
log.info(f'Using {ib_client} for symbol search')
|
log.info(
|
||||||
|
f'Using API client for symbol-search\n'
|
||||||
|
f'{ib_client}\n'
|
||||||
|
)
|
||||||
|
|
||||||
last = time.time()
|
last = time.time()
|
||||||
async for pattern in stream:
|
async for pattern in stream:
|
||||||
|
@ -294,7 +297,7 @@ async def open_symbol_search(ctx: tractor.Context) -> None:
|
||||||
elif stock_results:
|
elif stock_results:
|
||||||
break
|
break
|
||||||
# else:
|
# else:
|
||||||
await tractor.pause()
|
# await tractor.pause()
|
||||||
|
|
||||||
# # match against our ad-hoc set immediately
|
# # match against our ad-hoc set immediately
|
||||||
# adhoc_matches = fuzzy.extract(
|
# adhoc_matches = fuzzy.extract(
|
||||||
|
@ -522,7 +525,21 @@ async def get_mkt_info(
|
||||||
venue = con.primaryExchange or con.exchange
|
venue = con.primaryExchange or con.exchange
|
||||||
|
|
||||||
price_tick: Decimal = Decimal(str(details.minTick))
|
price_tick: Decimal = Decimal(str(details.minTick))
|
||||||
# price_tick: Decimal = Decimal('0.01')
|
ib_min_tick_gt_2: Decimal = Decimal('0.01')
|
||||||
|
if (
|
||||||
|
price_tick < ib_min_tick_gt_2
|
||||||
|
):
|
||||||
|
# TODO: we need to add some kinda dynamic rounding sys
|
||||||
|
# to our MktPair i guess?
|
||||||
|
# not sure where the logic should sit, but likely inside
|
||||||
|
# the `.clearing._ems` i suppose...
|
||||||
|
log.warning(
|
||||||
|
'IB seems to disallow a min price tick < 0.01 '
|
||||||
|
'when the price is > 2.0..?\n'
|
||||||
|
f'Decreasing min tick precision for {fqme} to 0.01'
|
||||||
|
)
|
||||||
|
# price_tick = ib_min_tick
|
||||||
|
# await tractor.pause()
|
||||||
|
|
||||||
if atype == 'stock':
|
if atype == 'stock':
|
||||||
# XXX: GRRRR they don't support fractional share sizes for
|
# XXX: GRRRR they don't support fractional share sizes for
|
||||||
|
|
|
@ -27,8 +27,8 @@ from typing import (
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
import httpx
|
||||||
import pendulum
|
import pendulum
|
||||||
import asks
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import hashlib
|
import hashlib
|
||||||
|
@ -60,6 +60,11 @@ log = get_logger('piker.brokers.kraken')
|
||||||
|
|
||||||
# <uri>/<version>/
|
# <uri>/<version>/
|
||||||
_url = 'https://api.kraken.com/0'
|
_url = 'https://api.kraken.com/0'
|
||||||
|
|
||||||
|
_headers: dict[str, str] = {
|
||||||
|
'User-Agent': 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||||
|
}
|
||||||
|
|
||||||
# TODO: this is the only backend providing this right?
|
# TODO: this is the only backend providing this right?
|
||||||
# in which case we should drop it from the defaults and
|
# in which case we should drop it from the defaults and
|
||||||
# instead make a custom fields descr in this module!
|
# instead make a custom fields descr in this module!
|
||||||
|
@ -135,16 +140,15 @@ class Client:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
config: dict[str, str],
|
config: dict[str, str],
|
||||||
|
httpx_client: httpx.AsyncClient,
|
||||||
|
|
||||||
name: str = '',
|
name: str = '',
|
||||||
api_key: str = '',
|
api_key: str = '',
|
||||||
secret: str = ''
|
secret: str = ''
|
||||||
) -> None:
|
) -> None:
|
||||||
self._sesh = asks.Session(connections=4)
|
|
||||||
self._sesh.base_location = _url
|
self._sesh: httpx.AsyncClient = httpx_client
|
||||||
self._sesh.headers.update({
|
|
||||||
'User-Agent':
|
|
||||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
|
||||||
})
|
|
||||||
self._name = name
|
self._name = name
|
||||||
self._api_key = api_key
|
self._api_key = api_key
|
||||||
self._secret = secret
|
self._secret = secret
|
||||||
|
@ -166,10 +170,9 @@ class Client:
|
||||||
method: str,
|
method: str,
|
||||||
data: dict,
|
data: dict,
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
resp = await self._sesh.post(
|
resp: httpx.Response = await self._sesh.post(
|
||||||
path=f'/public/{method}',
|
url=f'/public/{method}',
|
||||||
json=data,
|
json=data,
|
||||||
timeout=float('inf')
|
|
||||||
)
|
)
|
||||||
return resproc(resp, log)
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
@ -180,18 +183,18 @@ class Client:
|
||||||
uri_path: str
|
uri_path: str
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
headers = {
|
headers = {
|
||||||
'Content-Type':
|
'Content-Type': 'application/x-www-form-urlencoded',
|
||||||
'application/x-www-form-urlencoded',
|
'API-Key': self._api_key,
|
||||||
'API-Key':
|
'API-Sign': get_kraken_signature(
|
||||||
self._api_key,
|
uri_path,
|
||||||
'API-Sign':
|
data,
|
||||||
get_kraken_signature(uri_path, data, self._secret)
|
self._secret,
|
||||||
|
),
|
||||||
}
|
}
|
||||||
resp = await self._sesh.post(
|
resp: httpx.Response = await self._sesh.post(
|
||||||
path=f'/private/{method}',
|
url=f'/private/{method}',
|
||||||
data=data,
|
data=data,
|
||||||
headers=headers,
|
headers=headers,
|
||||||
timeout=float('inf')
|
|
||||||
)
|
)
|
||||||
return resproc(resp, log)
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
@ -665,24 +668,36 @@ class Client:
|
||||||
@acm
|
@acm
|
||||||
async def get_client() -> Client:
|
async def get_client() -> Client:
|
||||||
|
|
||||||
conf = get_config()
|
conf: dict[str, Any] = get_config()
|
||||||
if conf:
|
async with httpx.AsyncClient(
|
||||||
client = Client(
|
base_url=_url,
|
||||||
conf,
|
headers=_headers,
|
||||||
|
|
||||||
# TODO: don't break these up and just do internal
|
# TODO: is there a way to numerate this?
|
||||||
# conf lookups instead..
|
# https://www.python-httpx.org/advanced/clients/#why-use-a-client
|
||||||
name=conf['key_descr'],
|
# connections=4
|
||||||
api_key=conf['api_key'],
|
) as trio_client:
|
||||||
secret=conf['secret']
|
if conf:
|
||||||
)
|
client = Client(
|
||||||
else:
|
conf,
|
||||||
client = Client({})
|
httpx_client=trio_client,
|
||||||
|
|
||||||
# at startup, load all symbols, and asset info in
|
# TODO: don't break these up and just do internal
|
||||||
# batch requests.
|
# conf lookups instead..
|
||||||
async with trio.open_nursery() as nurse:
|
name=conf['key_descr'],
|
||||||
nurse.start_soon(client.get_assets)
|
api_key=conf['api_key'],
|
||||||
await client.get_mkt_pairs()
|
secret=conf['secret']
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
client = Client(
|
||||||
|
conf={},
|
||||||
|
httpx_client=trio_client,
|
||||||
|
)
|
||||||
|
|
||||||
yield client
|
# at startup, load all symbols, and asset info in
|
||||||
|
# batch requests.
|
||||||
|
async with trio.open_nursery() as nurse:
|
||||||
|
nurse.start_soon(client.get_assets)
|
||||||
|
await client.get_mkt_pairs()
|
||||||
|
|
||||||
|
yield client
|
||||||
|
|
|
@ -612,18 +612,18 @@ async def open_trade_dialog(
|
||||||
|
|
||||||
# enter relay loop
|
# enter relay loop
|
||||||
await handle_order_updates(
|
await handle_order_updates(
|
||||||
client,
|
client=client,
|
||||||
ws,
|
ws=ws,
|
||||||
stream,
|
ws_stream=stream,
|
||||||
ems_stream,
|
ems_stream=ems_stream,
|
||||||
apiflows,
|
apiflows=apiflows,
|
||||||
ids,
|
ids=ids,
|
||||||
reqids2txids,
|
reqids2txids=reqids2txids,
|
||||||
acnt,
|
acnt=acnt,
|
||||||
api_trans,
|
ledger=ledger,
|
||||||
acctid,
|
acctid=acctid,
|
||||||
acc_name,
|
acc_name=acc_name,
|
||||||
token,
|
token=token,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -639,7 +639,8 @@ async def handle_order_updates(
|
||||||
|
|
||||||
# transaction records which will be updated
|
# transaction records which will be updated
|
||||||
# on new trade clearing events (aka order "fills")
|
# on new trade clearing events (aka order "fills")
|
||||||
ledger_trans: dict[str, Transaction],
|
ledger: TransactionLedger,
|
||||||
|
# ledger_trans: dict[str, Transaction],
|
||||||
acctid: str,
|
acctid: str,
|
||||||
acc_name: str,
|
acc_name: str,
|
||||||
token: str,
|
token: str,
|
||||||
|
@ -699,7 +700,8 @@ async def handle_order_updates(
|
||||||
# if tid not in ledger_trans
|
# if tid not in ledger_trans
|
||||||
}
|
}
|
||||||
for tid, trade in trades.items():
|
for tid, trade in trades.items():
|
||||||
assert tid not in ledger_trans
|
# assert tid not in ledger_trans
|
||||||
|
assert tid not in ledger
|
||||||
txid = trade['ordertxid']
|
txid = trade['ordertxid']
|
||||||
reqid = trade.get('userref')
|
reqid = trade.get('userref')
|
||||||
|
|
||||||
|
@ -747,11 +749,17 @@ async def handle_order_updates(
|
||||||
client,
|
client,
|
||||||
api_name_set='wsname',
|
api_name_set='wsname',
|
||||||
)
|
)
|
||||||
ppmsgs = trades2pps(
|
ppmsgs: list[BrokerdPosition] = trades2pps(
|
||||||
acnt,
|
acnt=acnt,
|
||||||
acctid,
|
ledger=ledger,
|
||||||
new_trans,
|
acctid=acctid,
|
||||||
|
new_trans=new_trans,
|
||||||
)
|
)
|
||||||
|
# ppmsgs = trades2pps(
|
||||||
|
# acnt,
|
||||||
|
# acctid,
|
||||||
|
# new_trans,
|
||||||
|
# )
|
||||||
for pp_msg in ppmsgs:
|
for pp_msg in ppmsgs:
|
||||||
await ems_stream.send(pp_msg)
|
await ems_stream.send(pp_msg)
|
||||||
|
|
||||||
|
|
|
@ -16,10 +16,9 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
Kucoin broker backend
|
Kucoin cex API backend.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
from contextlib import (
|
from contextlib import (
|
||||||
asynccontextmanager as acm,
|
asynccontextmanager as acm,
|
||||||
aclosing,
|
aclosing,
|
||||||
|
@ -42,7 +41,7 @@ import wsproto
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import asks
|
import httpx
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pendulum
|
import pendulum
|
||||||
|
@ -112,6 +111,10 @@ class KucoinMktPair(Struct, frozen=True):
|
||||||
quoteMaxSize: float
|
quoteMaxSize: float
|
||||||
quoteMinSize: float
|
quoteMinSize: float
|
||||||
symbol: str # our bs_mktid, kucoin's internal id
|
symbol: str # our bs_mktid, kucoin's internal id
|
||||||
|
feeCategory: int
|
||||||
|
makerFeeCoefficient: float
|
||||||
|
takerFeeCoefficient: float
|
||||||
|
st: bool
|
||||||
|
|
||||||
|
|
||||||
class AccountTrade(Struct, frozen=True):
|
class AccountTrade(Struct, frozen=True):
|
||||||
|
@ -212,8 +215,12 @@ def get_config() -> BrokerConfig | None:
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(
|
||||||
self._config: BrokerConfig | None = get_config()
|
self,
|
||||||
|
httpx_client: httpx.AsyncClient,
|
||||||
|
) -> None:
|
||||||
|
self._http: httpx.AsyncClient = httpx_client
|
||||||
|
self._config: BrokerConfig|None = get_config()
|
||||||
self._pairs: dict[str, KucoinMktPair] = {}
|
self._pairs: dict[str, KucoinMktPair] = {}
|
||||||
self._fqmes2mktids: bidict[str, str] = bidict()
|
self._fqmes2mktids: bidict[str, str] = bidict()
|
||||||
self._bars: list[list[float]] = []
|
self._bars: list[list[float]] = []
|
||||||
|
@ -227,18 +234,24 @@ class Client:
|
||||||
|
|
||||||
) -> dict[str, str | bytes]:
|
) -> dict[str, str | bytes]:
|
||||||
'''
|
'''
|
||||||
Generate authenticated request headers
|
Generate authenticated request headers:
|
||||||
|
|
||||||
https://docs.kucoin.com/#authentication
|
https://docs.kucoin.com/#authentication
|
||||||
|
https://www.kucoin.com/docs/basic-info/connection-method/authentication/creating-a-request
|
||||||
|
https://www.kucoin.com/docs/basic-info/connection-method/authentication/signing-a-message
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
if not self._config:
|
if not self._config:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
'No config found when trying to send authenticated request')
|
'No config found when trying to send authenticated request'
|
||||||
|
)
|
||||||
|
|
||||||
str_to_sign = (
|
str_to_sign = (
|
||||||
str(int(time.time() * 1000))
|
str(int(time.time() * 1000))
|
||||||
+ action + f'/api/{api}/{endpoint.lstrip("/")}'
|
+
|
||||||
|
action
|
||||||
|
+
|
||||||
|
f'/api/{api}/{endpoint.lstrip("/")}'
|
||||||
)
|
)
|
||||||
|
|
||||||
signature = base64.b64encode(
|
signature = base64.b64encode(
|
||||||
|
@ -249,6 +262,7 @@ class Client:
|
||||||
).digest()
|
).digest()
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: can we cache this between calls?
|
||||||
passphrase = base64.b64encode(
|
passphrase = base64.b64encode(
|
||||||
hmac.new(
|
hmac.new(
|
||||||
self._config.key_secret.encode('utf-8'),
|
self._config.key_secret.encode('utf-8'),
|
||||||
|
@ -270,8 +284,10 @@ class Client:
|
||||||
self,
|
self,
|
||||||
action: Literal['POST', 'GET'],
|
action: Literal['POST', 'GET'],
|
||||||
endpoint: str,
|
endpoint: str,
|
||||||
|
|
||||||
api: str = 'v2',
|
api: str = 'v2',
|
||||||
headers: dict = {},
|
headers: dict = {},
|
||||||
|
|
||||||
) -> Any:
|
) -> Any:
|
||||||
'''
|
'''
|
||||||
Generic request wrapper for Kucoin API
|
Generic request wrapper for Kucoin API
|
||||||
|
@ -284,14 +300,19 @@ class Client:
|
||||||
api,
|
api,
|
||||||
)
|
)
|
||||||
|
|
||||||
api_url = f'https://api.kucoin.com/api/{api}/{endpoint}'
|
req_meth: Callable = getattr(
|
||||||
|
self._http,
|
||||||
res = await asks.request(action, api_url, headers=headers)
|
action.lower(),
|
||||||
|
)
|
||||||
json = res.json()
|
res = await req_meth(
|
||||||
if 'data' in json:
|
url=f'/{api}/{endpoint}',
|
||||||
return json['data']
|
headers=headers,
|
||||||
|
)
|
||||||
|
json: dict = res.json()
|
||||||
|
if (data := json.get('data')) is not None:
|
||||||
|
return data
|
||||||
else:
|
else:
|
||||||
|
api_url: str = self._http.base_url
|
||||||
log.error(
|
log.error(
|
||||||
f'Error making request to {api_url} ->\n'
|
f'Error making request to {api_url} ->\n'
|
||||||
f'{pformat(res)}'
|
f'{pformat(res)}'
|
||||||
|
@ -311,7 +332,7 @@ class Client:
|
||||||
'''
|
'''
|
||||||
token_type = 'private' if private else 'public'
|
token_type = 'private' if private else 'public'
|
||||||
try:
|
try:
|
||||||
data: dict[str, Any] | None = await self._request(
|
data: dict[str, Any]|None = await self._request(
|
||||||
'POST',
|
'POST',
|
||||||
endpoint=f'bullet-{token_type}',
|
endpoint=f'bullet-{token_type}',
|
||||||
api='v1'
|
api='v1'
|
||||||
|
@ -349,8 +370,8 @@ class Client:
|
||||||
currencies: dict[str, Currency] = {}
|
currencies: dict[str, Currency] = {}
|
||||||
entries: list[dict] = await self._request(
|
entries: list[dict] = await self._request(
|
||||||
'GET',
|
'GET',
|
||||||
api='v1',
|
|
||||||
endpoint='currencies',
|
endpoint='currencies',
|
||||||
|
api='v1',
|
||||||
)
|
)
|
||||||
for entry in entries:
|
for entry in entries:
|
||||||
curr = Currency(**entry).copy()
|
curr = Currency(**entry).copy()
|
||||||
|
@ -366,7 +387,10 @@ class Client:
|
||||||
dict[str, KucoinMktPair],
|
dict[str, KucoinMktPair],
|
||||||
bidict[str, KucoinMktPair],
|
bidict[str, KucoinMktPair],
|
||||||
]:
|
]:
|
||||||
entries = await self._request('GET', 'symbols')
|
entries = await self._request(
|
||||||
|
'GET',
|
||||||
|
endpoint='symbols',
|
||||||
|
)
|
||||||
log.info(f' {len(entries)} Kucoin market pairs fetched')
|
log.info(f' {len(entries)} Kucoin market pairs fetched')
|
||||||
|
|
||||||
pairs: dict[str, KucoinMktPair] = {}
|
pairs: dict[str, KucoinMktPair] = {}
|
||||||
|
@ -567,13 +591,21 @@ def fqme_to_kucoin_sym(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def get_client() -> AsyncGenerator[Client, None]:
|
async def get_client() -> AsyncGenerator[Client, None]:
|
||||||
client = Client()
|
'''
|
||||||
|
Load an API `Client` preconfigured from user settings
|
||||||
|
|
||||||
async with trio.open_nursery() as n:
|
'''
|
||||||
n.start_soon(client.get_mkt_pairs)
|
async with (
|
||||||
await client.get_currencies()
|
httpx.AsyncClient(
|
||||||
|
base_url='https://api.kucoin.com/api',
|
||||||
|
) as trio_client,
|
||||||
|
):
|
||||||
|
client = Client(httpx_client=trio_client)
|
||||||
|
async with trio.open_nursery() as tn:
|
||||||
|
tn.start_soon(client.get_mkt_pairs)
|
||||||
|
await client.get_currencies()
|
||||||
|
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -609,7 +641,7 @@ async def open_ping_task(
|
||||||
await trio.sleep((ping_interval - 1000) / 1000)
|
await trio.sleep((ping_interval - 1000) / 1000)
|
||||||
await ws.send_msg({'id': connect_id, 'type': 'ping'})
|
await ws.send_msg({'id': connect_id, 'type': 'ping'})
|
||||||
|
|
||||||
log.info('Starting ping task for kucoin ws connection')
|
log.warning('Starting ping task for kucoin ws connection')
|
||||||
n.start_soon(ping_server)
|
n.start_soon(ping_server)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
@ -621,9 +653,14 @@ async def open_ping_task(
|
||||||
async def get_mkt_info(
|
async def get_mkt_info(
|
||||||
fqme: str,
|
fqme: str,
|
||||||
|
|
||||||
) -> tuple[MktPair, KucoinMktPair]:
|
) -> tuple[
|
||||||
|
MktPair,
|
||||||
|
KucoinMktPair,
|
||||||
|
]:
|
||||||
'''
|
'''
|
||||||
Query for and return a `MktPair` and `KucoinMktPair`.
|
Query for and return both a `piker.accounting.MktPair` and
|
||||||
|
`KucoinMktPair` from provided `fqme: str`
|
||||||
|
(fully-qualified-market-endpoint).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async with open_cached_client('kucoin') as client:
|
async with open_cached_client('kucoin') as client:
|
||||||
|
@ -698,6 +735,8 @@ async def stream_quotes(
|
||||||
|
|
||||||
log.info(f'Starting up quote stream(s) for {symbols}')
|
log.info(f'Starting up quote stream(s) for {symbols}')
|
||||||
for sym_str in symbols:
|
for sym_str in symbols:
|
||||||
|
mkt: MktPair
|
||||||
|
pair: KucoinMktPair
|
||||||
mkt, pair = await get_mkt_info(sym_str)
|
mkt, pair = await get_mkt_info(sym_str)
|
||||||
init_msgs.append(
|
init_msgs.append(
|
||||||
FeedInit(mkt_info=mkt)
|
FeedInit(mkt_info=mkt)
|
||||||
|
@ -705,7 +744,11 @@ async def stream_quotes(
|
||||||
|
|
||||||
ws: NoBsWs
|
ws: NoBsWs
|
||||||
token, ping_interval = await client._get_ws_token()
|
token, ping_interval = await client._get_ws_token()
|
||||||
connect_id = str(uuid4())
|
log.info('API reported ping_interval: {ping_interval}\n')
|
||||||
|
|
||||||
|
connect_id: str = str(uuid4())
|
||||||
|
typ: str
|
||||||
|
quote: dict
|
||||||
async with (
|
async with (
|
||||||
open_autorecon_ws(
|
open_autorecon_ws(
|
||||||
(
|
(
|
||||||
|
@ -719,20 +762,37 @@ async def stream_quotes(
|
||||||
),
|
),
|
||||||
) as ws,
|
) as ws,
|
||||||
open_ping_task(ws, ping_interval, connect_id),
|
open_ping_task(ws, ping_interval, connect_id),
|
||||||
aclosing(stream_messages(ws, sym_str)) as msg_gen,
|
aclosing(
|
||||||
|
iter_normed_quotes(
|
||||||
|
ws, sym_str
|
||||||
|
)
|
||||||
|
) as iter_quotes,
|
||||||
):
|
):
|
||||||
typ, quote = await anext(msg_gen)
|
typ, quote = await anext(iter_quotes)
|
||||||
|
|
||||||
while typ != 'trade':
|
# take care to not unblock here until we get a real
|
||||||
# take care to not unblock here until we get a real
|
# trade quote?
|
||||||
# trade quote
|
# ^TODO, remove this right?
|
||||||
typ, quote = await anext(msg_gen)
|
# -[ ] what often blocks chart boot/new-feed switching
|
||||||
|
# since we'ere waiting for a live quote instead of just
|
||||||
|
# loading history afap..
|
||||||
|
# |_ XXX, not sure if we require a bit of rework to core
|
||||||
|
# feed init logic or if backends justg gotta be
|
||||||
|
# changed up.. feel like there was some causality
|
||||||
|
# dilema prolly only seen with IB too..
|
||||||
|
# while typ != 'trade':
|
||||||
|
# typ, quote = await anext(iter_quotes)
|
||||||
|
|
||||||
task_status.started((init_msgs, quote))
|
task_status.started((init_msgs, quote))
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
||||||
async for typ, msg in msg_gen:
|
# XXX NOTE, DO NOT include the `.<backend>` suffix!
|
||||||
await send_chan.send({sym_str: msg})
|
# OW the sampling loop will not broadcast correctly..
|
||||||
|
# since `bus._subscribers.setdefault(bs_fqme, set())`
|
||||||
|
# is used inside `.data.open_feed_bus()` !!!
|
||||||
|
topic: str = mkt.bs_fqme
|
||||||
|
async for typ, quote in iter_quotes:
|
||||||
|
await send_chan.send({topic: quote})
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
@ -787,7 +847,7 @@ async def subscribe(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def stream_messages(
|
async def iter_normed_quotes(
|
||||||
ws: NoBsWs,
|
ws: NoBsWs,
|
||||||
sym: str,
|
sym: str,
|
||||||
|
|
||||||
|
@ -818,6 +878,9 @@ async def stream_messages(
|
||||||
|
|
||||||
yield 'trade', {
|
yield 'trade', {
|
||||||
'symbol': sym,
|
'symbol': sym,
|
||||||
|
# TODO, is 'last' even used elsewhere/a-good
|
||||||
|
# semantic? can't we just read the ticks with our
|
||||||
|
# .data.ticktools.frame_ticks()`/
|
||||||
'last': trade_data.price,
|
'last': trade_data.price,
|
||||||
'brokerd_ts': last_trade_ts,
|
'brokerd_ts': last_trade_ts,
|
||||||
'ticks': [
|
'ticks': [
|
||||||
|
@ -910,7 +973,7 @@ async def open_history_client(
|
||||||
if end_dt is None:
|
if end_dt is None:
|
||||||
inow = round(time.time())
|
inow = round(time.time())
|
||||||
|
|
||||||
print(
|
log.debug(
|
||||||
f'difference in time between load and processing'
|
f'difference in time between load and processing'
|
||||||
f'{inow - times[-1]}'
|
f'{inow - times[-1]}'
|
||||||
)
|
)
|
||||||
|
|
|
@ -653,7 +653,11 @@ class Router(Struct):
|
||||||
flume = feed.flumes[fqme]
|
flume = feed.flumes[fqme]
|
||||||
first_quote: dict = flume.first_quote
|
first_quote: dict = flume.first_quote
|
||||||
book: DarkBook = self.get_dark_book(broker)
|
book: DarkBook = self.get_dark_book(broker)
|
||||||
book.lasts[fqme]: float = float(first_quote['last'])
|
|
||||||
|
if not (last := first_quote.get('last')):
|
||||||
|
last: float = flume.rt_shm.array[-1]['close']
|
||||||
|
|
||||||
|
book.lasts[fqme]: float = float(last)
|
||||||
|
|
||||||
async with self.maybe_open_brokerd_dialog(
|
async with self.maybe_open_brokerd_dialog(
|
||||||
brokermod=brokermod,
|
brokermod=brokermod,
|
||||||
|
@ -716,7 +720,7 @@ class Router(Struct):
|
||||||
subs = self.subscribers[sub_key]
|
subs = self.subscribers[sub_key]
|
||||||
|
|
||||||
sent_some: bool = False
|
sent_some: bool = False
|
||||||
for client_stream in subs:
|
for client_stream in subs.copy():
|
||||||
try:
|
try:
|
||||||
await client_stream.send(msg)
|
await client_stream.send(msg)
|
||||||
sent_some = True
|
sent_some = True
|
||||||
|
@ -1010,10 +1014,14 @@ async def translate_and_relay_brokerd_events(
|
||||||
status_msg.brokerd_msg = msg
|
status_msg.brokerd_msg = msg
|
||||||
status_msg.src = msg.broker_details['name']
|
status_msg.src = msg.broker_details['name']
|
||||||
|
|
||||||
await router.client_broadcast(
|
if not status_msg.req:
|
||||||
status_msg.req.symbol,
|
# likely some order change state?
|
||||||
status_msg,
|
await tractor.pause()
|
||||||
)
|
else:
|
||||||
|
await router.client_broadcast(
|
||||||
|
status_msg.req.symbol,
|
||||||
|
status_msg,
|
||||||
|
)
|
||||||
|
|
||||||
if status == 'closed':
|
if status == 'closed':
|
||||||
log.info(f'Execution for {oid} is complete!')
|
log.info(f'Execution for {oid} is complete!')
|
||||||
|
|
|
@ -653,6 +653,7 @@ async def open_trade_dialog(
|
||||||
# in) use manually constructed table from calling
|
# in) use manually constructed table from calling
|
||||||
# the `.get_mkt_info()` provider EP above.
|
# the `.get_mkt_info()` provider EP above.
|
||||||
_mktmap_table=mkt_by_fqme,
|
_mktmap_table=mkt_by_fqme,
|
||||||
|
only_require=list(mkt_by_fqme),
|
||||||
)
|
)
|
||||||
|
|
||||||
pp_msgs: list[BrokerdPosition] = []
|
pp_msgs: list[BrokerdPosition] = []
|
||||||
|
|
|
@ -335,7 +335,7 @@ def services(config, tl, ports):
|
||||||
name='service_query',
|
name='service_query',
|
||||||
loglevel=config['loglevel'] if tl else None,
|
loglevel=config['loglevel'] if tl else None,
|
||||||
),
|
),
|
||||||
tractor.get_arbiter(
|
tractor.get_registry(
|
||||||
host=host,
|
host=host,
|
||||||
port=ports[0]
|
port=ports[0]
|
||||||
) as portal
|
) as portal
|
||||||
|
|
|
@ -25,10 +25,12 @@ from collections import (
|
||||||
defaultdict,
|
defaultdict,
|
||||||
)
|
)
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from functools import partial
|
||||||
import time
|
import time
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
AsyncIterator,
|
AsyncIterator,
|
||||||
|
Callable,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -42,7 +44,7 @@ from tractor.trionics import (
|
||||||
maybe_open_nursery,
|
maybe_open_nursery,
|
||||||
)
|
)
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio import TaskStatus
|
||||||
|
|
||||||
from .ticktools import (
|
from .ticktools import (
|
||||||
frame_ticks,
|
frame_ticks,
|
||||||
|
@ -53,6 +55,9 @@ from ._util import (
|
||||||
get_console_log,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from ..service import maybe_spawn_daemon
|
from ..service import maybe_spawn_daemon
|
||||||
|
from piker.log import (
|
||||||
|
mk_repr,
|
||||||
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._sharedmem import (
|
from ._sharedmem import (
|
||||||
|
@ -70,6 +75,7 @@ if TYPE_CHECKING:
|
||||||
_default_delay_s: float = 1.0
|
_default_delay_s: float = 1.0
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: use new `tractor.singleton_acm` API for this!
|
||||||
class Sampler:
|
class Sampler:
|
||||||
'''
|
'''
|
||||||
Global sampling engine registry.
|
Global sampling engine registry.
|
||||||
|
@ -79,9 +85,9 @@ class Sampler:
|
||||||
|
|
||||||
This non-instantiated type is meant to be a singleton within
|
This non-instantiated type is meant to be a singleton within
|
||||||
a `samplerd` actor-service spawned once by the user wishing to
|
a `samplerd` actor-service spawned once by the user wishing to
|
||||||
time-step-sample (real-time) quote feeds, see
|
time-step-sample a (real-time) quote feeds, see
|
||||||
``.service.maybe_open_samplerd()`` and the below
|
`.service.maybe_open_samplerd()` and the below
|
||||||
``register_with_sampler()``.
|
`register_with_sampler()`.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
service_nursery: None | trio.Nursery = None
|
service_nursery: None | trio.Nursery = None
|
||||||
|
@ -375,7 +381,10 @@ async def register_with_sampler(
|
||||||
assert Sampler.ohlcv_shms
|
assert Sampler.ohlcv_shms
|
||||||
|
|
||||||
# unblock caller
|
# unblock caller
|
||||||
await ctx.started(set(Sampler.ohlcv_shms.keys()))
|
await ctx.started(
|
||||||
|
# XXX bc msgpack only allows one array type!
|
||||||
|
list(Sampler.ohlcv_shms.keys())
|
||||||
|
)
|
||||||
|
|
||||||
if open_index_stream:
|
if open_index_stream:
|
||||||
try:
|
try:
|
||||||
|
@ -419,7 +428,6 @@ async def register_with_sampler(
|
||||||
|
|
||||||
|
|
||||||
async def spawn_samplerd(
|
async def spawn_samplerd(
|
||||||
|
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
**extra_tractor_kwargs
|
**extra_tractor_kwargs
|
||||||
|
|
||||||
|
@ -429,7 +437,10 @@ async def spawn_samplerd(
|
||||||
update and increment count write and stream broadcasting.
|
update and increment count write and stream broadcasting.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from piker.service import Services
|
from piker.service import (
|
||||||
|
get_service_mngr,
|
||||||
|
ServiceMngr,
|
||||||
|
)
|
||||||
|
|
||||||
dname = 'samplerd'
|
dname = 'samplerd'
|
||||||
log.info(f'Spawning `{dname}`')
|
log.info(f'Spawning `{dname}`')
|
||||||
|
@ -437,26 +448,33 @@ async def spawn_samplerd(
|
||||||
# singleton lock creation of ``samplerd`` since we only ever want
|
# singleton lock creation of ``samplerd`` since we only ever want
|
||||||
# one daemon per ``pikerd`` proc tree.
|
# one daemon per ``pikerd`` proc tree.
|
||||||
# TODO: make this built-into the service api?
|
# TODO: make this built-into the service api?
|
||||||
async with Services.locks[dname + '_singleton']:
|
mngr: ServiceMngr = get_service_mngr()
|
||||||
|
already_started: bool = dname in mngr.service_tasks
|
||||||
|
|
||||||
if dname not in Services.service_tasks:
|
async with mngr._locks[dname + '_singleton']:
|
||||||
|
ctx: Context = await mngr.start_service(
|
||||||
portal = await Services.actor_n.start_actor(
|
daemon_name=dname,
|
||||||
dname,
|
ctx_ep=partial(
|
||||||
enable_modules=[
|
|
||||||
'piker.data._sampling',
|
|
||||||
],
|
|
||||||
loglevel=loglevel,
|
|
||||||
debug_mode=Services.debug_mode, # set by pikerd flag
|
|
||||||
**extra_tractor_kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
await Services.start_service_task(
|
|
||||||
dname,
|
|
||||||
portal,
|
|
||||||
register_with_sampler,
|
register_with_sampler,
|
||||||
period_s=1,
|
period_s=1,
|
||||||
sub_for_broadcasts=False,
|
sub_for_broadcasts=False,
|
||||||
|
),
|
||||||
|
debug_mode=mngr.debug_mode, # set by pikerd flag
|
||||||
|
|
||||||
|
# proxy-through to tractor
|
||||||
|
enable_modules=[
|
||||||
|
'piker.data._sampling',
|
||||||
|
],
|
||||||
|
loglevel=loglevel,
|
||||||
|
**extra_tractor_kwargs
|
||||||
|
)
|
||||||
|
if not already_started:
|
||||||
|
assert (
|
||||||
|
ctx
|
||||||
|
and
|
||||||
|
ctx.portal
|
||||||
|
and
|
||||||
|
not ctx.cancel_called
|
||||||
)
|
)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -561,7 +579,6 @@ async def open_sample_stream(
|
||||||
|
|
||||||
|
|
||||||
async def sample_and_broadcast(
|
async def sample_and_broadcast(
|
||||||
|
|
||||||
bus: _FeedsBus, # noqa
|
bus: _FeedsBus, # noqa
|
||||||
rt_shm: ShmArray,
|
rt_shm: ShmArray,
|
||||||
hist_shm: ShmArray,
|
hist_shm: ShmArray,
|
||||||
|
@ -582,11 +599,22 @@ async def sample_and_broadcast(
|
||||||
|
|
||||||
overruns = Counter()
|
overruns = Counter()
|
||||||
|
|
||||||
|
# multiline nested `dict` formatter (since rn quote-msgs are
|
||||||
|
# just that).
|
||||||
|
pfmt: Callable[[str], str] = mk_repr()
|
||||||
|
|
||||||
# iterate stream delivered by broker
|
# iterate stream delivered by broker
|
||||||
async for quotes in quote_stream:
|
async for quotes in quote_stream:
|
||||||
# print(quotes)
|
|
||||||
|
|
||||||
# TODO: ``numba`` this!
|
# XXX WARNING XXX only enable for debugging bc ow can cost
|
||||||
|
# ALOT of perf with HF-feedz!!!
|
||||||
|
#
|
||||||
|
# log.info(
|
||||||
|
# 'Rx live quotes:\n'
|
||||||
|
# f'{pfmt(quotes)}'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# TODO: `numba` this!
|
||||||
for broker_symbol, quote in quotes.items():
|
for broker_symbol, quote in quotes.items():
|
||||||
# TODO: in theory you can send the IPC msg *before* writing
|
# TODO: in theory you can send the IPC msg *before* writing
|
||||||
# to the sharedmem array to decrease latency, however, that
|
# to the sharedmem array to decrease latency, however, that
|
||||||
|
@ -659,6 +687,18 @@ async def sample_and_broadcast(
|
||||||
sub_key: str = broker_symbol.lower()
|
sub_key: str = broker_symbol.lower()
|
||||||
subs: set[Sub] = bus.get_subs(sub_key)
|
subs: set[Sub] = bus.get_subs(sub_key)
|
||||||
|
|
||||||
|
if not subs:
|
||||||
|
all_bs_fqmes: list[str] = list(
|
||||||
|
bus._subscribers.keys()
|
||||||
|
)
|
||||||
|
log.warning(
|
||||||
|
f'No subscribers for {brokername!r} live-quote ??\n'
|
||||||
|
f'broker_symbol: {broker_symbol}\n\n'
|
||||||
|
|
||||||
|
f'Maybe the backend-sys symbol does not match one of,\n'
|
||||||
|
f'{pfmt(all_bs_fqmes)}\n'
|
||||||
|
)
|
||||||
|
|
||||||
# NOTE: by default the broker backend doesn't append
|
# NOTE: by default the broker backend doesn't append
|
||||||
# it's own "name" into the fqme schema (but maybe it
|
# it's own "name" into the fqme schema (but maybe it
|
||||||
# should?) so we have to manually generate the correct
|
# should?) so we have to manually generate the correct
|
||||||
|
@ -889,6 +929,7 @@ async def uniform_rate_send(
|
||||||
# to consumers which crash or lose network connection.
|
# to consumers which crash or lose network connection.
|
||||||
# I.e. we **DO NOT** want to crash and propagate up to
|
# I.e. we **DO NOT** want to crash and propagate up to
|
||||||
# ``pikerd`` these kinds of errors!
|
# ``pikerd`` these kinds of errors!
|
||||||
|
trio.EndOfChannel,
|
||||||
trio.ClosedResourceError,
|
trio.ClosedResourceError,
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
ConnectionResetError,
|
ConnectionResetError,
|
||||||
|
|
|
@ -273,7 +273,7 @@ async def _reconnect_forever(
|
||||||
nobsws._connected.set()
|
nobsws._connected.set()
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
except HandshakeError:
|
except HandshakeError:
|
||||||
log.exception(f'Retrying connection')
|
log.exception('Retrying connection')
|
||||||
|
|
||||||
# ws & nursery block ends
|
# ws & nursery block ends
|
||||||
|
|
||||||
|
@ -359,8 +359,8 @@ async def open_autorecon_ws(
|
||||||
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
JSONRPC response-request style machinery for transparent multiplexing of msgs
|
JSONRPC response-request style machinery for transparent multiplexing
|
||||||
over a NoBsWs.
|
of msgs over a `NoBsWs`.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
@ -377,43 +377,77 @@ async def open_jsonrpc_session(
|
||||||
url: str,
|
url: str,
|
||||||
start_id: int = 0,
|
start_id: int = 0,
|
||||||
response_type: type = JSONRPCResult,
|
response_type: type = JSONRPCResult,
|
||||||
request_type: Optional[type] = None,
|
msg_recv_timeout: float = float('inf'),
|
||||||
request_hook: Optional[Callable] = None,
|
# ^NOTE, since only `deribit` is using this jsonrpc stuff atm
|
||||||
error_hook: Optional[Callable] = None,
|
# and options mkts are generally "slow moving"..
|
||||||
|
#
|
||||||
|
# FURTHER if we break the underlying ws connection then since we
|
||||||
|
# don't pass a `fixture` to the task that manages `NoBsWs`, i.e.
|
||||||
|
# `_reconnect_forever()`, the jsonrpc "transport pipe" get's
|
||||||
|
# broken and never restored with wtv init sequence is required to
|
||||||
|
# re-establish a working req-resp session.
|
||||||
|
|
||||||
|
# request_type: Optional[type] = None,
|
||||||
|
# request_hook: Optional[Callable] = None,
|
||||||
|
# error_hook: Optional[Callable] = None,
|
||||||
) -> Callable[[str, dict], dict]:
|
) -> Callable[[str, dict], dict]:
|
||||||
|
|
||||||
|
# NOTE, store all request msgs so we can raise errors on the
|
||||||
|
# caller side!
|
||||||
|
req_msgs: dict[int, dict] = {}
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
open_autorecon_ws(url) as ws
|
open_autorecon_ws(
|
||||||
|
url=url,
|
||||||
|
msg_recv_timeout=msg_recv_timeout,
|
||||||
|
) as ws
|
||||||
):
|
):
|
||||||
rpc_id: Iterable = count(start_id)
|
rpc_id: Iterable[int] = count(start_id)
|
||||||
rpc_results: dict[int, dict] = {}
|
rpc_results: dict[int, dict] = {}
|
||||||
|
|
||||||
async def json_rpc(method: str, params: dict) -> dict:
|
async def json_rpc(
|
||||||
|
method: str,
|
||||||
|
params: dict,
|
||||||
|
) -> dict:
|
||||||
'''
|
'''
|
||||||
perform a json rpc call and wait for the result, raise exception in
|
perform a json rpc call and wait for the result, raise exception in
|
||||||
case of error field present on response
|
case of error field present on response
|
||||||
'''
|
'''
|
||||||
|
nonlocal req_msgs
|
||||||
|
|
||||||
|
req_id: int = next(rpc_id)
|
||||||
msg = {
|
msg = {
|
||||||
'jsonrpc': '2.0',
|
'jsonrpc': '2.0',
|
||||||
'id': next(rpc_id),
|
'id': req_id,
|
||||||
'method': method,
|
'method': method,
|
||||||
'params': params
|
'params': params
|
||||||
}
|
}
|
||||||
_id = msg['id']
|
_id = msg['id']
|
||||||
|
|
||||||
rpc_results[_id] = {
|
result = rpc_results[_id] = {
|
||||||
'result': None,
|
'result': None,
|
||||||
'event': trio.Event()
|
'error': None,
|
||||||
|
'event': trio.Event(), # signal caller resp arrived
|
||||||
}
|
}
|
||||||
|
req_msgs[_id] = msg
|
||||||
|
|
||||||
await ws.send_msg(msg)
|
await ws.send_msg(msg)
|
||||||
|
|
||||||
|
# wait for reponse before unblocking requester code
|
||||||
await rpc_results[_id]['event'].wait()
|
await rpc_results[_id]['event'].wait()
|
||||||
|
|
||||||
ret = rpc_results[_id]['result']
|
if (maybe_result := result['result']):
|
||||||
|
ret = maybe_result
|
||||||
|
del rpc_results[_id]
|
||||||
|
|
||||||
del rpc_results[_id]
|
else:
|
||||||
|
err = result['error']
|
||||||
|
raise Exception(
|
||||||
|
f'JSONRPC request failed\n'
|
||||||
|
f'req: {msg}\n'
|
||||||
|
f'resp: {err}\n'
|
||||||
|
)
|
||||||
|
|
||||||
if ret.error is not None:
|
if ret.error is not None:
|
||||||
raise Exception(json.dumps(ret.error, indent=4))
|
raise Exception(json.dumps(ret.error, indent=4))
|
||||||
|
@ -428,6 +462,7 @@ async def open_jsonrpc_session(
|
||||||
the server side.
|
the server side.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
nonlocal req_msgs
|
||||||
async for msg in ws:
|
async for msg in ws:
|
||||||
match msg:
|
match msg:
|
||||||
case {
|
case {
|
||||||
|
@ -451,15 +486,29 @@ async def open_jsonrpc_session(
|
||||||
'params': _,
|
'params': _,
|
||||||
}:
|
}:
|
||||||
log.debug(f'Recieved\n{msg}')
|
log.debug(f'Recieved\n{msg}')
|
||||||
if request_hook:
|
# if request_hook:
|
||||||
await request_hook(request_type(**msg))
|
# await request_hook(request_type(**msg))
|
||||||
|
|
||||||
case {
|
case {
|
||||||
'error': error
|
'error': error
|
||||||
}:
|
}:
|
||||||
log.warning(f'Recieved\n{error}')
|
# if error_hook:
|
||||||
if error_hook:
|
# await error_hook(response_type(**msg))
|
||||||
await error_hook(response_type(**msg))
|
|
||||||
|
# retreive orig request msg, set error
|
||||||
|
# response in original "result" msg,
|
||||||
|
# THEN FINALLY set the event to signal caller
|
||||||
|
# to raise the error in the parent task.
|
||||||
|
req_id: int = error['id']
|
||||||
|
req_msg: dict = req_msgs[req_id]
|
||||||
|
result: dict = rpc_results[req_id]
|
||||||
|
result['error'] = error
|
||||||
|
result['event'].set()
|
||||||
|
log.error(
|
||||||
|
f'JSONRPC request failed\n'
|
||||||
|
f'req: {req_msg}\n'
|
||||||
|
f'resp: {error}\n'
|
||||||
|
)
|
||||||
|
|
||||||
case _:
|
case _:
|
||||||
log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
|
log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
|
||||||
|
|
|
@ -540,7 +540,10 @@ async def open_feed_bus(
|
||||||
# subscription since the backend isn't (yet) expected to
|
# subscription since the backend isn't (yet) expected to
|
||||||
# append it's own name to the fqme, so we filter on keys
|
# append it's own name to the fqme, so we filter on keys
|
||||||
# which *do not* include that name (e.g .ib) .
|
# which *do not* include that name (e.g .ib) .
|
||||||
bus._subscribers.setdefault(bs_fqme, set())
|
bus._subscribers.setdefault(
|
||||||
|
bs_fqme,
|
||||||
|
set(),
|
||||||
|
)
|
||||||
|
|
||||||
# sync feed subscribers with flume handles
|
# sync feed subscribers with flume handles
|
||||||
await ctx.started(
|
await ctx.started(
|
||||||
|
|
28
piker/log.py
28
piker/log.py
|
@ -18,7 +18,11 @@
|
||||||
Log like a forester!
|
Log like a forester!
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
|
import reprlib
|
||||||
import json
|
import json
|
||||||
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
from pygments import (
|
from pygments import (
|
||||||
|
@ -84,3 +88,27 @@ def colorize_json(
|
||||||
# likeable styles: algol_nu, tango, monokai
|
# likeable styles: algol_nu, tango, monokai
|
||||||
formatters.TerminalTrueColorFormatter(style=style)
|
formatters.TerminalTrueColorFormatter(style=style)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def mk_repr(
|
||||||
|
**repr_kws,
|
||||||
|
) -> Callable[[str], str]:
|
||||||
|
'''
|
||||||
|
Allocate and deliver a `repr.Repr` instance with provided input
|
||||||
|
settings using the std-lib's `reprlib` mod,
|
||||||
|
* https://docs.python.org/3/library/reprlib.html
|
||||||
|
|
||||||
|
------ Ex. ------
|
||||||
|
An up to 6-layer-nested `dict` as multi-line:
|
||||||
|
- https://stackoverflow.com/a/79102479
|
||||||
|
- https://docs.python.org/3/library/reprlib.html#reprlib.Repr.maxlevel
|
||||||
|
|
||||||
|
'''
|
||||||
|
def_kws: dict[str, int] = dict(
|
||||||
|
indent=2,
|
||||||
|
maxlevel=6, # recursion levels
|
||||||
|
maxstring=66, # match editor line-len limit
|
||||||
|
)
|
||||||
|
def_kws |= repr_kws
|
||||||
|
reprr = reprlib.Repr(**def_kws)
|
||||||
|
return reprr.repr
|
||||||
|
|
|
@ -30,7 +30,11 @@ Actor runtime primtives and (distributed) service APIs for,
|
||||||
=> TODO: maybe to (re)move elsewhere?
|
=> TODO: maybe to (re)move elsewhere?
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from ._mngr import Services as Services
|
from ._mngr import (
|
||||||
|
get_service_mngr as get_service_mngr,
|
||||||
|
open_service_mngr as open_service_mngr,
|
||||||
|
ServiceMngr as ServiceMngr,
|
||||||
|
)
|
||||||
from ._registry import (
|
from ._registry import (
|
||||||
_tractor_kwargs as _tractor_kwargs,
|
_tractor_kwargs as _tractor_kwargs,
|
||||||
_default_reg_addr as _default_reg_addr,
|
_default_reg_addr as _default_reg_addr,
|
||||||
|
|
|
@ -21,7 +21,6 @@
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import os
|
import os
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
|
||||||
Any,
|
Any,
|
||||||
ClassVar,
|
ClassVar,
|
||||||
)
|
)
|
||||||
|
@ -30,13 +29,13 @@ from contextlib import (
|
||||||
)
|
)
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
|
||||||
|
|
||||||
from ._util import (
|
from ._util import (
|
||||||
get_console_log,
|
get_console_log,
|
||||||
)
|
)
|
||||||
from ._mngr import (
|
from ._mngr import (
|
||||||
Services,
|
open_service_mngr,
|
||||||
|
ServiceMngr,
|
||||||
)
|
)
|
||||||
from ._registry import ( # noqa
|
from ._registry import ( # noqa
|
||||||
_tractor_kwargs,
|
_tractor_kwargs,
|
||||||
|
@ -59,7 +58,7 @@ async def open_piker_runtime(
|
||||||
registry_addrs: list[tuple[str, int]] = [],
|
registry_addrs: list[tuple[str, int]] = [],
|
||||||
|
|
||||||
enable_modules: list[str] = [],
|
enable_modules: list[str] = [],
|
||||||
loglevel: Optional[str] = None,
|
loglevel: str|None = None,
|
||||||
|
|
||||||
# XXX NOTE XXX: you should pretty much never want debug mode
|
# XXX NOTE XXX: you should pretty much never want debug mode
|
||||||
# for data daemons when running in production.
|
# for data daemons when running in production.
|
||||||
|
@ -69,7 +68,7 @@ async def open_piker_runtime(
|
||||||
# and spawn the service tree distributed per that.
|
# and spawn the service tree distributed per that.
|
||||||
start_method: str = 'trio',
|
start_method: str = 'trio',
|
||||||
|
|
||||||
tractor_runtime_overrides: dict | None = None,
|
tractor_runtime_overrides: dict|None = None,
|
||||||
**tractor_kwargs,
|
**tractor_kwargs,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
|
@ -119,6 +118,10 @@ async def open_piker_runtime(
|
||||||
# spawn other specialized daemons I think?
|
# spawn other specialized daemons I think?
|
||||||
enable_modules=enable_modules,
|
enable_modules=enable_modules,
|
||||||
|
|
||||||
|
# TODO: how to configure this?
|
||||||
|
# keep it on by default if debug mode is set?
|
||||||
|
# maybe_enable_greenback=debug_mode,
|
||||||
|
|
||||||
**tractor_kwargs,
|
**tractor_kwargs,
|
||||||
) as actor,
|
) as actor,
|
||||||
|
|
||||||
|
@ -167,12 +170,13 @@ async def open_pikerd(
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> Services:
|
) -> ServiceMngr:
|
||||||
'''
|
'''
|
||||||
Start a root piker daemon with an indefinite lifetime.
|
Start a root piker daemon actor (aka `pikerd`) with an indefinite
|
||||||
|
lifetime.
|
||||||
|
|
||||||
A root actor nursery is created which can be used to create and keep
|
A root actor-nursery is created which can be used to spawn and
|
||||||
alive underling services (see below).
|
supervise underling service sub-actors (see below).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# NOTE: for the root daemon we always enable the root
|
# NOTE: for the root daemon we always enable the root
|
||||||
|
@ -199,8 +203,6 @@ async def open_pikerd(
|
||||||
root_actor,
|
root_actor,
|
||||||
reg_addrs,
|
reg_addrs,
|
||||||
),
|
),
|
||||||
tractor.open_nursery() as actor_nursery,
|
|
||||||
trio.open_nursery() as service_nursery,
|
|
||||||
):
|
):
|
||||||
for addr in reg_addrs:
|
for addr in reg_addrs:
|
||||||
if addr not in root_actor.accept_addrs:
|
if addr not in root_actor.accept_addrs:
|
||||||
|
@ -209,25 +211,17 @@ async def open_pikerd(
|
||||||
'Maybe you have another daemon already running?'
|
'Maybe you have another daemon already running?'
|
||||||
)
|
)
|
||||||
|
|
||||||
# assign globally for future daemon/task creation
|
mngr: ServiceMngr
|
||||||
Services.actor_n = actor_nursery
|
async with open_service_mngr(
|
||||||
Services.service_n = service_nursery
|
debug_mode=debug_mode,
|
||||||
Services.debug_mode = debug_mode
|
) as mngr:
|
||||||
|
yield mngr
|
||||||
try:
|
|
||||||
yield Services
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# TODO: is this more clever/efficient?
|
|
||||||
# if 'samplerd' in Services.service_tasks:
|
|
||||||
# await Services.cancel_service('samplerd')
|
|
||||||
service_nursery.cancel_scope.cancel()
|
|
||||||
|
|
||||||
|
|
||||||
# TODO: do we even need this?
|
# TODO: do we even need this?
|
||||||
# @acm
|
# @acm
|
||||||
# async def maybe_open_runtime(
|
# async def maybe_open_runtime(
|
||||||
# loglevel: Optional[str] = None,
|
# loglevel: str|None = None,
|
||||||
# **kwargs,
|
# **kwargs,
|
||||||
|
|
||||||
# ) -> None:
|
# ) -> None:
|
||||||
|
@ -256,7 +250,7 @@ async def maybe_open_pikerd(
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> tractor._portal.Portal | ClassVar[Services]:
|
) -> tractor._portal.Portal | ClassVar[ServiceMngr]:
|
||||||
'''
|
'''
|
||||||
If no ``pikerd`` daemon-root-actor can be found start it and
|
If no ``pikerd`` daemon-root-actor can be found start it and
|
||||||
yield up (we should probably figure out returning a portal to self
|
yield up (we should probably figure out returning a portal to self
|
||||||
|
|
|
@ -49,7 +49,7 @@ from requests.exceptions import (
|
||||||
ReadTimeout,
|
ReadTimeout,
|
||||||
)
|
)
|
||||||
|
|
||||||
from ._mngr import Services
|
from ._mngr import ServiceMngr
|
||||||
from ._util import (
|
from ._util import (
|
||||||
log, # sub-sys logger
|
log, # sub-sys logger
|
||||||
get_console_log,
|
get_console_log,
|
||||||
|
@ -453,7 +453,7 @@ async def open_ahabd(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def start_ahab_service(
|
async def start_ahab_service(
|
||||||
services: Services,
|
services: ServiceMngr,
|
||||||
service_name: str,
|
service_name: str,
|
||||||
|
|
||||||
# endpoint config passed as **kwargs
|
# endpoint config passed as **kwargs
|
||||||
|
@ -549,7 +549,8 @@ async def start_ahab_service(
|
||||||
log.warning('Failed to cancel root permsed container')
|
log.warning('Failed to cancel root permsed container')
|
||||||
|
|
||||||
except (
|
except (
|
||||||
trio.MultiError,
|
# trio.MultiError,
|
||||||
|
ExceptionGroup,
|
||||||
) as err:
|
) as err:
|
||||||
for subexc in err.exceptions:
|
for subexc in err.exceptions:
|
||||||
if isinstance(subexc, PermissionError):
|
if isinstance(subexc, PermissionError):
|
||||||
|
|
|
@ -26,14 +26,17 @@ from typing import (
|
||||||
from contextlib import (
|
from contextlib import (
|
||||||
asynccontextmanager as acm,
|
asynccontextmanager as acm,
|
||||||
)
|
)
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
|
import trio
|
||||||
|
|
||||||
from ._util import (
|
from ._util import (
|
||||||
log, # sub-sys logger
|
log, # sub-sys logger
|
||||||
)
|
)
|
||||||
from ._mngr import (
|
from ._mngr import (
|
||||||
Services,
|
get_service_mngr,
|
||||||
|
ServiceMngr,
|
||||||
)
|
)
|
||||||
from ._actor_runtime import maybe_open_pikerd
|
from ._actor_runtime import maybe_open_pikerd
|
||||||
from ._registry import find_service
|
from ._registry import find_service
|
||||||
|
@ -41,15 +44,14 @@ from ._registry import find_service
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def maybe_spawn_daemon(
|
async def maybe_spawn_daemon(
|
||||||
|
|
||||||
service_name: str,
|
service_name: str,
|
||||||
service_task_target: Callable,
|
service_task_target: Callable,
|
||||||
|
|
||||||
spawn_args: dict[str, Any],
|
spawn_args: dict[str, Any],
|
||||||
|
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
singleton: bool = False,
|
singleton: bool = False,
|
||||||
|
|
||||||
|
_locks = defaultdict(trio.Lock),
|
||||||
**pikerd_kwargs,
|
**pikerd_kwargs,
|
||||||
|
|
||||||
) -> tractor.Portal:
|
) -> tractor.Portal:
|
||||||
|
@ -67,7 +69,7 @@ async def maybe_spawn_daemon(
|
||||||
'''
|
'''
|
||||||
# serialize access to this section to avoid
|
# serialize access to this section to avoid
|
||||||
# 2 or more tasks racing to create a daemon
|
# 2 or more tasks racing to create a daemon
|
||||||
lock = Services.locks[service_name]
|
lock = _locks[service_name]
|
||||||
await lock.acquire()
|
await lock.acquire()
|
||||||
|
|
||||||
async with find_service(
|
async with find_service(
|
||||||
|
@ -132,7 +134,65 @@ async def maybe_spawn_daemon(
|
||||||
async with tractor.wait_for_actor(service_name) as portal:
|
async with tractor.wait_for_actor(service_name) as portal:
|
||||||
lock.release()
|
lock.release()
|
||||||
yield portal
|
yield portal
|
||||||
await portal.cancel_actor()
|
# --- ---- ---
|
||||||
|
# XXX NOTE XXX
|
||||||
|
# --- ---- ---
|
||||||
|
# DO NOT PUT A `portal.cancel_actor()` here (as was prior)!
|
||||||
|
#
|
||||||
|
# Doing so will cause an "out-of-band" ctxc
|
||||||
|
# (`tractor.ContextCancelled`) to be raised inside the
|
||||||
|
# `ServiceMngr.open_context_in_task()`'s call to
|
||||||
|
# `ctx.wait_for_result()` AND the internal self-ctxc
|
||||||
|
# "graceful capture" WILL NOT CATCH IT!
|
||||||
|
#
|
||||||
|
# This can cause certain types of operations to raise
|
||||||
|
# that ctxc BEFORE THEY `return`, resulting in
|
||||||
|
# a "false-negative" ctxc being raised when really
|
||||||
|
# nothing actually failed, other then our semantic
|
||||||
|
# "failure" to suppress an expected, graceful,
|
||||||
|
# self-cancel scenario..
|
||||||
|
#
|
||||||
|
# bUt wHy duZ It WorK lIKe dis..
|
||||||
|
# ------------------------------
|
||||||
|
# from the perspective of the `tractor.Context` this
|
||||||
|
# cancel request was conducted "out of band" since
|
||||||
|
# `Context.cancel()` was never called and thus the
|
||||||
|
# `._cancel_called: bool` was never set. Despite the
|
||||||
|
# remote `.canceller` being set to `pikerd` (i.e. the
|
||||||
|
# same `Actor.uid` of the raising service-mngr task) the
|
||||||
|
# service-task's ctx itself was never marked as having
|
||||||
|
# requested cancellation and thus still raises the ctxc
|
||||||
|
# bc it was unaware of any such request.
|
||||||
|
#
|
||||||
|
# How to make grokin these cases easier tho?
|
||||||
|
# ------------------------------------------
|
||||||
|
# Because `Portal.cancel_actor()` was called it requests
|
||||||
|
# "full-`Actor`-runtime-cancellation" of it's peer
|
||||||
|
# process which IS NOT THE SAME as a single inter-actor
|
||||||
|
# RPC task cancelling its local context with a remote
|
||||||
|
# peer `Task` in that same peer process.
|
||||||
|
#
|
||||||
|
# ?TODO? It might be better if we do one (or all) of the
|
||||||
|
# following:
|
||||||
|
#
|
||||||
|
# -[ ] at least set a special message for the
|
||||||
|
# `ContextCancelled` when raised locally by the
|
||||||
|
# unaware ctx task such that we check for the
|
||||||
|
# `.canceller` being *our `Actor`* and in the case
|
||||||
|
# where `Context._cancel_called == False` we specially
|
||||||
|
# note that this is likely an "out-of-band"
|
||||||
|
# runtime-cancel request triggered by some call to
|
||||||
|
# `Portal.cancel_actor()`, possibly even reporting the
|
||||||
|
# exact LOC of that caller by tracking it inside our
|
||||||
|
# portal-type?
|
||||||
|
# -[ ] possibly add another field `ContextCancelled` like
|
||||||
|
# maybe a,
|
||||||
|
# `.request_type: Literal['os', 'proc', 'actor',
|
||||||
|
# 'ctx']` type thing which would allow immediately
|
||||||
|
# being able to tell what kind of cancellation caused
|
||||||
|
# the unexpected ctxc?
|
||||||
|
# -[ ] REMOVE THIS COMMENT, once we've settled on how to
|
||||||
|
# better augment `tractor` to be more explicit on this!
|
||||||
|
|
||||||
|
|
||||||
async def spawn_emsd(
|
async def spawn_emsd(
|
||||||
|
@ -147,21 +207,22 @@ async def spawn_emsd(
|
||||||
"""
|
"""
|
||||||
log.info('Spawning emsd')
|
log.info('Spawning emsd')
|
||||||
|
|
||||||
portal = await Services.actor_n.start_actor(
|
smngr: ServiceMngr = get_service_mngr()
|
||||||
|
portal = await smngr.actor_n.start_actor(
|
||||||
'emsd',
|
'emsd',
|
||||||
enable_modules=[
|
enable_modules=[
|
||||||
'piker.clearing._ems',
|
'piker.clearing._ems',
|
||||||
'piker.clearing._client',
|
'piker.clearing._client',
|
||||||
],
|
],
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=Services.debug_mode, # set by pikerd flag
|
debug_mode=smngr.debug_mode, # set by pikerd flag
|
||||||
**extra_tractor_kwargs
|
**extra_tractor_kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
# non-blocking setup of clearing service
|
# non-blocking setup of clearing service
|
||||||
from ..clearing._ems import _setup_persistent_emsd
|
from ..clearing._ems import _setup_persistent_emsd
|
||||||
|
|
||||||
await Services.start_service_task(
|
await smngr.start_service_task(
|
||||||
'emsd',
|
'emsd',
|
||||||
portal,
|
portal,
|
||||||
|
|
||||||
|
|
|
@ -18,16 +18,29 @@
|
||||||
daemon-service management API.
|
daemon-service management API.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
# contextmanager as cm,
|
||||||
|
)
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
from dataclasses import (
|
||||||
|
dataclass,
|
||||||
|
field,
|
||||||
|
)
|
||||||
|
import functools
|
||||||
|
import inspect
|
||||||
from typing import (
|
from typing import (
|
||||||
Callable,
|
Callable,
|
||||||
Any,
|
Any,
|
||||||
)
|
)
|
||||||
|
|
||||||
import trio
|
import msgspec
|
||||||
from trio_typing import TaskStatus
|
|
||||||
import tractor
|
import tractor
|
||||||
|
import trio
|
||||||
|
from trio import TaskStatus
|
||||||
from tractor import (
|
from tractor import (
|
||||||
|
ActorNursery,
|
||||||
current_actor,
|
current_actor,
|
||||||
ContextCancelled,
|
ContextCancelled,
|
||||||
Context,
|
Context,
|
||||||
|
@ -39,6 +52,130 @@ from ._util import (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: implement a singleton deco-API for wrapping the below
|
||||||
|
# factory's impl for general actor-singleton use?
|
||||||
|
#
|
||||||
|
# @singleton
|
||||||
|
# async def open_service_mngr(
|
||||||
|
# **init_kwargs,
|
||||||
|
# ) -> ServiceMngr:
|
||||||
|
# '''
|
||||||
|
# Note this function body is invoke IFF no existing singleton instance already
|
||||||
|
# exists in this proc's memory.
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# # setup
|
||||||
|
# yield ServiceMngr(**init_kwargs)
|
||||||
|
# # teardown
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: singleton factory API instead of a class API
|
||||||
|
@acm
|
||||||
|
async def open_service_mngr(
|
||||||
|
*,
|
||||||
|
debug_mode: bool = False,
|
||||||
|
|
||||||
|
# impl deat which ensures a single global instance
|
||||||
|
_singleton: list[ServiceMngr|None] = [None],
|
||||||
|
**init_kwargs,
|
||||||
|
|
||||||
|
) -> ServiceMngr:
|
||||||
|
'''
|
||||||
|
Open a multi-subactor-as-service-daemon tree supervisor.
|
||||||
|
|
||||||
|
The delivered `ServiceMngr` is a singleton instance for each
|
||||||
|
actor-process and is allocated on first open and never
|
||||||
|
de-allocated unless explicitly deleted by al call to
|
||||||
|
`del_service_mngr()`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# TODO: factor this an allocation into
|
||||||
|
# a `._mngr.open_service_mngr()` and put in the
|
||||||
|
# once-n-only-once setup/`.__aenter__()` part!
|
||||||
|
# -[ ] how to make this only happen on the `mngr == None` case?
|
||||||
|
# |_ use `.trionics.maybe_open_context()` (for generic
|
||||||
|
# async-with-style-only-once of the factory impl, though
|
||||||
|
# what do we do for the allocation case?
|
||||||
|
# / `.maybe_open_nursery()` (since for this specific case
|
||||||
|
# it's simpler?) to activate
|
||||||
|
async with (
|
||||||
|
tractor.open_nursery() as an,
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
# impl specific obvi..
|
||||||
|
init_kwargs.update({
|
||||||
|
'actor_n': an,
|
||||||
|
'service_n': tn,
|
||||||
|
})
|
||||||
|
|
||||||
|
mngr: ServiceMngr|None
|
||||||
|
if (mngr := _singleton[0]) is None:
|
||||||
|
|
||||||
|
log.info('Allocating a new service mngr!')
|
||||||
|
mngr = _singleton[0] = ServiceMngr(**init_kwargs)
|
||||||
|
|
||||||
|
# TODO: put into `.__aenter__()` section of
|
||||||
|
# eventual `@singleton_acm` API wrapper.
|
||||||
|
#
|
||||||
|
# assign globally for future daemon/task creation
|
||||||
|
mngr.actor_n = an
|
||||||
|
mngr.service_n = tn
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert (
|
||||||
|
mngr.actor_n
|
||||||
|
and
|
||||||
|
mngr.service_tn
|
||||||
|
)
|
||||||
|
log.info(
|
||||||
|
'Using extant service mngr!\n\n'
|
||||||
|
f'{mngr!r}\n' # it has a nice `.__repr__()` of services state
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# NOTE: this is a singleton factory impl specific detail
|
||||||
|
# which should be supported in the condensed
|
||||||
|
# `@singleton_acm` API?
|
||||||
|
mngr.debug_mode = debug_mode
|
||||||
|
|
||||||
|
yield mngr
|
||||||
|
finally:
|
||||||
|
# TODO: is this more clever/efficient?
|
||||||
|
# if 'samplerd' in mngr.service_tasks:
|
||||||
|
# await mngr.cancel_service('samplerd')
|
||||||
|
tn.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
def get_service_mngr() -> ServiceMngr:
|
||||||
|
'''
|
||||||
|
Try to get the singleton service-mngr for this actor presuming it
|
||||||
|
has already been allocated using,
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
|
||||||
|
async with open_<@singleton_acm(func)>() as mngr`
|
||||||
|
... this block kept open ...
|
||||||
|
|
||||||
|
If not yet allocated raise a `ServiceError`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# https://stackoverflow.com/a/12627202
|
||||||
|
# https://docs.python.org/3/library/inspect.html#inspect.Signature
|
||||||
|
maybe_mngr: ServiceMngr|None = inspect.signature(
|
||||||
|
open_service_mngr
|
||||||
|
).parameters['_singleton'].default[0]
|
||||||
|
|
||||||
|
if maybe_mngr is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Someone must allocate a `ServiceMngr` using\n\n'
|
||||||
|
'`async with open_service_mngr()` beforehand!!\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
return maybe_mngr
|
||||||
|
|
||||||
|
|
||||||
# TODO: we need remote wrapping and a general soln:
|
# TODO: we need remote wrapping and a general soln:
|
||||||
# - factor this into a ``tractor.highlevel`` extension # pack for the
|
# - factor this into a ``tractor.highlevel`` extension # pack for the
|
||||||
# library.
|
# library.
|
||||||
|
@ -46,31 +183,46 @@ from ._util import (
|
||||||
# to the pikerd actor for starting services remotely!
|
# to the pikerd actor for starting services remotely!
|
||||||
# - prolly rename this to ActorServicesNursery since it spawns
|
# - prolly rename this to ActorServicesNursery since it spawns
|
||||||
# new actors and supervises them to completion?
|
# new actors and supervises them to completion?
|
||||||
class Services:
|
@dataclass
|
||||||
|
class ServiceMngr:
|
||||||
|
# class ServiceMngr(msgspec.Struct):
|
||||||
|
'''
|
||||||
|
A multi-subactor-as-service manager.
|
||||||
|
|
||||||
actor_n: tractor._supervise.ActorNursery
|
Spawn, supervise and monitor service/daemon subactors in a SC
|
||||||
|
process tree.
|
||||||
|
|
||||||
|
'''
|
||||||
|
actor_n: ActorNursery
|
||||||
service_n: trio.Nursery
|
service_n: trio.Nursery
|
||||||
debug_mode: bool # tractor sub-actor debug mode flag
|
debug_mode: bool = False # tractor sub-actor debug mode flag
|
||||||
|
|
||||||
service_tasks: dict[
|
service_tasks: dict[
|
||||||
str,
|
str,
|
||||||
tuple[
|
tuple[
|
||||||
trio.CancelScope,
|
trio.CancelScope,
|
||||||
|
Context,
|
||||||
Portal,
|
Portal,
|
||||||
trio.Event,
|
trio.Event,
|
||||||
]
|
]
|
||||||
] = {}
|
] = field(default_factory=dict)
|
||||||
locks = defaultdict(trio.Lock)
|
|
||||||
|
# internal per-service task mutexs
|
||||||
|
_locks = defaultdict(trio.Lock)
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def start_service_task(
|
async def start_service_task(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
portal: Portal,
|
portal: Portal,
|
||||||
|
|
||||||
|
# TODO: typevar for the return type of the target and then
|
||||||
|
# use it below for `ctx_res`?
|
||||||
target: Callable,
|
target: Callable,
|
||||||
|
|
||||||
allow_overruns: bool = False,
|
allow_overruns: bool = False,
|
||||||
**ctx_kwargs,
|
**ctx_kwargs,
|
||||||
|
|
||||||
) -> (trio.CancelScope, Context):
|
) -> (trio.CancelScope, Context, Any):
|
||||||
'''
|
'''
|
||||||
Open a context in a service sub-actor, add to a stack
|
Open a context in a service sub-actor, add to a stack
|
||||||
that gets unwound at ``pikerd`` teardown.
|
that gets unwound at ``pikerd`` teardown.
|
||||||
|
@ -83,6 +235,7 @@ class Services:
|
||||||
task_status: TaskStatus[
|
task_status: TaskStatus[
|
||||||
tuple[
|
tuple[
|
||||||
trio.CancelScope,
|
trio.CancelScope,
|
||||||
|
Context,
|
||||||
trio.Event,
|
trio.Event,
|
||||||
Any,
|
Any,
|
||||||
]
|
]
|
||||||
|
@ -90,64 +243,87 @@ class Services:
|
||||||
|
|
||||||
) -> Any:
|
) -> Any:
|
||||||
|
|
||||||
|
# TODO: use the ctx._scope directly here instead?
|
||||||
|
# -[ ] actually what semantics do we expect for this
|
||||||
|
# usage!?
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
|
try:
|
||||||
|
async with portal.open_context(
|
||||||
|
target,
|
||||||
|
allow_overruns=allow_overruns,
|
||||||
|
**ctx_kwargs,
|
||||||
|
|
||||||
async with portal.open_context(
|
) as (ctx, started):
|
||||||
target,
|
|
||||||
allow_overruns=allow_overruns,
|
|
||||||
**ctx_kwargs,
|
|
||||||
|
|
||||||
) as (ctx, first):
|
# unblock once the remote context has started
|
||||||
|
complete = trio.Event()
|
||||||
# unblock once the remote context has started
|
task_status.started((
|
||||||
complete = trio.Event()
|
cs,
|
||||||
task_status.started((cs, complete, first))
|
ctx,
|
||||||
log.info(
|
complete,
|
||||||
f'`pikerd` service {name} started with value {first}'
|
started,
|
||||||
)
|
))
|
||||||
try:
|
log.info(
|
||||||
|
f'`pikerd` service {name} started with value {started}'
|
||||||
|
)
|
||||||
# wait on any context's return value
|
# wait on any context's return value
|
||||||
# and any final portal result from the
|
# and any final portal result from the
|
||||||
# sub-actor.
|
# sub-actor.
|
||||||
ctx_res: Any = await ctx.result()
|
ctx_res: Any = await ctx.wait_for_result()
|
||||||
|
|
||||||
# NOTE: blocks indefinitely until cancelled
|
# NOTE: blocks indefinitely until cancelled
|
||||||
# either by error from the target context
|
# either by error from the target context
|
||||||
# function or by being cancelled here by the
|
# function or by being cancelled here by the
|
||||||
# surrounding cancel scope.
|
# surrounding cancel scope.
|
||||||
return (await portal.result(), ctx_res)
|
return (
|
||||||
except ContextCancelled as ctxe:
|
await portal.wait_for_result(),
|
||||||
canceller: tuple[str, str] = ctxe.canceller
|
ctx_res,
|
||||||
our_uid: tuple[str, str] = current_actor().uid
|
)
|
||||||
if (
|
|
||||||
canceller != portal.channel.uid
|
|
||||||
and
|
|
||||||
canceller != our_uid
|
|
||||||
):
|
|
||||||
log.cancel(
|
|
||||||
f'Actor-service {name} was remotely cancelled?\n'
|
|
||||||
f'remote canceller: {canceller}\n'
|
|
||||||
f'Keeping {our_uid} alive, ignoring sub-actor cancel..\n'
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
|
except ContextCancelled as ctxe:
|
||||||
|
canceller: tuple[str, str] = ctxe.canceller
|
||||||
|
our_uid: tuple[str, str] = current_actor().uid
|
||||||
|
if (
|
||||||
|
canceller != portal.chan.uid
|
||||||
|
and
|
||||||
|
canceller != our_uid
|
||||||
|
):
|
||||||
|
log.cancel(
|
||||||
|
f'Actor-service `{name}` was remotely cancelled by a peer?\n'
|
||||||
|
|
||||||
|
# TODO: this would be a good spot to use
|
||||||
|
# a respawn feature Bo
|
||||||
|
f'-> Keeping `pikerd` service manager alive despite this inter-peer cancel\n\n'
|
||||||
|
|
||||||
finally:
|
f'cancellee: {portal.chan.uid}\n'
|
||||||
await portal.cancel_actor()
|
f'canceller: {canceller}\n'
|
||||||
complete.set()
|
)
|
||||||
self.service_tasks.pop(name)
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
cs, complete, first = await self.service_n.start(open_context_in_task)
|
finally:
|
||||||
|
# NOTE: the ctx MUST be cancelled first if we
|
||||||
|
# don't want the above `ctx.wait_for_result()` to
|
||||||
|
# raise a self-ctxc. WHY, well since from the ctx's
|
||||||
|
# perspective the cancel request will have
|
||||||
|
# arrived out-out-of-band at the `Actor.cancel()`
|
||||||
|
# level, thus `Context.cancel_called == False`,
|
||||||
|
# meaning `ctx._is_self_cancelled() == False`.
|
||||||
|
# with trio.CancelScope(shield=True):
|
||||||
|
# await ctx.cancel()
|
||||||
|
await portal.cancel_actor()
|
||||||
|
complete.set()
|
||||||
|
self.service_tasks.pop(name)
|
||||||
|
|
||||||
|
cs, sub_ctx, complete, started = await self.service_n.start(
|
||||||
|
open_context_in_task
|
||||||
|
)
|
||||||
|
|
||||||
# store the cancel scope and portal for later cancellation or
|
# store the cancel scope and portal for later cancellation or
|
||||||
# retstart if needed.
|
# retstart if needed.
|
||||||
self.service_tasks[name] = (cs, portal, complete)
|
self.service_tasks[name] = (cs, sub_ctx, portal, complete)
|
||||||
|
return cs, sub_ctx, started
|
||||||
|
|
||||||
return cs, first
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def cancel_service(
|
async def cancel_service(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
|
@ -158,8 +334,80 @@ class Services:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
log.info(f'Cancelling `pikerd` service {name}')
|
log.info(f'Cancelling `pikerd` service {name}')
|
||||||
cs, portal, complete = self.service_tasks[name]
|
cs, sub_ctx, portal, complete = self.service_tasks[name]
|
||||||
cs.cancel()
|
|
||||||
|
# cs.cancel()
|
||||||
|
await sub_ctx.cancel()
|
||||||
await complete.wait()
|
await complete.wait()
|
||||||
assert name not in self.service_tasks, \
|
|
||||||
f'Serice task for {name} not terminated?'
|
if name in self.service_tasks:
|
||||||
|
# TODO: custom err?
|
||||||
|
# raise ServiceError(
|
||||||
|
raise RuntimeError(
|
||||||
|
f'Serice task for {name} not terminated?'
|
||||||
|
)
|
||||||
|
|
||||||
|
# assert name not in self.service_tasks, \
|
||||||
|
# f'Serice task for {name} not terminated?'
|
||||||
|
|
||||||
|
async def start_service(
|
||||||
|
self,
|
||||||
|
daemon_name: str,
|
||||||
|
ctx_ep: Callable, # kwargs must `partial`-ed in!
|
||||||
|
|
||||||
|
debug_mode: bool = False,
|
||||||
|
**tractor_actor_kwargs,
|
||||||
|
|
||||||
|
) -> Context:
|
||||||
|
'''
|
||||||
|
Start a "service" task in a new sub-actor (daemon) and manage it's lifetime
|
||||||
|
indefinitely.
|
||||||
|
|
||||||
|
Services can be cancelled/shutdown using `.cancel_service()`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
entry: tuple|None = self.service_tasks.get(daemon_name)
|
||||||
|
if entry:
|
||||||
|
(cs, sub_ctx, portal, complete) = entry
|
||||||
|
return sub_ctx
|
||||||
|
|
||||||
|
if daemon_name not in self.service_tasks:
|
||||||
|
portal = await self.actor_n.start_actor(
|
||||||
|
daemon_name,
|
||||||
|
debug_mode=( # maybe set globally during allocate
|
||||||
|
debug_mode
|
||||||
|
or
|
||||||
|
self.debug_mode
|
||||||
|
),
|
||||||
|
**tractor_actor_kwargs,
|
||||||
|
)
|
||||||
|
ctx_kwargs: dict[str, Any] = {}
|
||||||
|
if isinstance(ctx_ep, functools.partial):
|
||||||
|
ctx_kwargs: dict[str, Any] = ctx_ep.keywords
|
||||||
|
ctx_ep: Callable = ctx_ep.func
|
||||||
|
|
||||||
|
(cs, sub_ctx, started) = await self.start_service_task(
|
||||||
|
daemon_name,
|
||||||
|
portal,
|
||||||
|
ctx_ep,
|
||||||
|
**ctx_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
return sub_ctx
|
||||||
|
|
||||||
|
|
||||||
|
# TODO:
|
||||||
|
# -[ ] factor all the common shit from `.data._sampling`
|
||||||
|
# and `.brokers._daemon` into here / `ServiceMngr`
|
||||||
|
# in terms of allocating the `Portal` as part of the
|
||||||
|
# "service-in-subactor" starting!
|
||||||
|
# -[ ] move to `tractor.hilevel._service`, import and use here!
|
||||||
|
# NOTE: purposely leaks the ref to the mod-scope Bo
|
||||||
|
# import tractor
|
||||||
|
# from tractor.hilevel import (
|
||||||
|
# open_service_mngr,
|
||||||
|
# ServiceMngr,
|
||||||
|
# )
|
||||||
|
# mngr: ServiceMngr|None = None
|
||||||
|
# with tractor.hilevel.open_service_mngr() as mngr:
|
||||||
|
# Services = proxy(mngr)
|
||||||
|
|
|
@ -21,11 +21,13 @@ from typing import (
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: oof, needs to be changed to `httpx`!
|
||||||
import asks
|
import asks
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
import docker
|
import docker
|
||||||
from ._ahab import DockerContainer
|
from ._ahab import DockerContainer
|
||||||
|
from . import ServiceMngr
|
||||||
|
|
||||||
from ._util import log # sub-sys logger
|
from ._util import log # sub-sys logger
|
||||||
from ._util import (
|
from ._util import (
|
||||||
|
@ -127,7 +129,7 @@ def start_elasticsearch(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def start_ahab_daemon(
|
async def start_ahab_daemon(
|
||||||
service_mngr: Services,
|
service_mngr: ServiceMngr,
|
||||||
user_config: dict | None = None,
|
user_config: dict | None = None,
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ import pendulum
|
||||||
# import purerpc
|
# import purerpc
|
||||||
|
|
||||||
from ..data.feed import maybe_open_feed
|
from ..data.feed import maybe_open_feed
|
||||||
from . import Services
|
from . import ServiceMngr
|
||||||
from ._util import (
|
from ._util import (
|
||||||
log, # sub-sys logger
|
log, # sub-sys logger
|
||||||
get_console_log,
|
get_console_log,
|
||||||
|
@ -233,7 +233,7 @@ def start_marketstore(
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def start_ahab_daemon(
|
async def start_ahab_daemon(
|
||||||
service_mngr: Services,
|
service_mngr: ServiceMngr,
|
||||||
user_config: dict | None = None,
|
user_config: dict | None = None,
|
||||||
loglevel: str | None = None,
|
loglevel: str | None = None,
|
||||||
|
|
||||||
|
|
|
@ -161,7 +161,12 @@ class NativeStorageClient:
|
||||||
|
|
||||||
def index_files(self):
|
def index_files(self):
|
||||||
for path in self._datadir.iterdir():
|
for path in self._datadir.iterdir():
|
||||||
if path.name in {'borked', 'expired',}:
|
if (
|
||||||
|
path.name in {'borked', 'expired',}
|
||||||
|
or
|
||||||
|
'.parquet' not in str(path)
|
||||||
|
):
|
||||||
|
# ignore all non-apache files (for now)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
key: str = path.name.rstrip('.parquet')
|
key: str = path.name.rstrip('.parquet')
|
||||||
|
|
|
@ -44,8 +44,10 @@ import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
from pendulum import (
|
from pendulum import (
|
||||||
|
Interval,
|
||||||
DateTime,
|
DateTime,
|
||||||
Duration,
|
Duration,
|
||||||
|
duration as mk_duration,
|
||||||
from_timestamp,
|
from_timestamp,
|
||||||
)
|
)
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -214,7 +216,8 @@ async def maybe_fill_null_segments(
|
||||||
# pair, immediately stop backfilling?
|
# pair, immediately stop backfilling?
|
||||||
if (
|
if (
|
||||||
start_dt
|
start_dt
|
||||||
and end_dt < start_dt
|
and
|
||||||
|
end_dt < start_dt
|
||||||
):
|
):
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
break
|
break
|
||||||
|
@ -262,6 +265,7 @@ async def maybe_fill_null_segments(
|
||||||
except tractor.ContextCancelled:
|
except tractor.ContextCancelled:
|
||||||
# log.exception
|
# log.exception
|
||||||
await tractor.pause()
|
await tractor.pause()
|
||||||
|
raise
|
||||||
|
|
||||||
null_segs_detected.set()
|
null_segs_detected.set()
|
||||||
# RECHECK for more null-gaps
|
# RECHECK for more null-gaps
|
||||||
|
@ -349,7 +353,7 @@ async def maybe_fill_null_segments(
|
||||||
|
|
||||||
async def start_backfill(
|
async def start_backfill(
|
||||||
get_hist,
|
get_hist,
|
||||||
frame_types: dict[str, Duration] | None,
|
def_frame_duration: Duration,
|
||||||
mod: ModuleType,
|
mod: ModuleType,
|
||||||
mkt: MktPair,
|
mkt: MktPair,
|
||||||
shm: ShmArray,
|
shm: ShmArray,
|
||||||
|
@ -379,22 +383,23 @@ async def start_backfill(
|
||||||
update_start_on_prepend: bool = False
|
update_start_on_prepend: bool = False
|
||||||
if backfill_until_dt is None:
|
if backfill_until_dt is None:
|
||||||
|
|
||||||
# TODO: drop this right and just expose the backfill
|
# TODO: per-provider default history-durations?
|
||||||
# limits inside a [storage] section in conf.toml?
|
# -[ ] inside the `open_history_client()` config allow
|
||||||
# when no tsdb "last datum" is provided, we just load
|
# declaring the history duration limits instead of
|
||||||
# some near-term history.
|
# guessing and/or applying the same limits to all?
|
||||||
# periods = {
|
#
|
||||||
# 1: {'days': 1},
|
# -[ ] allow declaring (default) per-provider backfill
|
||||||
# 60: {'days': 14},
|
# limits inside a [storage] sub-section in conf.toml?
|
||||||
# }
|
#
|
||||||
|
# NOTE, when no tsdb "last datum" is provided, we just
|
||||||
# do a decently sized backfill and load it into storage.
|
# load some near-term history by presuming a "decently
|
||||||
|
# large" 60s duration limit and a much shorter 1s range.
|
||||||
periods = {
|
periods = {
|
||||||
1: {'days': 2},
|
1: {'days': 2},
|
||||||
60: {'years': 6},
|
60: {'years': 6},
|
||||||
}
|
}
|
||||||
period_duration: int = periods[timeframe]
|
period_duration: int = periods[timeframe]
|
||||||
update_start_on_prepend = True
|
update_start_on_prepend: bool = True
|
||||||
|
|
||||||
# NOTE: manually set the "latest" datetime which we intend to
|
# NOTE: manually set the "latest" datetime which we intend to
|
||||||
# backfill history "until" so as to adhere to the history
|
# backfill history "until" so as to adhere to the history
|
||||||
|
@ -416,7 +421,6 @@ async def start_backfill(
|
||||||
f'backfill_until_dt: {backfill_until_dt}\n'
|
f'backfill_until_dt: {backfill_until_dt}\n'
|
||||||
f'last_start_dt: {last_start_dt}\n'
|
f'last_start_dt: {last_start_dt}\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
(
|
(
|
||||||
array,
|
array,
|
||||||
|
@ -426,71 +430,114 @@ async def start_backfill(
|
||||||
timeframe,
|
timeframe,
|
||||||
end_dt=last_start_dt,
|
end_dt=last_start_dt,
|
||||||
)
|
)
|
||||||
|
|
||||||
except NoData as _daterr:
|
except NoData as _daterr:
|
||||||
# 3 cases:
|
orig_last_start_dt: datetime = last_start_dt
|
||||||
# - frame in the middle of a legit venue gap
|
gap_report: str = (
|
||||||
# - history actually began at the `last_start_dt`
|
f'EMPTY FRAME for `end_dt: {last_start_dt}`?\n'
|
||||||
# - some other unknown error (ib blocking the
|
f'{mod.name} -> tf@fqme: {timeframe}@{mkt.fqme}\n'
|
||||||
# history bc they don't want you seeing how they
|
f'last_start_dt: {orig_last_start_dt}\n\n'
|
||||||
# cucked all the tinas..)
|
f'bf_until: {backfill_until_dt}\n'
|
||||||
if dur := frame_types.get(timeframe):
|
)
|
||||||
# decrement by a frame's worth of duration and
|
# EMPTY FRAME signal with 3 (likely) causes:
|
||||||
# retry a few times.
|
#
|
||||||
last_start_dt.subtract(
|
# 1. range contains legit gap in venue history
|
||||||
seconds=dur.total_seconds()
|
# 2. history actually (edge case) **began** at the
|
||||||
|
# value `last_start_dt`
|
||||||
|
# 3. some other unknown error (ib blocking the
|
||||||
|
# history-query bc they don't want you seeing how
|
||||||
|
# they cucked all the tinas.. like with options
|
||||||
|
# hist)
|
||||||
|
#
|
||||||
|
if def_frame_duration:
|
||||||
|
# decrement by a duration's (frame) worth of time
|
||||||
|
# as maybe indicated by the backend to see if we
|
||||||
|
# can get older data before this possible
|
||||||
|
# "history gap".
|
||||||
|
last_start_dt: datetime = last_start_dt.subtract(
|
||||||
|
seconds=def_frame_duration.total_seconds()
|
||||||
)
|
)
|
||||||
log.warning(
|
gap_report += (
|
||||||
f'{mod.name} -> EMPTY FRAME for end_dt?\n'
|
f'Decrementing `end_dt` and retrying with,\n'
|
||||||
f'tf@fqme: {timeframe}@{mkt.fqme}\n'
|
f'def_frame_duration: {def_frame_duration}\n'
|
||||||
'bf_until <- last_start_dt:\n'
|
f'(new) last_start_dt: {last_start_dt}\n'
|
||||||
f'{backfill_until_dt} <- {last_start_dt}\n'
|
|
||||||
f'Decrementing `end_dt` by {dur} and retry..\n'
|
|
||||||
)
|
)
|
||||||
|
log.warning(gap_report)
|
||||||
|
# skip writing to shm/tsdb and try the next
|
||||||
|
# duration's worth of prior history.
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# broker says there never was or is no more history to pull
|
else:
|
||||||
except DataUnavailable:
|
# await tractor.pause()
|
||||||
log.warning(
|
raise DataUnavailable(gap_report)
|
||||||
f'NO-MORE-DATA in range?\n'
|
|
||||||
f'`{mod.name}` halted history:\n'
|
|
||||||
f'tf@fqme: {timeframe}@{mkt.fqme}\n'
|
|
||||||
'bf_until <- last_start_dt:\n'
|
|
||||||
f'{backfill_until_dt} <- {last_start_dt}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
# ugh, what's a better way?
|
# broker says there never was or is no more history to pull
|
||||||
# TODO: fwiw, we probably want a way to signal a throttle
|
except DataUnavailable as due:
|
||||||
# condition (eg. with ib) so that we can halt the
|
message: str = due.args[0]
|
||||||
# request loop until the condition is resolved?
|
log.warning(
|
||||||
if timeframe > 1:
|
f'Provider {mod.name!r} halted backfill due to,\n\n'
|
||||||
await tractor.pause()
|
|
||||||
|
f'{message}\n'
|
||||||
|
|
||||||
|
f'fqme: {mkt.fqme}\n'
|
||||||
|
f'timeframe: {timeframe}\n'
|
||||||
|
f'last_start_dt: {last_start_dt}\n'
|
||||||
|
f'bf_until: {backfill_until_dt}\n'
|
||||||
|
)
|
||||||
|
# UGH: what's a better way?
|
||||||
|
# TODO: backends are responsible for being correct on
|
||||||
|
# this right!?
|
||||||
|
# -[ ] in the `ib` case we could maybe offer some way
|
||||||
|
# to halt the request loop until the condition is
|
||||||
|
# resolved or should the backend be entirely in
|
||||||
|
# charge of solving such faults? yes, right?
|
||||||
return
|
return
|
||||||
|
|
||||||
|
time: np.ndarray = array['time']
|
||||||
assert (
|
assert (
|
||||||
array['time'][0]
|
time[0]
|
||||||
==
|
==
|
||||||
next_start_dt.timestamp()
|
next_start_dt.timestamp()
|
||||||
)
|
)
|
||||||
|
|
||||||
diff = last_start_dt - next_start_dt
|
assert time[-1] == next_end_dt.timestamp()
|
||||||
frame_time_diff_s = diff.seconds
|
|
||||||
|
expected_dur: Interval = last_start_dt - next_start_dt
|
||||||
|
|
||||||
# frame's worth of sample-period-steps, in seconds
|
# frame's worth of sample-period-steps, in seconds
|
||||||
frame_size_s: float = len(array) * timeframe
|
frame_size_s: float = len(array) * timeframe
|
||||||
expected_frame_size_s: float = frame_size_s + timeframe
|
recv_frame_dur: Duration = (
|
||||||
if frame_time_diff_s > expected_frame_size_s:
|
from_timestamp(array[-1]['time'])
|
||||||
|
-
|
||||||
|
from_timestamp(array[0]['time'])
|
||||||
|
)
|
||||||
|
if (
|
||||||
|
(lt_frame := (recv_frame_dur < expected_dur))
|
||||||
|
or
|
||||||
|
(null_frame := (frame_size_s == 0))
|
||||||
|
# ^XXX, should NEVER hit now!
|
||||||
|
):
|
||||||
# XXX: query result includes a start point prior to our
|
# XXX: query result includes a start point prior to our
|
||||||
# expected "frame size" and thus is likely some kind of
|
# expected "frame size" and thus is likely some kind of
|
||||||
# history gap (eg. market closed period, outage, etc.)
|
# history gap (eg. market closed period, outage, etc.)
|
||||||
# so just report it to console for now.
|
# so just report it to console for now.
|
||||||
|
if lt_frame:
|
||||||
|
reason = 'Possible GAP (or first-datum)'
|
||||||
|
else:
|
||||||
|
assert null_frame
|
||||||
|
reason = 'NULL-FRAME'
|
||||||
|
|
||||||
|
missing_dur: Interval = expected_dur.end - recv_frame_dur.end
|
||||||
log.warning(
|
log.warning(
|
||||||
'GAP DETECTED:\n'
|
f'{timeframe}s-series {reason} detected!\n'
|
||||||
f'last_start_dt: {last_start_dt}\n'
|
f'fqme: {mkt.fqme}\n'
|
||||||
f'diff: {diff}\n'
|
f'last_start_dt: {last_start_dt}\n\n'
|
||||||
f'frame_time_diff_s: {frame_time_diff_s}\n'
|
f'recv interval: {recv_frame_dur}\n'
|
||||||
|
f'expected interval: {expected_dur}\n\n'
|
||||||
|
|
||||||
|
f'Missing duration of history of {missing_dur.in_words()!r}\n'
|
||||||
|
f'{missing_dur}\n'
|
||||||
)
|
)
|
||||||
|
# await tractor.pause()
|
||||||
|
|
||||||
to_push = diff_history(
|
to_push = diff_history(
|
||||||
array,
|
array,
|
||||||
|
@ -565,7 +612,8 @@ async def start_backfill(
|
||||||
# long-term storage.
|
# long-term storage.
|
||||||
if (
|
if (
|
||||||
storage is not None
|
storage is not None
|
||||||
and write_tsdb
|
and
|
||||||
|
write_tsdb
|
||||||
):
|
):
|
||||||
log.info(
|
log.info(
|
||||||
f'Writing {ln} frame to storage:\n'
|
f'Writing {ln} frame to storage:\n'
|
||||||
|
@ -578,6 +626,7 @@ async def start_backfill(
|
||||||
'crypto',
|
'crypto',
|
||||||
'crypto_currency',
|
'crypto_currency',
|
||||||
'fiat', # a "forex pair"
|
'fiat', # a "forex pair"
|
||||||
|
'perpetual_future', # stupid "perps" from cex land
|
||||||
}:
|
}:
|
||||||
# for now, our table key schema is not including
|
# for now, our table key schema is not including
|
||||||
# the dst[/src] source asset token.
|
# the dst[/src] source asset token.
|
||||||
|
@ -685,7 +734,7 @@ async def back_load_from_tsdb(
|
||||||
last_tsdb_dt
|
last_tsdb_dt
|
||||||
and latest_start_dt
|
and latest_start_dt
|
||||||
):
|
):
|
||||||
backfilled_size_s = (
|
backfilled_size_s: Duration = (
|
||||||
latest_start_dt - last_tsdb_dt
|
latest_start_dt - last_tsdb_dt
|
||||||
).seconds
|
).seconds
|
||||||
# if the shm buffer len is not large enough to contain
|
# if the shm buffer len is not large enough to contain
|
||||||
|
@ -908,6 +957,8 @@ async def tsdb_backfill(
|
||||||
f'{pformat(config)}\n'
|
f'{pformat(config)}\n'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# concurrently load the provider's most-recent-frame AND any
|
||||||
|
# pre-existing tsdb history already saved in `piker` storage.
|
||||||
dt_eps: list[DateTime, DateTime] = []
|
dt_eps: list[DateTime, DateTime] = []
|
||||||
async with trio.open_nursery() as tn:
|
async with trio.open_nursery() as tn:
|
||||||
tn.start_soon(
|
tn.start_soon(
|
||||||
|
@ -918,7 +969,6 @@ async def tsdb_backfill(
|
||||||
timeframe,
|
timeframe,
|
||||||
config,
|
config,
|
||||||
)
|
)
|
||||||
|
|
||||||
tsdb_entry: tuple = await load_tsdb_hist(
|
tsdb_entry: tuple = await load_tsdb_hist(
|
||||||
storage,
|
storage,
|
||||||
mkt,
|
mkt,
|
||||||
|
@ -947,6 +997,25 @@ async def tsdb_backfill(
|
||||||
mr_end_dt,
|
mr_end_dt,
|
||||||
) = dt_eps
|
) = dt_eps
|
||||||
|
|
||||||
|
first_frame_dur_s: Duration = (mr_end_dt - mr_start_dt).seconds
|
||||||
|
calced_frame_size: Duration = mk_duration(
|
||||||
|
seconds=first_frame_dur_s,
|
||||||
|
)
|
||||||
|
# NOTE, attempt to use the backend declared default frame
|
||||||
|
# sizing (as allowed by their time-series query APIs) and
|
||||||
|
# if not provided try to construct a default from the
|
||||||
|
# first frame received above.
|
||||||
|
def_frame_durs: dict[
|
||||||
|
int,
|
||||||
|
Duration,
|
||||||
|
]|None = config.get('frame_types', None)
|
||||||
|
if def_frame_durs:
|
||||||
|
def_frame_size: Duration = def_frame_durs[timeframe]
|
||||||
|
assert def_frame_size == calced_frame_size
|
||||||
|
else:
|
||||||
|
# use what we calced from first frame above.
|
||||||
|
def_frame_size = calced_frame_size
|
||||||
|
|
||||||
# NOTE: when there's no offline data, there's 2 cases:
|
# NOTE: when there's no offline data, there's 2 cases:
|
||||||
# - data backend doesn't support timeframe/sample
|
# - data backend doesn't support timeframe/sample
|
||||||
# period (in which case `dt_eps` should be `None` and
|
# period (in which case `dt_eps` should be `None` and
|
||||||
|
@ -977,7 +1046,7 @@ async def tsdb_backfill(
|
||||||
partial(
|
partial(
|
||||||
start_backfill,
|
start_backfill,
|
||||||
get_hist=get_hist,
|
get_hist=get_hist,
|
||||||
frame_types=config.get('frame_types', None),
|
def_frame_duration=def_frame_size,
|
||||||
mod=mod,
|
mod=mod,
|
||||||
mkt=mkt,
|
mkt=mkt,
|
||||||
shm=shm,
|
shm=shm,
|
||||||
|
|
|
@ -2,13 +2,13 @@
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "anyio"
|
name = "anyio"
|
||||||
version = "3.7.1"
|
version = "4.6.2.post1"
|
||||||
description = "High level compatibility layer for multiple asynchronous event loop implementations"
|
description = "High level compatibility layer for multiple asynchronous event loop implementations"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.9"
|
||||||
files = [
|
files = [
|
||||||
{file = "anyio-3.7.1-py3-none-any.whl", hash = "sha256:91dee416e570e92c64041bd18b900d1d6fa78dff7048769ce5ac5ddad004fbb5"},
|
{file = "anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d"},
|
||||||
{file = "anyio-3.7.1.tar.gz", hash = "sha256:44a3c9aba0f5defa43261a8b3efb97891f2bd7d804e0e1f56419befa1adfc780"},
|
{file = "anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
@ -50,7 +50,7 @@ files = [
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "asyncvnc"
|
name = "asyncvnc"
|
||||||
version = "1.1.0"
|
version = "1.1.0"
|
||||||
description = ""
|
description = "Asynchronous VNC for Python"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">= 3.7"
|
python-versions = ">= 3.7"
|
||||||
files = []
|
files = []
|
||||||
|
@ -69,21 +69,22 @@ resolved_reference = "825447564e3af6b0d4a0996793f1ca7fb360c48f"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "attrs"
|
name = "attrs"
|
||||||
version = "23.1.0"
|
version = "23.2.0"
|
||||||
description = "Classes Without Boilerplate"
|
description = "Classes Without Boilerplate"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04"},
|
{file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1"},
|
||||||
{file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015"},
|
{file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
|
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
|
||||||
dev = ["attrs[docs,tests]", "pre-commit"]
|
dev = ["attrs[tests]", "pre-commit"]
|
||||||
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
|
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
|
||||||
tests = ["attrs[tests-no-zope]", "zope-interface"]
|
tests = ["attrs[tests-no-zope]", "zope-interface"]
|
||||||
tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"]
|
tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
|
||||||
|
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bidict"
|
name = "bidict"
|
||||||
|
@ -103,75 +104,78 @@ test = ["hypothesis", "pytest", "pytest-benchmark[histogram]", "pytest-cov", "py
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cffi"
|
name = "cffi"
|
||||||
version = "1.15.1"
|
version = "1.17.1"
|
||||||
description = "Foreign Function Interface for Python calling C code."
|
description = "Foreign Function Interface for Python calling C code."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "cffi-1.15.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2"},
|
{file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"},
|
||||||
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2"},
|
{file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"},
|
||||||
{file = "cffi-1.15.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914"},
|
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"},
|
||||||
{file = "cffi-1.15.1-cp27-cp27m-win32.whl", hash = "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3"},
|
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"},
|
||||||
{file = "cffi-1.15.1-cp27-cp27m-win_amd64.whl", hash = "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e"},
|
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"},
|
||||||
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162"},
|
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"},
|
||||||
{file = "cffi-1.15.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b"},
|
{file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21"},
|
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185"},
|
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd"},
|
{file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc"},
|
{file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f"},
|
{file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e"},
|
{file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4"},
|
{file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01"},
|
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e"},
|
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-win32.whl", hash = "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2"},
|
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"},
|
||||||
{file = "cffi-1.15.1-cp310-cp310-win_amd64.whl", hash = "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d"},
|
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac"},
|
{file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83"},
|
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9"},
|
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c"},
|
{file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325"},
|
{file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c"},
|
{file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef"},
|
{file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8"},
|
{file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-win32.whl", hash = "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d"},
|
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"},
|
||||||
{file = "cffi-1.15.1-cp311-cp311-win_amd64.whl", hash = "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104"},
|
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"},
|
||||||
{file = "cffi-1.15.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7"},
|
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"},
|
||||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6"},
|
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"},
|
||||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d"},
|
{file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"},
|
||||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a"},
|
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"},
|
||||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405"},
|
{file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"},
|
||||||
{file = "cffi-1.15.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e"},
|
{file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"},
|
||||||
{file = "cffi-1.15.1-cp36-cp36m-win32.whl", hash = "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf"},
|
{file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"},
|
||||||
{file = "cffi-1.15.1-cp36-cp36m-win_amd64.whl", hash = "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497"},
|
{file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"},
|
||||||
{file = "cffi-1.15.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375"},
|
{file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"},
|
||||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e"},
|
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"},
|
||||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82"},
|
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"},
|
||||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b"},
|
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"},
|
||||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c"},
|
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"},
|
||||||
{file = "cffi-1.15.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426"},
|
{file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"},
|
||||||
{file = "cffi-1.15.1-cp37-cp37m-win32.whl", hash = "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9"},
|
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"},
|
||||||
{file = "cffi-1.15.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045"},
|
{file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"},
|
||||||
{file = "cffi-1.15.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3"},
|
{file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"},
|
||||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a"},
|
{file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"},
|
||||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5"},
|
{file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"},
|
||||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca"},
|
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"},
|
||||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02"},
|
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"},
|
||||||
{file = "cffi-1.15.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192"},
|
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"},
|
||||||
{file = "cffi-1.15.1-cp38-cp38-win32.whl", hash = "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314"},
|
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"},
|
||||||
{file = "cffi-1.15.1-cp38-cp38-win_amd64.whl", hash = "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5"},
|
{file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585"},
|
{file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0"},
|
{file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415"},
|
{file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d"},
|
{file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984"},
|
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35"},
|
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27"},
|
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76"},
|
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3"},
|
{file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-win32.whl", hash = "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee"},
|
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"},
|
||||||
{file = "cffi-1.15.1-cp39-cp39-win_amd64.whl", hash = "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c"},
|
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"},
|
||||||
{file = "cffi-1.15.1.tar.gz", hash = "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9"},
|
{file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"},
|
||||||
|
{file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"},
|
||||||
|
{file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"},
|
||||||
|
{file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
@ -221,47 +225,51 @@ development = ["black", "flake8", "mypy", "pytest", "types-colorama"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cryptography"
|
name = "cryptography"
|
||||||
version = "41.0.3"
|
version = "43.0.3"
|
||||||
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:652627a055cb52a84f8c448185922241dd5217443ca194d5739b44612c5e6507"},
|
{file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"},
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-macosx_10_12_x86_64.whl", hash = "sha256:8f09daa483aedea50d249ef98ed500569841d6498aa9c9f4b0531b9964658922"},
|
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"},
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fd871184321100fb400d759ad0cddddf284c4b696568204d281c902fc7b0d81"},
|
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"},
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84537453d57f55a50a5b6835622ee405816999a7113267739a1b4581f83535bd"},
|
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"},
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3fb248989b6363906827284cd20cca63bb1a757e0a2864d4c1682a985e3dca47"},
|
{file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"},
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:42cb413e01a5d36da9929baa9d70ca90d90b969269e5a12d39c1e0d475010116"},
|
{file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"},
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:aeb57c421b34af8f9fe830e1955bf493a86a7996cc1338fe41b30047d16e962c"},
|
{file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"},
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:6af1c6387c531cd364b72c28daa29232162010d952ceb7e5ca8e2827526aceae"},
|
{file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"},
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-win32.whl", hash = "sha256:0d09fb5356f975974dbcb595ad2d178305e5050656affb7890a1583f5e02a306"},
|
{file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"},
|
||||||
{file = "cryptography-41.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:a983e441a00a9d57a4d7c91b3116a37ae602907a7618b882c8013b5762e80574"},
|
{file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"},
|
||||||
{file = "cryptography-41.0.3-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5259cb659aa43005eb55a0e4ff2c825ca111a0da1814202c64d28a985d33b087"},
|
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"},
|
||||||
{file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:67e120e9a577c64fe1f611e53b30b3e69744e5910ff3b6e97e935aeb96005858"},
|
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"},
|
||||||
{file = "cryptography-41.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7efe8041897fe7a50863e51b77789b657a133c75c3b094e51b5e4b5cec7bf906"},
|
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"},
|
||||||
{file = "cryptography-41.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:ce785cf81a7bdade534297ef9e490ddff800d956625020ab2ec2780a556c313e"},
|
{file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"},
|
||||||
{file = "cryptography-41.0.3-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:57a51b89f954f216a81c9d057bf1a24e2f36e764a1ca9a501a6964eb4a6800dd"},
|
{file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"},
|
||||||
{file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c2f0d35703d61002a2bbdcf15548ebb701cfdd83cdc12471d2bae80878a4207"},
|
{file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"},
|
||||||
{file = "cryptography-41.0.3-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:23c2d778cf829f7d0ae180600b17e9fceea3c2ef8b31a99e3c694cbbf3a24b84"},
|
{file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"},
|
||||||
{file = "cryptography-41.0.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:95dd7f261bb76948b52a5330ba5202b91a26fbac13ad0e9fc8a3ac04752058c7"},
|
{file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"},
|
||||||
{file = "cryptography-41.0.3-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:41d7aa7cdfded09b3d73a47f429c298e80796c8e825ddfadc84c8a7f12df212d"},
|
{file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"},
|
||||||
{file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d0d651aa754ef58d75cec6edfbd21259d93810b73f6ec246436a21b7841908de"},
|
{file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"},
|
||||||
{file = "cryptography-41.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:ab8de0d091acbf778f74286f4989cf3d1528336af1b59f3e5d2ebca8b5fe49e1"},
|
{file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"},
|
||||||
{file = "cryptography-41.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a74fbcdb2a0d46fe00504f571a2a540532f4c188e6ccf26f1f178480117b33c4"},
|
{file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"},
|
||||||
{file = "cryptography-41.0.3.tar.gz", hash = "sha256:6d192741113ef5e30d89dcb5b956ef4e1578f304708701b8b73d38e3e1461f34"},
|
{file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"},
|
||||||
|
{file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"},
|
||||||
|
{file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"},
|
||||||
|
{file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"},
|
||||||
|
{file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
cffi = ">=1.12"
|
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
|
docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
|
||||||
docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"]
|
docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
|
||||||
nox = ["nox"]
|
nox = ["nox"]
|
||||||
pep8test = ["black", "check-sdist", "mypy", "ruff"]
|
pep8test = ["check-sdist", "click", "mypy", "ruff"]
|
||||||
sdist = ["build"]
|
sdist = ["build"]
|
||||||
ssh = ["bcrypt (>=3.1.5)"]
|
ssh = ["bcrypt (>=3.1.5)"]
|
||||||
test = ["pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
|
test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
|
||||||
test-randomorder = ["pytest-randomly"]
|
test-randomorder = ["pytest-randomly"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -333,13 +341,13 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "eventkit"
|
name = "eventkit"
|
||||||
version = "1.0.1"
|
version = "1.0.3"
|
||||||
description = "Event-driven data pipelines"
|
description = "Event-driven data pipelines"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = "*"
|
||||||
files = [
|
files = [
|
||||||
{file = "eventkit-1.0.1-py3-none-any.whl", hash = "sha256:6060a6aa04d5c5d20f2e55b7c17e2a22e8d31f88f2c2791d60eab3301aa040da"},
|
{file = "eventkit-1.0.3-py3-none-any.whl", hash = "sha256:0e199527a89aff9d195b9671ad45d2cc9f79ecda0900de8ecfb4c864d67ad6a2"},
|
||||||
{file = "eventkit-1.0.1.tar.gz", hash = "sha256:56b99a6205f61cd995aa5e0036e37bd61f052f7d32560e60b6fe45e319a7ef3a"},
|
{file = "eventkit-1.0.3.tar.gz", hash = "sha256:99497f6f3c638a50ff7616f2f8cd887b18bbff3765dc1bd8681554db1467c933"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
@ -347,13 +355,13 @@ numpy = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "exceptiongroup"
|
name = "exceptiongroup"
|
||||||
version = "1.1.3"
|
version = "1.2.2"
|
||||||
description = "Backport of PEP 654 (exception groups)"
|
description = "Backport of PEP 654 (exception groups)"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"},
|
{file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"},
|
||||||
{file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"},
|
{file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
|
@ -480,31 +488,37 @@ nest-asyncio = "*"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "idna"
|
name = "idna"
|
||||||
version = "3.4"
|
version = "3.10"
|
||||||
description = "Internationalized Domain Names in Applications (IDNA)"
|
description = "Internationalized Domain Names in Applications (IDNA)"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "idna-3.4-py3-none-any.whl", hash = "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2"},
|
{file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"},
|
||||||
{file = "idna-3.4.tar.gz", hash = "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4"},
|
{file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[package.extras]
|
||||||
|
all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "importlib-metadata"
|
name = "importlib-metadata"
|
||||||
version = "6.8.0"
|
version = "8.5.0"
|
||||||
description = "Read metadata from Python packages"
|
description = "Read metadata from Python packages"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "importlib_metadata-6.8.0-py3-none-any.whl", hash = "sha256:3ebb78df84a805d7698245025b975d9d67053cd94c79245ba4b3eb694abe68bb"},
|
{file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"},
|
||||||
{file = "importlib_metadata-6.8.0.tar.gz", hash = "sha256:dbace7892d8c0c4ac1ad096662232f831d4e64f4c4545bd53016a3e9d4654743"},
|
{file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
zipp = ">=0.5"
|
zipp = ">=3.20"
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
|
||||||
|
cover = ["pytest-cov"]
|
||||||
|
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||||
|
enabler = ["pytest-enabler (>=2.2)"]
|
||||||
perf = ["ipython"]
|
perf = ["ipython"]
|
||||||
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
|
testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"]
|
||||||
|
|
||||||
|
@ -644,13 +658,13 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "nest-asyncio"
|
name = "nest-asyncio"
|
||||||
version = "1.5.7"
|
version = "1.6.0"
|
||||||
description = "Patch asyncio to allow nested event loops"
|
description = "Patch asyncio to allow nested event loops"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.5"
|
python-versions = ">=3.5"
|
||||||
files = [
|
files = [
|
||||||
{file = "nest_asyncio-1.5.7-py3-none-any.whl", hash = "sha256:5301c82941b550b3123a1ea772ba9a1c80bad3a182be8c1a5ae6ad3be57a9657"},
|
{file = "nest_asyncio-1.6.0-py3-none-any.whl", hash = "sha256:87af6efd6b5e897c81050477ef65c62e2b2f35d51703cae01aff2905b1852e1c"},
|
||||||
{file = "nest_asyncio-1.5.7.tar.gz", hash = "sha256:6a80f7b98f24d9083ed24608977c09dd608d83f91cccc24c9d2cba6d10e01c10"},
|
{file = "nest_asyncio-1.6.0.tar.gz", hash = "sha256:6f172d5449aca15afd6c646851f4e31e02c598d553a667e38cafa997cfec55fe"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -729,13 +743,13 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "outcome"
|
name = "outcome"
|
||||||
version = "1.2.0"
|
version = "1.3.0.post0"
|
||||||
description = "Capture the outcome of Python function calls."
|
description = "Capture the outcome of Python function calls."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "outcome-1.2.0-py2.py3-none-any.whl", hash = "sha256:c4ab89a56575d6d38a05aa16daeaa333109c1f96167aba8901ab18b6b5e0f7f5"},
|
{file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"},
|
||||||
{file = "outcome-1.2.0.tar.gz", hash = "sha256:6f82bd3de45da303cf1f771ecafa1633750a358436a8bb60e06a1ceb745d2672"},
|
{file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
@ -743,13 +757,13 @@ attrs = ">=19.2.0"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "packaging"
|
name = "packaging"
|
||||||
version = "23.1"
|
version = "24.2"
|
||||||
description = "Core utilities for Python packages"
|
description = "Core utilities for Python packages"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"},
|
{file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"},
|
||||||
{file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"},
|
{file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -836,24 +850,24 @@ xlsxwriter = ["xlsxwriter"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pycparser"
|
name = "pycparser"
|
||||||
version = "2.21"
|
version = "2.22"
|
||||||
description = "C parser in Python"
|
description = "C parser in Python"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"},
|
{file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"},
|
||||||
{file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"},
|
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pygments"
|
name = "pygments"
|
||||||
version = "2.16.1"
|
version = "2.18.0"
|
||||||
description = "Pygments is a syntax highlighting package written in Python."
|
description = "Pygments is a syntax highlighting package written in Python."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "Pygments-2.16.1-py3-none-any.whl", hash = "sha256:13fc09fa63bc8d8671a6d247e1eb303c4b343eaee81d861f3404db2935653692"},
|
{file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"},
|
||||||
{file = "Pygments-2.16.1.tar.gz", hash = "sha256:1daff0494820c69bc8941e407aa20f577374ee88364ee10a98fdbe0aece96e29"},
|
{file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
|
@ -861,24 +875,24 @@ plugins = ["importlib-metadata"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "pyreadline3"
|
name = "pyreadline3"
|
||||||
version = "3.4.1"
|
version = "3.5.4"
|
||||||
description = "A python implementation of GNU readline."
|
description = "A python implementation of GNU readline."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "*"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "pyreadline3-3.4.1-py3-none-any.whl", hash = "sha256:b0efb6516fd4fb07b45949053826a62fa4cb353db5be2bbb4a7aa1fdd1e345fb"},
|
{file = "pyreadline3-3.5.4-py3-none-any.whl", hash = "sha256:eaf8e6cc3c49bcccf145fc6067ba8643d1df34d604a1ec0eccbf7a18e6d3fae6"},
|
||||||
{file = "pyreadline3-3.4.1.tar.gz", hash = "sha256:6f3d1f7b8a31ba32b73917cefc1f28cc660562f39aea8646d30bd6eff21f7bae"},
|
{file = "pyreadline3-3.5.4.tar.gz", hash = "sha256:8d57d53039a1c75adba8e50dd3d992b28143480816187ea5efbd5c78e6c885b7"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "python-dateutil"
|
name = "python-dateutil"
|
||||||
version = "2.8.2"
|
version = "2.9.0.post0"
|
||||||
description = "Extensions to the standard Python datetime module"
|
description = "Extensions to the standard Python datetime module"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"},
|
{file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"},
|
||||||
{file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"},
|
{file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
@ -897,13 +911,13 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rich"
|
name = "rich"
|
||||||
version = "13.5.2"
|
version = "13.9.4"
|
||||||
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
|
description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7.0"
|
python-versions = ">=3.8.0"
|
||||||
files = [
|
files = [
|
||||||
{file = "rich-13.5.2-py3-none-any.whl", hash = "sha256:146a90b3b6b47cac4a73c12866a499e9817426423f57c5a66949c086191a8808"},
|
{file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"},
|
||||||
{file = "rich-13.5.2.tar.gz", hash = "sha256:fb9d6c0a0f643c99eed3875b5377a184132ba9be4d61516a55273d3554d75a39"},
|
{file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
@ -926,13 +940,13 @@ files = [
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "sniffio"
|
name = "sniffio"
|
||||||
version = "1.3.0"
|
version = "1.3.1"
|
||||||
description = "Sniff out which async library your code is running under"
|
description = "Sniff out which async library your code is running under"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.7"
|
||||||
files = [
|
files = [
|
||||||
{file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"},
|
{file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"},
|
||||||
{file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"},
|
{file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -962,24 +976,24 @@ pyreadline3 = {version = "*", markers = "platform_system == \"Windows\""}
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tomli"
|
name = "tomli"
|
||||||
version = "2.0.1"
|
version = "2.1.0"
|
||||||
description = "A lil' TOML parser"
|
description = "A lil' TOML parser"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"},
|
{file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
|
||||||
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
|
{file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tomli-w"
|
name = "tomli-w"
|
||||||
version = "1.0.0"
|
version = "1.1.0"
|
||||||
description = "A lil' TOML writer"
|
description = "A lil' TOML writer"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.9"
|
||||||
files = [
|
files = [
|
||||||
{file = "tomli_w-1.0.0-py3-none-any.whl", hash = "sha256:9f2a07e8be30a0729e533ec968016807069991ae2fd921a78d42f429ae5f4463"},
|
{file = "tomli_w-1.1.0-py3-none-any.whl", hash = "sha256:1403179c78193e3184bfaade390ddbd071cba48a32a2e62ba11aae47490c63f7"},
|
||||||
{file = "tomli_w-1.0.0.tar.gz", hash = "sha256:f463434305e0336248cac9c2dc8076b707d8a12d019dd349f5c1e382dd1ae1b9"},
|
{file = "tomli_w-1.1.0.tar.gz", hash = "sha256:49e847a3a304d516a169a601184932ef0f6b61623fe680f836a2aa7128ed0d33"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1114,13 +1128,13 @@ wsproto = ">=0.14"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typer"
|
name = "typer"
|
||||||
version = "0.9.0"
|
version = "0.9.4"
|
||||||
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
|
description = "Typer, build great CLIs. Easy to code. Based on Python type hints."
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.6"
|
python-versions = ">=3.6"
|
||||||
files = [
|
files = [
|
||||||
{file = "typer-0.9.0-py3-none-any.whl", hash = "sha256:5d96d986a21493606a358cae4461bd8cdf83cbf33a5aa950ae629ca3b51467ee"},
|
{file = "typer-0.9.4-py3-none-any.whl", hash = "sha256:aa6c4a4e2329d868b80ecbaf16f807f2b54e192209d7ac9dd42691d63f7a54eb"},
|
||||||
{file = "typer-0.9.0.tar.gz", hash = "sha256:50922fd79aea2f4751a8e0408ff10d2662bd0c8bbfa84755a699f3bada2978b2"},
|
{file = "typer-0.9.4.tar.gz", hash = "sha256:f714c2d90afae3a7929fcd72a3abb08df305e1ff61719381384211c4070af57f"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.dependencies]
|
[package.dependencies]
|
||||||
|
@ -1131,17 +1145,17 @@ typing-extensions = ">=3.7.4.3"
|
||||||
all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
|
all = ["colorama (>=0.4.3,<0.5.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
|
||||||
dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
|
dev = ["autoflake (>=1.3.1,<2.0.0)", "flake8 (>=3.8.3,<4.0.0)", "pre-commit (>=2.17.0,<3.0.0)"]
|
||||||
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
|
doc = ["cairosvg (>=2.5.2,<3.0.0)", "mdx-include (>=1.4.1,<2.0.0)", "mkdocs (>=1.1.2,<2.0.0)", "mkdocs-material (>=8.1.4,<9.0.0)", "pillow (>=9.3.0,<10.0.0)"]
|
||||||
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.910)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
|
test = ["black (>=22.3.0,<23.0.0)", "coverage (>=6.2,<7.0)", "isort (>=5.0.6,<6.0.0)", "mypy (==0.971)", "pytest (>=4.4.0,<8.0.0)", "pytest-cov (>=2.10.0,<5.0.0)", "pytest-sugar (>=0.9.4,<0.10.0)", "pytest-xdist (>=1.32.0,<4.0.0)", "rich (>=10.11.0,<14.0.0)", "shellingham (>=1.3.0,<2.0.0)"]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "typing-extensions"
|
name = "typing-extensions"
|
||||||
version = "4.7.1"
|
version = "4.12.2"
|
||||||
description = "Backported and Experimental Type Hints for Python 3.7+"
|
description = "Backported and Experimental Type Hints for Python 3.8+"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.7"
|
python-versions = ">=3.8"
|
||||||
files = [
|
files = [
|
||||||
{file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"},
|
{file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"},
|
||||||
{file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"},
|
{file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
|
@ -1244,18 +1258,22 @@ h11 = ">=0.9.0,<1"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zipp"
|
name = "zipp"
|
||||||
version = "3.16.2"
|
version = "3.21.0"
|
||||||
description = "Backport of pathlib-compatible object wrapper for zip files"
|
description = "Backport of pathlib-compatible object wrapper for zip files"
|
||||||
optional = false
|
optional = false
|
||||||
python-versions = ">=3.8"
|
python-versions = ">=3.9"
|
||||||
files = [
|
files = [
|
||||||
{file = "zipp-3.16.2-py3-none-any.whl", hash = "sha256:679e51dd4403591b2d6838a48de3d283f3d188412a9782faadf845f298736ba0"},
|
{file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"},
|
||||||
{file = "zipp-3.16.2.tar.gz", hash = "sha256:ebc15946aa78bd63458992fc81ec3b6f7b1e92d51c35e6de1c3804e73b799147"},
|
{file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"},
|
||||||
]
|
]
|
||||||
|
|
||||||
[package.extras]
|
[package.extras]
|
||||||
docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"]
|
||||||
testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"]
|
cover = ["pytest-cov"]
|
||||||
|
doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"]
|
||||||
|
enabler = ["pytest-enabler (>=2.2)"]
|
||||||
|
test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"]
|
||||||
|
type = ["pytest-mypy"]
|
||||||
|
|
||||||
[metadata]
|
[metadata]
|
||||||
lock-version = "2.0"
|
lock-version = "2.0"
|
||||||
|
|
237
pyproject.toml
237
pyproject.toml
|
@ -15,8 +15,8 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
[build-system]
|
[build-system]
|
||||||
requires = ["poetry-core"]
|
requires = ["hatchling"]
|
||||||
build-backend = "poetry.core.masonry.api"
|
build-backend = "hatchling.build"
|
||||||
|
|
||||||
# ------ - ------
|
# ------ - ------
|
||||||
|
|
||||||
|
@ -25,130 +25,123 @@ build-backend = "poetry.core.masonry.api"
|
||||||
ignore = []
|
ignore = []
|
||||||
|
|
||||||
# https://docs.astral.sh/ruff/settings/#lint_per-file-ignores
|
# https://docs.astral.sh/ruff/settings/#lint_per-file-ignores
|
||||||
"piker/ui/qt.py" = [
|
# "piker/ui/qt.py" = [
|
||||||
"E402",
|
# "E402",
|
||||||
'F401', # unused imports (without __all__ or blah as blah)
|
# 'F401', # unused imports (without __all__ or blah as blah)
|
||||||
# "F841", # unused variable rules
|
# # "F841", # unused variable rules
|
||||||
]
|
# ]
|
||||||
# ignore-init-module-imports = false
|
# ignore-init-module-imports = false
|
||||||
|
|
||||||
# ------ - ------
|
# ------ - ------
|
||||||
|
|
||||||
[tool.poetry]
|
|
||||||
name = "piker"
|
|
||||||
version = "0.1.0.alpha0.dev0"
|
|
||||||
description = "trading gear for hackers"
|
|
||||||
authors = ["Tyler Goodlet <goodboy_foss@protonmail.com>"]
|
|
||||||
license = "AGPLv3"
|
|
||||||
readme = "README.rst"
|
|
||||||
|
|
||||||
# ------ - ------
|
|
||||||
|
|
||||||
[tool.poetry.dependencies]
|
|
||||||
async-generator = "^1.10"
|
|
||||||
attrs = "^23.1.0"
|
|
||||||
bidict = "^0.22.1"
|
|
||||||
colorama = "^0.4.6"
|
|
||||||
colorlog = "^6.7.0"
|
|
||||||
cython = "^3.0.0"
|
|
||||||
greenback = "^1.1.1"
|
|
||||||
ib-insync = "^0.9.86"
|
|
||||||
msgspec = "^0.18.0"
|
|
||||||
numba = "^0.59.0"
|
|
||||||
numpy = "^1.25"
|
|
||||||
polars = "^0.18.13"
|
|
||||||
pygments = "^2.16.1"
|
|
||||||
python = ">=3.11, <3.13"
|
|
||||||
rich = "^13.5.2"
|
|
||||||
# setuptools = "^68.0.0"
|
|
||||||
tomli = "^2.0.1"
|
|
||||||
tomli-w = "^1.0.0"
|
|
||||||
trio-util = "^0.7.0"
|
|
||||||
trio-websocket = "^0.10.3"
|
|
||||||
typer = "^0.9.0"
|
|
||||||
rapidfuzz = "^3.5.2"
|
|
||||||
pdbp = "^1.5.0"
|
|
||||||
trio = "^0.24"
|
|
||||||
pendulum = "^3.0.0"
|
|
||||||
httpx = "^0.27.0"
|
|
||||||
|
|
||||||
[tool.poetry.dependencies.tractor]
|
|
||||||
develop = true
|
|
||||||
git = 'https://github.com/goodboy/tractor.git'
|
|
||||||
branch = 'asyncio_debugger_support'
|
|
||||||
# path = "../tractor"
|
|
||||||
|
|
||||||
[tool.poetry.dependencies.asyncvnc]
|
|
||||||
git = 'https://github.com/pikers/asyncvnc.git'
|
|
||||||
branch = 'main'
|
|
||||||
|
|
||||||
[tool.poetry.dependencies.tomlkit]
|
|
||||||
develop = true
|
|
||||||
git = 'https://github.com/pikers/tomlkit.git'
|
|
||||||
branch = 'piker_pin'
|
|
||||||
# path = "../tomlkit/"
|
|
||||||
|
|
||||||
[tool.poetry.group.uis]
|
|
||||||
optional = true
|
|
||||||
[tool.poetry.group.uis.dependencies]
|
|
||||||
# https://python-poetry.org/docs/managing-dependencies/#dependency-groups
|
|
||||||
# TODO: make sure the levenshtein shit compiles on nix..
|
|
||||||
# rapidfuzz = {extras = ["speedup"], version = "^0.18.0"}
|
|
||||||
rapidfuzz = "^3.2.0"
|
|
||||||
qdarkstyle = ">=3.0.2"
|
|
||||||
pyqtgraph = { git = 'https://github.com/pikers/pyqtgraph.git' }
|
|
||||||
|
|
||||||
# ------ - ------
|
|
||||||
pyqt6 = "^6.7.0"
|
|
||||||
|
|
||||||
[tool.poetry.group.dev]
|
|
||||||
optional = true
|
|
||||||
[tool.poetry.group.dev.dependencies]
|
|
||||||
# testing / CI
|
|
||||||
pytest = "^6.0.0"
|
|
||||||
elasticsearch = "^8.9.0"
|
|
||||||
xonsh = "^0.14.2"
|
|
||||||
prompt-toolkit = "3.0.40"
|
|
||||||
|
|
||||||
# console ehancements and eventually remote debugging
|
|
||||||
# extras/helpers.
|
|
||||||
# TODO: add a toolset that makes debugging a `pikerd` service
|
|
||||||
# (tree) easy to hack on directly using more or less the local env:
|
|
||||||
# - xonsh + xxh
|
|
||||||
# - rsyscall + pdbp
|
|
||||||
# - actor runtime control console like BEAM/OTP
|
|
||||||
|
|
||||||
# ------ - ------
|
|
||||||
|
|
||||||
# TODO: add an `--only daemon` group for running non-ui / pikerd
|
|
||||||
# service tree in distributed mode B)
|
|
||||||
# https://python-poetry.org/docs/managing-dependencies/#installing-group-dependencies
|
|
||||||
# [tool.poetry.group.daemon.dependencies]
|
|
||||||
|
|
||||||
[tool.poetry.scripts]
|
|
||||||
piker = 'piker.cli:cli'
|
|
||||||
pikerd = 'piker.cli:pikerd'
|
|
||||||
ledger = 'piker.accounting.cli:ledger'
|
|
||||||
|
|
||||||
|
|
||||||
[project]
|
[project]
|
||||||
keywords=[
|
name = "piker"
|
||||||
"async",
|
version = "0.1.0a0dev0"
|
||||||
"trading",
|
description = "trading gear for hackers"
|
||||||
"finance",
|
authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }]
|
||||||
"quant",
|
requires-python = ">=3.12, <3.13"
|
||||||
"charting",
|
license = "AGPL-3.0-or-later"
|
||||||
|
readme = "README.rst"
|
||||||
|
keywords = [
|
||||||
|
"async",
|
||||||
|
"trading",
|
||||||
|
"finance",
|
||||||
|
"quant",
|
||||||
|
"charting",
|
||||||
]
|
]
|
||||||
classifiers=[
|
classifiers = [
|
||||||
'Development Status :: 3 - Alpha',
|
"Development Status :: 3 - Alpha",
|
||||||
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
|
"License :: OSI Approved :: GNU Affero General Public License v3 or later (AGPLv3+)",
|
||||||
'Operating System :: POSIX :: Linux',
|
"Operating System :: POSIX :: Linux",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
"Programming Language :: Python :: 3.11",
|
"Programming Language :: Python :: 3.11",
|
||||||
"Programming Language :: Python :: 3.12",
|
"Programming Language :: Python :: 3.12",
|
||||||
'Intended Audience :: Financial and Insurance Industry',
|
"Intended Audience :: Financial and Insurance Industry",
|
||||||
'Intended Audience :: Science/Research',
|
"Intended Audience :: Science/Research",
|
||||||
'Intended Audience :: Developers',
|
"Intended Audience :: Developers",
|
||||||
'Intended Audience :: Education',
|
"Intended Audience :: Education",
|
||||||
]
|
]
|
||||||
|
dependencies = [
|
||||||
|
"async-generator >=1.10, <2.0.0",
|
||||||
|
"attrs >=23.1.0, <24.0.0",
|
||||||
|
"bidict >=0.22.1, <0.23.0",
|
||||||
|
"colorama >=0.4.6, <0.5.0",
|
||||||
|
"colorlog >=6.7.0, <7.0.0",
|
||||||
|
"ib-insync >=0.9.86, <0.10.0",
|
||||||
|
"numba >=0.59.0, <0.60.0",
|
||||||
|
"numpy >=1.25, <2.0",
|
||||||
|
"polars >=0.18.13, <0.19.0",
|
||||||
|
"pygments >=2.16.1, <3.0.0",
|
||||||
|
"rich >=13.5.2, <14.0.0",
|
||||||
|
"tomli >=2.0.1, <3.0.0",
|
||||||
|
"tomli-w >=1.0.0, <2.0.0",
|
||||||
|
"trio-util >=0.7.0, <0.8.0",
|
||||||
|
"trio-websocket >=0.10.3, <0.11.0",
|
||||||
|
"typer >=0.9.0, <1.0.0",
|
||||||
|
"rapidfuzz >=3.5.2, <4.0.0",
|
||||||
|
"pdbp >=1.5.0, <2.0.0",
|
||||||
|
"trio >=0.24, <0.25",
|
||||||
|
"pendulum >=3.0.0, <4.0.0",
|
||||||
|
"httpx >=0.27.0, <0.28.0",
|
||||||
|
"cryptofeed >=2.4.0, <3.0.0",
|
||||||
|
"pyarrow >=17.0.0, <18.0.0",
|
||||||
|
"websockets ==12.0",
|
||||||
|
"msgspec",
|
||||||
|
"tractor",
|
||||||
|
"asyncvnc",
|
||||||
|
"tomlkit",
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.optional-dependencies]
|
||||||
|
uis = [
|
||||||
|
# https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies
|
||||||
|
# TODO: make sure the levenshtein shit compiles on nix..
|
||||||
|
# rapidfuzz = {extras = ["speedup"], version = "^0.18.0"}
|
||||||
|
"rapidfuzz >=3.2.0, <4.0.0",
|
||||||
|
"qdarkstyle >=3.0.2, <4.0.0",
|
||||||
|
"pyqt6 >=6.7.0, <7.0.0",
|
||||||
|
"pyqtgraph",
|
||||||
|
|
||||||
|
# ------ - ------
|
||||||
|
|
||||||
|
# TODO: add an `--only daemon` group for running non-ui / pikerd
|
||||||
|
# service tree in distributed mode B)
|
||||||
|
# https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies
|
||||||
|
# [project.optional-dependencies]
|
||||||
|
]
|
||||||
|
|
||||||
|
[dependency-groups]
|
||||||
|
dev = [
|
||||||
|
"pytest >=6.0.0, <7.0.0",
|
||||||
|
"elasticsearch >=8.9.0, <9.0.0",
|
||||||
|
"xonsh >=0.14.2, <0.15.0",
|
||||||
|
"prompt-toolkit ==3.0.40",
|
||||||
|
"cython >=3.0.0, <4.0.0",
|
||||||
|
"greenback >=1.1.1, <2.0.0",
|
||||||
|
# console ehancements and eventually remote debugging
|
||||||
|
# extras/helpers.
|
||||||
|
# TODO: add a toolset that makes debugging a `pikerd` service
|
||||||
|
# (tree) easy to hack on directly using more or less the local env:
|
||||||
|
# - xonsh + xxh
|
||||||
|
# - rsyscall + pdbp
|
||||||
|
# - actor runtime control console like BEAM/OTP
|
||||||
|
]
|
||||||
|
|
||||||
|
[project.scripts]
|
||||||
|
piker = "piker.cli:cli"
|
||||||
|
pikerd = "piker.cli:pikerd"
|
||||||
|
ledger = "piker.accounting.cli:ledger"
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.sdist]
|
||||||
|
include = ["piker"]
|
||||||
|
|
||||||
|
[tool.hatch.build.targets.wheel]
|
||||||
|
include = ["piker"]
|
||||||
|
|
||||||
|
[tool.uv.sources]
|
||||||
|
pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
|
||||||
|
asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" }
|
||||||
|
tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" }
|
||||||
|
msgspec = { git = "https://github.com/jcrist/msgspec.git" }
|
||||||
|
tractor = { path = "../tractor" }
|
||||||
|
|
|
@ -10,7 +10,7 @@ from piker import (
|
||||||
config,
|
config,
|
||||||
)
|
)
|
||||||
from piker.service import (
|
from piker.service import (
|
||||||
Services,
|
get_service_mngr,
|
||||||
)
|
)
|
||||||
from piker.log import get_console_log
|
from piker.log import get_console_log
|
||||||
|
|
||||||
|
@ -129,7 +129,7 @@ async def _open_test_pikerd(
|
||||||
) as service_manager,
|
) as service_manager,
|
||||||
):
|
):
|
||||||
# this proc/actor is the pikerd
|
# this proc/actor is the pikerd
|
||||||
assert service_manager is Services
|
assert service_manager is get_service_mngr()
|
||||||
|
|
||||||
async with tractor.wait_for_actor(
|
async with tractor.wait_for_actor(
|
||||||
'pikerd',
|
'pikerd',
|
||||||
|
|
|
@ -26,7 +26,7 @@ import pytest
|
||||||
import tractor
|
import tractor
|
||||||
from uuid import uuid4
|
from uuid import uuid4
|
||||||
|
|
||||||
from piker.service import Services
|
from piker.service import ServiceMngr
|
||||||
from piker.log import get_logger
|
from piker.log import get_logger
|
||||||
from piker.clearing._messages import (
|
from piker.clearing._messages import (
|
||||||
Order,
|
Order,
|
||||||
|
@ -158,7 +158,7 @@ def load_and_check_pos(
|
||||||
|
|
||||||
|
|
||||||
def test_ems_err_on_bad_broker(
|
def test_ems_err_on_bad_broker(
|
||||||
open_test_pikerd: Services,
|
open_test_pikerd: ServiceMngr,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
):
|
):
|
||||||
async def load_bad_fqme():
|
async def load_bad_fqme():
|
||||||
|
|
|
@ -15,7 +15,7 @@ import tractor
|
||||||
|
|
||||||
from piker.service import (
|
from piker.service import (
|
||||||
find_service,
|
find_service,
|
||||||
Services,
|
ServiceMngr,
|
||||||
)
|
)
|
||||||
from piker.data import (
|
from piker.data import (
|
||||||
open_feed,
|
open_feed,
|
||||||
|
@ -44,7 +44,7 @@ def test_runtime_boot(
|
||||||
async def main():
|
async def main():
|
||||||
port = 6666
|
port = 6666
|
||||||
daemon_addr = ('127.0.0.1', port)
|
daemon_addr = ('127.0.0.1', port)
|
||||||
services: Services
|
services: ServiceMngr
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_test_pikerd(
|
open_test_pikerd(
|
||||||
|
|
Loading…
Reference in New Issue