subsys_refinery #29

Closed
goodboy wants to merge 50 commits from subsys_refinery into gitea_feats
31 changed files with 2037 additions and 1296 deletions

View File

@ -88,22 +88,57 @@ a sane install with `uv`
************************ ************************
bc why install with `python` when you can faster with `rust` :: bc why install with `python` when you can faster with `rust` ::
uv lock uv sync
# ^ astral's docs,
# https://docs.astral.sh/uv/concepts/projects/sync/
include all GUIs (ex. for charting)::
uv sync --extra uis
AND with all our hacking tools and WIP integrations::
uv sync --dev --all-extras
hacky install on nixos Ensure you can run the root-daemon::
**********************
uv run pikerd [-l info --pdb]
install on nix(os)
******************
``NixOS`` is our core devs' distro of choice for which we offer ``NixOS`` is our core devs' distro of choice for which we offer
a stringently defined development shell envoirment that can be loaded with:: a stringently defined development shell envoirment that can currently
be applied in one of 2 ways::
# ONLY if running on X11
nix-shell default.nix nix-shell default.nix
Or if you prefer flakes style and a modern DE::
# ONLY if also running on Wayland
nix develop # for default bash
nix develop -c uv run xonsh # for @goodboy's preferred sh B)
start a chart start a chart
************* *************
run a realtime OHLCV chart stand-alone:: run a realtime OHLCV chart stand-alone::
piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken [uv run] piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken
# ^^^ iff you haven't activated the py-env,
# - https://docs.astral.sh/uv/concepts/projects/run/
#
# in order to create an explicit virt-env see,
# - https://docs.astral.sh/uv/concepts/projects/layout/#the-project-environment
# - https://docs.astral.sh/uv/pip/environments/
#
# use $UV_PROJECT_ENVIRONMENT to select any non-`.venv/`
# as the venv sudir in the repo's root.
# - https://docs.astral.sh/uv/reference/environment/#uv_project_environment
this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes
overlayed on the same graph. Use of `piker` without first starting overlayed on the same graph. Use of `piker` without first starting

View File

@ -1,7 +1,9 @@
[network] [network]
tsdb.backend = 'marketstore' pikerd = [
tsdb.host = 'localhost' '/ipv4/127.0.0.1/tcp/6116', # std localhost daemon-actor tree
tsdb.grpc_port = 5995 # '/uds/6116', # TODO std uds socket file
]
[ui] [ui]
# set custom font + size which will scale entire UI # set custom font + size which will scale entire UI

View File

@ -11,11 +11,12 @@ let
libxkbcommonStorePath = lib.getLib libxkbcommon; libxkbcommonStorePath = lib.getLib libxkbcommon;
xcbutilcursorStorePath = lib.getLib xcb-util-cursor; xcbutilcursorStorePath = lib.getLib xcb-util-cursor;
qtpyStorePath = lib.getLib python312Packages.qtpy; pypkgs = python313Packages;
pyqt6StorePath = lib.getLib python312Packages.pyqt6; qtpyStorePath = lib.getLib pypkgs.qtpy;
pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip; pyqt6StorePath = lib.getLib pypkgs.pyqt6;
rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz; pyqt6SipStorePath = lib.getLib pypkgs.pyqt6-sip;
qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle; rapidfuzzStorePath = lib.getLib pypkgs.rapidfuzz;
qdarkstyleStorePath = lib.getLib pypkgs.qdarkstyle;
xorgLibX11StorePath = lib.getLib xorg.libX11; xorgLibX11StorePath = lib.getLib xorg.libX11;
xorgLibxcbStorePath = lib.getLib xorg.libxcb; xorgLibxcbStorePath = lib.getLib xorg.libxcb;
@ -51,12 +52,12 @@ stdenv.mkDerivation {
xorg.xcbutilrenderutil xorg.xcbutilrenderutil
# Python requirements. # Python requirements.
python312Full python313
python312Packages.uv uv
python312Packages.qdarkstyle pypkgs.qdarkstyle
python312Packages.rapidfuzz pypkgs.rapidfuzz
python312Packages.pyqt6 pypkgs.pyqt6
python312Packages.qtpy pypkgs.qtpy
]; ];
src = null; src = null;
shellHook = '' shellHook = ''
@ -113,11 +114,11 @@ stdenv.mkDerivation {
export LD_LIBRARY_PATH export LD_LIBRARY_PATH
RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages" RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.13/site-packages"
QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages" QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.13/site-packages"
QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages" QTPY_PATH="${qtpyStorePath}/lib/python3.13/site-packages"
PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages" PYQT6_PATH="${pyqt6StorePath}/lib/python3.13/site-packages"
PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages" PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.13/site-packages"
PATCH="$PATCH:$RPDFUZZ_PATH" PATCH="$PATCH:$RPDFUZZ_PATH"
PATCH="$PATCH:$QDRKSTYLE_PATH" PATCH="$PATCH:$QDRKSTYLE_PATH"
@ -127,8 +128,8 @@ stdenv.mkDerivation {
export PATCH export PATCH
# Install deps # install all dev and extras
uv lock uv sync --dev --all-extras
''; '';
} }

View File

@ -1,135 +1,24 @@
{ {
"nodes": { "nodes": {
"flake-utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1689068808,
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"flake-utils_2": {
"inputs": {
"systems": "systems_2"
},
"locked": {
"lastModified": 1689068808,
"narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
},
"nix-github-actions": {
"inputs": {
"nixpkgs": [
"poetry2nix",
"nixpkgs"
]
},
"locked": {
"lastModified": 1688870561,
"narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=",
"owner": "nix-community",
"repo": "nix-github-actions",
"rev": "165b1650b753316aa7f1787f3005a8d2da0f5301",
"type": "github"
},
"original": {
"owner": "nix-community",
"repo": "nix-github-actions",
"type": "github"
}
},
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1692174805, "lastModified": 1765779637,
"narHash": "sha256-xmNPFDi/AUMIxwgOH/IVom55Dks34u1g7sFKKebxUm0=", "narHash": "sha256-KJ2wa/BLSrTqDjbfyNx70ov/HdgNBCBBSQP3BIzKnv4=",
"owner": "NixOS", "owner": "nixos",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "caac0eb6bdcad0b32cb2522e03e4002c8975c62e", "rev": "1306659b587dc277866c7b69eb97e5f07864d8c4",
"type": "github" "type": "github"
}, },
"original": { "original": {
"owner": "NixOS", "owner": "nixos",
"ref": "nixos-unstable", "ref": "nixos-unstable",
"repo": "nixpkgs", "repo": "nixpkgs",
"type": "github" "type": "github"
} }
}, },
"poetry2nix": {
"inputs": {
"flake-utils": "flake-utils_2",
"nix-github-actions": "nix-github-actions",
"nixpkgs": [
"nixpkgs"
]
},
"locked": {
"lastModified": 1692048894,
"narHash": "sha256-cDw03rso2V4CDc3Mll0cHN+ztzysAvdI8pJ7ybbz714=",
"ref": "refs/heads/pyqt6",
"rev": "b059ad4c3051f45d6c912e17747aae37a9ec1544",
"revCount": 2276,
"type": "git",
"url": "file:///home/lord_fomo/repos/poetry2nix"
},
"original": {
"type": "git",
"url": "file:///home/lord_fomo/repos/poetry2nix"
}
},
"root": { "root": {
"inputs": { "inputs": {
"flake-utils": "flake-utils", "nixpkgs": "nixpkgs"
"nixpkgs": "nixpkgs",
"poetry2nix": "poetry2nix"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"systems_2": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
} }
} }
}, },

255
flake.nix
View File

@ -1,180 +1,103 @@
# NOTE: to convert to a poetry2nix env like this here are the # An "impure" template thx to `pyproject.nix`,
# steps: # https://pyproject-nix.github.io/pyproject.nix/templates.html#impure
# - install poetry in your system nix config # https://github.com/pyproject-nix/pyproject.nix/blob/master/templates/impure/flake.nix
# - convert the repo to use poetry using `poetry init`:
# https://python-poetry.org/docs/basic-usage/#initialising-a-pre-existing-project
# - then manually ensuring all deps are converted over:
# - add this file to the repo and commit it
# -
# GROKin tips:
# - CLI eps are (ostensibly) added via an `entry_points.txt`:
# - https://packaging.python.org/en/latest/specifications/entry-points/#file-format
# - https://github.com/nix-community/poetry2nix/blob/master/editable.nix#L49
{ {
description = "piker: trading gear for hackers (pkged with poetry2nix)"; description = "An impure `piker` overlay using `uv` with Nix(OS)";
inputs.flake-utils.url = "github:numtide/flake-utils"; inputs = {
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable"; nixpkgs.url = "github:nixos/nixpkgs/nixos-unstable";
# see https://github.com/nix-community/poetry2nix/tree/master#api
inputs.poetry2nix = {
# url = "github:nix-community/poetry2nix";
# url = "github:K900/poetry2nix/qt5-explicit-deps";
url = "/home/lord_fomo/repos/poetry2nix";
inputs.nixpkgs.follows = "nixpkgs";
}; };
outputs = { outputs =
self, { nixpkgs, ... }:
nixpkgs,
flake-utils,
poetry2nix,
}:
# TODO: build cross-OS and use the `${system}` var thingy..
flake-utils.lib.eachDefaultSystem (system:
let let
# use PWD as sources inherit (nixpkgs) lib;
projectDir = ./.; forAllSystems = lib.genAttrs lib.systems.flakeExposed;
pyproject = ./pyproject.toml;
poetrylock = ./poetry.lock;
# TODO: port to 3.11 and support both versions?
python = "python3.10";
# for more functions and examples.
# inherit
# (poetry2nix.legacyPackages.${system})
# mkPoetryApplication;
# pkgs = nixpkgs.legacyPackages.${system};
pkgs = nixpkgs.legacyPackages.x86_64-linux;
lib = pkgs.lib;
p2npkgs = poetry2nix.legacyPackages.x86_64-linux;
# define all pkg overrides per dep, see edgecases.md:
# https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md
# TODO: add these into the json file:
# https://github.com/nix-community/poetry2nix/blob/master/overrides/build-systems.json
pypkgs-build-requirements = {
asyncvnc = [ "setuptools" ];
eventkit = [ "setuptools" ];
ib-insync = [ "setuptools" "flake8" ];
msgspec = [ "setuptools"];
pdbp = [ "setuptools" ];
pyqt6-sip = [ "setuptools" ];
tabcompleter = [ "setuptools" ];
tractor = [ "setuptools" ];
tricycle = [ "setuptools" ];
trio-typing = [ "setuptools" ];
trio-util = [ "setuptools" ];
xonsh = [ "setuptools" ];
};
# auto-generate override entries
p2n-overrides = p2npkgs.defaultPoetryOverrides.extend (self: super:
builtins.mapAttrs (package: build-requirements:
(builtins.getAttr package super).overridePythonAttrs (old: {
buildInputs = (
old.buildInputs or [ ]
) ++ (
builtins.map (
pkg: if builtins.isString pkg then builtins.getAttr pkg super else pkg
) build-requirements
);
})
) pypkgs-build-requirements
);
# override some ahead-of-time compiled extensions
# to be built with their wheels.
ahot_overrides = p2n-overrides.extend(
final: prev: {
# llvmlite = prev.llvmlite.override {
# preferWheel = false;
# };
# TODO: get this workin with p2n and nixpkgs..
# pyqt6 = prev.pyqt6.override {
# preferWheel = true;
# };
# NOTE: this DOESN'T work atm but after a fix
# to poetry2nix, it will and actually this line
# won't be needed - thanks @k900:
# https://github.com/nix-community/poetry2nix/pull/1257
pyqt5 = prev.pyqt5.override {
# withWebkit = false;
preferWheel = true;
};
# see PR from @k900:
# https://github.com/nix-community/poetry2nix/pull/1257
# pyqt5-qt5 = prev.pyqt5-qt5.override {
# withWebkit = false;
# preferWheel = true;
# };
# TODO: patch in an override for polars to build
# from src! See the details likely needed from
# the cryptography entry:
# https://github.com/nix-community/poetry2nix/blob/master/overrides/default.nix#L426-L435
polars = prev.polars.override {
preferWheel = true;
};
}
);
# WHY!? -> output-attrs that `nix develop` scans for:
# https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-develop.html#flake-output-attributes
in in
rec { {
packages = { devShells = forAllSystems (
# piker = poetry2nix.legacyPackages.x86_64-linux.mkPoetryEditablePackage { system:
# editablePackageSources = { piker = ./piker; }; let
pkgs = nixpkgs.legacyPackages.${system};
piker = p2npkgs.mkPoetryApplication { # do store-path extractions
projectDir = projectDir; qt6baseStorePath = lib.getLib pkgs.qt6.qtbase;
# ?TODO? can remove below since manual linking not needed?
# qt6QtWaylandStorePath = lib.getLib pkgs.qt6.qtwayland;
# SEE ABOVE for auto-genned input set, override # XXX NOTE XXX, for now we overlay specific pkgs via
# buncha deps with extras.. like `setuptools` mostly. # a major-version-pinned-`cpython`
# TODO: maybe propose a patch to p2n to show that you cpython = "python313";
# can even do this in the edgecases docs? pypkgs = pkgs."${cpython}Packages";
overrides = ahot_overrides; in
{
default = pkgs.mkShell {
# XXX: won't work on llvmlite.. packages = with pkgs; [
# preferWheels = true; # XXX, ensure sh completions active!
}; bashInteractive
}; bash-completion
# devShells.default = pkgs.mkShell { # dev utils
# projectDir = projectDir; ruff
# python = "python3.10"; pypkgs.ruff
# overrides = ahot_overrides;
# inputsFrom = [ self.packages.x86_64-linux.piker ];
# packages = packages;
# # packages = [ poetry2nix.packages.${system}.poetry ];
# };
# TODO: grok the difference here.. qt6.qtwayland
# - avoid re-cloning git repos on every develop entry.. qt6.qtbase
# - ideally allow hacking on the src code of some deps
# (tractor, pyqtgraph, tomlkit, etc.) WITHOUT having to uv
# re-install them every time a change is made. python313 # ?TODO^ how to set from `cpython` above?
# - boot a usable xonsh inside the poetry virtualenv when pypkgs.pyqt6
# defined via a custom entry point? pypkgs.pyqt6-sip
devShells.default = p2npkgs.mkPoetryEnv { pypkgs.qtpy
# env = p2npkgs.mkPoetryEnv { pypkgs.qdarkstyle
projectDir = projectDir; pypkgs.rapidfuzz
python = pkgs.python310; ];
overrides = ahot_overrides;
editablePackageSources = packages; shellHook = ''
# piker = "./"; # unmask to debug **this** dev-shell-hook
# tractor = "../tractor/"; # set -e
# }; # wut?
# set qt-base/plugin path(s)
QTBASE_PATH="${qt6baseStorePath}/lib"
QT_PLUGIN_PATH="${qt6baseStorePath}/lib/qt-6/plugins"
QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms"
# link in Qt cc lib paths from <nixpkgs>
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH"
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH"
LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH"
# link-in c++ stdlib for various AOT-ext-pkgs (numpy, etc.)
LD_LIBRARY_PATH="${pkgs.stdenv.cc.cc.lib}/lib:$LD_LIBRARY_PATH"
export LD_LIBRARY_PATH
# RUNTIME-SETTINGS
#
# ------ Qt ------
# XXX, unmask to debug qt .so linking/loading deats
# export QT_DEBUG_PLUGINS=1
#
# ALSO, for *modern linux* DEs,
# - maybe set wayland-mode (TODO, parametrtize this!)
# * a chosen wayland-mode shell-integration
export QT_QPA_PLATFORM="wayland"
export QT_WAYLAND_SHELL_INTEGRATION="xdg-shell"
# ------ uv ------
# - always use the ./py313/ venv-subdir
export UV_PROJECT_ENVIRONMENT="py313"
# sync project-env with all extras
uv sync --dev --all-extras --no-group lint
# ------ TIPS ------
# NOTE, to launch the py-venv installed `xonsh` (like @goodboy)
# run the `nix develop` cmd with,
# >> nix develop -c uv run xonsh
'';
}; };
} }
); # end of .outputs scope );
};
} }

View File

@ -42,7 +42,6 @@ from ._mktinfo import (
dec_digits, dec_digits,
digits_to_dec, digits_to_dec,
MktPair, MktPair,
Symbol,
unpack_fqme, unpack_fqme,
_derivs as DerivTypes, _derivs as DerivTypes,
) )
@ -60,7 +59,6 @@ __all__ = [
'Asset', 'Asset',
'MktPair', 'MktPair',
'Position', 'Position',
'Symbol',
'Transaction', 'Transaction',
'TransactionLedger', 'TransactionLedger',
'dec_digits', 'dec_digits',

View File

@ -40,7 +40,7 @@ import tomli_w # for fast ledger writing
from piker.types import Struct from piker.types import Struct
from piker import config from piker import config
from ..log import get_logger from piker.log import get_logger
from .calc import ( from .calc import (
iter_by_dt, iter_by_dt,
) )
@ -239,7 +239,9 @@ class TransactionLedger(UserDict):
symcache: SymbologyCache = self._symcache symcache: SymbologyCache = self._symcache
towrite: dict[str, Any] = {} towrite: dict[str, Any] = {}
for tid, txdict in self.tx_sort(self.data.copy()): for tid, txdict in self.tx_sort(
self.data.copy()
):
# write blank-str expiry for non-expiring assets # write blank-str expiry for non-expiring assets
if ( if (
'expiry' in txdict 'expiry' in txdict
@ -377,7 +379,7 @@ def open_trade_ledger(
account, account,
dirpath=_fp, dirpath=_fp,
) )
cpy = ledger_dict.copy() cpy: dict = ledger_dict.copy()
# XXX NOTE: if not provided presume we are being called from # XXX NOTE: if not provided presume we are being called from
# sync code and need to maybe run `trio` to generate.. # sync code and need to maybe run `trio` to generate..
@ -406,7 +408,13 @@ def open_trade_ledger(
account=account, account=account,
mod=mod, mod=mod,
symcache=symcache, symcache=symcache,
tx_sort=getattr(mod, 'tx_sort', tx_sort),
# NOTE: allow backends to provide custom ledger sorting
tx_sort=getattr(
mod,
'tx_sort',
tx_sort,
),
) )
try: try:
yield ledger yield ledger

View File

@ -305,8 +305,8 @@ class MktPair(Struct, frozen=True):
# config right? # config right?
# src_type: AssetTypeName # src_type: AssetTypeName
# for derivs, info describing contract, egs. # for derivs, info describing contract, egs. strike price, call
# strike price, call or put, swap type, exercise model, etc. # or put, swap type, exercise model, etc.
contract_info: list[str] | None = None contract_info: list[str] | None = None
# TODO: rename to sectype since all of these can # TODO: rename to sectype since all of these can
@ -390,8 +390,8 @@ class MktPair(Struct, frozen=True):
cls, cls,
fqme: str, fqme: str,
price_tick: float | str, price_tick: float|str,
size_tick: float | str, size_tick: float|str,
bs_mktid: str, bs_mktid: str,
broker: str | None = None, broker: str | None = None,
@ -677,90 +677,3 @@ def unpack_fqme(
# '.'.join([mkt_ep, venue]), # '.'.join([mkt_ep, venue]),
suffix, suffix,
) )
class Symbol(Struct):
'''
I guess this is some kinda container thing for dealing with
all the different meta-data formats from brokers?
'''
key: str
broker: str = ''
venue: str = ''
# precision descriptors for price and vlm
tick_size: Decimal = Decimal('0.01')
lot_tick_size: Decimal = Decimal('0.0')
suffix: str = ''
broker_info: dict[str, dict[str, Any]] = {}
@classmethod
def from_fqme(
cls,
fqsn: str,
info: dict[str, Any],
) -> Symbol:
broker, mktep, venue, suffix = unpack_fqme(fqsn)
tick_size = info.get('price_tick_size', 0.01)
lot_size = info.get('lot_tick_size', 0.0)
return Symbol(
broker=broker,
key=mktep,
tick_size=tick_size,
lot_tick_size=lot_size,
venue=venue,
suffix=suffix,
broker_info={broker: info},
)
@property
def type_key(self) -> str:
return list(self.broker_info.values())[0]['asset_type']
@property
def tick_size_digits(self) -> int:
return float_digits(self.tick_size)
@property
def lot_size_digits(self) -> int:
return float_digits(self.lot_tick_size)
@property
def price_tick(self) -> Decimal:
return Decimal(str(self.tick_size))
@property
def size_tick(self) -> Decimal:
return Decimal(str(self.lot_tick_size))
@property
def broker(self) -> str:
return list(self.broker_info.keys())[0]
@property
def fqme(self) -> str:
return maybe_cons_tokens([
self.key, # final "pair name" (eg. qqq[/usd], btcusdt)
self.venue,
self.suffix, # includes expiry and other con info
self.broker,
])
def quantize(
self,
size: float,
) -> Decimal:
digits = float_digits(self.lot_tick_size)
return Decimal(size).quantize(
Decimal(f'1.{"0".ljust(digits, "0")}'),
rounding=ROUND_HALF_EVEN
)
# NOTE: when cast to `str` return fqme
def __str__(self) -> str:
return self.fqme

View File

@ -30,7 +30,8 @@ from types import ModuleType
from typing import ( from typing import (
Any, Any,
Iterator, Iterator,
Generator Generator,
TYPE_CHECKING,
) )
import pendulum import pendulum
@ -59,8 +60,10 @@ from ..clearing._messages import (
BrokerdPosition, BrokerdPosition,
) )
from piker.types import Struct from piker.types import Struct
from piker.data._symcache import SymbologyCache from piker.log import get_logger
from ..log import get_logger
if TYPE_CHECKING:
from piker.data._symcache import SymbologyCache
log = get_logger(__name__) log = get_logger(__name__)
@ -493,6 +496,17 @@ class Account(Struct):
_mktmap_table: dict[str, MktPair] | None = None, _mktmap_table: dict[str, MktPair] | None = None,
only_require: list[str]|True = True,
# ^list of fqmes that are "required" to be processed from
# this ledger pass; we often don't care about others and
# definitely shouldn't always error in such cases.
# (eg. broker backend loaded that doesn't yet supsport the
# symcache but also, inside the paper engine we don't ad-hoc
# request `get_mkt_info()` for every symbol in the ledger,
# only the one for which we're simulating against).
# TODO, not sure if there's a better soln for this, ideally
# all backends get symcache support afap i guess..
) -> dict[str, Position]: ) -> dict[str, Position]:
''' '''
Update the internal `.pps[str, Position]` table from input Update the internal `.pps[str, Position]` table from input
@ -535,11 +549,32 @@ class Account(Struct):
if _mktmap_table is None: if _mktmap_table is None:
raise raise
required: bool = (
only_require is True
or (
only_require is not True
and
fqme in only_require
)
)
# XXX: caller is allowed to provide a fallback # XXX: caller is allowed to provide a fallback
# mktmap table for the case where a new position is # mktmap table for the case where a new position is
# being added and the preloaded symcache didn't # being added and the preloaded symcache didn't
# have this entry prior (eg. with frickin IB..) # have this entry prior (eg. with frickin IB..)
mkt = _mktmap_table[fqme] if (
not (mkt := _mktmap_table.get(fqme))
and
required
):
raise
elif not required:
continue
else:
# should be an entry retreived somewhere
assert mkt
if not (pos := pps.get(bs_mktid)): if not (pos := pps.get(bs_mktid)):
@ -656,7 +691,7 @@ class Account(Struct):
def write_config(self) -> None: def write_config(self) -> None:
''' '''
Write the current account state to the user's account TOML file, normally Write the current account state to the user's account TOML file, normally
something like ``pps.toml``. something like `pps.toml`.
''' '''
# TODO: show diff output? # TODO: show diff output?

View File

@ -251,10 +251,16 @@ def iter_by_dt(
for k in parsers: for k in parsers:
if ( if (
isdict and k in tx isdict and k in tx
or getattr(tx, k, None) or
getattr(tx, k, None)
): ):
v = tx[k] if isdict else tx.dt v = (
assert v is not None, f'No valid value for `{k}`!?' tx[k] if isdict
else tx.dt
)
assert v is not None, (
f'No valid value for `{k}`!?'
)
# only call parser on the value if not None from # only call parser on the value if not None from
# the `parsers` table above (when NOT using # the `parsers` table above (when NOT using
@ -269,8 +275,21 @@ def iter_by_dt(
return v return v
else: else:
# XXX: should never get here.. # TODO: move to top?
breakpoint() from piker.log import get_logger
log = get_logger(__name__)
# XXX: we should really never get here..
# only if a ledger record has no expected sort(able)
# field will we likely hit this.. like with ze IB.
# if no sortable field just deliver epoch?
log.warning(
'No (time) sortable field for TXN:\n'
f'{tx}\n'
)
return from_timestamp(0)
# breakpoint()
entry: tuple[str, dict] | Transaction entry: tuple[str, dict] | Transaction
for entry in sorted( for entry in sorted(
@ -446,7 +465,7 @@ def ledger_to_dfs(
df = dfs[key] = ldf.with_columns([ df = dfs[key] = ldf.with_columns([
pl.cumsum('size').alias('cumsize'), pl.cum_sum('size').alias('cumsize'),
# amount of source asset "sent" (via buy txns in # amount of source asset "sent" (via buy txns in
# the market) to acquire the dst asset, PER txn. # the market) to acquire the dst asset, PER txn.
@ -461,7 +480,7 @@ def ledger_to_dfs(
]).with_columns([ ]).with_columns([
# rolling balance in src asset units # rolling balance in src asset units
(pl.col('dst_bot').cumsum() * -1).alias('src_balance'), (pl.col('dst_bot').cum_sum() * -1).alias('src_balance'),
# "position operation type" in terms of increasing the # "position operation type" in terms of increasing the
# amount in the dst asset (entering) or decreasing the # amount in the dst asset (entering) or decreasing the
@ -603,7 +622,7 @@ def ledger_to_dfs(
# cost that was included in the least-recently # cost that was included in the least-recently
# entered txn that is still part of the current CSi # entered txn that is still part of the current CSi
# set. # set.
# => we look up the cost-per-unit cumsum and apply # => we look up the cost-per-unit cum_sum and apply
# if over the current txn size (by multiplication) # if over the current txn size (by multiplication)
# and then reverse that previusly applied cost on # and then reverse that previusly applied cost on
# the txn_cost for this record. # the txn_cost for this record.

View File

@ -300,7 +300,8 @@ def disect(
assert not df.is_empty() assert not df.is_empty()
# muck around in pdbp REPL # muck around in pdbp REPL
breakpoint() # tractor.devx.mk_pdb().set_trace()
# breakpoint()
# TODO: we REALLY need a better console REPL for this # TODO: we REALLY need a better console REPL for this
# kinda thing.. # kinda thing..

View File

@ -168,7 +168,6 @@ class OrderClient(Struct):
async def relay_orders_from_sync_code( async def relay_orders_from_sync_code(
client: OrderClient, client: OrderClient,
symbol_key: str, symbol_key: str,
to_ems_stream: tractor.MsgStream, to_ems_stream: tractor.MsgStream,
@ -242,6 +241,11 @@ async def open_ems(
async with maybe_open_emsd( async with maybe_open_emsd(
broker, broker,
# XXX NOTE, LOL so this determines the daemon `emsd` loglevel
# then FYI.. that's kinda wrong no?
# -[ ] shouldn't it be set by `pikerd -l` or no?
# -[ ] would make a lot more sense to have a subsys ctl for
# levels.. like `-l emsd.info` or something?
loglevel=loglevel, loglevel=loglevel,
) as portal: ) as portal:

View File

@ -653,7 +653,11 @@ class Router(Struct):
flume = feed.flumes[fqme] flume = feed.flumes[fqme]
first_quote: dict = flume.first_quote first_quote: dict = flume.first_quote
book: DarkBook = self.get_dark_book(broker) book: DarkBook = self.get_dark_book(broker)
book.lasts[fqme]: float = float(first_quote['last'])
if not (last := first_quote.get('last')):
last: float = flume.rt_shm.array[-1]['close']
book.lasts[fqme]: float = float(last)
async with self.maybe_open_brokerd_dialog( async with self.maybe_open_brokerd_dialog(
brokermod=brokermod, brokermod=brokermod,
@ -716,7 +720,7 @@ class Router(Struct):
subs = self.subscribers[sub_key] subs = self.subscribers[sub_key]
sent_some: bool = False sent_some: bool = False
for client_stream in subs: for client_stream in subs.copy():
try: try:
await client_stream.send(msg) await client_stream.send(msg)
sent_some = True sent_some = True
@ -1010,6 +1014,10 @@ async def translate_and_relay_brokerd_events(
status_msg.brokerd_msg = msg status_msg.brokerd_msg = msg
status_msg.src = msg.broker_details['name'] status_msg.src = msg.broker_details['name']
if not status_msg.req:
# likely some order change state?
await tractor.pause()
else:
await router.client_broadcast( await router.client_broadcast(
status_msg.req.symbol, status_msg.req.symbol,
status_msg, status_msg,

View File

@ -297,6 +297,8 @@ class PaperBoi(Struct):
# transmit pp msg to ems # transmit pp msg to ems
pp: Position = self.acnt.pps[bs_mktid] pp: Position = self.acnt.pps[bs_mktid]
# TODO, this will break if `require_only=True` was passed to
# `.update_from_ledger()`
pp_msg = BrokerdPosition( pp_msg = BrokerdPosition(
broker=self.broker, broker=self.broker,
@ -653,6 +655,7 @@ async def open_trade_dialog(
# in) use manually constructed table from calling # in) use manually constructed table from calling
# the `.get_mkt_info()` provider EP above. # the `.get_mkt_info()` provider EP above.
_mktmap_table=mkt_by_fqme, _mktmap_table=mkt_by_fqme,
only_require=list(mkt_by_fqme),
) )
pp_msgs: list[BrokerdPosition] = [] pp_msgs: list[BrokerdPosition] = []

View File

@ -30,6 +30,7 @@ subsys: str = 'piker.clearing'
log = get_logger(subsys) log = get_logger(subsys)
# TODO, oof doesn't this ignore the `loglevel` then???
get_console_log = partial( get_console_log = partial(
get_console_log, get_console_log,
name=subsys, name=subsys,

View File

@ -189,28 +189,9 @@ def pikerd(
# AsyncExitStack() as stack, # AsyncExitStack() as stack,
): ):
# TODO: spawn all other sub-actor daemons according to
# multiaddress endpoint spec defined by user config
assert service_mngr assert service_mngr
# ?TODO? spawn all other sub-actor daemons according to
# if tsdb: # multiaddress endpoint spec defined by user config
# dname, conf = await stack.enter_async_context(
# service.marketstore.start_ahab_daemon(
# service_mngr,
# loglevel=loglevel,
# )
# )
# log.info(f'TSDB `{dname}` up with conf:\n{conf}')
# if es:
# dname, conf = await stack.enter_async_context(
# service.elastic.start_ahab_daemon(
# service_mngr,
# loglevel=loglevel,
# )
# )
# log.info(f'DB `{dname}` up with conf:\n{conf}')
await trio.sleep_forever() await trio.sleep_forever()
trio.run(main) trio.run(main)

View File

@ -41,10 +41,13 @@ from .log import get_logger
log = get_logger('broker-config') log = get_logger('broker-config')
# XXX NOTE: taken from ``click`` since apparently they have some # XXX NOTE: taken from `click`
# super weirdness with sigint and sudo..no clue # |_https://github.com/pallets/click/blob/main/src/click/utils.py#L449
# we're probably going to slowly just modify it to our own version over #
# time.. # (since apparently they have some super weirdness with SIGINT and
# sudo.. no clue we're probably going to slowly just modify it to our
# own version over time..)
#
def get_app_dir( def get_app_dir(
app_name: str, app_name: str,
roaming: bool = True, roaming: bool = True,
@ -261,7 +264,7 @@ def load(
MutableMapping, MutableMapping,
] = tomllib.loads, ] = tomllib.loads,
touch_if_dne: bool = False, touch_if_dne: bool = True,
**tomlkws, **tomlkws,
@ -270,7 +273,7 @@ def load(
Load config file by name. Load config file by name.
If desired config is not in the top level piker-user config path then If desired config is not in the top level piker-user config path then
pass the ``path: Path`` explicitly. pass the `path: Path` explicitly.
''' '''
# create the $HOME/.config/piker dir if dne # create the $HOME/.config/piker dir if dne
@ -285,7 +288,8 @@ def load(
if ( if (
not path.is_file() not path.is_file()
and touch_if_dne and
touch_if_dne
): ):
# only do a template if no path provided, # only do a template if no path provided,
# just touch an empty file with same name. # just touch an empty file with same name.

View File

@ -95,6 +95,12 @@ class Sampler:
# history loading. # history loading.
incr_task_cs: trio.CancelScope | None = None incr_task_cs: trio.CancelScope | None = None
bcast_errors: tuple[Exception] = (
trio.BrokenResourceError,
trio.ClosedResourceError,
trio.EndOfChannel,
)
# holds all the ``tractor.Context`` remote subscriptions for # holds all the ``tractor.Context`` remote subscriptions for
# a particular sample period increment event: all subscribers are # a particular sample period increment event: all subscribers are
# notified on a step. # notified on a step.
@ -258,14 +264,15 @@ class Sampler:
subs: set subs: set
last_ts, subs = pair last_ts, subs = pair
task = trio.lowlevel.current_task() # NOTE, for debugging pub-sub issues
log.debug( # task = trio.lowlevel.current_task()
f'SUBS {self.subscribers}\n' # log.debug(
f'PAIR {pair}\n' # f'AlL-SUBS@{period_s!r}: {self.subscribers}\n'
f'TASK: {task}: {id(task)}\n' # f'PAIR: {pair}\n'
f'broadcasting {period_s} -> {last_ts}\n' # f'TASK: {task}: {id(task)}\n'
# f'broadcasting {period_s} -> {last_ts}\n'
# f'consumers: {subs}' # f'consumers: {subs}'
) # )
borked: set[MsgStream] = set() borked: set[MsgStream] = set()
sent: set[MsgStream] = set() sent: set[MsgStream] = set()
while True: while True:
@ -282,12 +289,11 @@ class Sampler:
await stream.send(msg) await stream.send(msg)
sent.add(stream) sent.add(stream)
except ( except self.bcast_errors as err:
trio.BrokenResourceError,
trio.ClosedResourceError
):
log.error( log.error(
f'{stream._ctx.chan.uid} dropped connection' f'Connection dropped for IPC ctx\n'
f'{stream._ctx}\n\n'
f'Due to {type(err)}'
) )
borked.add(stream) borked.add(stream)
else: else:
@ -394,7 +400,8 @@ async def register_with_sampler(
finally: finally:
if ( if (
sub_for_broadcasts sub_for_broadcasts
and subs and
subs
): ):
try: try:
subs.remove(stream) subs.remove(stream)
@ -561,8 +568,7 @@ async def open_sample_stream(
async def sample_and_broadcast( async def sample_and_broadcast(
bus: _FeedsBus,
bus: _FeedsBus, # noqa
rt_shm: ShmArray, rt_shm: ShmArray,
hist_shm: ShmArray, hist_shm: ShmArray,
quote_stream: trio.abc.ReceiveChannel, quote_stream: trio.abc.ReceiveChannel,
@ -582,11 +588,33 @@ async def sample_and_broadcast(
overruns = Counter() overruns = Counter()
# NOTE, only used for debugging live-data-feed issues, though
# this should be resolved more correctly in the future using the
# new typed-msgspec feats of `tractor`!
#
# XXX, a multiline nested `dict` formatter (since rn quote-msgs
# are just that).
# pfmt: Callable[[str], str] = mk_repr()
# iterate stream delivered by broker # iterate stream delivered by broker
async for quotes in quote_stream: async for quotes in quote_stream:
# print(quotes) # print(quotes)
# TODO: ``numba`` this! # XXX WARNING XXX only enable for debugging bc ow can cost
# ALOT of perf with HF-feedz!!!
#
# log.info(
# 'Rx live quotes:\n'
# f'{pfmt(quotes)}'
# )
# TODO,
# -[ ] `numba` or `cython`-nize this loop possibly?
# |_alternatively could we do it in rust somehow by upacking
# arrow msgs instead of using `msgspec`?
# -[ ] use `msgspec.Struct` support in new typed-msging from
# `tractor` to ensure only allowed msgs are transmitted?
#
for broker_symbol, quote in quotes.items(): for broker_symbol, quote in quotes.items():
# TODO: in theory you can send the IPC msg *before* writing # TODO: in theory you can send the IPC msg *before* writing
# to the sharedmem array to decrease latency, however, that # to the sharedmem array to decrease latency, however, that
@ -659,6 +687,21 @@ async def sample_and_broadcast(
sub_key: str = broker_symbol.lower() sub_key: str = broker_symbol.lower()
subs: set[Sub] = bus.get_subs(sub_key) subs: set[Sub] = bus.get_subs(sub_key)
# TODO, figure out how to make this useful whilst
# incoporating feed "pausing" ..
#
# if not subs:
# all_bs_fqmes: list[str] = list(
# bus._subscribers.keys()
# )
# log.warning(
# f'No subscribers for {brokername!r} live-quote ??\n'
# f'broker_symbol: {broker_symbol}\n\n'
# f'Maybe the backend-sys symbol does not match one of,\n'
# f'{pfmt(all_bs_fqmes)}\n'
# )
# NOTE: by default the broker backend doesn't append # NOTE: by default the broker backend doesn't append
# it's own "name" into the fqme schema (but maybe it # it's own "name" into the fqme schema (but maybe it
# should?) so we have to manually generate the correct # should?) so we have to manually generate the correct
@ -697,7 +740,7 @@ async def sample_and_broadcast(
log.warning( log.warning(
f'Feed OVERRUN {sub_key}' f'Feed OVERRUN {sub_key}'
'@{bus.brokername} -> \n' f'@{bus.brokername} -> \n'
f'feed @ {chan.uid}\n' f'feed @ {chan.uid}\n'
f'throttle = {throttle} Hz' f'throttle = {throttle} Hz'
) )
@ -728,18 +771,14 @@ async def sample_and_broadcast(
if lags > 10: if lags > 10:
await tractor.pause() await tractor.pause()
except ( except Sampler.bcast_errors as ipc_err:
trio.BrokenResourceError,
trio.ClosedResourceError,
trio.EndOfChannel,
):
ctx: Context = ipc._ctx ctx: Context = ipc._ctx
chan: Channel = ctx.chan chan: Channel = ctx.chan
if ctx: if ctx:
log.warning( log.warning(
'Dropped `brokerd`-quotes-feed connection:\n' f'Dropped `brokerd`-feed for {broker_symbol!r} due to,\n'
f'{broker_symbol}:' f'x>) {ctx.cid}@{chan.uid}'
f'{ctx.cid}@{chan.uid}' f'|_{ipc_err!r}\n\n'
) )
if sub.throttle_rate: if sub.throttle_rate:
assert ipc._closed assert ipc._closed
@ -756,12 +795,11 @@ async def sample_and_broadcast(
async def uniform_rate_send( async def uniform_rate_send(
rate: float, rate: float,
quote_stream: trio.abc.ReceiveChannel, quote_stream: trio.abc.ReceiveChannel,
stream: MsgStream, stream: MsgStream,
task_status: TaskStatus = trio.TASK_STATUS_IGNORED, task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
) -> None: ) -> None:
''' '''
@ -779,13 +817,16 @@ async def uniform_rate_send(
https://gist.github.com/njsmith/7ea44ec07e901cb78ebe1dd8dd846cb9 https://gist.github.com/njsmith/7ea44ec07e901cb78ebe1dd8dd846cb9
''' '''
# TODO: compute the approx overhead latency per cycle # ?TODO? dynamically compute the **actual** approx overhead latency per cycle
left_to_sleep = throttle_period = 1/rate - 0.000616 # instead of this magic # bidinezz?
throttle_period: float = 1/rate - 0.000616
left_to_sleep: float = throttle_period
# send cycle state # send cycle state
first_quote: dict|None
first_quote = last_quote = None first_quote = last_quote = None
last_send = time.time() last_send: float = time.time()
diff = 0 diff: float = 0
task_status.started() task_status.started()
ticks_by_type: dict[ ticks_by_type: dict[
@ -796,22 +837,28 @@ async def uniform_rate_send(
clear_types = _tick_groups['clears'] clear_types = _tick_groups['clears']
while True: while True:
# compute the remaining time to sleep for this throttled cycle # compute the remaining time to sleep for this throttled cycle
left_to_sleep = throttle_period - diff left_to_sleep: float = throttle_period - diff
if left_to_sleep > 0: if left_to_sleep > 0:
cs: trio.CancelScope
with trio.move_on_after(left_to_sleep) as cs: with trio.move_on_after(left_to_sleep) as cs:
sym: str
last_quote: dict
try: try:
sym, last_quote = await quote_stream.receive() sym, last_quote = await quote_stream.receive()
except trio.EndOfChannel: except trio.EndOfChannel:
log.exception(f"feed for {stream} ended?") log.exception(
f'Live stream for feed for ended?\n'
f'<=c\n'
f' |_[{stream!r}\n'
)
break break
diff = time.time() - last_send diff: float = time.time() - last_send
if not first_quote: if not first_quote:
first_quote = last_quote first_quote: float = last_quote
# first_quote['tbt'] = ticks_by_type # first_quote['tbt'] = ticks_by_type
if (throttle_period - diff) > 0: if (throttle_period - diff) > 0:
@ -872,7 +919,9 @@ async def uniform_rate_send(
# TODO: now if only we could sync this to the display # TODO: now if only we could sync this to the display
# rate timing exactly lul # rate timing exactly lul
try: try:
await stream.send({sym: first_quote}) await stream.send({
sym: first_quote
})
except tractor.RemoteActorError as rme: except tractor.RemoteActorError as rme:
if rme.type is not tractor._exceptions.StreamOverrun: if rme.type is not tractor._exceptions.StreamOverrun:
raise raise
@ -883,19 +932,28 @@ async def uniform_rate_send(
f'{sym}:{ctx.cid}@{chan.uid}' f'{sym}:{ctx.cid}@{chan.uid}'
) )
except ( # NOTE: any of these can be raised by `tractor`'s IPC
# NOTE: any of these can be raised by ``tractor``'s IPC
# transport-layer and we want to be highly resilient # transport-layer and we want to be highly resilient
# to consumers which crash or lose network connection. # to consumers which crash or lose network connection.
# I.e. we **DO NOT** want to crash and propagate up to # I.e. we **DO NOT** want to crash and propagate up to
# ``pikerd`` these kinds of errors! # ``pikerd`` these kinds of errors!
trio.ClosedResourceError, except (
trio.BrokenResourceError,
ConnectionResetError, ConnectionResetError,
): ) + Sampler.bcast_errors as ipc_err:
match ipc_err:
case trio.EndOfChannel():
log.info(
f'{stream} terminated by peer,\n'
f'{ipc_err!r}'
)
case _:
# if the feed consumer goes down then drop # if the feed consumer goes down then drop
# out of this rate limiter # out of this rate limiter
log.warning(f'{stream} closed') log.warning(
f'{stream} closed due to,\n'
f'{ipc_err!r}'
)
await stream.aclose() await stream.aclose()
return return

View File

@ -31,6 +31,7 @@ from pathlib import Path
from pprint import pformat from pprint import pformat
from typing import ( from typing import (
Any, Any,
Callable,
Sequence, Sequence,
Hashable, Hashable,
TYPE_CHECKING, TYPE_CHECKING,
@ -56,7 +57,7 @@ from piker.brokers import (
) )
if TYPE_CHECKING: if TYPE_CHECKING:
from ..accounting import ( from piker.accounting import (
Asset, Asset,
MktPair, MktPair,
) )
@ -90,6 +91,18 @@ class SymbologyCache(Struct):
# provided by the backend pkg. # provided by the backend pkg.
mktmaps: dict[str, MktPair] = field(default_factory=dict) mktmaps: dict[str, MktPair] = field(default_factory=dict)
def pformat(self) -> str:
return (
f'<{type(self).__name__}(\n'
f' .mod: {self.mod!r}\n'
f' .assets: {len(self.assets)!r}\n'
f' .pairs: {len(self.pairs)!r}\n'
f' .mktmaps: {len(self.mktmaps)!r}\n'
f')>'
)
__repr__ = pformat
def write_config(self) -> None: def write_config(self) -> None:
# put the backend's pair-struct type ref at the top # put the backend's pair-struct type ref at the top
@ -149,19 +162,36 @@ class SymbologyCache(Struct):
'Implement `Client.get_assets()`!' 'Implement `Client.get_assets()`!'
) )
if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None): get_mkt_pairs: Callable|None = getattr(
client,
'get_mkt_pairs',
None,
)
if not get_mkt_pairs:
log.warning(
'No symbology cache `Pair` support for `{provider}`..\n'
'Implement `Client.get_mkt_pairs()`!'
)
return self
pairs: dict[str, Struct] = await get_mkt_pairs() pairs: dict[str, Struct] = await get_mkt_pairs()
for bs_fqme, pair in pairs.items(): if not pairs:
log.warning(
'No pairs from intial {provider!r} sym-cache request?\n\n'
'`Client.get_mkt_pairs()` -> {pairs!r} ?'
)
return self
# NOTE: every backend defined pair should for bs_fqme, pair in pairs.items():
# declare it's ns path for roundtrip
# serialization lookup.
if not getattr(pair, 'ns_path', None): if not getattr(pair, 'ns_path', None):
# XXX: every backend defined pair must declare
# a `.ns_path: tractor.NamespacePath` to enable
# roundtrip serialization lookup from a local
# cache file.
raise TypeError( raise TypeError(
f'Pair-struct for {self.mod.name} MUST define a ' f'Pair-struct for {self.mod.name} MUST define a '
'`.ns_path: str`!\n' '`.ns_path: str`!\n\n'
f'{pair}' f'{pair!r}'
) )
entry = await self.mod.get_mkt_info(pair.bs_fqme) entry = await self.mod.get_mkt_info(pair.bs_fqme)
@ -195,12 +225,6 @@ class SymbologyCache(Struct):
pair, pair,
) )
else:
log.warning(
'No symbology cache `Pair` support for `{provider}`..\n'
'Implement `Client.get_mkt_pairs()`!'
)
return self return self
@classmethod @classmethod

View File

@ -352,7 +352,9 @@ async def allocate_persistent_feed(
# yield back control to starting nursery once we receive either # yield back control to starting nursery once we receive either
# some history or a real-time quote. # some history or a real-time quote.
log.info(f'loading OHLCV history: {fqme}') log.info(
f'loading OHLCV history: {fqme!r}\n'
)
await some_data_ready.wait() await some_data_ready.wait()
flume = Flume( flume = Flume(
@ -786,7 +788,6 @@ async def install_brokerd_search(
@acm @acm
async def maybe_open_feed( async def maybe_open_feed(
fqmes: list[str], fqmes: list[str],
loglevel: str | None = None, loglevel: str | None = None,
@ -840,13 +841,12 @@ async def maybe_open_feed(
@acm @acm
async def open_feed( async def open_feed(
fqmes: list[str], fqmes: list[str],
loglevel: str | None = None, loglevel: str|None = None,
allow_overruns: bool = True, allow_overruns: bool = True,
start_stream: bool = True, start_stream: bool = True,
tick_throttle: float | None = None, # Hz tick_throttle: float|None = None, # Hz
allow_remote_ctl_ui: bool = False, allow_remote_ctl_ui: bool = False,

View File

@ -36,10 +36,10 @@ from ._sharedmem import (
ShmArray, ShmArray,
_Token, _Token,
) )
from piker.accounting import MktPair
if TYPE_CHECKING: if TYPE_CHECKING:
from ..accounting import MktPair from piker.data.feed import Feed
from .feed import Feed
class Flume(Struct): class Flume(Struct):
@ -82,7 +82,7 @@ class Flume(Struct):
# TODO: do we need this really if we can pull the `Portal` from # TODO: do we need this really if we can pull the `Portal` from
# ``tractor``'s internals? # ``tractor``'s internals?
feed: Feed | None = None feed: Feed|None = None
@property @property
def rt_shm(self) -> ShmArray: def rt_shm(self) -> ShmArray:

View File

@ -113,9 +113,9 @@ def validate_backend(
) )
if ep is None: if ep is None:
log.warning( log.warning(
f'Provider backend {mod.name} is missing ' f'Provider backend {mod.name!r} is missing '
f'{daemon_name} support :(\n' f'{daemon_name!r} support?\n'
f'The following endpoint is missing: {name}' f'|_module endpoint-func missing: {name!r}\n'
) )
inits: list[ inits: list[

View File

@ -19,6 +19,10 @@ Log like a forester!
""" """
import logging import logging
import json import json
import reprlib
from typing import (
Callable,
)
import tractor import tractor
from pygments import ( from pygments import (
@ -84,3 +88,29 @@ def colorize_json(
# likeable styles: algol_nu, tango, monokai # likeable styles: algol_nu, tango, monokai
formatters.TerminalTrueColorFormatter(style=style) formatters.TerminalTrueColorFormatter(style=style)
) )
# TODO, eventually defer to the version in `modden` once
# it becomes a dep!
def mk_repr(
**repr_kws,
) -> Callable[[str], str]:
'''
Allocate and deliver a `repr.Repr` instance with provided input
settings using the std-lib's `reprlib` mod,
* https://docs.python.org/3/library/reprlib.html
------ Ex. ------
An up to 6-layer-nested `dict` as multi-line:
- https://stackoverflow.com/a/79102479
- https://docs.python.org/3/library/reprlib.html#reprlib.Repr.maxlevel
'''
def_kws: dict[str, int] = dict(
indent=2,
maxlevel=6, # recursion levels
maxstring=66, # match editor line-len limit
)
def_kws |= repr_kws
reprr = reprlib.Repr(**def_kws)
return reprr.repr

View File

@ -517,7 +517,7 @@ def with_dts(
''' '''
return df.with_columns([ return df.with_columns([
pl.col(time_col).shift(1).suffix('_prev'), pl.col(time_col).shift(1).name.suffix('_prev'),
pl.col(time_col).diff().alias('s_diff'), pl.col(time_col).diff().alias('s_diff'),
pl.from_epoch(pl.col(time_col)).alias('dt'), pl.from_epoch(pl.col(time_col)).alias('dt'),
]).with_columns([ ]).with_columns([
@ -623,7 +623,7 @@ def detect_vlm_gaps(
) -> pl.DataFrame: ) -> pl.DataFrame:
vnull: pl.DataFrame = w_dts.filter( vnull: pl.DataFrame = df.filter(
pl.col(col) == 0 pl.col(col) == 0
) )
return vnull return vnull

View File

@ -269,6 +269,8 @@ def hcolor(name: str) -> str:
# default ohlc-bars/curve gray # default ohlc-bars/curve gray
'bracket': '#666666', # like the logo 'bracket': '#666666', # like the logo
'pikers': '#616161', # a trader shade of..
'beast': '#161616', # in the dark alone.
# bluish # bluish
'charcoal': '#36454F', 'charcoal': '#36454F',

View File

@ -23,7 +23,7 @@ name = "piker"
version = "0.1.0a0dev0" version = "0.1.0a0dev0"
description = "trading gear for hackers" description = "trading gear for hackers"
authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }] authors = [{ name = "Tyler Goodlet", email = "goodboy_foss@protonmail.com" }]
requires-python = ">=3.12, <3.13" requires-python = ">=3.12"
license = "AGPL-3.0-or-later" license = "AGPL-3.0-or-later"
readme = "README.rst" readme = "README.rst"
keywords = [ keywords = [
@ -39,8 +39,8 @@ classifiers = [
"Operating System :: POSIX :: Linux", "Operating System :: POSIX :: Linux",
"Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: 3 :: Only", "Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Intended Audience :: Financial and Insurance Industry", "Intended Audience :: Financial and Insurance Industry",
"Intended Audience :: Science/Research", "Intended Audience :: Science/Research",
"Intended Audience :: Developers", "Intended Audience :: Developers",
@ -49,13 +49,13 @@ classifiers = [
dependencies = [ dependencies = [
"async-generator >=1.10, <2.0.0", "async-generator >=1.10, <2.0.0",
"attrs >=23.1.0, <24.0.0", "attrs >=23.1.0, <24.0.0",
"bidict >=0.22.1, <0.23.0", "bidict >=0.23.1",
"colorama >=0.4.6, <0.5.0", "colorama >=0.4.6, <0.5.0",
"colorlog >=6.7.0, <7.0.0", "colorlog >=6.7.0, <7.0.0",
"ib-insync >=0.9.86, <0.10.0", "ib-insync >=0.9.86, <0.10.0",
"numba >=0.59.0, <0.60.0", "numpy>=2.0",
"numpy >=1.25, <2.0", "polars >=0.20.6",
"polars >=0.18.13, <0.19.0", "polars-fuzzy-match>=0.1.5",
"pygments >=2.16.1, <3.0.0", "pygments >=2.16.1, <3.0.0",
"rich >=13.5.2, <14.0.0", "rich >=13.5.2, <14.0.0",
"tomli >=2.0.1, <3.0.0", "tomli >=2.0.1, <3.0.0",
@ -63,21 +63,27 @@ dependencies = [
"trio-util >=0.7.0, <0.8.0", "trio-util >=0.7.0, <0.8.0",
"trio-websocket >=0.10.3, <0.11.0", "trio-websocket >=0.10.3, <0.11.0",
"typer >=0.9.0, <1.0.0", "typer >=0.9.0, <1.0.0",
"rapidfuzz >=3.5.2, <4.0.0", "trio >=0.27",
"pdbp >=1.5.0, <2.0.0", "pendulum",
"trio >=0.24, <0.25",
"pendulum >=3.0.0, <4.0.0",
"httpx >=0.27.0, <0.28.0", "httpx >=0.27.0, <0.28.0",
"cryptofeed >=2.4.0, <3.0.0", "cryptofeed >=2.4.0, <3.0.0",
"pyarrow >=17.0.0, <18.0.0", "pyarrow>=18.0.0",
"websockets ==12.0", "websockets ==12.0",
"msgspec", "msgspec>=0.19.0,<0.20",
"tractor", "tractor",
"asyncvnc",
"tomlkit", "tomlkit",
"trio-typing>=0.10.0",
"numba>=0.61.0",
"pyvnc",
] ]
# ------ dependencies ------
[project.optional-dependencies]
# TODO: add an `--only daemon` group for running non-ui / pikerd
# service tree in distributed mode B)
# https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies
[dependency-groups]
uis = [ uis = [
# https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies # https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies
# TODO: make sure the levenshtein shit compiles on nix.. # TODO: make sure the levenshtein shit compiles on nix..
@ -90,12 +96,11 @@ uis = [
# for consideration, # for consideration,
# - 'visidata' # - 'visidata'
# TODO: add an `--only daemon` group for running non-ui / pikerd "qdarkstyle >=3.0.2, <4.0.0",
# service tree in distributed mode B) "pyqt6 >=6.7.0, <7.0.0",
# https://docs.astral.sh/uv/concepts/projects/dependencies/#optional-dependencies "pyqtgraph",
] ]
[dependency-groups]
# TODO: a toolset that makes debugging a `pikerd` service (tree) easy # TODO: a toolset that makes debugging a `pikerd` service (tree) easy
# to hack on directly using more or less the local env: # to hack on directly using more or less the local env:
# - xonsh + xxh # - xonsh + xxh
@ -104,30 +109,92 @@ uis = [
# #
# console ehancements and eventually remote debugging extras/helpers. # console ehancements and eventually remote debugging extras/helpers.
# use `uv --dev` to enable # use `uv --dev` to enable
dev = [ repl = [
"pytest >=6.0.0, <7.0.0", # debug
"elasticsearch >=8.9.0, <9.0.0", "pdbp >=1.5.0, <2.0.0",
"xonsh >=0.14.2, <0.15.0",
"prompt-toolkit ==3.0.40",
"cython >=3.0.0, <4.0.0",
"greenback >=1.1.1, <2.0.0", "greenback >=1.1.1, <2.0.0",
"ruff>=0.9.6", "xonsh",
"prompt-toolkit ==3.0.40",
"pyperclip>=1.9.0",
] ]
testing = [
"pytest",
]
de = [
# DE-specific
"i3ipc>=2.2.1",
]
dev = [
# https://docs.astral.sh/uv/concepts/projects/dependencies/#development-dependencies
"cython >=3.0.0, <4.0.0",
# nested deps-groups
# https://docs.astral.sh/uv/concepts/projects/dependencies/#nesting-groups
{include-group = 'uis'},
{include-group = 'repl'},
{include-group = 'testing'},
{include-group = 'de'},
]
lint = [
# XXX, with flake.nix needs to be from nixpkgs
"ruff>=0.9.6"
#
# ^TODO? these markers don't work; use deps-flags for now?
# ; os_name != 'nixos' and platform_system != 'NixOS'",
# ; defined('IN_NIX_SHELL')",
]
dbs = [
"elasticsearch >=8.9.0, <9.0.0",
]
# ------ dependency-groups ------
[tool.pytest.ini_options]
# https://docs.pytest.org/en/stable/reference/reference.html#configuration-options
testpaths = [
"tests",
]
# https://docs.pytest.org/en/stable/reference/reference.html#confval-console_output_style
console_output_style = 'progress'
# https://docs.pytest.org/en/stable/how-to/plugins.html#disabling-plugins-from-autoloading
# https://docs.pytest.org/en/stable/how-to/plugins.html#deactivating-unregistering-a-plugin-by-name
addopts = '-p no:xonsh'
# ------ tool.pytest ------
[project.scripts] [project.scripts]
piker = "piker.cli:cli" piker = "piker.cli:cli"
pikerd = "piker.cli:pikerd" pikerd = "piker.cli:pikerd"
ledger = "piker.accounting.cli:ledger" ledger = "piker.accounting.cli:ledger"
# ------ project.scripts ------
[tool.hatch.build.targets.sdist] [tool.hatch.build.targets.sdist]
include = ["piker"] include = ["piker"]
[tool.hatch.build.targets.wheel] [tool.hatch.build.targets.wheel]
include = ["piker"] include = ["piker"]
# ------ tool.hatch ------
# TODO? move to a `uv.toml`?
[tool.uv]
python-preference = 'system'
python-downloads = 'manual'
# https://docs.astral.sh/uv/concepts/projects/dependencies/#default-groups
default-groups = ['uis', 'dev']
# ------ tool.uv ------
[tool.uv.sources] [tool.uv.sources]
pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" } pyqtgraph = { git = "https://github.com/pikers/pyqtgraph.git" }
asyncvnc = { git = "https://github.com/pikers/asyncvnc.git", branch = "main" }
tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" } tomlkit = { git = "https://github.com/pikers/tomlkit.git", branch ="piker_pin" }
msgspec = { git = "https://github.com/jcrist/msgspec.git" } pyvnc = { git = "https://github.com/regulad/pyvnc.git" }
tractor = { path = "../tractor", editable = true }
# XXX since, we're like, always hacking new shite all-the-time. Bp
tractor = { git = "https://github.com/goodboy/tractor.git", branch ="piker_pin" }
# tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "piker_pin" }
# tractor = { git = "https://pikers.dev/goodboy/tractor", branch = "main" }
# ------ goodboy ------
# hackin dev-envs, usually there's something new he's hackin in..
# tractor = { path = "../tractor", editable = true }

View File

@ -62,8 +62,9 @@ ignore-init-module-imports = false
fixable = ["ALL"] fixable = ["ALL"]
unfixable = [] unfixable = []
# TODO? uhh why no work!?
# Allow unused variables when underscore-prefixed. # Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" # dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
[format] [format]
# Use single quotes in `ruff format`. # Use single quotes in `ruff format`.

View File

@ -1,4 +1,22 @@
""" # piker: trading gear for hackers
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
'''
A per-display, DPI (scaling) info dumper.
Resource list for mucking with DPIs on multiple screens: Resource list for mucking with DPIs on multiple screens:
- https://stackoverflow.com/questions/42141354/convert-pixel-size-to-point-size-for-fonts-on-multiple-platforms - https://stackoverflow.com/questions/42141354/convert-pixel-size-to-point-size-for-fonts-on-multiple-platforms
@ -12,89 +30,86 @@ Resource list for mucking with DPIs on multiple screens:
- https://stackoverflow.com/questions/16561879/what-is-the-difference-between-logicaldpix-and-physicaldpix-in-qt - https://stackoverflow.com/questions/16561879/what-is-the-difference-between-logicaldpix-and-physicaldpix-in-qt
- https://doc.qt.io/qt-5/qguiapplication.html#screenAt - https://doc.qt.io/qt-5/qguiapplication.html#screenAt
""" '''
from pyqtgraph import QtGui from pyqtgraph import QtGui
from PyQt5.QtCore import ( from PyQt6 import (
Qt, QCoreApplication QtCore,
QtWidgets,
)
from PyQt6.QtCore import (
Qt,
QCoreApplication,
QSize,
QRect,
) )
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute # Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
# must be set before creating the application # must be set before creating the application
if hasattr(Qt, 'AA_EnableHighDpiScaling'): if hasattr(Qt, 'AA_EnableHighDpiScaling'):
QCoreApplication.setAttribute(Qt.AA_EnableHighDpiScaling, True) QCoreApplication.setAttribute(
Qt.AA_EnableHighDpiScaling,
True,
)
if hasattr(Qt, 'AA_UseHighDpiPixmaps'): if hasattr(Qt, 'AA_UseHighDpiPixmaps'):
QCoreApplication.setAttribute(Qt.AA_UseHighDpiPixmaps, True) QCoreApplication.setAttribute(
Qt.AA_UseHighDpiPixmaps,
True,
)
app = QtWidgets.QApplication([])
app = QtGui.QApplication([]) window = QtWidgets.QMainWindow()
window = QtGui.QMainWindow() main_widget = QtWidgets.QWidget()
main_widget = QtGui.QWidget()
window.setCentralWidget(main_widget) window.setCentralWidget(main_widget)
window.show() window.show()
pxr = main_widget.devicePixelRatioF() pxr: float = main_widget.devicePixelRatioF()
# screen_num = app.desktop().screenNumber() # explicitly get main widget and primary displays
# screen = app.screens()[screen_num] current_screen: QtGui.QScreen = app.screenAt(
main_widget.geometry().center()
screen = app.screenAt(main_widget.geometry().center())
name = screen.name()
size = screen.size()
geo = screen.availableGeometry()
phydpi = screen.physicalDotsPerInch()
logdpi = screen.logicalDotsPerInch()
print(
# f'screen number: {screen_num}\n',
f'screen name: {name}\n'
f'screen size: {size}\n'
f'screen geometry: {geo}\n\n'
f'devicePixelRationF(): {pxr}\n'
f'physical dpi: {phydpi}\n'
f'logical dpi: {logdpi}\n'
) )
primary_screen: QtGui.QScreen = app.primaryScreen()
print('-'*50) screen: QtGui.QScreen
for screen in app.screens():
name: str = screen.name()
model: str = screen.model().rstrip()
size: QSize = screen.size()
geo: QRect = screen.availableGeometry()
phydpi: float = screen.physicalDotsPerInch()
logdpi: float = screen.logicalDotsPerInch()
is_primary: bool = screen is primary_screen
is_current: bool = screen is current_screen
screen = app.primaryScreen() print(
f'------ screen name: {name} ------\n'
f'|_primary: {is_primary}\n'
f' _current: {is_current}\n'
f' _model: {model}\n'
f' _screen size: {size}\n'
f' _screen geometry: {geo}\n'
f' _devicePixelRationF(): {pxr}\n'
f' _physical dpi: {phydpi}\n'
f' _logical dpi: {logdpi}\n'
)
name = screen.name() # app-wide font info
size = screen.size()
geo = screen.availableGeometry()
phydpi = screen.physicalDotsPerInch()
logdpi = screen.logicalDotsPerInch()
print(
# f'screen number: {screen_num}\n',
f'screen name: {name}\n'
f'screen size: {size}\n'
f'screen geometry: {geo}\n\n'
f'devicePixelRationF(): {pxr}\n'
f'physical dpi: {phydpi}\n'
f'logical dpi: {logdpi}\n'
)
# app-wide font
font = QtGui.QFont("Hack") font = QtGui.QFont("Hack")
# use pixel size to be cross-resolution compatible? # use pixel size to be cross-resolution compatible?
font.setPixelSize(6) font.setPixelSize(6)
fm = QtGui.QFontMetrics(font) fm = QtGui.QFontMetrics(font)
fontdpi = fm.fontDpi() fontdpi: float = fm.fontDpi()
font_h = fm.height() font_h: int = fm.height()
string = '10000'
str_br = fm.boundingRect(string)
str_w = str_br.width()
string: str = '10000'
str_br: QtCore.QRect = fm.boundingRect(string)
str_w: int = str_br.width()
print( print(
# f'screen number: {screen_num}\n', f'------ global font settings ------\n'
f'font dpi: {fontdpi}\n' f'font dpi: {fontdpi}\n'
f'font height: {font_h}\n' f'font height: {font_h}\n'
f'string bounding rect: {str_br}\n' f'string bounding rect: {str_br}\n'

1
tags 100644
View File

@ -0,0 +1 @@
TAG_feed_status_update ./piker/data/feed.py /TAG_feed_status_update/

2016
uv.lock

File diff suppressed because it is too large Load Diff