Compare commits

...

6 Commits

Author SHA1 Message Date
Tyler Goodlet eb06fc79f1 `.accounting._ledger`: typing anda more multiline styling 2025-02-19 17:56:10 -05:00
Tyler Goodlet 76ca316b9d Drop some bps and style logic to multiline 2025-02-19 17:56:10 -05:00
Tyler Goodlet 3e8481978b `.accounting` add synopsis section to readme 2025-02-19 17:56:10 -05:00
Tyler Goodlet 9e6bfa0926 Teensie `piker.data` styling tweaks
- use more compact optional value style with `|`-union
- fix `.flows` typing-only import since we need `MktPair` to be
  immediately defined for use on a `msgspec.Struct` field.
- more "tree-like" warning msg in `.validate()` reporting.
2025-02-19 17:52:14 -05:00
Tyler Goodlet a945bb33f3 Invert `getattr()` check for `get_mkt_pairs()` ep
Such that we `return` early when not defined by the provider backend to
reduce an indent level in `SymbologyCache.load()`.
2025-02-19 17:31:10 -05:00
Tyler Goodlet 850cdbfe59 Allow ledger passes to ignore (symcache) unknown fqmes
For example in the paper-eng, if you have a backend that doesn't fully
support a symcache (yet) it's handy to be able to ignore processing
other paper-eng txns when all you care about at the moment is the
simulated symbol.

NOTE, that currently this will still result in a key-error when you load
more then one mkt with the paper engine (for which the backend does not
have the symcache implemented) since no fqme ad-hoc query was made for
the 2nd symbol (and i'm not sure we should support that kinda hackery
over just encouraging the sym-cache being added?). Def needs a little
more thought depending on how many backends are never going to be able
to (easily) support caching..
2025-02-19 17:31:10 -05:00
11 changed files with 183 additions and 77 deletions

View File

@ -1,8 +1,40 @@
.accounting
-----------
piker.accounting
________________
A subsystem for transaction processing, storage and historical
measurement.
synopsis
--------
The big question for any trader is this:
*what is the price that determines whether i take a loss or a gain on my
trade?*
In other words, at any given state of accounting your current assets,
what is the price between any 2 assets you've transacted that determines
at which price you can conduct **the next** transaction and know if you
are making or losing more (or less) of the *source* asset versus the
*destination* asset?
Let's do a very simple example:
> Joe wants to buy some tacos bc they're super hungo.
> Joe has a friend who also likes tacos but doesn't mind if they're fresh; he doesn't mind having day old tacos.
> Inflation is rampant and taco prices are trending up for no good reason besides everyone thinks prices are going up.
> Joe goes to the taco stand and buys 4 tacos at 25 mxn.
> This makes Joe's net cost `4 * 25 = 200` mxn.
> Joe eats 3 tacos and realizes that he can't finish the last, so he puts it in the fridge to save for the next day (since he owns a comal).
> The next day the price of tacos goes up to 30 mxn (for no good reason > besides the taco stand noticing Joe is a tourist and that > "inflation" is some thing that's used as an excuse for price changes).
> Joe's friend from before got lit up (like he does every morning) and msgs Joe to buy him 2 tacos for when he shows up in the late morning.
> Joe says "sure, but i also have a leftover if you want it, and I'm fasting today so you can have my sobras and i'll buy you a new one".
> The friend coughs a couple times, and says "yee no problem man, just make sure you get them"
>
Prior *suit* definitions:
- the canucks equiv of the IRS call this idea ["Adjusted cost base"](https://www.canada.ca/en/revenue-agency/services/tax/individuals/topics/about-your-tax-return/tax-return/completing-a-tax-return/personal-income/line-12700-capital-gains/definitions-capital-gains.html#Adjustedcostbase)
.pnl
----

View File

@ -40,7 +40,7 @@ import tomli_w # for fast ledger writing
from piker.types import Struct
from piker import config
from ..log import get_logger
from piker.log import get_logger
from .calc import (
iter_by_dt,
)
@ -239,7 +239,9 @@ class TransactionLedger(UserDict):
symcache: SymbologyCache = self._symcache
towrite: dict[str, Any] = {}
for tid, txdict in self.tx_sort(self.data.copy()):
for tid, txdict in self.tx_sort(
self.data.copy()
):
# write blank-str expiry for non-expiring assets
if (
'expiry' in txdict
@ -377,7 +379,7 @@ def open_trade_ledger(
account,
dirpath=_fp,
)
cpy = ledger_dict.copy()
cpy: dict = ledger_dict.copy()
# XXX NOTE: if not provided presume we are being called from
# sync code and need to maybe run `trio` to generate..
@ -406,7 +408,13 @@ def open_trade_ledger(
account=account,
mod=mod,
symcache=symcache,
tx_sort=getattr(mod, 'tx_sort', tx_sort),
# NOTE: allow backends to provide custom ledger sorting
tx_sort=getattr(
mod,
'tx_sort',
tx_sort,
),
)
try:
yield ledger

View File

@ -305,8 +305,8 @@ class MktPair(Struct, frozen=True):
# config right?
# src_type: AssetTypeName
# for derivs, info describing contract, egs.
# strike price, call or put, swap type, exercise model, etc.
# for derivs, info describing contract, egs. strike price, call
# or put, swap type, exercise model, etc.
contract_info: list[str] | None = None
# TODO: rename to sectype since all of these can

View File

@ -30,7 +30,8 @@ from types import ModuleType
from typing import (
Any,
Iterator,
Generator
Generator,
TYPE_CHECKING,
)
import pendulum
@ -59,8 +60,10 @@ from ..clearing._messages import (
BrokerdPosition,
)
from piker.types import Struct
from piker.log import get_logger
if TYPE_CHECKING:
from piker.data._symcache import SymbologyCache
from ..log import get_logger
log = get_logger(__name__)
@ -493,6 +496,17 @@ class Account(Struct):
_mktmap_table: dict[str, MktPair] | None = None,
only_require: list[str]|True = True,
# ^list of fqmes that are "required" to be processed from
# this ledger pass; we often don't care about others and
# definitely shouldn't always error in such cases.
# (eg. broker backend loaded that doesn't yet supsport the
# symcache but also, inside the paper engine we don't ad-hoc
# request `get_mkt_info()` for every symbol in the ledger,
# only the one for which we're simulating against).
# TODO, not sure if there's a better soln for this, ideally
# all backends get symcache support afap i guess..
) -> dict[str, Position]:
'''
Update the internal `.pps[str, Position]` table from input
@ -535,11 +549,32 @@ class Account(Struct):
if _mktmap_table is None:
raise
required: bool = (
only_require is True
or (
only_require is not True
and
fqme in only_require
)
)
# XXX: caller is allowed to provide a fallback
# mktmap table for the case where a new position is
# being added and the preloaded symcache didn't
# have this entry prior (eg. with frickin IB..)
mkt = _mktmap_table[fqme]
if (
not (mkt := _mktmap_table.get(fqme))
and
required
):
raise
elif not required:
continue
else:
# should be an entry retreived somewhere
assert mkt
if not (pos := pps.get(bs_mktid)):
@ -656,7 +691,7 @@ class Account(Struct):
def write_config(self) -> None:
'''
Write the current account state to the user's account TOML file, normally
something like ``pps.toml``.
something like `pps.toml`.
'''
# TODO: show diff output?

View File

@ -251,10 +251,16 @@ def iter_by_dt(
for k in parsers:
if (
isdict and k in tx
or getattr(tx, k, None)
or
getattr(tx, k, None)
):
v = tx[k] if isdict else tx.dt
assert v is not None, f'No valid value for `{k}`!?'
v = (
tx[k] if isdict
else tx.dt
)
assert v is not None, (
f'No valid value for `{k}`!?'
)
# only call parser on the value if not None from
# the `parsers` table above (when NOT using
@ -269,8 +275,21 @@ def iter_by_dt(
return v
else:
# XXX: should never get here..
breakpoint()
# TODO: move to top?
from piker.log import get_logger
log = get_logger(__name__)
# XXX: we should really never get here..
# only if a ledger record has no expected sort(able)
# field will we likely hit this.. like with ze IB.
# if no sortable field just deliver epoch?
log.warning(
'No (time) sortable field for TXN:\n'
f'{tx}\n'
)
return from_timestamp(0)
# breakpoint()
entry: tuple[str, dict] | Transaction
for entry in sorted(

View File

@ -300,7 +300,8 @@ def disect(
assert not df.is_empty()
# muck around in pdbp REPL
breakpoint()
# tractor.devx.mk_pdb().set_trace()
# breakpoint()
# TODO: we REALLY need a better console REPL for this
# kinda thing..

View File

@ -653,6 +653,7 @@ async def open_trade_dialog(
# in) use manually constructed table from calling
# the `.get_mkt_info()` provider EP above.
_mktmap_table=mkt_by_fqme,
only_require=list(mkt_by_fqme),
)
pp_msgs: list[BrokerdPosition] = []

View File

@ -31,6 +31,7 @@ from pathlib import Path
from pprint import pformat
from typing import (
Any,
Callable,
Sequence,
Hashable,
TYPE_CHECKING,
@ -56,7 +57,7 @@ from piker.brokers import (
)
if TYPE_CHECKING:
from ..accounting import (
from piker.accounting import (
Asset,
MktPair,
)
@ -149,19 +150,36 @@ class SymbologyCache(Struct):
'Implement `Client.get_assets()`!'
)
if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None):
get_mkt_pairs: Callable|None = getattr(
client,
'get_mkt_pairs',
None,
)
if not get_mkt_pairs:
log.warning(
'No symbology cache `Pair` support for `{provider}`..\n'
'Implement `Client.get_mkt_pairs()`!'
)
return self
pairs: dict[str, Struct] = await get_mkt_pairs()
for bs_fqme, pair in pairs.items():
if not pairs:
log.warning(
'No pairs from intial {provider!r} sym-cache request?\n\n'
'`Client.get_mkt_pairs()` -> {pairs!r} ?'
)
return self
# NOTE: every backend defined pair should
# declare it's ns path for roundtrip
# serialization lookup.
for bs_fqme, pair in pairs.items():
if not getattr(pair, 'ns_path', None):
# XXX: every backend defined pair must declare
# a `.ns_path: tractor.NamespacePath` to enable
# roundtrip serialization lookup from a local
# cache file.
raise TypeError(
f'Pair-struct for {self.mod.name} MUST define a '
'`.ns_path: str`!\n'
f'{pair}'
'`.ns_path: str`!\n\n'
f'{pair!r}'
)
entry = await self.mod.get_mkt_info(pair.bs_fqme)
@ -195,12 +213,6 @@ class SymbologyCache(Struct):
pair,
)
else:
log.warning(
'No symbology cache `Pair` support for `{provider}`..\n'
'Implement `Client.get_mkt_pairs()`!'
)
return self
@classmethod

View File

@ -786,7 +786,6 @@ async def install_brokerd_search(
@acm
async def maybe_open_feed(
fqmes: list[str],
loglevel: str | None = None,
@ -840,7 +839,6 @@ async def maybe_open_feed(
@acm
async def open_feed(
fqmes: list[str],
loglevel: str|None = None,

View File

@ -36,10 +36,10 @@ from ._sharedmem import (
ShmArray,
_Token,
)
from piker.accounting import MktPair
if TYPE_CHECKING:
from ..accounting import MktPair
from .feed import Feed
from piker.data.feed import Feed
class Flume(Struct):

View File

@ -113,9 +113,9 @@ def validate_backend(
)
if ep is None:
log.warning(
f'Provider backend {mod.name} is missing '
f'{daemon_name} support :(\n'
f'The following endpoint is missing: {name}'
f'Provider backend {mod.name!r} is missing '
f'{daemon_name!r} support?\n'
f'|_module endpoint-func missing: {name!r}\n'
)
inits: list[