Use `datetime` sorting on clears table appends

In order to avoid issues with reloading ledger and API trades after an
existing `pps.toml` exists we have to make sure we not only avoid
duplicate entries but also avoid re-adding entries that would have been
removed during a prior call to the `Position.minimize_clears()` filter.
The easiest way to do this is to sort on timestamps and avoid adding any
record that pre-existed the last net-zero position ledger event that
`.minimize_clears()` discarded. In order to implement this it means
parsing config file clears table's timestamps into datetime objects for
inequality checks and we add a `Position.first_clear_dt` attr for
storing this value when managing pps in object form but never store it
in the config (since it should be obviously from the sorted clear event
table).
doin_the_splits
Tyler Goodlet 2022-08-15 10:42:58 -04:00
parent 7bec989eed
commit 0cf4e07b84
1 changed files with 40 additions and 20 deletions

View File

@ -141,6 +141,7 @@ class Position(Struct):
Union[str, int, Status], # trade id Union[str, int, Status], # trade id
dict[str, Any], # transaction history summaries dict[str, Any], # transaction history summaries
] = {} ] = {}
first_clear_dt: Optional[datetime] = None
expiry: Optional[datetime] = None expiry: Optional[datetime] = None
@ -164,6 +165,9 @@ class Position(Struct):
if self.split_ratio is None: if self.split_ratio is None:
d.pop('split_ratio') d.pop('split_ratio')
# should be obvious from clears/event table
d.pop('first_clear_dt')
# TODO: we need to figure out how to have one top level # TODO: we need to figure out how to have one top level
# listing venue here even when the backend isn't providing # listing venue here even when the backend isn't providing
# it via the trades ledger.. # it via the trades ledger..
@ -189,7 +193,8 @@ class Position(Struct):
): ):
inline_table = toml.TomlDecoder().get_empty_inline_table() inline_table = toml.TomlDecoder().get_empty_inline_table()
inline_table['dt'] = data['dt'] # serialize datetime to parsable `str`
inline_table['dt'] = str(data['dt'])
# insert optional clear fields in column order # insert optional clear fields in column order
for k in ['ppu', 'accum_size']: for k in ['ppu', 'accum_size']:
@ -236,6 +241,10 @@ class Position(Struct):
) )
ppu = cppu ppu = cppu
self.first_clear_dt = min(
list(entry['dt'] for entry in self.clears.values())
)
return size, ppu return size, ppu
def update_from_msg( def update_from_msg(
@ -449,15 +458,16 @@ class Position(Struct):
'cost': t.cost, 'cost': t.cost,
'price': t.price, 'price': t.price,
'size': t.size, 'size': t.size,
'dt': str(t.dt), 'dt': t.dt,
} }
# TODO: compute these incrementally instead # TODO: compute these incrementally instead
# of re-looping through each time resulting in O(n**2) # of re-looping through each time resulting in O(n**2)
# behaviour.. # behaviour..?
# compute these **after** adding the entry
# in order to make the recurrence relation math work # NOTE: we compute these **after** adding the entry in order to
# inside ``.calc_size()``. # make the recurrence relation math work inside
# ``.calc_size()``.
self.size = clear['accum_size'] = self.calc_size() self.size = clear['accum_size'] = self.calc_size()
self.ppu = clear['ppu'] = self.calc_ppu() self.ppu = clear['ppu'] = self.calc_ppu()
@ -499,16 +509,22 @@ class PpTable(Struct):
expiry=t.expiry, expiry=t.expiry,
) )
) )
clears = pp.clears
if clears:
first_clear_dt = pp.first_clear_dt
# don't do updates for ledger records we already have # don't do updates for ledger records we already have
# included in the current pps state. # included in the current pps state.
if t.tid in pp.clears: if (
# NOTE: likely you'll see repeats of the same t.tid in clears
# ``Transaction`` passed in here if/when you are restarting or first_clear_dt and t.dt < first_clear_dt
# a ``brokerd.ib`` where the API will re-report trades from ):
# the current session, so we need to make sure we don't # NOTE: likely you'll see repeats of the same
# "double count" these in pp calculations. # ``Transaction`` passed in here if/when you are restarting
continue # a ``brokerd.ib`` where the API will re-report trades from
# the current session, so we need to make sure we don't
# "double count" these in pp calculations.
continue
# update clearing table # update clearing table
pp.add_clear(t) pp.add_clear(t)
@ -850,17 +866,21 @@ def open_pps(
# index clears entries in "object" form by tid in a top # index clears entries in "object" form by tid in a top
# level dict instead of a list (as is presented in our # level dict instead of a list (as is presented in our
# ``pps.toml``). # ``pps.toml``).
pp = pp_objs.get(bsuid) clears = pp_objs.setdefault(bsuid, {})
if pp:
clears = pp.clears
else:
clears = {}
# TODO: should be make a ``Struct`` for clear/event entries?
# convert "clear events table" from the toml config (list of
# a dicts) and load it into object form for use in position
# processing of new clear events.
for clears_table in clears_list: for clears_table in clears_list:
tid = clears_table.pop('tid') tid = clears_table.pop('tid')
dtstr = clears_table['dt']
dt = pendulum.parse(dtstr)
clears_table['dt'] = dt
clears[tid] = clears_table clears[tid] = clears_table
size = entry['size'] size = entry['size']
# TODO: remove but, handle old field name for now # TODO: remove but, handle old field name for now
ppu = entry.get('ppu', entry.get('be_price', 0)) ppu = entry.get('ppu', entry.get('be_price', 0))
split_ratio = entry.get('split_ratio') split_ratio = entry.get('split_ratio')