From d7179d47b025f71e9f055eb189df25a27c50f7a6 Mon Sep 17 00:00:00 2001 From: Tyler Goodlet Date: Thu, 13 Feb 2025 11:36:59 -0500 Subject: [PATCH] `.tsp._anal`: add (unused) `detect_vlm_gaps()` --- piker/tsp/_anal.py | 30 ++++++++++++++++++++++-------- 1 file changed, 22 insertions(+), 8 deletions(-) diff --git a/piker/tsp/_anal.py b/piker/tsp/_anal.py index c34a0c3a..ea78c46a 100644 --- a/piker/tsp/_anal.py +++ b/piker/tsp/_anal.py @@ -616,6 +616,18 @@ def detect_price_gaps( # ]) ... +# TODO: probably just use the null_segs impl above? +def detect_vlm_gaps( + df: pl.DataFrame, + col: str = 'volume', + +) -> pl.DataFrame: + + vnull: pl.DataFrame = w_dts.filter( + pl.col(col) == 0 + ) + return vnull + def dedupe( src_df: pl.DataFrame, @@ -626,7 +638,6 @@ def dedupe( ) -> tuple[ pl.DataFrame, # with dts - pl.DataFrame, # gaps pl.DataFrame, # with deduplicated dts (aka gap/repeat removal) int, # len diff between input and deduped ]: @@ -639,19 +650,22 @@ def dedupe( ''' wdts: pl.DataFrame = with_dts(src_df) - # maybe sort on any time field - if sort: - wdts = wdts.sort(by='time') - # TODO: detect out-of-order segments which were corrected! - # -[ ] report in log msg - # -[ ] possibly return segment sections which were moved? + deduped = wdts # remove duplicated datetime samples/sections deduped: pl.DataFrame = wdts.unique( - subset=['dt'], + # subset=['dt'], + subset=['time'], maintain_order=True, ) + # maybe sort on any time field + if sort: + deduped = deduped.sort(by='time') + # TODO: detect out-of-order segments which were corrected! + # -[ ] report in log msg + # -[ ] possibly return segment sections which were moved? + diff: int = ( wdts.height -