To avoid feed breakage, just give up on history after too many throttles for now

symbol_search
Tyler Goodlet 2021-05-21 12:51:06 -04:00
parent 82cdb176e1
commit 27d704b32e
1 changed files with 13 additions and 6 deletions

View File

@ -833,6 +833,7 @@ async def get_bars(
_err = None _err = None
fails = 0
for _ in range(2): for _ in range(2):
try: try:
@ -847,7 +848,7 @@ async def get_bars(
next_dt = bars[0].date next_dt = bars[0].date
return bars, bars_array, next_dt return (bars, bars_array, next_dt), fails
except RequestError as err: except RequestError as err:
_err = err _err = err
@ -871,10 +872,13 @@ async def get_bars(
# and then somehow get that to trigger an event here # and then somehow get that to trigger an event here
# that restarts/resumes this task? # that restarts/resumes this task?
await tractor.breakpoint() await tractor.breakpoint()
fails += 1
continue continue
else: # throttle wasn't fixed so error out immediately return (None, None)
raise _err
# else: # throttle wasn't fixed so error out immediately
# raise _err
async def backfill_bars( async def backfill_bars(
@ -892,7 +896,7 @@ async def backfill_bars(
https://github.com/pikers/piker/issues/128 https://github.com/pikers/piker/issues/128
""" """
first_bars, bars_array, next_dt = await get_bars(sym) (first_bars, bars_array, next_dt), fails = await get_bars(sym)
# write historical data to buffer # write historical data to buffer
shm.push(bars_array) shm.push(bars_array)
@ -904,9 +908,12 @@ async def backfill_bars(
i = 0 i = 0
while i < count: while i < count:
out = await get_bars(sym, end_dt=next_dt) out, fails = await get_bars(sym, end_dt=next_dt)
if out is None: if fails is None or fails > 1:
break
if out is (None, None):
# could be trying to retreive bars over weekend # could be trying to retreive bars over weekend
# TODO: add logic here to handle tradable hours and only grab # TODO: add logic here to handle tradable hours and only grab
# valid bars in the range # valid bars in the range