Invert data provider's OHLCV field defs

Turns out the reason we were originally making the `time: float` column in our
ohlcv arrays was bc that's what **only** ib uses XD (and/or 🤦)

Instead we changed the default field type to be an `int` (which is also
more correct to avoid `float` rounding/precision discrepancies) and thus
**do not need to override it** in all other (crypto) backends (except
`ib`). Now we only do the customization (via `._ohlc_dtype`) to `float`
only for `ib` for now (though pretty sure we can also not do that
eventually as well..)!
basic_buy_bot
Tyler Goodlet 2023-05-31 18:07:00 -04:00
parent af64152640
commit 9859f601ca
8 changed files with 82 additions and 94 deletions

View File

@ -58,9 +58,10 @@ from ._util import (
log,
get_console_log,
)
from ..data.types import Struct
from ..data.validate import FeedInit
from ..data._web_bs import (
from piker.data.types import Struct
from piker.data.validate import FeedInit
from piker.data import def_iohlcv_fields
from piker.data._web_bs import (
open_autorecon_ws,
NoBsWs,
)
@ -70,30 +71,21 @@ _url = 'https://api.binance.com'
# Broker specific ohlc schema (rest)
_ohlc_dtype = [
('index', int),
('time', int),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', float),
('bar_wap', float), # will be zeroed by sampler if not filled
# XXX: some additional fields are defined in the docs:
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
# XXX TODO? some additional fields are defined in the docs:
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
# _ohlc_dtype = [
# ('close_time', int),
# ('quote_vol', float),
# ('num_trades', int),
# ('buy_base_vol', float),
# ('buy_quote_vol', float),
# ('ignore', float),
]
# ]
# UI components allow this to be declared such that additional
# (historical) fields can be exposed.
ohlc_dtype = np.dtype(_ohlc_dtype)
# ohlc_dtype = np.dtype(_ohlc_dtype)
_show_wap_in_history = False
@ -330,7 +322,7 @@ class Client:
bar.typecast()
row = []
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
for j, (name, ftype) in enumerate(def_iohlcv_fields[1:]):
# TODO: maybe we should go nanoseconds on all
# history time stamps?
@ -343,7 +335,10 @@ class Client:
new_bars.append((i,) + tuple(row))
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
array = np.array(
new_bars,
dtype=def_iohlcv_fields,
) if as_np else bars
return array

View File

@ -18,43 +18,33 @@
Deribit backend.
'''
import json
import time
import asyncio
from contextlib import asynccontextmanager as acm, AsyncExitStack
from functools import partial
from contextlib import (
asynccontextmanager as acm,
)
from datetime import datetime
from typing import Any, Optional, Iterable, Callable
import pendulum
import asks
import trio
from trio_typing import Nursery, TaskStatus
from fuzzywuzzy import process as fuzzy
import numpy as np
from piker.data.types import Struct
from piker.data._web_bs import (
NoBsWs,
open_autorecon_ws,
open_jsonrpc_session
from functools import partial
import time
from typing import (
Any,
Optional,
Callable,
)
from .._util import resproc
from piker import config
from piker.log import get_logger
import pendulum
import trio
from trio_typing import TaskStatus
from fuzzywuzzy import process as fuzzy
import numpy as np
from tractor.trionics import (
broadcast_receiver,
BroadcastReceiver,
maybe_open_context
)
from tractor import to_asyncio
# XXX WOOPS XD
# yeah you'll need to install it since it was removed in #489 by
# accident; well i thought we had removed all usage..
from cryptofeed import FeedHandler
from cryptofeed.defines import (
DERIBIT,
L1_BOOK, TRADES,
@ -62,6 +52,17 @@ from cryptofeed.defines import (
)
from cryptofeed.symbols import Symbol
from piker.data.types import Struct
from piker.data import def_iohlcv_fields
from piker.data._web_bs import (
open_jsonrpc_session
)
from piker import config
from piker.log import get_logger
log = get_logger(__name__)
@ -75,19 +76,6 @@ _ws_url = 'wss://www.deribit.com/ws/api/v2'
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
# Broker specific ohlc schema (rest)
_ohlc_dtype = [
('index', int),
('time', int),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', float),
('bar_wap', float), # will be zeroed by sampler if not filled
]
class JSONRPCResult(Struct):
jsonrpc: str = '2.0'
id: int
@ -405,7 +393,7 @@ class Client:
new_bars.append((i,) + tuple(row))
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines
array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else klines
return array
async def last_trades(

View File

@ -39,7 +39,6 @@ from piker.brokers._util import (
)
from cryptofeed import FeedHandler
from cryptofeed.defines import (
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
)

View File

@ -73,12 +73,34 @@ from ib_insync.wrapper import (
from ib_insync.client import Client as ib_Client
import numpy as np
# TODO: in hindsight, probably all imports should be
# non-relative for backends so that non-builting backends
# can be easily modelled after this style B)
from piker import config
from piker.brokers._util import (
log,
get_logger,
)
from piker.data._source import base_ohlc_dtype
# Broker specific ohlc schema which includes a vwap field
_ohlc_dtype: list[tuple[str, type]] = [
('index', int),
# NOTE XXX: only part that's diff
# from our default fields where
# time is normally an int.
# TODO: can we just cast to this
# at np.ndarray load time?
('time', float),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', float),
('count', int),
('bar_wap', float), # Wait do we need this?
]
_time_units = {
@ -295,7 +317,7 @@ def bars_to_np(bars: list) -> np.ndarray:
nparr = np.array(
np_ready,
dtype=base_ohlc_dtype,
dtype=_ohlc_dtype,
)
assert nparr['time'][0] == bars[0].date.timestamp()
assert nparr['time'][-1] == bars[-1].date.timestamp()

View File

@ -25,11 +25,6 @@ Sub-modules within break into the core functionalities:
wrapping around ``ib_insync``.
'''
from piker.log import get_logger
log = get_logger(__name__)
from .api import (
get_client,
)
@ -44,8 +39,10 @@ from .broker import (
norm_trade_records,
)
__all__ = [
'get_client',
'get_mkt_info',
'trades_dialogue',
'open_history_client',
'open_symbol_search',

View File

@ -41,6 +41,7 @@ import trio
from piker import config
from piker.data.types import Struct
from piker.data import def_iohlcv_fields
from piker.accounting._mktinfo import (
Asset,
digits_to_dec,
@ -52,29 +53,15 @@ from piker.brokers._util import (
DataThrottle,
)
from piker.accounting import Transaction
from . import log
from piker.log import get_logger
log = get_logger('piker.brokers.kraken')
# <uri>/<version>/
_url = 'https://api.kraken.com/0'
# Broker specific ohlc schema which includes a vwap field
_ohlc_dtype = [
('index', int),
('time', int),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', float),
('count', int),
('bar_wap', float),
]
# UI components allow this to be declared such that additional
# (historical) fields can be exposed.
ohlc_dtype = np.dtype(_ohlc_dtype)
# TODO: this is the only backend providing this right?
# in which case we should drop it from the defaults and
# instead make a custom fields descr in this module!
_show_wap_in_history = True
_symbol_info_translation: dict[str, str] = {
'tick_decimals': 'pair_decimals',
@ -622,11 +609,11 @@ class Client:
new_bars.append(
(i,) + tuple(
ftype(bar[j]) for j, (name, ftype) in enumerate(
_ohlc_dtype[1:]
def_iohlcv_fields[1:]
)
)
)
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else bars
return array
except KeyError:
errmsg = json['error'][0]

View File

@ -63,8 +63,8 @@ from piker.clearing._messages import (
BrokerdPosition,
BrokerdStatus,
)
from . import log
from .api import (
log,
Client,
BrokerError,
get_client,

View File

@ -54,8 +54,8 @@ from piker.brokers._util import (
from piker.data.types import Struct
from piker.data.validate import FeedInit
from piker.data._web_bs import open_autorecon_ws, NoBsWs
from . import log
from .api import (
log,
Client,
Pair,
)