Invert data provider's OHLCV field defs

Turns out the reason we were originally making the `time: float` column in our
ohlcv arrays was bc that's what **only** ib uses XD (and/or 🤦)

Instead we changed the default field type to be an `int` (which is also
more correct to avoid `float` rounding/precision discrepancies) and thus
**do not need to override it** in all other (crypto) backends (except
`ib`). Now we only do the customization (via `._ohlc_dtype`) to `float`
only for `ib` for now (though pretty sure we can also not do that
eventually as well..)!
basic_buy_bot
Tyler Goodlet 2023-05-31 18:07:00 -04:00
parent af64152640
commit 9859f601ca
8 changed files with 82 additions and 94 deletions

View File

@ -58,9 +58,10 @@ from ._util import (
log, log,
get_console_log, get_console_log,
) )
from ..data.types import Struct from piker.data.types import Struct
from ..data.validate import FeedInit from piker.data.validate import FeedInit
from ..data._web_bs import ( from piker.data import def_iohlcv_fields
from piker.data._web_bs import (
open_autorecon_ws, open_autorecon_ws,
NoBsWs, NoBsWs,
) )
@ -70,30 +71,21 @@ _url = 'https://api.binance.com'
# Broker specific ohlc schema (rest) # Broker specific ohlc schema (rest)
_ohlc_dtype = [ # XXX TODO? some additional fields are defined in the docs:
('index', int),
('time', int),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', float),
('bar_wap', float), # will be zeroed by sampler if not filled
# XXX: some additional fields are defined in the docs:
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
# _ohlc_dtype = [
# ('close_time', int), # ('close_time', int),
# ('quote_vol', float), # ('quote_vol', float),
# ('num_trades', int), # ('num_trades', int),
# ('buy_base_vol', float), # ('buy_base_vol', float),
# ('buy_quote_vol', float), # ('buy_quote_vol', float),
# ('ignore', float), # ('ignore', float),
] # ]
# UI components allow this to be declared such that additional # UI components allow this to be declared such that additional
# (historical) fields can be exposed. # (historical) fields can be exposed.
ohlc_dtype = np.dtype(_ohlc_dtype) # ohlc_dtype = np.dtype(_ohlc_dtype)
_show_wap_in_history = False _show_wap_in_history = False
@ -330,7 +322,7 @@ class Client:
bar.typecast() bar.typecast()
row = [] row = []
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]): for j, (name, ftype) in enumerate(def_iohlcv_fields[1:]):
# TODO: maybe we should go nanoseconds on all # TODO: maybe we should go nanoseconds on all
# history time stamps? # history time stamps?
@ -343,7 +335,10 @@ class Client:
new_bars.append((i,) + tuple(row)) new_bars.append((i,) + tuple(row))
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars array = np.array(
new_bars,
dtype=def_iohlcv_fields,
) if as_np else bars
return array return array

View File

@ -18,43 +18,33 @@
Deribit backend. Deribit backend.
''' '''
import json
import time
import asyncio import asyncio
from contextlib import (
from contextlib import asynccontextmanager as acm, AsyncExitStack asynccontextmanager as acm,
from functools import partial )
from datetime import datetime from datetime import datetime
from typing import Any, Optional, Iterable, Callable from functools import partial
import time
import pendulum from typing import (
import asks Any,
import trio Optional,
from trio_typing import Nursery, TaskStatus Callable,
from fuzzywuzzy import process as fuzzy
import numpy as np
from piker.data.types import Struct
from piker.data._web_bs import (
NoBsWs,
open_autorecon_ws,
open_jsonrpc_session
) )
from .._util import resproc import pendulum
import trio
from piker import config from trio_typing import TaskStatus
from piker.log import get_logger from fuzzywuzzy import process as fuzzy
import numpy as np
from tractor.trionics import ( from tractor.trionics import (
broadcast_receiver, broadcast_receiver,
BroadcastReceiver,
maybe_open_context maybe_open_context
) )
from tractor import to_asyncio from tractor import to_asyncio
# XXX WOOPS XD
# yeah you'll need to install it since it was removed in #489 by
# accident; well i thought we had removed all usage..
from cryptofeed import FeedHandler from cryptofeed import FeedHandler
from cryptofeed.defines import ( from cryptofeed.defines import (
DERIBIT, DERIBIT,
L1_BOOK, TRADES, L1_BOOK, TRADES,
@ -62,6 +52,17 @@ from cryptofeed.defines import (
) )
from cryptofeed.symbols import Symbol from cryptofeed.symbols import Symbol
from piker.data.types import Struct
from piker.data import def_iohlcv_fields
from piker.data._web_bs import (
open_jsonrpc_session
)
from piker import config
from piker.log import get_logger
log = get_logger(__name__) log = get_logger(__name__)
@ -75,19 +76,6 @@ _ws_url = 'wss://www.deribit.com/ws/api/v2'
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2' _testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
# Broker specific ohlc schema (rest)
_ohlc_dtype = [
('index', int),
('time', int),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', float),
('bar_wap', float), # will be zeroed by sampler if not filled
]
class JSONRPCResult(Struct): class JSONRPCResult(Struct):
jsonrpc: str = '2.0' jsonrpc: str = '2.0'
id: int id: int
@ -405,7 +393,7 @@ class Client:
new_bars.append((i,) + tuple(row)) new_bars.append((i,) + tuple(row))
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else klines
return array return array
async def last_trades( async def last_trades(

View File

@ -39,7 +39,6 @@ from piker.brokers._util import (
) )
from cryptofeed import FeedHandler from cryptofeed import FeedHandler
from cryptofeed.defines import ( from cryptofeed.defines import (
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
) )

View File

@ -73,12 +73,34 @@ from ib_insync.wrapper import (
from ib_insync.client import Client as ib_Client from ib_insync.client import Client as ib_Client
import numpy as np import numpy as np
# TODO: in hindsight, probably all imports should be
# non-relative for backends so that non-builting backends
# can be easily modelled after this style B)
from piker import config from piker import config
from piker.brokers._util import ( from piker.brokers._util import (
log, log,
get_logger, get_logger,
) )
from piker.data._source import base_ohlc_dtype
# Broker specific ohlc schema which includes a vwap field
_ohlc_dtype: list[tuple[str, type]] = [
('index', int),
# NOTE XXX: only part that's diff
# from our default fields where
# time is normally an int.
# TODO: can we just cast to this
# at np.ndarray load time?
('time', float),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', float),
('count', int),
('bar_wap', float), # Wait do we need this?
]
_time_units = { _time_units = {
@ -295,7 +317,7 @@ def bars_to_np(bars: list) -> np.ndarray:
nparr = np.array( nparr = np.array(
np_ready, np_ready,
dtype=base_ohlc_dtype, dtype=_ohlc_dtype,
) )
assert nparr['time'][0] == bars[0].date.timestamp() assert nparr['time'][0] == bars[0].date.timestamp()
assert nparr['time'][-1] == bars[-1].date.timestamp() assert nparr['time'][-1] == bars[-1].date.timestamp()

View File

@ -25,11 +25,6 @@ Sub-modules within break into the core functionalities:
wrapping around ``ib_insync``. wrapping around ``ib_insync``.
''' '''
from piker.log import get_logger
log = get_logger(__name__)
from .api import ( from .api import (
get_client, get_client,
) )
@ -44,8 +39,10 @@ from .broker import (
norm_trade_records, norm_trade_records,
) )
__all__ = [ __all__ = [
'get_client', 'get_client',
'get_mkt_info',
'trades_dialogue', 'trades_dialogue',
'open_history_client', 'open_history_client',
'open_symbol_search', 'open_symbol_search',

View File

@ -41,6 +41,7 @@ import trio
from piker import config from piker import config
from piker.data.types import Struct from piker.data.types import Struct
from piker.data import def_iohlcv_fields
from piker.accounting._mktinfo import ( from piker.accounting._mktinfo import (
Asset, Asset,
digits_to_dec, digits_to_dec,
@ -52,29 +53,15 @@ from piker.brokers._util import (
DataThrottle, DataThrottle,
) )
from piker.accounting import Transaction from piker.accounting import Transaction
from . import log from piker.log import get_logger
log = get_logger('piker.brokers.kraken')
# <uri>/<version>/ # <uri>/<version>/
_url = 'https://api.kraken.com/0' _url = 'https://api.kraken.com/0'
# TODO: this is the only backend providing this right?
# in which case we should drop it from the defaults and
# Broker specific ohlc schema which includes a vwap field # instead make a custom fields descr in this module!
_ohlc_dtype = [
('index', int),
('time', int),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', float),
('count', int),
('bar_wap', float),
]
# UI components allow this to be declared such that additional
# (historical) fields can be exposed.
ohlc_dtype = np.dtype(_ohlc_dtype)
_show_wap_in_history = True _show_wap_in_history = True
_symbol_info_translation: dict[str, str] = { _symbol_info_translation: dict[str, str] = {
'tick_decimals': 'pair_decimals', 'tick_decimals': 'pair_decimals',
@ -622,11 +609,11 @@ class Client:
new_bars.append( new_bars.append(
(i,) + tuple( (i,) + tuple(
ftype(bar[j]) for j, (name, ftype) in enumerate( ftype(bar[j]) for j, (name, ftype) in enumerate(
_ohlc_dtype[1:] def_iohlcv_fields[1:]
) )
) )
) )
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else bars
return array return array
except KeyError: except KeyError:
errmsg = json['error'][0] errmsg = json['error'][0]

View File

@ -63,8 +63,8 @@ from piker.clearing._messages import (
BrokerdPosition, BrokerdPosition,
BrokerdStatus, BrokerdStatus,
) )
from . import log
from .api import ( from .api import (
log,
Client, Client,
BrokerError, BrokerError,
get_client, get_client,

View File

@ -54,8 +54,8 @@ from piker.brokers._util import (
from piker.data.types import Struct from piker.data.types import Struct
from piker.data.validate import FeedInit from piker.data.validate import FeedInit
from piker.data._web_bs import open_autorecon_ws, NoBsWs from piker.data._web_bs import open_autorecon_ws, NoBsWs
from . import log
from .api import ( from .api import (
log,
Client, Client,
Pair, Pair,
) )