commit
a9185e7d6f
|
@ -33,7 +33,6 @@ import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
from pydantic.dataclasses import dataclass
|
|
||||||
import wsproto
|
import wsproto
|
||||||
|
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
|
@ -106,14 +105,14 @@ class Pair(Struct, frozen=True):
|
||||||
permissions: list[str]
|
permissions: list[str]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class OHLC(Struct):
|
||||||
class OHLC:
|
'''
|
||||||
"""Description of the flattened OHLC quote format.
|
Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
For schema details see:
|
For schema details see:
|
||||||
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
||||||
|
|
||||||
"""
|
'''
|
||||||
time: int
|
time: int
|
||||||
|
|
||||||
open: float
|
open: float
|
||||||
|
@ -262,6 +261,7 @@ class Client:
|
||||||
for i, bar in enumerate(bars):
|
for i, bar in enumerate(bars):
|
||||||
|
|
||||||
bar = OHLC(*bar)
|
bar = OHLC(*bar)
|
||||||
|
bar.typecast()
|
||||||
|
|
||||||
row = []
|
row = []
|
||||||
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
||||||
|
|
|
@ -0,0 +1,64 @@
|
||||||
|
``kraken`` backend
|
||||||
|
------------------
|
||||||
|
though they don't have the most liquidity of all the cexes they sure are
|
||||||
|
accommodating to those of us who appreciate a little ``xmr``.
|
||||||
|
|
||||||
|
status
|
||||||
|
******
|
||||||
|
current support is *production grade* and both real-time data and order
|
||||||
|
management should be correct and fast. this backend is used by core devs
|
||||||
|
for live trading.
|
||||||
|
|
||||||
|
|
||||||
|
config
|
||||||
|
******
|
||||||
|
In order to get order mode support your ``brokers.toml``
|
||||||
|
needs to have something like the following:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[kraken]
|
||||||
|
accounts.spot = 'spot'
|
||||||
|
key_descr = "spot"
|
||||||
|
api_key = "69696969696969696696969696969696969696969696969696969696"
|
||||||
|
secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
|
||||||
|
|
||||||
|
|
||||||
|
If everything works correctly you should see any current positions
|
||||||
|
loaded in the pps pane on chart load and you should also be able to
|
||||||
|
check your trade records in the file::
|
||||||
|
|
||||||
|
<pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
|
||||||
|
|
||||||
|
|
||||||
|
An example ledger file will have entries written verbatim from the
|
||||||
|
trade events schema:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[TFJBKK-SMBZS-VJ4UWS]
|
||||||
|
ordertxid = "SMBZSA-7CNQU-3HWLNJ"
|
||||||
|
postxid = "SMBZSE-M7IF5-CFI7LT"
|
||||||
|
pair = "XXMRZEUR"
|
||||||
|
time = 1655691993.4133966
|
||||||
|
type = "buy"
|
||||||
|
ordertype = "limit"
|
||||||
|
price = "103.97000000"
|
||||||
|
cost = "499.99999977"
|
||||||
|
fee = "0.80000000"
|
||||||
|
vol = "4.80907954"
|
||||||
|
margin = "0.00000000"
|
||||||
|
misc = ""
|
||||||
|
|
||||||
|
|
||||||
|
your ``pps.toml`` file will have position entries like,
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[kraken.spot."xmreur.kraken"]
|
||||||
|
size = 4.80907954
|
||||||
|
ppu = 103.97000000
|
||||||
|
bsuid = "XXMRZEUR"
|
||||||
|
clears = [
|
||||||
|
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
||||||
|
]
|
|
@ -19,7 +19,6 @@ Kraken web API wrapping.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from dataclasses import field
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import itertools
|
import itertools
|
||||||
from typing import (
|
from typing import (
|
||||||
|
@ -29,17 +28,16 @@ from typing import (
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
|
|
||||||
# import trio
|
from bidict import bidict
|
||||||
# import tractor
|
|
||||||
import pendulum
|
import pendulum
|
||||||
import asks
|
import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic.dataclasses import dataclass
|
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import hashlib
|
import hashlib
|
||||||
import hmac
|
import hmac
|
||||||
import base64
|
import base64
|
||||||
|
import trio
|
||||||
|
|
||||||
from piker import config
|
from piker import config
|
||||||
from piker.brokers._util import (
|
from piker.brokers._util import (
|
||||||
|
@ -48,6 +46,7 @@ from piker.brokers._util import (
|
||||||
BrokerError,
|
BrokerError,
|
||||||
DataThrottle,
|
DataThrottle,
|
||||||
)
|
)
|
||||||
|
from piker.pp import Transaction
|
||||||
from . import log
|
from . import log
|
||||||
|
|
||||||
# <uri>/<version>/
|
# <uri>/<version>/
|
||||||
|
@ -77,31 +76,6 @@ _symbol_info_translation: dict[str, str] = {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class OHLC:
|
|
||||||
'''
|
|
||||||
Description of the flattened OHLC quote format.
|
|
||||||
|
|
||||||
For schema details see:
|
|
||||||
https://docs.kraken.com/websockets/#message-ohlc
|
|
||||||
|
|
||||||
'''
|
|
||||||
chan_id: int # internal kraken id
|
|
||||||
chan_name: str # eg. ohlc-1 (name-interval)
|
|
||||||
pair: str # fx pair
|
|
||||||
time: float # Begin time of interval, in seconds since epoch
|
|
||||||
etime: float # End time of interval, in seconds since epoch
|
|
||||||
open: float # Open price of interval
|
|
||||||
high: float # High price within interval
|
|
||||||
low: float # Low price within interval
|
|
||||||
close: float # Close price of interval
|
|
||||||
vwap: float # Volume weighted average price within interval
|
|
||||||
volume: float # Accumulated volume **within interval**
|
|
||||||
count: int # Number of trades within interval
|
|
||||||
# (sampled) generated tick data
|
|
||||||
ticks: list[Any] = field(default_factory=list)
|
|
||||||
|
|
||||||
|
|
||||||
def get_config() -> dict[str, Any]:
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
conf, path = config.load()
|
conf, path = config.load()
|
||||||
|
@ -141,8 +115,13 @@ class InvalidKey(ValueError):
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
|
|
||||||
|
# global symbol normalization table
|
||||||
|
_ntable: dict[str, str] = {}
|
||||||
|
_atable: bidict[str, str] = bidict()
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
config: dict[str, str],
|
||||||
name: str = '',
|
name: str = '',
|
||||||
api_key: str = '',
|
api_key: str = '',
|
||||||
secret: str = ''
|
secret: str = ''
|
||||||
|
@ -153,6 +132,7 @@ class Client:
|
||||||
'User-Agent':
|
'User-Agent':
|
||||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||||
})
|
})
|
||||||
|
self.conf: dict[str, str] = config
|
||||||
self._pairs: list[str] = []
|
self._pairs: list[str] = []
|
||||||
self._name = name
|
self._name = name
|
||||||
self._api_key = api_key
|
self._api_key = api_key
|
||||||
|
@ -212,8 +192,36 @@ class Client:
|
||||||
data['nonce'] = str(int(1000*time.time()))
|
data['nonce'] = str(int(1000*time.time()))
|
||||||
return await self._private(method, data, uri_path)
|
return await self._private(method, data, uri_path)
|
||||||
|
|
||||||
|
async def get_balances(
|
||||||
|
self,
|
||||||
|
) -> dict[str, float]:
|
||||||
|
'''
|
||||||
|
Return the set of asset balances for this account
|
||||||
|
by symbol.
|
||||||
|
|
||||||
|
'''
|
||||||
|
resp = await self.endpoint(
|
||||||
|
'Balance',
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
by_bsuid = resp['result']
|
||||||
|
return {
|
||||||
|
self._atable[sym].lower(): float(bal)
|
||||||
|
for sym, bal in by_bsuid.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_assets(self) -> dict[str, dict]:
|
||||||
|
resp = await self._public('Assets', {})
|
||||||
|
return resp['result']
|
||||||
|
|
||||||
|
async def cache_assets(self) -> None:
|
||||||
|
assets = self.assets = await self.get_assets()
|
||||||
|
for bsuid, info in assets.items():
|
||||||
|
self._atable[bsuid] = info['altname']
|
||||||
|
|
||||||
async def get_trades(
|
async def get_trades(
|
||||||
self,
|
self,
|
||||||
|
fetch_limit: int = 10,
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
'''
|
'''
|
||||||
|
@ -225,6 +233,8 @@ class Client:
|
||||||
trades_by_id: dict[str, Any] = {}
|
trades_by_id: dict[str, Any] = {}
|
||||||
|
|
||||||
for i in itertools.count():
|
for i in itertools.count():
|
||||||
|
if i >= fetch_limit:
|
||||||
|
break
|
||||||
|
|
||||||
# increment 'ofs' pagination offset
|
# increment 'ofs' pagination offset
|
||||||
ofs = i*50
|
ofs = i*50
|
||||||
|
@ -254,6 +264,61 @@ class Client:
|
||||||
assert count == len(trades_by_id.values())
|
assert count == len(trades_by_id.values())
|
||||||
return trades_by_id
|
return trades_by_id
|
||||||
|
|
||||||
|
async def get_xfers(
|
||||||
|
self,
|
||||||
|
asset: str,
|
||||||
|
src_asset: str = '',
|
||||||
|
|
||||||
|
) -> dict[str, Transaction]:
|
||||||
|
'''
|
||||||
|
Get asset balance transfer transactions.
|
||||||
|
|
||||||
|
Currently only withdrawals are supported.
|
||||||
|
|
||||||
|
'''
|
||||||
|
xfers: list[dict] = (await self.endpoint(
|
||||||
|
'WithdrawStatus',
|
||||||
|
{'asset': asset},
|
||||||
|
))['result']
|
||||||
|
|
||||||
|
# eg. resp schema:
|
||||||
|
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
||||||
|
# 'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
|
||||||
|
# 'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
|
||||||
|
# 'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
|
||||||
|
# 'amount': '0.00300726', 'fee': '0.00001000', 'time':
|
||||||
|
# 1658347714, 'status': 'Success'}]}
|
||||||
|
|
||||||
|
trans: dict[str, Transaction] = {}
|
||||||
|
for entry in xfers:
|
||||||
|
# look up the normalized name
|
||||||
|
asset = self._atable[entry['asset']].lower()
|
||||||
|
|
||||||
|
# XXX: this is in the asset units (likely) so it isn't
|
||||||
|
# quite the same as a commisions cost necessarily..)
|
||||||
|
cost = float(entry['fee'])
|
||||||
|
|
||||||
|
tran = Transaction(
|
||||||
|
fqsn=asset + '.kraken',
|
||||||
|
tid=entry['txid'],
|
||||||
|
dt=pendulum.from_timestamp(entry['time']),
|
||||||
|
bsuid=f'{asset}{src_asset}',
|
||||||
|
size=-1*(
|
||||||
|
float(entry['amount'])
|
||||||
|
+
|
||||||
|
cost
|
||||||
|
),
|
||||||
|
# since this will be treated as a "sell" it
|
||||||
|
# shouldn't be needed to compute the be price.
|
||||||
|
price='NaN',
|
||||||
|
|
||||||
|
# XXX: see note above
|
||||||
|
cost=0,
|
||||||
|
)
|
||||||
|
trans[tran.tid] = tran
|
||||||
|
|
||||||
|
return trans
|
||||||
|
|
||||||
async def submit_limit(
|
async def submit_limit(
|
||||||
self,
|
self,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
|
@ -282,6 +347,7 @@ class Client:
|
||||||
"volume": str(size),
|
"volume": str(size),
|
||||||
}
|
}
|
||||||
return await self.endpoint('AddOrder', data)
|
return await self.endpoint('AddOrder', data)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Edit order data for kraken api
|
# Edit order data for kraken api
|
||||||
data["txid"] = reqid
|
data["txid"] = reqid
|
||||||
|
@ -301,7 +367,9 @@ class Client:
|
||||||
async def symbol_info(
|
async def symbol_info(
|
||||||
self,
|
self,
|
||||||
pair: Optional[str] = None,
|
pair: Optional[str] = None,
|
||||||
):
|
|
||||||
|
) -> dict[str, dict[str, str]]:
|
||||||
|
|
||||||
if pair is not None:
|
if pair is not None:
|
||||||
pairs = {'pair': pair}
|
pairs = {'pair': pair}
|
||||||
else:
|
else:
|
||||||
|
@ -327,6 +395,12 @@ class Client:
|
||||||
if not self._pairs:
|
if not self._pairs:
|
||||||
self._pairs = await self.symbol_info()
|
self._pairs = await self.symbol_info()
|
||||||
|
|
||||||
|
ntable = {}
|
||||||
|
for restapikey, info in self._pairs.items():
|
||||||
|
ntable[restapikey] = ntable[info['wsname']] = info['altname']
|
||||||
|
|
||||||
|
self._ntable.update(ntable)
|
||||||
|
|
||||||
return self._pairs
|
return self._pairs
|
||||||
|
|
||||||
async def search_symbols(
|
async def search_symbols(
|
||||||
|
@ -424,45 +498,43 @@ class Client:
|
||||||
else:
|
else:
|
||||||
raise BrokerError(errmsg)
|
raise BrokerError(errmsg)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def normalize_symbol(
|
||||||
|
cls,
|
||||||
|
ticker: str
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Normalize symbol names to to a 3x3 pair from the global
|
||||||
|
definition map which we build out from the data retreived from
|
||||||
|
the 'AssetPairs' endpoint, see methods above.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ticker = cls._ntable[ticker]
|
||||||
|
symlen = len(ticker)
|
||||||
|
if symlen != 6:
|
||||||
|
raise ValueError(f'Unhandled symbol: {ticker}')
|
||||||
|
|
||||||
|
return ticker.lower()
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def get_client() -> Client:
|
async def get_client() -> Client:
|
||||||
|
|
||||||
section = get_config()
|
conf = get_config()
|
||||||
if section:
|
if conf:
|
||||||
client = Client(
|
client = Client(
|
||||||
name=section['key_descr'],
|
conf,
|
||||||
api_key=section['api_key'],
|
name=conf['key_descr'],
|
||||||
secret=section['secret']
|
api_key=conf['api_key'],
|
||||||
|
secret=conf['secret']
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
client = Client()
|
client = Client({})
|
||||||
|
|
||||||
# at startup, load all symbols locally for fast search
|
# at startup, load all symbols, and asset info in
|
||||||
await client.cache_symbols()
|
# batch requests.
|
||||||
|
async with trio.open_nursery() as nurse:
|
||||||
|
nurse.start_soon(client.cache_assets)
|
||||||
|
await client.cache_symbols()
|
||||||
|
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
def normalize_symbol(
|
|
||||||
ticker: str
|
|
||||||
) -> str:
|
|
||||||
'''
|
|
||||||
Normalize symbol names to to a 3x3 pair.
|
|
||||||
|
|
||||||
'''
|
|
||||||
remap = {
|
|
||||||
'XXBTZEUR': 'XBTEUR',
|
|
||||||
'XXMRZEUR': 'XMREUR',
|
|
||||||
|
|
||||||
# ws versions? pretty weird..
|
|
||||||
'XBT/EUR': 'XBTEUR',
|
|
||||||
'XMR/EUR': 'XMREUR',
|
|
||||||
}
|
|
||||||
symlen = len(ticker)
|
|
||||||
if symlen != 6:
|
|
||||||
ticker = remap[ticker]
|
|
||||||
else:
|
|
||||||
raise ValueError(f'Unhandled symbol: {ticker}')
|
|
||||||
|
|
||||||
return ticker.lower()
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -19,7 +19,6 @@ Real-time and historical data feed endpoints.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from dataclasses import asdict
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
@ -28,6 +27,7 @@ from typing import (
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
|
|
||||||
|
from async_generator import aclosing
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pendulum
|
import pendulum
|
||||||
|
@ -49,7 +49,6 @@ from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
||||||
from . import log
|
from . import log
|
||||||
from .api import (
|
from .api import (
|
||||||
Client,
|
Client,
|
||||||
OHLC,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -87,6 +86,30 @@ class Pair(Struct):
|
||||||
ordermin: float # minimum order volume for pair
|
ordermin: float # minimum order volume for pair
|
||||||
|
|
||||||
|
|
||||||
|
class OHLC(Struct):
|
||||||
|
'''
|
||||||
|
Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
|
For schema details see:
|
||||||
|
https://docs.kraken.com/websockets/#message-ohlc
|
||||||
|
|
||||||
|
'''
|
||||||
|
chan_id: int # internal kraken id
|
||||||
|
chan_name: str # eg. ohlc-1 (name-interval)
|
||||||
|
pair: str # fx pair
|
||||||
|
time: float # Begin time of interval, in seconds since epoch
|
||||||
|
etime: float # End time of interval, in seconds since epoch
|
||||||
|
open: float # Open price of interval
|
||||||
|
high: float # High price within interval
|
||||||
|
low: float # Low price within interval
|
||||||
|
close: float # Close price of interval
|
||||||
|
vwap: float # Volume weighted average price within interval
|
||||||
|
volume: float # Accumulated volume **within interval**
|
||||||
|
count: int # Number of trades within interval
|
||||||
|
# (sampled) generated tick data
|
||||||
|
ticks: list[Any] = []
|
||||||
|
|
||||||
|
|
||||||
async def stream_messages(
|
async def stream_messages(
|
||||||
ws: NoBsWs,
|
ws: NoBsWs,
|
||||||
):
|
):
|
||||||
|
@ -117,9 +140,8 @@ async def stream_messages(
|
||||||
too_slow_count = 0
|
too_slow_count = 0
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if isinstance(msg, dict):
|
match msg:
|
||||||
if msg.get('event') == 'heartbeat':
|
case {'event': 'heartbeat'}:
|
||||||
|
|
||||||
now = time.time()
|
now = time.time()
|
||||||
delay = now - last_hb
|
delay = now - last_hb
|
||||||
last_hb = now
|
last_hb = now
|
||||||
|
@ -130,11 +152,9 @@ async def stream_messages(
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
err = msg.get('errorMessage')
|
case _:
|
||||||
if err:
|
# passthrough sub msgs
|
||||||
raise BrokerError(err)
|
yield msg
|
||||||
else:
|
|
||||||
yield msg
|
|
||||||
|
|
||||||
|
|
||||||
async def process_data_feed_msgs(
|
async def process_data_feed_msgs(
|
||||||
|
@ -145,44 +165,69 @@ async def process_data_feed_msgs(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async for msg in stream_messages(ws):
|
async for msg in stream_messages(ws):
|
||||||
|
match msg:
|
||||||
|
case {
|
||||||
|
'errorMessage': errmsg
|
||||||
|
}:
|
||||||
|
raise BrokerError(errmsg)
|
||||||
|
|
||||||
chan_id, *payload_array, chan_name, pair = msg
|
case {
|
||||||
|
'event': 'subscriptionStatus',
|
||||||
|
} as sub:
|
||||||
|
log.info(
|
||||||
|
'WS subscription is active:\n'
|
||||||
|
f'{sub}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
if 'ohlc' in chan_name:
|
case [
|
||||||
|
chan_id,
|
||||||
|
*payload_array,
|
||||||
|
chan_name,
|
||||||
|
pair
|
||||||
|
]:
|
||||||
|
if 'ohlc' in chan_name:
|
||||||
|
ohlc = OHLC(
|
||||||
|
chan_id,
|
||||||
|
chan_name,
|
||||||
|
pair,
|
||||||
|
*payload_array[0]
|
||||||
|
)
|
||||||
|
ohlc.typecast()
|
||||||
|
yield 'ohlc', ohlc
|
||||||
|
|
||||||
yield 'ohlc', OHLC(chan_id, chan_name, pair, *payload_array[0])
|
elif 'spread' in chan_name:
|
||||||
|
|
||||||
elif 'spread' in chan_name:
|
bid, ask, ts, bsize, asize = map(
|
||||||
|
float, payload_array[0])
|
||||||
|
|
||||||
bid, ask, ts, bsize, asize = map(float, payload_array[0])
|
# TODO: really makes you think IB has a horrible API...
|
||||||
|
quote = {
|
||||||
|
'symbol': pair.replace('/', ''),
|
||||||
|
'ticks': [
|
||||||
|
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||||
|
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||||
|
|
||||||
# TODO: really makes you think IB has a horrible API...
|
{'type': 'ask', 'price': ask, 'size': asize},
|
||||||
quote = {
|
{'type': 'asize', 'price': ask, 'size': asize},
|
||||||
'symbol': pair.replace('/', ''),
|
],
|
||||||
'ticks': [
|
}
|
||||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
yield 'l1', quote
|
||||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
|
||||||
|
|
||||||
{'type': 'ask', 'price': ask, 'size': asize},
|
# elif 'book' in msg[-2]:
|
||||||
{'type': 'asize', 'price': ask, 'size': asize},
|
# chan_id, *payload_array, chan_name, pair = msg
|
||||||
],
|
# print(msg)
|
||||||
}
|
|
||||||
yield 'l1', quote
|
|
||||||
|
|
||||||
# elif 'book' in msg[-2]:
|
case _:
|
||||||
# chan_id, *payload_array, chan_name, pair = msg
|
print(f'UNHANDLED MSG: {msg}')
|
||||||
# print(msg)
|
# yield msg
|
||||||
|
|
||||||
else:
|
|
||||||
print(f'UNHANDLED MSG: {msg}')
|
|
||||||
yield msg
|
|
||||||
|
|
||||||
|
|
||||||
def normalize(
|
def normalize(
|
||||||
ohlc: OHLC,
|
ohlc: OHLC,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
quote = asdict(ohlc)
|
quote = ohlc.to_dict()
|
||||||
quote['broker_ts'] = quote['time']
|
quote['broker_ts'] = quote['time']
|
||||||
quote['brokerd_ts'] = time.time()
|
quote['brokerd_ts'] = time.time()
|
||||||
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
||||||
|
@ -376,17 +421,15 @@ async def stream_quotes(
|
||||||
# see the tips on reconnection logic:
|
# see the tips on reconnection logic:
|
||||||
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||||
ws: NoBsWs
|
ws: NoBsWs
|
||||||
async with open_autorecon_ws(
|
async with (
|
||||||
'wss://ws.kraken.com/',
|
open_autorecon_ws(
|
||||||
fixture=subscribe,
|
'wss://ws.kraken.com/',
|
||||||
) as ws:
|
fixture=subscribe,
|
||||||
|
) as ws,
|
||||||
|
aclosing(process_data_feed_msgs(ws)) as msg_gen,
|
||||||
|
):
|
||||||
# pull a first quote and deliver
|
# pull a first quote and deliver
|
||||||
msg_gen = process_data_feed_msgs(ws)
|
typ, ohlc_last = await anext(msg_gen)
|
||||||
|
|
||||||
# TODO: use ``anext()`` when it lands in 3.10!
|
|
||||||
typ, ohlc_last = await msg_gen.__anext__()
|
|
||||||
|
|
||||||
topic, quote = normalize(ohlc_last)
|
topic, quote = normalize(ohlc_last)
|
||||||
|
|
||||||
task_status.started((init_msgs, quote))
|
task_status.started((init_msgs, quote))
|
||||||
|
|
|
@ -88,7 +88,8 @@ def mk_check(
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class _DarkBook:
|
class _DarkBook:
|
||||||
'''EMS-trigger execution book.
|
'''
|
||||||
|
EMS-trigger execution book.
|
||||||
|
|
||||||
Contains conditions for executions (aka "orders" or "triggers")
|
Contains conditions for executions (aka "orders" or "triggers")
|
||||||
which are not exposed to brokers and thus the market; i.e. these are
|
which are not exposed to brokers and thus the market; i.e. these are
|
||||||
|
@ -653,6 +654,13 @@ async def translate_and_relay_brokerd_events(
|
||||||
else:
|
else:
|
||||||
# check for existing live flow entry
|
# check for existing live flow entry
|
||||||
entry = book._ems_entries.get(oid)
|
entry = book._ems_entries.get(oid)
|
||||||
|
old_reqid = entry.reqid
|
||||||
|
|
||||||
|
if old_reqid and old_reqid != reqid:
|
||||||
|
log.warning(
|
||||||
|
f'Brokerd order id change for {oid}:\n'
|
||||||
|
f'{old_reqid} -> {reqid}'
|
||||||
|
)
|
||||||
|
|
||||||
# initial response to brokerd order request
|
# initial response to brokerd order request
|
||||||
if name == 'ack':
|
if name == 'ack':
|
||||||
|
@ -663,6 +671,10 @@ async def translate_and_relay_brokerd_events(
|
||||||
# a ``BrokerdOrderAck`` **must** be sent after an order
|
# a ``BrokerdOrderAck`` **must** be sent after an order
|
||||||
# request in order to establish this id mapping.
|
# request in order to establish this id mapping.
|
||||||
book._ems2brokerd_ids[oid] = reqid
|
book._ems2brokerd_ids[oid] = reqid
|
||||||
|
log.info(
|
||||||
|
'Rx ACK for order\n'
|
||||||
|
f'oid: {oid} -> reqid: {reqid}'
|
||||||
|
)
|
||||||
|
|
||||||
# new order which has not yet be registered into the
|
# new order which has not yet be registered into the
|
||||||
# local ems book, insert it now and handle 2 cases:
|
# local ems book, insert it now and handle 2 cases:
|
||||||
|
@ -690,6 +702,9 @@ async def translate_and_relay_brokerd_events(
|
||||||
# a live flow now exists
|
# a live flow now exists
|
||||||
oid = entry.oid
|
oid = entry.oid
|
||||||
|
|
||||||
|
# TODO: instead this should be our status set.
|
||||||
|
# ack, open, fill, closed, cancelled'
|
||||||
|
|
||||||
resp = None
|
resp = None
|
||||||
broker_details = {}
|
broker_details = {}
|
||||||
|
|
||||||
|
|
|
@ -186,6 +186,7 @@ class BrokerdStatus(Struct):
|
||||||
# XXX: should be best effort set for every update
|
# XXX: should be best effort set for every update
|
||||||
account: str = ''
|
account: str = ''
|
||||||
|
|
||||||
|
# TODO: instead (ack, pending, open, fill, clos(ed), cancelled)
|
||||||
# {
|
# {
|
||||||
# 'submitted',
|
# 'submitted',
|
||||||
# 'cancelled',
|
# 'cancelled',
|
||||||
|
|
|
@ -39,7 +39,11 @@ from docker.errors import (
|
||||||
APIError,
|
APIError,
|
||||||
# ContainerError,
|
# ContainerError,
|
||||||
)
|
)
|
||||||
from requests.exceptions import ConnectionError, ReadTimeout
|
import requests
|
||||||
|
from requests.exceptions import (
|
||||||
|
ConnectionError,
|
||||||
|
ReadTimeout,
|
||||||
|
)
|
||||||
|
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .. import config
|
from .. import config
|
||||||
|
@ -188,13 +192,12 @@ class Container:
|
||||||
|
|
||||||
def hard_kill(self, start: float) -> None:
|
def hard_kill(self, start: float) -> None:
|
||||||
delay = time.time() - start
|
delay = time.time() - start
|
||||||
log.error(
|
|
||||||
f'Failed to kill container {self.cntr.id} after {delay}s\n'
|
|
||||||
'sending SIGKILL..'
|
|
||||||
)
|
|
||||||
# get out the big guns, bc apparently marketstore
|
# get out the big guns, bc apparently marketstore
|
||||||
# doesn't actually know how to terminate gracefully
|
# doesn't actually know how to terminate gracefully
|
||||||
# :eyeroll:...
|
# :eyeroll:...
|
||||||
|
log.error(
|
||||||
|
f'SIGKILL-ing: {self.cntr.id} after {delay}s\n'
|
||||||
|
)
|
||||||
self.try_signal('SIGKILL')
|
self.try_signal('SIGKILL')
|
||||||
self.cntr.wait(
|
self.cntr.wait(
|
||||||
timeout=3,
|
timeout=3,
|
||||||
|
@ -218,20 +221,25 @@ class Container:
|
||||||
self.try_signal('SIGINT')
|
self.try_signal('SIGINT')
|
||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
for _ in range(30):
|
for _ in range(6):
|
||||||
|
|
||||||
with trio.move_on_after(0.5) as cs:
|
with trio.move_on_after(0.5) as cs:
|
||||||
cs.shield = True
|
|
||||||
log.cancel('polling for CNTR logs...')
|
log.cancel('polling for CNTR logs...')
|
||||||
|
|
||||||
try:
|
try:
|
||||||
await self.process_logs_until(stop_msg)
|
await self.process_logs_until(stop_msg)
|
||||||
except ApplicationLogError:
|
except ApplicationLogError:
|
||||||
hard_kill = True
|
hard_kill = True
|
||||||
|
else:
|
||||||
|
# if we aren't cancelled on above checkpoint then we
|
||||||
|
# assume we read the expected stop msg and
|
||||||
|
# terminated.
|
||||||
|
break
|
||||||
|
|
||||||
# if we aren't cancelled on above checkpoint then we
|
if cs.cancelled_caught:
|
||||||
# assume we read the expected stop msg and terminated.
|
# on timeout just try a hard kill after
|
||||||
break
|
# a quick container sync-wait.
|
||||||
|
hard_kill = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
log.info(f'Polling for container shutdown:\n{cid}')
|
log.info(f'Polling for container shutdown:\n{cid}')
|
||||||
|
@ -254,9 +262,16 @@ class Container:
|
||||||
except (
|
except (
|
||||||
docker.errors.APIError,
|
docker.errors.APIError,
|
||||||
ConnectionError,
|
ConnectionError,
|
||||||
|
requests.exceptions.ConnectionError,
|
||||||
|
trio.Cancelled,
|
||||||
):
|
):
|
||||||
log.exception('Docker connection failure')
|
log.exception('Docker connection failure')
|
||||||
self.hard_kill(start)
|
self.hard_kill(start)
|
||||||
|
raise
|
||||||
|
|
||||||
|
except trio.Cancelled:
|
||||||
|
log.exception('trio cancelled...')
|
||||||
|
self.hard_kill(start)
|
||||||
else:
|
else:
|
||||||
hard_kill = True
|
hard_kill = True
|
||||||
|
|
||||||
|
@ -305,16 +320,13 @@ async def open_ahabd(
|
||||||
))
|
))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# TODO: we might eventually want a proxy-style msg-prot here
|
# TODO: we might eventually want a proxy-style msg-prot here
|
||||||
# to allow remote control of containers without needing
|
# to allow remote control of containers without needing
|
||||||
# callers to have root perms?
|
# callers to have root perms?
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# needed?
|
await cntr.cancel(stop_msg)
|
||||||
with trio.CancelScope(shield=True):
|
|
||||||
await cntr.cancel(stop_msg)
|
|
||||||
|
|
||||||
|
|
||||||
async def start_ahab(
|
async def start_ahab(
|
||||||
|
|
|
@ -66,3 +66,10 @@ class Struct(
|
||||||
).decode(
|
).decode(
|
||||||
msgspec.msgpack.Encoder().encode(self)
|
msgspec.msgpack.Encoder().encode(self)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def typecast(
|
||||||
|
self,
|
||||||
|
# fields: Optional[list[str]] = None,
|
||||||
|
) -> None:
|
||||||
|
for fname, ftype in self.__annotations__.items():
|
||||||
|
setattr(self, fname, ftype(getattr(self, fname)))
|
||||||
|
|
|
@ -63,7 +63,7 @@ from ..log import get_logger
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# TODO: load this from a config.toml!
|
# TODO: load this from a config.toml!
|
||||||
_quote_throttle_rate: int = 60 # Hz
|
_quote_throttle_rate: int = 22 # Hz
|
||||||
|
|
||||||
|
|
||||||
# a working tick-type-classes template
|
# a working tick-type-classes template
|
||||||
|
|
|
@ -794,15 +794,11 @@ async def process_trades_and_update_ui(
|
||||||
pp_msg_symbol = msg['symbol'].lower()
|
pp_msg_symbol = msg['symbol'].lower()
|
||||||
fqsn = sym.front_fqsn()
|
fqsn = sym.front_fqsn()
|
||||||
broker, key = sym.front_feed()
|
broker, key = sym.front_feed()
|
||||||
# print(
|
|
||||||
# f'pp msg symbol: {pp_msg_symbol}\n',
|
|
||||||
# f'fqsn: {fqsn}\n',
|
|
||||||
# f'front key: {key}\n',
|
|
||||||
# )
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
pp_msg_symbol == fqsn.replace(f'.{broker}', '')
|
pp_msg_symbol == fqsn
|
||||||
|
or pp_msg_symbol == fqsn.removesuffix(f'.{broker}')
|
||||||
):
|
):
|
||||||
|
log.info(f'{fqsn} matched pp msg: {fmsg}')
|
||||||
tracker = mode.trackers[msg['account']]
|
tracker = mode.trackers[msg['account']]
|
||||||
tracker.live_pp.update_from_msg(msg)
|
tracker.live_pp.update_from_msg(msg)
|
||||||
# update order pane widgets
|
# update order pane widgets
|
||||||
|
@ -843,16 +839,25 @@ async def process_trades_and_update_ui(
|
||||||
# resp to 'cancel' request or error condition
|
# resp to 'cancel' request or error condition
|
||||||
# for action request
|
# for action request
|
||||||
elif resp in (
|
elif resp in (
|
||||||
'broker_cancelled',
|
|
||||||
'broker_inactive',
|
'broker_inactive',
|
||||||
'broker_errored',
|
'broker_errored',
|
||||||
|
):
|
||||||
|
# delete level line from view
|
||||||
|
mode.on_cancel(oid)
|
||||||
|
broker_msg = msg['brokerd_msg']
|
||||||
|
log.error(
|
||||||
|
f'Order {oid}->{resp} with:\n{pformat(broker_msg)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
elif resp in (
|
||||||
|
'broker_cancelled',
|
||||||
'dark_cancelled'
|
'dark_cancelled'
|
||||||
):
|
):
|
||||||
# delete level line from view
|
# delete level line from view
|
||||||
mode.on_cancel(oid)
|
mode.on_cancel(oid)
|
||||||
broker_msg = msg['brokerd_msg']
|
broker_msg = msg['brokerd_msg']
|
||||||
log.warning(
|
log.cancel(
|
||||||
f'Order {oid} failed with:\n{pformat(broker_msg)}'
|
f'Order {oid}->{resp} with:\n{pformat(broker_msg)}'
|
||||||
)
|
)
|
||||||
|
|
||||||
elif resp in (
|
elif resp in (
|
||||||
|
|
Loading…
Reference in New Issue