Compare commits
225 Commits
310_plus
...
kraken_use
Author | SHA1 | Date |
---|---|---|
Tyler Goodlet | 9d0589f8d5 | |
Tyler Goodlet | 1a291939c3 | |
Tyler Goodlet | 69e501764a | |
Tyler Goodlet | 1cbf45b4c4 | |
Tyler Goodlet | 227a80469e | |
Tyler Goodlet | dc8072c6db | |
Tyler Goodlet | 808dbb12e6 | |
Tyler Goodlet | 44e21b1de9 | |
Tyler Goodlet | b3058b8c78 | |
Tyler Goodlet | db564d7977 | |
Tyler Goodlet | e6a3e8b65a | |
Tyler Goodlet | d43ba47ebe | |
Tyler Goodlet | 168c9863cb | |
Tyler Goodlet | 0fb31586fd | |
Tyler Goodlet | 8b609f531b | |
Tyler Goodlet | d502274eb9 | |
Tyler Goodlet | b1419c850d | |
Tyler Goodlet | aa7f24b6db | |
Tyler Goodlet | 319e68c855 | |
Tyler Goodlet | 64f920d7e5 | |
Tyler Goodlet | 3b79743c7b | |
Tyler Goodlet | 54008a1976 | |
Tyler Goodlet | b96b7a8b9c | |
Tyler Goodlet | 0fca1b3e1a | |
Tyler Goodlet | 2386270cad | |
Tyler Goodlet | 5b135fad61 | |
Tyler Goodlet | abb6854e74 | |
Tyler Goodlet | 22f9b2552c | |
Tyler Goodlet | 57f2478dc7 | |
Tyler Goodlet | 5dc9a61ec4 | |
Tyler Goodlet | b0d3d9bb01 | |
Tyler Goodlet | caecbaa231 | |
Tyler Goodlet | a20a8d95d5 | |
Tyler Goodlet | ba93f96c71 | |
Tyler Goodlet | 804e9afdde | |
Tyler Goodlet | 89bcaed15e | |
Tyler Goodlet | bb2f8e4304 | |
Tyler Goodlet | 8ab8268edc | |
Tyler Goodlet | bbcc55b24c | |
Tyler Goodlet | 9fa9c27e4d | |
Tyler Goodlet | d9b4c4a413 | |
Tyler Goodlet | 84cab1327d | |
Tyler Goodlet | df4cec930b | |
Tyler Goodlet | ab08dc582d | |
Tyler Goodlet | f79d9865a0 | |
Tyler Goodlet | 00378c330c | |
goodboy | 180b97b180 | |
Tyler Goodlet | f0b3a4d5c0 | |
goodboy | e2e66324cc | |
Tyler Goodlet | d950c78b81 | |
Tyler Goodlet | 7dbcbfdcd5 | |
Tyler Goodlet | 279c899de5 | |
Tyler Goodlet | db5aacdb9c | |
Tyler Goodlet | c7b84ab500 | |
Tyler Goodlet | 9967adb371 | |
Tyler Goodlet | 30ff793a22 | |
Tyler Goodlet | 666587991a | |
goodboy | 01005e40a8 | |
goodboy | d81e629c29 | |
Tyler Goodlet | 2766fad719 | |
Tyler Goodlet | ae71168216 | |
Tyler Goodlet | a0c238daa7 | |
Tyler Goodlet | 7cbdc6a246 | |
Tyler Goodlet | 2ff8be71aa | |
Tyler Goodlet | ddffaa952d | |
Tyler Goodlet | 5520e9ef21 | |
Tyler Goodlet | 958e542f7d | |
goodboy | 927bbc7258 | |
Tyler Goodlet | 45bef0cea9 | |
goodboy | a3d46f713e | |
Tyler Goodlet | 5684120c11 | |
Tyler Goodlet | ddb195ed2c | |
Tyler Goodlet | 6747831677 | |
Tyler Goodlet | 9326379b04 | |
Tyler Goodlet | 09d9a7ea2b | |
Tyler Goodlet | 45871d5846 | |
goodboy | bf7a49c19b | |
goodboy | 0a7fce087c | |
Tyler Goodlet | d3130ca04c | |
Tyler Goodlet | e30a3c5b54 | |
Tyler Goodlet | 2393965e83 | |
Tyler Goodlet | fb39da19f4 | |
Tyler Goodlet | a27431c34f | |
Tyler Goodlet | 070b9f3dc1 | |
goodboy | f2dba44169 | |
Tyler Goodlet | 0ef5da0881 | |
Tyler Goodlet | 0580b204a3 | |
Tyler Goodlet | 6ce699ae1f | |
Tyler Goodlet | 3aa72abacf | |
Tyler Goodlet | 04004525c1 | |
Tyler Goodlet | a7f0adf1cf | |
Tyler Goodlet | cef511092d | |
Tyler Goodlet | 4e5df973a9 | |
Tyler Goodlet | 6a1a62d8c0 | |
Tyler Goodlet | e0491cf2e7 | |
Tyler Goodlet | 90bc9b9730 | |
goodboy | f449672c68 | |
Tyler Goodlet | fd22f45178 | |
goodboy | 37f634a2ed | |
Tyler Goodlet | dfee9dd97e | |
Tyler Goodlet | 2a99f7a4d7 | |
Tyler Goodlet | b44e2d9ed9 | |
Tyler Goodlet | 795d4d76f4 | |
Tyler Goodlet | c26acb1fa8 | |
Tyler Goodlet | 11b6699a54 | |
Tyler Goodlet | f9bdd643cf | |
Tyler Goodlet | 2baea21c7d | |
Tyler Goodlet | bea0111753 | |
Tyler Goodlet | c870665be0 | |
Tyler Goodlet | 4ff1090284 | |
Tyler Goodlet | f22461a844 | |
Tyler Goodlet | 458c7211ee | |
Tyler Goodlet | 5cc4b19a7c | |
goodboy | f5236f658b | |
goodboy | a360b66cc0 | |
Tyler Goodlet | 4bcb791161 | |
Tyler Goodlet | 4c7c78c815 | |
Tyler Goodlet | 019867b413 | |
Tyler Goodlet | f356fb0a68 | |
goodboy | 756249ff70 | |
goodboy | 419ebebe72 | |
goodboy | a229996ebe | |
Tyler Goodlet | af01e89612 | |
Tyler Goodlet | 609034c634 | |
Tyler Goodlet | 95dd0e6bd6 | |
goodboy | 479ad1bb15 | |
Tyler Goodlet | d506235a8b | |
Tyler Goodlet | 7846446a44 | |
Tyler Goodlet | 214f864dcf | |
Tyler Goodlet | 4c0f2099aa | |
Tyler Goodlet | aea7bec2c3 | |
Tyler Goodlet | 47777e4192 | |
Tyler Goodlet | f6888057c3 | |
Tyler Goodlet | f65f56ec75 | |
Tyler Goodlet | 5d39b04552 | |
Tyler Goodlet | 735fbc6259 | |
Tyler Goodlet | fcd7e0f3f3 | |
Tyler Goodlet | 9106d13dfe | |
Tyler Goodlet | d3caad6e11 | |
Tyler Goodlet | f87a2a810a | |
Tyler Goodlet | 208e2e9e97 | |
Tyler Goodlet | 90cc6eb317 | |
Tyler Goodlet | b118becc84 | |
Tyler Goodlet | 7442d68ecf | |
Tyler Goodlet | 076c167d6e | |
Tyler Goodlet | 64d8cd448f | |
Tyler Goodlet | ec6a28a8b1 | |
Tyler Goodlet | cc15d02488 | |
goodboy | d5bc43e8dd | |
Tyler Goodlet | 287a2c8396 | |
Tyler Goodlet | 453ebdfe30 | |
Tyler Goodlet | 2b1fb90e03 | |
Tyler Goodlet | 695ba5288d | |
Tyler Goodlet | d6c32bba86 | |
Tyler Goodlet | fa89207583 | |
Tyler Goodlet | 557562e25c | |
Tyler Goodlet | c6efa2641b | |
Tyler Goodlet | 8a7e391b4e | |
Tyler Goodlet | aec48a1dd5 | |
Tyler Goodlet | 87f301500d | |
Tyler Goodlet | 566a54ffb6 | |
Tyler Goodlet | f9c4b3cc96 | |
Tyler Goodlet | a12e6800ff | |
Tyler Goodlet | cc68501c7a | |
Tyler Goodlet | 7ebf8a8dc0 | |
Tyler Goodlet | 4475823e48 | |
Tyler Goodlet | 3713288b48 | |
Tyler Goodlet | 4fdfb81876 | |
Tyler Goodlet | f32b4d37cb | |
Tyler Goodlet | 2063b9d8bb | |
Tyler Goodlet | fe14605034 | |
Tyler Goodlet | 68b32208de | |
Tyler Goodlet | f1fe369bbf | |
Tyler Goodlet | 16b2937d23 | |
Tyler Goodlet | bfad676b7c | |
Tyler Goodlet | c617a06905 | |
Tyler Goodlet | ff74f4302a | |
Tyler Goodlet | 21153a0e1e | |
Tyler Goodlet | b6f344f34a | |
Tyler Goodlet | ecdc747ced | |
Tyler Goodlet | 5147cd7be0 | |
Tyler Goodlet | 3dcb72d429 | |
Tyler Goodlet | fbee33b00d | |
Tyler Goodlet | 3991d8f911 | |
Tyler Goodlet | 7b2e8f1ba5 | |
Tyler Goodlet | cbcbb2b243 | |
Tyler Goodlet | cd3bfb1ea4 | |
Tyler Goodlet | 82b718d5a3 | |
Tyler Goodlet | 05a1a4e3d8 | |
Tyler Goodlet | 412138a75b | |
Tyler Goodlet | c1b63f4757 | |
Tyler Goodlet | 5d774bef90 | |
Tyler Goodlet | de77c7d209 | |
Tyler Goodlet | ce1eb11b59 | |
Tyler Goodlet | b629ce177d | |
Tyler Goodlet | 73fa320917 | |
Tyler Goodlet | dd05ed1371 | |
Tyler Goodlet | 2a641ab8b4 | |
Tyler Goodlet | f8f7ca350c | |
Tyler Goodlet | 88b4ccc768 | |
Tyler Goodlet | eb2bad5138 | |
Tyler Goodlet | f768576060 | |
Tyler Goodlet | add0e92335 | |
Tyler Goodlet | 1eb7e109e6 | |
Tyler Goodlet | 725909a94c | |
Tyler Goodlet | 050aa7594c | |
Tyler Goodlet | 450009ff9c | |
goodboy | b2d5892010 | |
goodboy | 5a3b465ac0 | |
Tyler Goodlet | be7afdaa89 | |
Tyler Goodlet | 1c561207f5 | |
Tyler Goodlet | ed2c962bb9 | |
Tyler Goodlet | 147ceca016 | |
Tyler Goodlet | 03a7940f83 | |
Tyler Goodlet | dd2a9f74f1 | |
Tyler Goodlet | 49c720af3c | |
Tyler Goodlet | c620517543 | |
Tyler Goodlet | a425c29ef1 | |
Tyler Goodlet | 783914c7fe | |
Tyler Goodlet | 920a394539 | |
Tyler Goodlet | e977597cd0 | |
Tyler Goodlet | 7a33ba64f1 | |
Tyler Goodlet | 191b94b67c | |
Tyler Goodlet | 4ad7b073c3 | |
Tyler Goodlet | d92ff9c7a0 |
|
@ -2,15 +2,19 @@
|
||||||
|
|
||||||
# start VNC server
|
# start VNC server
|
||||||
x11vnc \
|
x11vnc \
|
||||||
-ncache_cr \
|
-listen 127.0.0.1 \
|
||||||
-listen localhost \
|
-allow 127.0.0.1 \
|
||||||
|
-autoport 3003 \
|
||||||
|
-no6 \
|
||||||
|
-noipv6 \
|
||||||
-display :1 \
|
-display :1 \
|
||||||
|
-bg \
|
||||||
-forever \
|
-forever \
|
||||||
-shared \
|
-shared \
|
||||||
-logappend /var/log/x11vnc.log \
|
-logappend /var/log/x11vnc.log \
|
||||||
-bg \
|
-ncache_cr \
|
||||||
-noipv6 \
|
-ncache \
|
||||||
-autoport 3003 \
|
|
||||||
# can't use this because of ``asyncvnc`` issue:
|
# can't use this because of ``asyncvnc`` issue:
|
||||||
# https://github.com/barneygale/asyncvnc/issues/1
|
# https://github.com/barneygale/asyncvnc/issues/1
|
||||||
# -passwd 'ibcansmbz'
|
# -passwd 'ibcansmbz'
|
||||||
|
|
|
@ -22,10 +22,10 @@ from typing import Optional, Union, Callable, Any
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from msgspec import Struct
|
||||||
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
|
||||||
|
|
||||||
from .log import get_logger, get_console_log
|
from .log import get_logger, get_console_log
|
||||||
from .brokers import get_brokermod
|
from .brokers import get_brokermod
|
||||||
|
@ -47,16 +47,13 @@ _root_modules = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class Services(BaseModel):
|
class Services(Struct):
|
||||||
|
|
||||||
actor_n: tractor._supervise.ActorNursery
|
actor_n: tractor._supervise.ActorNursery
|
||||||
service_n: trio.Nursery
|
service_n: trio.Nursery
|
||||||
debug_mode: bool # tractor sub-actor debug mode flag
|
debug_mode: bool # tractor sub-actor debug mode flag
|
||||||
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
||||||
|
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
async def start_service_task(
|
async def start_service_task(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
|
|
|
@ -33,14 +33,13 @@ import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
from pydantic.dataclasses import dataclass
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import wsproto
|
import wsproto
|
||||||
|
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
from ._util import resproc, SymbolNotFound
|
from ._util import resproc, SymbolNotFound
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from ..data import ShmArray
|
from ..data import ShmArray
|
||||||
|
from ..data.types import Struct
|
||||||
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -79,12 +78,14 @@ _show_wap_in_history = False
|
||||||
|
|
||||||
|
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||||
class Pair(BaseModel):
|
class Pair(Struct, frozen=True):
|
||||||
symbol: str
|
symbol: str
|
||||||
status: str
|
status: str
|
||||||
|
|
||||||
baseAsset: str
|
baseAsset: str
|
||||||
baseAssetPrecision: int
|
baseAssetPrecision: int
|
||||||
|
cancelReplaceAllowed: bool
|
||||||
|
allowTrailingStop: bool
|
||||||
quoteAsset: str
|
quoteAsset: str
|
||||||
quotePrecision: int
|
quotePrecision: int
|
||||||
quoteAssetPrecision: int
|
quoteAssetPrecision: int
|
||||||
|
@ -104,14 +105,14 @@ class Pair(BaseModel):
|
||||||
permissions: list[str]
|
permissions: list[str]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class OHLC(Struct):
|
||||||
class OHLC:
|
'''
|
||||||
"""Description of the flattened OHLC quote format.
|
Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
For schema details see:
|
For schema details see:
|
||||||
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
||||||
|
|
||||||
"""
|
'''
|
||||||
time: int
|
time: int
|
||||||
|
|
||||||
open: float
|
open: float
|
||||||
|
@ -260,6 +261,7 @@ class Client:
|
||||||
for i, bar in enumerate(bars):
|
for i, bar in enumerate(bars):
|
||||||
|
|
||||||
bar = OHLC(*bar)
|
bar = OHLC(*bar)
|
||||||
|
bar.typecast()
|
||||||
|
|
||||||
row = []
|
row = []
|
||||||
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
||||||
|
@ -287,7 +289,7 @@ async def get_client() -> Client:
|
||||||
|
|
||||||
|
|
||||||
# validation type
|
# validation type
|
||||||
class AggTrade(BaseModel):
|
class AggTrade(Struct):
|
||||||
e: str # Event type
|
e: str # Event type
|
||||||
E: int # Event time
|
E: int # Event time
|
||||||
s: str # Symbol
|
s: str # Symbol
|
||||||
|
@ -341,7 +343,9 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
|
|
||||||
elif msg.get('e') == 'aggTrade':
|
elif msg.get('e') == 'aggTrade':
|
||||||
|
|
||||||
# validate
|
# NOTE: this is purely for a definition, ``msgspec.Struct``
|
||||||
|
# does not runtime-validate until you decode/encode.
|
||||||
|
# see: https://jcristharif.com/msgspec/structs.html#type-validation
|
||||||
msg = AggTrade(**msg)
|
msg = AggTrade(**msg)
|
||||||
|
|
||||||
# TODO: type out and require this quote format
|
# TODO: type out and require this quote format
|
||||||
|
@ -352,8 +356,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
'brokerd_ts': time.time(),
|
'brokerd_ts': time.time(),
|
||||||
'ticks': [{
|
'ticks': [{
|
||||||
'type': 'trade',
|
'type': 'trade',
|
||||||
'price': msg.p,
|
'price': float(msg.p),
|
||||||
'size': msg.q,
|
'size': float(msg.q),
|
||||||
'broker_ts': msg.T,
|
'broker_ts': msg.T,
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
|
@ -448,7 +452,7 @@ async def stream_quotes(
|
||||||
d = cache[sym.upper()]
|
d = cache[sym.upper()]
|
||||||
syminfo = Pair(**d) # validation
|
syminfo = Pair(**d) # validation
|
||||||
|
|
||||||
si = sym_infos[sym] = syminfo.dict()
|
si = sym_infos[sym] = syminfo.to_dict()
|
||||||
|
|
||||||
# XXX: after manually inspecting the response format we
|
# XXX: after manually inspecting the response format we
|
||||||
# just directly pick out the info we need
|
# just directly pick out the info we need
|
||||||
|
|
|
@ -20,15 +20,10 @@ Interactive Brokers API backend.
|
||||||
Sub-modules within break into the core functionalities:
|
Sub-modules within break into the core functionalities:
|
||||||
|
|
||||||
- ``broker.py`` part for orders / trading endpoints
|
- ``broker.py`` part for orders / trading endpoints
|
||||||
- ``data.py`` for real-time data feed endpoints
|
- ``feed.py`` for real-time data feed endpoints
|
||||||
|
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||||
- ``client.py`` for the core API machinery which is ``trio``-ized
|
|
||||||
wrapping around ``ib_insync``.
|
wrapping around ``ib_insync``.
|
||||||
|
|
||||||
- ``report.py`` for the hackery to build manual pp calcs
|
|
||||||
to avoid ib's absolute bullshit FIFO style position
|
|
||||||
tracking..
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from .api import (
|
from .api import (
|
||||||
get_client,
|
get_client,
|
||||||
|
@ -38,7 +33,10 @@ from .feed import (
|
||||||
open_symbol_search,
|
open_symbol_search,
|
||||||
stream_quotes,
|
stream_quotes,
|
||||||
)
|
)
|
||||||
from .broker import trades_dialogue
|
from .broker import (
|
||||||
|
trades_dialogue,
|
||||||
|
norm_trade_records,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'get_client',
|
'get_client',
|
||||||
|
|
|
@ -29,6 +29,7 @@ import itertools
|
||||||
from math import isnan
|
from math import isnan
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Optional,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
import asyncio
|
import asyncio
|
||||||
|
@ -38,16 +39,28 @@ import time
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor import to_asyncio
|
from tractor import to_asyncio
|
||||||
from ib_insync.wrapper import RequestError
|
import ib_insync as ibis
|
||||||
from ib_insync.contract import Contract, ContractDetails
|
from ib_insync.contract import (
|
||||||
|
Contract,
|
||||||
|
ContractDetails,
|
||||||
|
Option,
|
||||||
|
)
|
||||||
from ib_insync.order import Order
|
from ib_insync.order import Order
|
||||||
from ib_insync.ticker import Ticker
|
from ib_insync.ticker import Ticker
|
||||||
from ib_insync.objects import Position
|
from ib_insync.objects import (
|
||||||
import ib_insync as ibis
|
Position,
|
||||||
from ib_insync.wrapper import Wrapper
|
Fill,
|
||||||
|
Execution,
|
||||||
|
CommissionReport,
|
||||||
|
)
|
||||||
|
from ib_insync.wrapper import (
|
||||||
|
Wrapper,
|
||||||
|
RequestError,
|
||||||
|
)
|
||||||
from ib_insync.client import Client as ib_Client
|
from ib_insync.client import Client as ib_Client
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
@ -155,60 +168,93 @@ class NonShittyIB(ibis.IB):
|
||||||
self.client.apiEnd += self.disconnectedEvent
|
self.client.apiEnd += self.disconnectedEvent
|
||||||
|
|
||||||
|
|
||||||
# map of symbols to contract ids
|
|
||||||
_adhoc_cmdty_data_map = {
|
|
||||||
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
|
||||||
|
|
||||||
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
|
|
||||||
# https://groups.io/g/twsapi/message/44174
|
|
||||||
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
|
|
||||||
}
|
|
||||||
|
|
||||||
_futes_venues = (
|
_futes_venues = (
|
||||||
'GLOBEX',
|
'GLOBEX',
|
||||||
'NYMEX',
|
'NYMEX',
|
||||||
'CME',
|
'CME',
|
||||||
'CMECRYPTO',
|
'CMECRYPTO',
|
||||||
|
'COMEX',
|
||||||
|
'CMDTY', # special name case..
|
||||||
)
|
)
|
||||||
|
|
||||||
_adhoc_futes_set = {
|
_adhoc_futes_set = {
|
||||||
|
|
||||||
# equities
|
# equities
|
||||||
'nq.globex',
|
'nq.globex',
|
||||||
'mnq.globex',
|
'mnq.globex', # micro
|
||||||
|
|
||||||
'es.globex',
|
'es.globex',
|
||||||
'mes.globex',
|
'mes.globex', # micro
|
||||||
|
|
||||||
# cypto$
|
# cypto$
|
||||||
'brr.cmecrypto',
|
'brr.cmecrypto',
|
||||||
'ethusdrr.cmecrypto',
|
'ethusdrr.cmecrypto',
|
||||||
|
|
||||||
# agriculture
|
# agriculture
|
||||||
'he.globex', # lean hogs
|
'he.nymex', # lean hogs
|
||||||
'le.globex', # live cattle (geezers)
|
'le.nymex', # live cattle (geezers)
|
||||||
'gf.globex', # feeder cattle (younguns)
|
'gf.nymex', # feeder cattle (younguns)
|
||||||
|
|
||||||
# raw
|
# raw
|
||||||
'lb.globex', # random len lumber
|
'lb.nymex', # random len lumber
|
||||||
|
|
||||||
# metals
|
# metals
|
||||||
'xauusd.cmdty', # gold spot
|
'xauusd.cmdty', # gold spot
|
||||||
'gc.nymex',
|
'gc.nymex',
|
||||||
'mgc.nymex',
|
'mgc.nymex', # micro
|
||||||
|
|
||||||
|
# oil & gas
|
||||||
|
'cl.nymex',
|
||||||
|
|
||||||
'xagusd.cmdty', # silver spot
|
'xagusd.cmdty', # silver spot
|
||||||
'ni.nymex', # silver futes
|
'ni.nymex', # silver futes
|
||||||
'qi.comex', # mini-silver futes
|
'qi.comex', # mini-silver futes
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# taken from list here:
|
||||||
|
# https://www.interactivebrokers.com/en/trading/products-spot-currencies.php
|
||||||
|
_adhoc_fiat_set = set((
|
||||||
|
'USD, AED, AUD, CAD,'
|
||||||
|
'CHF, CNH, CZK, DKK,'
|
||||||
|
'EUR, GBP, HKD, HUF,'
|
||||||
|
'ILS, JPY, MXN, NOK,'
|
||||||
|
'NZD, PLN, RUB, SAR,'
|
||||||
|
'SEK, SGD, TRY, ZAR'
|
||||||
|
).split(' ,')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# map of symbols to contract ids
|
||||||
|
_adhoc_symbol_map = {
|
||||||
|
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
||||||
|
|
||||||
|
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
|
||||||
|
# https://groups.io/g/twsapi/message/44174
|
||||||
|
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
|
||||||
|
}
|
||||||
|
for qsn in _adhoc_futes_set:
|
||||||
|
sym, venue = qsn.split('.')
|
||||||
|
assert venue.upper() in _futes_venues, f'{venue}'
|
||||||
|
_adhoc_symbol_map[sym.upper()] = (
|
||||||
|
{'exchange': venue},
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# exchanges we don't support at the moment due to not knowing
|
# exchanges we don't support at the moment due to not knowing
|
||||||
# how to do symbol-contract lookup correctly likely due
|
# how to do symbol-contract lookup correctly likely due
|
||||||
# to not having the data feeds subscribed.
|
# to not having the data feeds subscribed.
|
||||||
_exch_skip_list = {
|
_exch_skip_list = {
|
||||||
|
|
||||||
'ASX', # aussie stocks
|
'ASX', # aussie stocks
|
||||||
'MEXI', # mexican stocks
|
'MEXI', # mexican stocks
|
||||||
'VALUE', # no idea
|
|
||||||
|
# no idea
|
||||||
|
'VALUE',
|
||||||
|
'FUNDSERV',
|
||||||
|
'SWB2',
|
||||||
|
'PSE',
|
||||||
}
|
}
|
||||||
|
|
||||||
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
||||||
|
@ -261,27 +307,29 @@ class Client:
|
||||||
|
|
||||||
# NOTE: the ib.client here is "throttled" to 45 rps by default
|
# NOTE: the ib.client here is "throttled" to 45 rps by default
|
||||||
|
|
||||||
async def trades(
|
async def trades(self) -> dict[str, Any]:
|
||||||
self,
|
'''
|
||||||
# api_only: bool = False,
|
Return list of trade-fills from current session in ``dict``.
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
'''
|
||||||
|
fills: list[Fill] = self.ib.fills()
|
||||||
# orders = await self.ib.reqCompletedOrdersAsync(
|
norm_fills: list[dict] = []
|
||||||
# apiOnly=api_only
|
|
||||||
# )
|
|
||||||
fills = await self.ib.reqExecutionsAsync()
|
|
||||||
norm_fills = []
|
|
||||||
for fill in fills:
|
for fill in fills:
|
||||||
fill = fill._asdict() # namedtuple
|
fill = fill._asdict() # namedtuple
|
||||||
for key, val in fill.copy().items():
|
for key, val in fill.items():
|
||||||
if isinstance(val, Contract):
|
match val:
|
||||||
|
case Contract() | Execution() | CommissionReport():
|
||||||
fill[key] = asdict(val)
|
fill[key] = asdict(val)
|
||||||
|
|
||||||
norm_fills.append(fill)
|
norm_fills.append(fill)
|
||||||
|
|
||||||
return norm_fills
|
return norm_fills
|
||||||
|
|
||||||
|
async def orders(self) -> list[Order]:
|
||||||
|
return await self.ib.reqAllOpenOrdersAsync(
|
||||||
|
apiOnly=False,
|
||||||
|
)
|
||||||
|
|
||||||
async def bars(
|
async def bars(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
|
@ -309,7 +357,7 @@ class Client:
|
||||||
|
|
||||||
_enters += 1
|
_enters += 1
|
||||||
|
|
||||||
contract = await self.find_contract(fqsn)
|
contract = (await self.find_contracts(fqsn))[0]
|
||||||
bars_kwargs.update(getattr(contract, 'bars_kwargs', {}))
|
bars_kwargs.update(getattr(contract, 'bars_kwargs', {}))
|
||||||
|
|
||||||
# _min = min(2000*100, count)
|
# _min = min(2000*100, count)
|
||||||
|
@ -364,7 +412,15 @@ class Client:
|
||||||
futs.append(self.ib.reqContractDetailsAsync(con))
|
futs.append(self.ib.reqContractDetailsAsync(con))
|
||||||
|
|
||||||
# batch request all details
|
# batch request all details
|
||||||
|
try:
|
||||||
results = await asyncio.gather(*futs)
|
results = await asyncio.gather(*futs)
|
||||||
|
except RequestError as err:
|
||||||
|
msg = err.message
|
||||||
|
if (
|
||||||
|
'No security definition' in msg
|
||||||
|
):
|
||||||
|
log.warning(f'{msg}: {contracts}')
|
||||||
|
return {}
|
||||||
|
|
||||||
# one set per future result
|
# one set per future result
|
||||||
details = {}
|
details = {}
|
||||||
|
@ -373,20 +429,11 @@ class Client:
|
||||||
# XXX: if there is more then one entry in the details list
|
# XXX: if there is more then one entry in the details list
|
||||||
# then the contract is so called "ambiguous".
|
# then the contract is so called "ambiguous".
|
||||||
for d in details_set:
|
for d in details_set:
|
||||||
con = d.contract
|
|
||||||
|
|
||||||
key = '.'.join([
|
# nested dataclass we probably don't need and that won't
|
||||||
con.symbol,
|
# IPC serialize..
|
||||||
con.primaryExchange or con.exchange,
|
|
||||||
])
|
|
||||||
expiry = con.lastTradeDateOrContractMonth
|
|
||||||
if expiry:
|
|
||||||
key += f'.{expiry}'
|
|
||||||
|
|
||||||
# nested dataclass we probably don't need and that
|
|
||||||
# won't IPC serialize..
|
|
||||||
d.secIdList = ''
|
d.secIdList = ''
|
||||||
|
key, calc_price = con2fqsn(d.contract)
|
||||||
details[key] = d
|
details[key] = d
|
||||||
|
|
||||||
return details
|
return details
|
||||||
|
@ -416,7 +463,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
# how many contracts to search "up to"
|
# how many contracts to search "up to"
|
||||||
upto: int = 3,
|
upto: int = 6,
|
||||||
asdicts: bool = True,
|
asdicts: bool = True,
|
||||||
|
|
||||||
) -> dict[str, ContractDetails]:
|
) -> dict[str, ContractDetails]:
|
||||||
|
@ -427,7 +474,6 @@ class Client:
|
||||||
pattern,
|
pattern,
|
||||||
upto=upto,
|
upto=upto,
|
||||||
)
|
)
|
||||||
|
|
||||||
for key, deats in results.copy().items():
|
for key, deats in results.copy().items():
|
||||||
|
|
||||||
tract = deats.contract
|
tract = deats.contract
|
||||||
|
@ -437,21 +483,44 @@ class Client:
|
||||||
if sectype == 'IND':
|
if sectype == 'IND':
|
||||||
results[f'{sym}.IND'] = tract
|
results[f'{sym}.IND'] = tract
|
||||||
results.pop(key)
|
results.pop(key)
|
||||||
exch = tract.exchange
|
# exch = tract.exchange
|
||||||
|
|
||||||
if exch in _futes_venues:
|
# XXX: add back one of these to get the weird deadlock
|
||||||
|
# on the debugger from root without the latest
|
||||||
|
# maybe_wait_for_debugger() fix in the `open_context()`
|
||||||
|
# exit.
|
||||||
|
# assert 0
|
||||||
|
# if con.exchange not in _exch_skip_list:
|
||||||
|
|
||||||
|
exch = tract.exchange
|
||||||
|
if exch not in _exch_skip_list:
|
||||||
# try get all possible contracts for symbol as per,
|
# try get all possible contracts for symbol as per,
|
||||||
# https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut
|
# https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut
|
||||||
con = ibis.Future(
|
con = ibis.Future(
|
||||||
symbol=sym,
|
symbol=sym,
|
||||||
exchange=exch,
|
exchange=exch,
|
||||||
)
|
)
|
||||||
try:
|
# TODO: make this work, think it's something to do
|
||||||
|
# with the qualify flag.
|
||||||
|
# cons = await self.find_contracts(
|
||||||
|
# contract=con,
|
||||||
|
# err_on_qualify=False,
|
||||||
|
# )
|
||||||
|
# if cons:
|
||||||
all_deats = await self.con_deats([con])
|
all_deats = await self.con_deats([con])
|
||||||
results |= all_deats
|
results |= all_deats
|
||||||
|
|
||||||
except RequestError as err:
|
# forex pairs
|
||||||
log.warning(err.message)
|
elif sectype == 'CASH':
|
||||||
|
dst, src = tract.localSymbol.split('.')
|
||||||
|
pair_key = "/".join([dst, src])
|
||||||
|
exch = tract.exchange.lower()
|
||||||
|
results[f'{pair_key}.{exch}'] = tract
|
||||||
|
results.pop(key)
|
||||||
|
|
||||||
|
# XXX: again seems to trigger the weird tractor
|
||||||
|
# bug with the debugger..
|
||||||
|
# assert 0
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
@ -483,13 +552,19 @@ class Client:
|
||||||
|
|
||||||
return con
|
return con
|
||||||
|
|
||||||
async def find_contract(
|
async def get_con(
|
||||||
|
self,
|
||||||
|
conid: int,
|
||||||
|
) -> Contract:
|
||||||
|
return await self.ib.qualifyContractsAsync(
|
||||||
|
ibis.Contract(conId=conid)
|
||||||
|
)
|
||||||
|
|
||||||
|
def parse_patt2fqsn(
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
currency: str = 'USD',
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> Contract:
|
) -> tuple[str, str, str, str]:
|
||||||
|
|
||||||
# TODO: we can't use this currently because
|
# TODO: we can't use this currently because
|
||||||
# ``wrapper.starTicker()`` currently cashes ticker instances
|
# ``wrapper.starTicker()`` currently cashes ticker instances
|
||||||
|
@ -502,12 +577,30 @@ class Client:
|
||||||
# XXX UPDATE: we can probably do the tick/trades scraping
|
# XXX UPDATE: we can probably do the tick/trades scraping
|
||||||
# inside our eventkit handler instead to bypass this entirely?
|
# inside our eventkit handler instead to bypass this entirely?
|
||||||
|
|
||||||
|
currency = ''
|
||||||
|
|
||||||
|
# fqsn parsing stage
|
||||||
|
# ------------------
|
||||||
if '.ib' in pattern:
|
if '.ib' in pattern:
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import unpack_fqsn
|
||||||
broker, symbol, expiry = unpack_fqsn(pattern)
|
_, symbol, expiry = unpack_fqsn(pattern)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
symbol = pattern
|
symbol = pattern
|
||||||
|
expiry = ''
|
||||||
|
|
||||||
|
# another hack for forex pairs lul.
|
||||||
|
if (
|
||||||
|
'.idealpro' in symbol
|
||||||
|
# or '/' in symbol
|
||||||
|
):
|
||||||
|
exch = 'IDEALPRO'
|
||||||
|
symbol = symbol.removesuffix('.idealpro')
|
||||||
|
if '/' in symbol:
|
||||||
|
symbol, currency = symbol.split('/')
|
||||||
|
|
||||||
|
else:
|
||||||
|
# TODO: yes, a cache..
|
||||||
# try:
|
# try:
|
||||||
# # give the cache a go
|
# # give the cache a go
|
||||||
# return self._contracts[symbol]
|
# return self._contracts[symbol]
|
||||||
|
@ -518,42 +611,70 @@ class Client:
|
||||||
symbol, _, expiry = symbol.rpartition('.')
|
symbol, _, expiry = symbol.rpartition('.')
|
||||||
|
|
||||||
# use heuristics to figure out contract "type"
|
# use heuristics to figure out contract "type"
|
||||||
sym, exch = symbol.upper().rsplit('.', maxsplit=1)
|
symbol, exch = symbol.upper().rsplit('.', maxsplit=1)
|
||||||
|
|
||||||
qualify: bool = True
|
return symbol, currency, exch, expiry
|
||||||
|
|
||||||
|
async def find_contracts(
|
||||||
|
self,
|
||||||
|
pattern: Optional[str] = None,
|
||||||
|
contract: Optional[Contract] = None,
|
||||||
|
qualify: bool = True,
|
||||||
|
err_on_qualify: bool = True,
|
||||||
|
|
||||||
|
) -> Contract:
|
||||||
|
|
||||||
|
if pattern is not None:
|
||||||
|
symbol, currency, exch, expiry = self.parse_patt2fqsn(
|
||||||
|
pattern,
|
||||||
|
)
|
||||||
|
sectype = ''
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert contract
|
||||||
|
symbol = contract.symbol
|
||||||
|
sectype = contract.secType
|
||||||
|
exch = contract.exchange or contract.primaryExchange
|
||||||
|
expiry = contract.lastTradeDateOrContractMonth
|
||||||
|
currency = contract.currency
|
||||||
|
|
||||||
|
# contract searching stage
|
||||||
|
# ------------------------
|
||||||
|
|
||||||
# futes
|
# futes
|
||||||
if exch in _futes_venues:
|
if exch in _futes_venues:
|
||||||
if expiry:
|
if expiry:
|
||||||
# get the "front" contract
|
# get the "front" contract
|
||||||
contract = await self.get_fute(
|
con = await self.get_fute(
|
||||||
symbol=sym,
|
symbol=symbol,
|
||||||
exchange=exch,
|
exchange=exch,
|
||||||
expiry=expiry,
|
expiry=expiry,
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# get the "front" contract
|
# get the "front" contract
|
||||||
contract = await self.get_fute(
|
con = await self.get_fute(
|
||||||
symbol=sym,
|
symbol=symbol,
|
||||||
exchange=exch,
|
exchange=exch,
|
||||||
front=True,
|
front=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
qualify = False
|
elif (
|
||||||
|
exch in ('IDEALPRO')
|
||||||
elif exch in ('FOREX'):
|
or sectype == 'CASH'
|
||||||
currency = ''
|
):
|
||||||
symbol, currency = sym.split('/')
|
# if '/' in symbol:
|
||||||
|
# currency = ''
|
||||||
|
# symbol, currency = symbol.split('/')
|
||||||
con = ibis.Forex(
|
con = ibis.Forex(
|
||||||
symbol=symbol,
|
pair=''.join((symbol, currency)),
|
||||||
currency=currency,
|
currency=currency,
|
||||||
)
|
)
|
||||||
con.bars_kwargs = {'whatToShow': 'MIDPOINT'}
|
con.bars_kwargs = {'whatToShow': 'MIDPOINT'}
|
||||||
|
|
||||||
# commodities
|
# commodities
|
||||||
elif exch == 'CMDTY': # eg. XAUUSD.CMDTY
|
elif exch == 'CMDTY': # eg. XAUUSD.CMDTY
|
||||||
con_kwargs, bars_kwargs = _adhoc_cmdty_data_map[sym]
|
con_kwargs, bars_kwargs = _adhoc_symbol_map[symbol]
|
||||||
con = ibis.Commodity(**con_kwargs)
|
con = ibis.Commodity(**con_kwargs)
|
||||||
con.bars_kwargs = bars_kwargs
|
con.bars_kwargs = bars_kwargs
|
||||||
|
|
||||||
|
@ -569,33 +690,50 @@ class Client:
|
||||||
exch = 'SMART'
|
exch = 'SMART'
|
||||||
|
|
||||||
else:
|
else:
|
||||||
exch = 'SMART'
|
# XXX: order is super important here since
|
||||||
|
# a primary == 'SMART' won't ever work.
|
||||||
primaryExchange = exch
|
primaryExchange = exch
|
||||||
|
exch = 'SMART'
|
||||||
|
|
||||||
con = ibis.Stock(
|
con = ibis.Stock(
|
||||||
symbol=sym,
|
symbol=symbol,
|
||||||
exchange=exch,
|
exchange=exch,
|
||||||
primaryExchange=primaryExchange,
|
primaryExchange=primaryExchange,
|
||||||
currency=currency,
|
currency=currency,
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
exch = 'SMART' if not exch else exch
|
exch = 'SMART' if not exch else exch
|
||||||
if qualify:
|
|
||||||
contract = (await self.ib.qualifyContractsAsync(con))[0]
|
|
||||||
else:
|
|
||||||
assert contract
|
|
||||||
|
|
||||||
except IndexError:
|
contracts = [con]
|
||||||
|
if qualify:
|
||||||
|
try:
|
||||||
|
contracts = await self.ib.qualifyContractsAsync(con)
|
||||||
|
except RequestError as err:
|
||||||
|
msg = err.message
|
||||||
|
if (
|
||||||
|
'No security definition' in msg
|
||||||
|
and not err_on_qualify
|
||||||
|
):
|
||||||
|
log.warning(
|
||||||
|
f'Could not find def for {con}')
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
if not contracts:
|
||||||
raise ValueError(f"No contract could be found {con}")
|
raise ValueError(f"No contract could be found {con}")
|
||||||
|
|
||||||
self._contracts[pattern] = contract
|
# pack all contracts into cache
|
||||||
|
for tract in contracts:
|
||||||
|
exch: str = tract.primaryExchange or tract.exchange or exch
|
||||||
|
pattern = f'{symbol}.{exch}'
|
||||||
|
expiry = tract.lastTradeDateOrContractMonth
|
||||||
|
# add an entry with expiry suffix if available
|
||||||
|
if expiry:
|
||||||
|
pattern += f'.{expiry}'
|
||||||
|
|
||||||
# add an aditional entry with expiry suffix if available
|
self._contracts[pattern.lower()] = tract
|
||||||
conexp = contract.lastTradeDateOrContractMonth
|
|
||||||
if conexp:
|
|
||||||
self._contracts[pattern + f'.{conexp}'] = contract
|
|
||||||
|
|
||||||
return contract
|
return contracts
|
||||||
|
|
||||||
async def get_head_time(
|
async def get_head_time(
|
||||||
self,
|
self,
|
||||||
|
@ -614,9 +752,10 @@ class Client:
|
||||||
async def get_sym_details(
|
async def get_sym_details(
|
||||||
self,
|
self,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
|
|
||||||
) -> tuple[Contract, Ticker, ContractDetails]:
|
) -> tuple[Contract, Ticker, ContractDetails]:
|
||||||
|
|
||||||
contract = await self.find_contract(symbol)
|
contract = (await self.find_contracts(symbol))[0]
|
||||||
ticker: Ticker = self.ib.reqMktData(
|
ticker: Ticker = self.ib.reqMktData(
|
||||||
contract,
|
contract,
|
||||||
snapshot=True,
|
snapshot=True,
|
||||||
|
@ -804,6 +943,73 @@ class Client:
|
||||||
return self.ib.positions(account=account)
|
return self.ib.positions(account=account)
|
||||||
|
|
||||||
|
|
||||||
|
def con2fqsn(
|
||||||
|
con: Contract,
|
||||||
|
_cache: dict[int, (str, bool)] = {}
|
||||||
|
|
||||||
|
) -> tuple[str, bool]:
|
||||||
|
'''
|
||||||
|
Convert contracts to fqsn-style strings to be used both in symbol-search
|
||||||
|
matching and as feed tokens passed to the front end data deed layer.
|
||||||
|
|
||||||
|
Previously seen contracts are cached by id.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# should be real volume for this contract by default
|
||||||
|
calc_price = False
|
||||||
|
if con.conId:
|
||||||
|
try:
|
||||||
|
return _cache[con.conId]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
suffix = con.primaryExchange or con.exchange
|
||||||
|
symbol = con.symbol
|
||||||
|
expiry = con.lastTradeDateOrContractMonth or ''
|
||||||
|
|
||||||
|
match con:
|
||||||
|
case Option():
|
||||||
|
# TODO: option symbol parsing and sane display:
|
||||||
|
symbol = con.localSymbol.replace(' ', '')
|
||||||
|
|
||||||
|
case ibis.Commodity():
|
||||||
|
# commodities and forex don't have an exchange name and
|
||||||
|
# no real volume so we have to calculate the price
|
||||||
|
suffix = con.secType
|
||||||
|
|
||||||
|
# no real volume on this tract
|
||||||
|
calc_price = True
|
||||||
|
|
||||||
|
case ibis.Forex() | ibis.Contract(secType='CASH'):
|
||||||
|
dst, src = con.localSymbol.split('.')
|
||||||
|
symbol = ''.join([dst, src])
|
||||||
|
suffix = con.exchange
|
||||||
|
|
||||||
|
# no real volume on forex feeds..
|
||||||
|
calc_price = True
|
||||||
|
|
||||||
|
if not suffix:
|
||||||
|
entry = _adhoc_symbol_map.get(
|
||||||
|
con.symbol or con.localSymbol
|
||||||
|
)
|
||||||
|
if entry:
|
||||||
|
meta, kwargs = entry
|
||||||
|
cid = meta.get('conId')
|
||||||
|
if cid:
|
||||||
|
assert con.conId == meta['conId']
|
||||||
|
suffix = meta['exchange']
|
||||||
|
|
||||||
|
# append a `.<suffix>` to the returned symbol
|
||||||
|
# key for derivatives that normally is the expiry
|
||||||
|
# date key.
|
||||||
|
if expiry:
|
||||||
|
suffix += f'.{expiry}'
|
||||||
|
|
||||||
|
fqsn_key = '.'.join((symbol, suffix)).lower()
|
||||||
|
_cache[con.conId] = fqsn_key, calc_price
|
||||||
|
return fqsn_key, calc_price
|
||||||
|
|
||||||
|
|
||||||
# per-actor API ep caching
|
# per-actor API ep caching
|
||||||
_client_cache: dict[tuple[str, int], Client] = {}
|
_client_cache: dict[tuple[str, int], Client] = {}
|
||||||
_scan_ignore: set[tuple[str, int]] = set()
|
_scan_ignore: set[tuple[str, int]] = set()
|
||||||
|
@ -811,10 +1017,23 @@ _scan_ignore: set[tuple[str, int]] = set()
|
||||||
|
|
||||||
def get_config() -> dict[str, Any]:
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
conf, path = config.load()
|
conf, path = config.load('brokers')
|
||||||
|
|
||||||
section = conf.get('ib')
|
section = conf.get('ib')
|
||||||
|
|
||||||
|
accounts = section.get('accounts')
|
||||||
|
if not accounts:
|
||||||
|
raise ValueError(
|
||||||
|
'brokers.toml -> `ib.accounts` must be defined\n'
|
||||||
|
f'location: {path}'
|
||||||
|
)
|
||||||
|
|
||||||
|
names = list(accounts.keys())
|
||||||
|
accts = section['accounts'] = bidict(accounts)
|
||||||
|
log.info(
|
||||||
|
f'brokers.toml defines {len(accts)} accounts: '
|
||||||
|
f'{pformat(names)}'
|
||||||
|
)
|
||||||
|
|
||||||
if section is None:
|
if section is None:
|
||||||
log.warning(f'No config section found for ib in {path}')
|
log.warning(f'No config section found for ib in {path}')
|
||||||
return {}
|
return {}
|
||||||
|
@ -990,7 +1209,7 @@ async def load_aio_clients(
|
||||||
for acct, client in _accounts2clients.items():
|
for acct, client in _accounts2clients.items():
|
||||||
log.info(f'Disconnecting {acct}@{client}')
|
log.info(f'Disconnecting {acct}@{client}')
|
||||||
client.ib.disconnect()
|
client.ib.disconnect()
|
||||||
_client_cache.pop((host, port))
|
_client_cache.pop((host, port), None)
|
||||||
|
|
||||||
|
|
||||||
async def load_clients_for_trio(
|
async def load_clients_for_trio(
|
||||||
|
@ -1019,9 +1238,6 @@ async def load_clients_for_trio(
|
||||||
await asyncio.sleep(float('inf'))
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
|
|
||||||
_proxies: dict[str, MethodProxy] = {}
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_client_proxies() -> tuple[
|
async def open_client_proxies() -> tuple[
|
||||||
dict[str, MethodProxy],
|
dict[str, MethodProxy],
|
||||||
|
@ -1044,13 +1260,14 @@ async def open_client_proxies() -> tuple[
|
||||||
if cache_hit:
|
if cache_hit:
|
||||||
log.info(f'Re-using cached clients: {clients}')
|
log.info(f'Re-using cached clients: {clients}')
|
||||||
|
|
||||||
|
proxies = {}
|
||||||
for acct_name, client in clients.items():
|
for acct_name, client in clients.items():
|
||||||
proxy = await stack.enter_async_context(
|
proxy = await stack.enter_async_context(
|
||||||
open_client_proxy(client),
|
open_client_proxy(client),
|
||||||
)
|
)
|
||||||
_proxies[acct_name] = proxy
|
proxies[acct_name] = proxy
|
||||||
|
|
||||||
yield _proxies, clients
|
yield proxies, clients
|
||||||
|
|
||||||
|
|
||||||
def get_preferred_data_client(
|
def get_preferred_data_client(
|
||||||
|
@ -1199,11 +1416,13 @@ async def open_client_proxy(
|
||||||
event_table = {}
|
event_table = {}
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
to_asyncio.open_channel_from(
|
to_asyncio.open_channel_from(
|
||||||
open_aio_client_method_relay,
|
open_aio_client_method_relay,
|
||||||
client=client,
|
client=client,
|
||||||
event_consumers=event_table,
|
event_consumers=event_table,
|
||||||
) as (first, chan),
|
) as (first, chan),
|
||||||
|
|
||||||
trio.open_nursery() as relay_n,
|
trio.open_nursery() as relay_n,
|
||||||
):
|
):
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -41,7 +41,8 @@ from trio_typing import TaskStatus
|
||||||
from piker.data._sharedmem import ShmArray
|
from piker.data._sharedmem import ShmArray
|
||||||
from .._util import SymbolNotFound, NoData
|
from .._util import SymbolNotFound, NoData
|
||||||
from .api import (
|
from .api import (
|
||||||
_adhoc_futes_set,
|
# _adhoc_futes_set,
|
||||||
|
con2fqsn,
|
||||||
log,
|
log,
|
||||||
load_aio_clients,
|
load_aio_clients,
|
||||||
ibis,
|
ibis,
|
||||||
|
@ -207,8 +208,6 @@ async def get_bars(
|
||||||
|
|
||||||
except RequestError as err:
|
except RequestError as err:
|
||||||
msg = err.message
|
msg = err.message
|
||||||
# why do we always need to rebind this?
|
|
||||||
# _err = err
|
|
||||||
|
|
||||||
if 'No market data permissions for' in msg:
|
if 'No market data permissions for' in msg:
|
||||||
# TODO: signalling for no permissions searches
|
# TODO: signalling for no permissions searches
|
||||||
|
@ -217,8 +216,8 @@ async def get_bars(
|
||||||
)
|
)
|
||||||
|
|
||||||
elif (
|
elif (
|
||||||
err.code == 162
|
err.code == 162 and
|
||||||
and 'HMDS query returned no data' in err.message
|
'HMDS query returned no data' in err.message
|
||||||
):
|
):
|
||||||
# XXX: this is now done in the storage mgmt layer
|
# XXX: this is now done in the storage mgmt layer
|
||||||
# and we shouldn't implicitly decrement the frame dt
|
# and we shouldn't implicitly decrement the frame dt
|
||||||
|
@ -237,6 +236,14 @@ async def get_bars(
|
||||||
frame_size=2000,
|
frame_size=2000,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# elif (
|
||||||
|
# err.code == 162 and
|
||||||
|
# 'Trading TWS session is connected from a different IP
|
||||||
|
# address' in err.message
|
||||||
|
# ):
|
||||||
|
# log.warning("ignoring ip address warning")
|
||||||
|
# continue
|
||||||
|
|
||||||
elif _pacing in msg:
|
elif _pacing in msg:
|
||||||
|
|
||||||
log.warning(
|
log.warning(
|
||||||
|
@ -294,7 +301,13 @@ async def get_bars(
|
||||||
else:
|
else:
|
||||||
|
|
||||||
log.warning('Sending CONNECTION RESET')
|
log.warning('Sending CONNECTION RESET')
|
||||||
await data_reset_hack(reset_type='connection')
|
res = await data_reset_hack(reset_type='connection')
|
||||||
|
if not res:
|
||||||
|
log.warning(
|
||||||
|
'NO VNC DETECTED!\n'
|
||||||
|
'Manually press ctrl-alt-f on your IB java app'
|
||||||
|
)
|
||||||
|
# break
|
||||||
|
|
||||||
with trio.move_on_after(timeout) as cs:
|
with trio.move_on_after(timeout) as cs:
|
||||||
for name, ev in [
|
for name, ev in [
|
||||||
|
@ -553,38 +566,18 @@ async def open_aio_quote_stream(
|
||||||
|
|
||||||
|
|
||||||
# TODO: cython/mypyc/numba this!
|
# TODO: cython/mypyc/numba this!
|
||||||
|
# or we can at least cache a majority of the values
|
||||||
|
# except for the ones we expect to change?..
|
||||||
def normalize(
|
def normalize(
|
||||||
ticker: Ticker,
|
ticker: Ticker,
|
||||||
calc_price: bool = False
|
calc_price: bool = False
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
|
||||||
# should be real volume for this contract by default
|
|
||||||
calc_price = False
|
|
||||||
|
|
||||||
# check for special contract types
|
# check for special contract types
|
||||||
con = ticker.contract
|
con = ticker.contract
|
||||||
if type(con) in (
|
|
||||||
ibis.Commodity,
|
|
||||||
ibis.Forex,
|
|
||||||
):
|
|
||||||
# commodities and forex don't have an exchange name and
|
|
||||||
# no real volume so we have to calculate the price
|
|
||||||
suffix = con.secType
|
|
||||||
# no real volume on this tract
|
|
||||||
calc_price = True
|
|
||||||
|
|
||||||
else:
|
fqsn, calc_price = con2fqsn(con)
|
||||||
suffix = con.primaryExchange
|
|
||||||
if not suffix:
|
|
||||||
suffix = con.exchange
|
|
||||||
|
|
||||||
# append a `.<suffix>` to the returned symbol
|
|
||||||
# key for derivatives that normally is the expiry
|
|
||||||
# date key.
|
|
||||||
expiry = con.lastTradeDateOrContractMonth
|
|
||||||
if expiry:
|
|
||||||
suffix += f'.{expiry}'
|
|
||||||
|
|
||||||
# convert named tuples to dicts so we send usable keys
|
# convert named tuples to dicts so we send usable keys
|
||||||
new_ticks = []
|
new_ticks = []
|
||||||
|
@ -616,9 +609,7 @@ def normalize(
|
||||||
|
|
||||||
# generate fqsn with possible specialized suffix
|
# generate fqsn with possible specialized suffix
|
||||||
# for derivatives, note the lowercase.
|
# for derivatives, note the lowercase.
|
||||||
data['symbol'] = data['fqsn'] = '.'.join(
|
data['symbol'] = data['fqsn'] = fqsn
|
||||||
(con.symbol, suffix)
|
|
||||||
).lower()
|
|
||||||
|
|
||||||
# convert named tuples to dicts for transport
|
# convert named tuples to dicts for transport
|
||||||
tbts = data.get('tickByTicks')
|
tbts = data.get('tickByTicks')
|
||||||
|
@ -683,6 +674,13 @@ async def stream_quotes(
|
||||||
# TODO: more consistent field translation
|
# TODO: more consistent field translation
|
||||||
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
|
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
|
||||||
|
|
||||||
|
if atype in {
|
||||||
|
'forex',
|
||||||
|
'index',
|
||||||
|
'commodity',
|
||||||
|
}:
|
||||||
|
syminfo['no_vlm'] = True
|
||||||
|
|
||||||
# for stocks it seems TWS reports too small a tick size
|
# for stocks it seems TWS reports too small a tick size
|
||||||
# such that you can't submit orders with that granularity?
|
# such that you can't submit orders with that granularity?
|
||||||
min_tick = 0.01 if atype == 'stock' else 0
|
min_tick = 0.01 if atype == 'stock' else 0
|
||||||
|
@ -709,9 +707,9 @@ async def stream_quotes(
|
||||||
},
|
},
|
||||||
|
|
||||||
}
|
}
|
||||||
return init_msgs
|
return init_msgs, syminfo
|
||||||
|
|
||||||
init_msgs = mk_init_msgs()
|
init_msgs, syminfo = mk_init_msgs()
|
||||||
|
|
||||||
# TODO: we should instead spawn a task that waits on a feed to start
|
# TODO: we should instead spawn a task that waits on a feed to start
|
||||||
# and let it wait indefinitely..instead of this hard coded stuff.
|
# and let it wait indefinitely..instead of this hard coded stuff.
|
||||||
|
@ -720,7 +718,13 @@ async def stream_quotes(
|
||||||
|
|
||||||
# it might be outside regular trading hours so see if we can at
|
# it might be outside regular trading hours so see if we can at
|
||||||
# least grab history.
|
# least grab history.
|
||||||
if isnan(first_ticker.last):
|
if (
|
||||||
|
isnan(first_ticker.last)
|
||||||
|
and type(first_ticker.contract) not in (
|
||||||
|
ibis.Commodity,
|
||||||
|
ibis.Forex
|
||||||
|
)
|
||||||
|
):
|
||||||
task_status.started((init_msgs, first_quote))
|
task_status.started((init_msgs, first_quote))
|
||||||
|
|
||||||
# it's not really live but this will unblock
|
# it's not really live but this will unblock
|
||||||
|
@ -743,10 +747,16 @@ async def stream_quotes(
|
||||||
task_status.started((init_msgs, first_quote))
|
task_status.started((init_msgs, first_quote))
|
||||||
|
|
||||||
async with aclosing(stream):
|
async with aclosing(stream):
|
||||||
if type(first_ticker.contract) not in (
|
if syminfo.get('no_vlm', False):
|
||||||
ibis.Commodity,
|
|
||||||
ibis.Forex
|
# generally speaking these feeds don't
|
||||||
):
|
# include vlm data.
|
||||||
|
atype = syminfo['asset_type']
|
||||||
|
log.info(
|
||||||
|
f'Non-vlm asset {sym}@{atype}, skipping quote poll...'
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
# wait for real volume on feed (trading might be closed)
|
# wait for real volume on feed (trading might be closed)
|
||||||
while True:
|
while True:
|
||||||
ticker = await stream.receive()
|
ticker = await stream.receive()
|
||||||
|
@ -805,6 +815,9 @@ async def data_reset_hack(
|
||||||
successful.
|
successful.
|
||||||
- other OS support?
|
- other OS support?
|
||||||
- integration with ``ib-gw`` run in docker + Xorg?
|
- integration with ``ib-gw`` run in docker + Xorg?
|
||||||
|
- is it possible to offer a local server that can be accessed by
|
||||||
|
a client? Would be sure be handy for running native java blobs
|
||||||
|
that need to be wrangle.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
@ -835,7 +848,10 @@ async def data_reset_hack(
|
||||||
client.mouse.click()
|
client.mouse.click()
|
||||||
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
||||||
|
|
||||||
|
try:
|
||||||
await tractor.to_asyncio.run_task(vnc_click_hack)
|
await tractor.to_asyncio.run_task(vnc_click_hack)
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
# we don't really need the ``xdotool`` approach any more B)
|
# we don't really need the ``xdotool`` approach any more B)
|
||||||
return True
|
return True
|
||||||
|
@ -909,17 +925,18 @@ async def open_symbol_search(
|
||||||
# trigger async request
|
# trigger async request
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
# match against our ad-hoc set immediately
|
# # match against our ad-hoc set immediately
|
||||||
adhoc_matches = fuzzy.extractBests(
|
# adhoc_matches = fuzzy.extractBests(
|
||||||
pattern,
|
# pattern,
|
||||||
list(_adhoc_futes_set),
|
# list(_adhoc_futes_set),
|
||||||
score_cutoff=90,
|
# score_cutoff=90,
|
||||||
)
|
# )
|
||||||
log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
# log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
||||||
adhoc_match_results = {}
|
# adhoc_match_results = {}
|
||||||
if adhoc_matches:
|
# if adhoc_matches:
|
||||||
# TODO: do we need to pull contract details?
|
# # TODO: do we need to pull contract details?
|
||||||
adhoc_match_results = {i[0]: {} for i in adhoc_matches}
|
# adhoc_match_results = {i[0]: {} for i in
|
||||||
|
# adhoc_matches}
|
||||||
|
|
||||||
log.debug(f'fuzzy matching stocks {stock_results}')
|
log.debug(f'fuzzy matching stocks {stock_results}')
|
||||||
stock_matches = fuzzy.extractBests(
|
stock_matches = fuzzy.extractBests(
|
||||||
|
@ -928,7 +945,8 @@ async def open_symbol_search(
|
||||||
score_cutoff=50,
|
score_cutoff=50,
|
||||||
)
|
)
|
||||||
|
|
||||||
matches = adhoc_match_results | {
|
# matches = adhoc_match_results | {
|
||||||
|
matches = {
|
||||||
item[0]: {} for item in stock_matches
|
item[0]: {} for item in stock_matches
|
||||||
}
|
}
|
||||||
# TODO: we used to deliver contract details
|
# TODO: we used to deliver contract details
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,64 @@
|
||||||
|
``kraken`` backend
|
||||||
|
------------------
|
||||||
|
though they don't have the most liquidity of all the cexes they sure are
|
||||||
|
accommodating to those of us who appreciate a little ``xmr``.
|
||||||
|
|
||||||
|
status
|
||||||
|
******
|
||||||
|
current support is *production grade* and both real-time data and order
|
||||||
|
management should be correct and fast. this backend is used by core devs
|
||||||
|
for live trading.
|
||||||
|
|
||||||
|
|
||||||
|
config
|
||||||
|
******
|
||||||
|
In order to get order mode support your ``brokers.toml``
|
||||||
|
needs to have something like the following:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[kraken]
|
||||||
|
accounts.spot = 'spot'
|
||||||
|
key_descr = "spot"
|
||||||
|
api_key = "69696969696969696696969696969696969696969696969696969696"
|
||||||
|
secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
|
||||||
|
|
||||||
|
|
||||||
|
If everything works correctly you should see any current positions
|
||||||
|
loaded in the pps pane on chart load and you should also be able to
|
||||||
|
check your trade records in the file::
|
||||||
|
|
||||||
|
<pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
|
||||||
|
|
||||||
|
|
||||||
|
An example ledger file will have entries written verbatim from the
|
||||||
|
trade events schema:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[TFJBKK-SMBZS-VJ4UWS]
|
||||||
|
ordertxid = "SMBZSA-7CNQU-3HWLNJ"
|
||||||
|
postxid = "SMBZSE-M7IF5-CFI7LT"
|
||||||
|
pair = "XXMRZEUR"
|
||||||
|
time = 1655691993.4133966
|
||||||
|
type = "buy"
|
||||||
|
ordertype = "limit"
|
||||||
|
price = "103.97000000"
|
||||||
|
cost = "499.99999977"
|
||||||
|
fee = "0.80000000"
|
||||||
|
vol = "4.80907954"
|
||||||
|
margin = "0.00000000"
|
||||||
|
misc = ""
|
||||||
|
|
||||||
|
|
||||||
|
your ``pps.toml`` file will have position entries like,
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[kraken.spot."xmreur.kraken"]
|
||||||
|
size = 4.80907954
|
||||||
|
ppu = 103.97000000
|
||||||
|
bsuid = "XXMRZEUR"
|
||||||
|
clears = [
|
||||||
|
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
||||||
|
]
|
|
@ -0,0 +1,61 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Kraken backend.
|
||||||
|
|
||||||
|
Sub-modules within break into the core functionalities:
|
||||||
|
|
||||||
|
- ``broker.py`` part for orders / trading endpoints
|
||||||
|
- ``feed.py`` for real-time data feed endpoints
|
||||||
|
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||||
|
wrapping around ``ib_insync``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
from piker.log import get_logger
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
from .api import (
|
||||||
|
get_client,
|
||||||
|
)
|
||||||
|
from .feed import (
|
||||||
|
open_history_client,
|
||||||
|
open_symbol_search,
|
||||||
|
stream_quotes,
|
||||||
|
)
|
||||||
|
from .broker import (
|
||||||
|
trades_dialogue,
|
||||||
|
norm_trade_records,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'get_client',
|
||||||
|
'trades_dialogue',
|
||||||
|
'open_history_client',
|
||||||
|
'open_symbol_search',
|
||||||
|
'stream_quotes',
|
||||||
|
'norm_trade_records',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# tractor RPC enable arg
|
||||||
|
__enable_modules__: list[str] = [
|
||||||
|
'api',
|
||||||
|
'feed',
|
||||||
|
'broker',
|
||||||
|
]
|
|
@ -0,0 +1,541 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Kraken web API wrapping.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
import itertools
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
import time
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
|
import pendulum
|
||||||
|
import asks
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
import urllib.parse
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import base64
|
||||||
|
import trio
|
||||||
|
|
||||||
|
from piker import config
|
||||||
|
from piker.brokers._util import (
|
||||||
|
resproc,
|
||||||
|
SymbolNotFound,
|
||||||
|
BrokerError,
|
||||||
|
DataThrottle,
|
||||||
|
)
|
||||||
|
from piker.pp import Transaction
|
||||||
|
from . import log
|
||||||
|
|
||||||
|
# <uri>/<version>/
|
||||||
|
_url = 'https://api.kraken.com/0'
|
||||||
|
|
||||||
|
|
||||||
|
# Broker specific ohlc schema which includes a vwap field
|
||||||
|
_ohlc_dtype = [
|
||||||
|
('index', int),
|
||||||
|
('time', int),
|
||||||
|
('open', float),
|
||||||
|
('high', float),
|
||||||
|
('low', float),
|
||||||
|
('close', float),
|
||||||
|
('volume', float),
|
||||||
|
('count', int),
|
||||||
|
('bar_wap', float),
|
||||||
|
]
|
||||||
|
|
||||||
|
# UI components allow this to be declared such that additional
|
||||||
|
# (historical) fields can be exposed.
|
||||||
|
ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||||
|
|
||||||
|
_show_wap_in_history = True
|
||||||
|
_symbol_info_translation: dict[str, str] = {
|
||||||
|
'tick_decimals': 'pair_decimals',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
|
conf, path = config.load()
|
||||||
|
section = conf.get('kraken')
|
||||||
|
|
||||||
|
if section is None:
|
||||||
|
log.warning(f'No config section found for kraken in {path}')
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return section
|
||||||
|
|
||||||
|
|
||||||
|
def get_kraken_signature(
|
||||||
|
urlpath: str,
|
||||||
|
data: dict[str, Any],
|
||||||
|
secret: str
|
||||||
|
) -> str:
|
||||||
|
postdata = urllib.parse.urlencode(data)
|
||||||
|
encoded = (str(data['nonce']) + postdata).encode()
|
||||||
|
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
||||||
|
|
||||||
|
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
||||||
|
sigdigest = base64.b64encode(mac.digest())
|
||||||
|
return sigdigest.decode()
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidKey(ValueError):
|
||||||
|
'''
|
||||||
|
EAPI:Invalid key
|
||||||
|
This error is returned when the API key used for the call is
|
||||||
|
either expired or disabled, please review the API key in your
|
||||||
|
Settings -> API tab of account management or generate a new one
|
||||||
|
and update your application.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class Client:
|
||||||
|
|
||||||
|
# global symbol normalization table
|
||||||
|
_ntable: dict[str, str] = {}
|
||||||
|
_atable: bidict[str, str] = bidict()
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config: dict[str, str],
|
||||||
|
name: str = '',
|
||||||
|
api_key: str = '',
|
||||||
|
secret: str = ''
|
||||||
|
) -> None:
|
||||||
|
self._sesh = asks.Session(connections=4)
|
||||||
|
self._sesh.base_location = _url
|
||||||
|
self._sesh.headers.update({
|
||||||
|
'User-Agent':
|
||||||
|
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||||
|
})
|
||||||
|
self.conf: dict[str, str] = config
|
||||||
|
self._pairs: list[str] = []
|
||||||
|
self._name = name
|
||||||
|
self._api_key = api_key
|
||||||
|
self._secret = secret
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pairs(self) -> dict[str, Any]:
|
||||||
|
if self._pairs is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Make sure to run `cache_symbols()` on startup!"
|
||||||
|
)
|
||||||
|
# retreive and cache all symbols
|
||||||
|
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
async def _public(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
resp = await self._sesh.post(
|
||||||
|
path=f'/public/{method}',
|
||||||
|
json=data,
|
||||||
|
timeout=float('inf')
|
||||||
|
)
|
||||||
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
async def _private(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict,
|
||||||
|
uri_path: str
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
headers = {
|
||||||
|
'Content-Type':
|
||||||
|
'application/x-www-form-urlencoded',
|
||||||
|
'API-Key':
|
||||||
|
self._api_key,
|
||||||
|
'API-Sign':
|
||||||
|
get_kraken_signature(uri_path, data, self._secret)
|
||||||
|
}
|
||||||
|
resp = await self._sesh.post(
|
||||||
|
path=f'/private/{method}',
|
||||||
|
data=data,
|
||||||
|
headers=headers,
|
||||||
|
timeout=float('inf')
|
||||||
|
)
|
||||||
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
async def endpoint(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict[str, Any]
|
||||||
|
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
uri_path = f'/0/private/{method}'
|
||||||
|
data['nonce'] = str(int(1000*time.time()))
|
||||||
|
return await self._private(method, data, uri_path)
|
||||||
|
|
||||||
|
async def get_balances(
|
||||||
|
self,
|
||||||
|
) -> dict[str, float]:
|
||||||
|
'''
|
||||||
|
Return the set of asset balances for this account
|
||||||
|
by symbol.
|
||||||
|
|
||||||
|
'''
|
||||||
|
resp = await self.endpoint(
|
||||||
|
'Balance',
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
by_bsuid = resp['result']
|
||||||
|
return {
|
||||||
|
self._atable[sym].lower(): float(bal)
|
||||||
|
for sym, bal in by_bsuid.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_assets(self) -> dict[str, dict]:
|
||||||
|
resp = await self._public('Assets', {})
|
||||||
|
return resp['result']
|
||||||
|
|
||||||
|
async def cache_assets(self) -> None:
|
||||||
|
assets = self.assets = await self.get_assets()
|
||||||
|
for bsuid, info in assets.items():
|
||||||
|
self._atable[bsuid] = info['altname']
|
||||||
|
|
||||||
|
async def get_trades(
|
||||||
|
self,
|
||||||
|
fetch_limit: int = 10,
|
||||||
|
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
'''
|
||||||
|
Get the trades (aka cleared orders) history from the rest endpoint:
|
||||||
|
https://docs.kraken.com/rest/#operation/getTradeHistory
|
||||||
|
|
||||||
|
'''
|
||||||
|
ofs = 0
|
||||||
|
trades_by_id: dict[str, Any] = {}
|
||||||
|
|
||||||
|
for i in itertools.count():
|
||||||
|
if i >= fetch_limit:
|
||||||
|
break
|
||||||
|
|
||||||
|
# increment 'ofs' pagination offset
|
||||||
|
ofs = i*50
|
||||||
|
|
||||||
|
resp = await self.endpoint(
|
||||||
|
'TradesHistory',
|
||||||
|
{'ofs': ofs},
|
||||||
|
)
|
||||||
|
by_id = resp['result']['trades']
|
||||||
|
trades_by_id.update(by_id)
|
||||||
|
|
||||||
|
# we can get up to 50 results per query
|
||||||
|
if (
|
||||||
|
len(by_id) < 50
|
||||||
|
):
|
||||||
|
err = resp.get('error')
|
||||||
|
if err:
|
||||||
|
raise BrokerError(err)
|
||||||
|
|
||||||
|
# we know we received the max amount of
|
||||||
|
# trade results so there may be more history.
|
||||||
|
# catch the end of the trades
|
||||||
|
count = resp['result']['count']
|
||||||
|
break
|
||||||
|
|
||||||
|
# santity check on update
|
||||||
|
assert count == len(trades_by_id.values())
|
||||||
|
return trades_by_id
|
||||||
|
|
||||||
|
async def get_xfers(
|
||||||
|
self,
|
||||||
|
asset: str,
|
||||||
|
src_asset: str = '',
|
||||||
|
|
||||||
|
) -> dict[str, Transaction]:
|
||||||
|
'''
|
||||||
|
Get asset balance transfer transactions.
|
||||||
|
|
||||||
|
Currently only withdrawals are supported.
|
||||||
|
|
||||||
|
'''
|
||||||
|
xfers: list[dict] = (await self.endpoint(
|
||||||
|
'WithdrawStatus',
|
||||||
|
{'asset': asset},
|
||||||
|
))['result']
|
||||||
|
|
||||||
|
# eg. resp schema:
|
||||||
|
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
||||||
|
# 'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
|
||||||
|
# 'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
|
||||||
|
# 'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
|
||||||
|
# 'amount': '0.00300726', 'fee': '0.00001000', 'time':
|
||||||
|
# 1658347714, 'status': 'Success'}]}
|
||||||
|
|
||||||
|
trans: dict[str, Transaction] = {}
|
||||||
|
for entry in xfers:
|
||||||
|
# look up the normalized name
|
||||||
|
asset = self._atable[entry['asset']].lower()
|
||||||
|
|
||||||
|
# XXX: this is in the asset units (likely) so it isn't
|
||||||
|
# quite the same as a commisions cost necessarily..)
|
||||||
|
cost = float(entry['fee'])
|
||||||
|
|
||||||
|
tran = Transaction(
|
||||||
|
fqsn=asset + '.kraken',
|
||||||
|
tid=entry['txid'],
|
||||||
|
dt=pendulum.from_timestamp(entry['time']),
|
||||||
|
bsuid=f'{asset}{src_asset}',
|
||||||
|
size=-1*(
|
||||||
|
float(entry['amount'])
|
||||||
|
+
|
||||||
|
cost
|
||||||
|
),
|
||||||
|
# since this will be treated as a "sell" it
|
||||||
|
# shouldn't be needed to compute the be price.
|
||||||
|
price='NaN',
|
||||||
|
|
||||||
|
# XXX: see note above
|
||||||
|
cost=0,
|
||||||
|
)
|
||||||
|
trans[tran.tid] = tran
|
||||||
|
|
||||||
|
return trans
|
||||||
|
|
||||||
|
async def submit_limit(
|
||||||
|
self,
|
||||||
|
symbol: str,
|
||||||
|
price: float,
|
||||||
|
action: str,
|
||||||
|
size: float,
|
||||||
|
reqid: str = None,
|
||||||
|
validate: bool = False # set True test call without a real submission
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Place an order and return integer request id provided by client.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# Build common data dict for common keys from both endpoints
|
||||||
|
data = {
|
||||||
|
"pair": symbol,
|
||||||
|
"price": str(price),
|
||||||
|
"validate": validate
|
||||||
|
}
|
||||||
|
if reqid is None:
|
||||||
|
# Build order data for kraken api
|
||||||
|
data |= {
|
||||||
|
"ordertype": "limit",
|
||||||
|
"type": action,
|
||||||
|
"volume": str(size),
|
||||||
|
}
|
||||||
|
return await self.endpoint('AddOrder', data)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Edit order data for kraken api
|
||||||
|
data["txid"] = reqid
|
||||||
|
return await self.endpoint('EditOrder', data)
|
||||||
|
|
||||||
|
async def submit_cancel(
|
||||||
|
self,
|
||||||
|
reqid: str,
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Send cancel request for order id ``reqid``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# txid is a transaction id given by kraken
|
||||||
|
return await self.endpoint('CancelOrder', {"txid": reqid})
|
||||||
|
|
||||||
|
async def symbol_info(
|
||||||
|
self,
|
||||||
|
pair: Optional[str] = None,
|
||||||
|
|
||||||
|
) -> dict[str, dict[str, str]]:
|
||||||
|
|
||||||
|
if pair is not None:
|
||||||
|
pairs = {'pair': pair}
|
||||||
|
else:
|
||||||
|
pairs = None # get all pairs
|
||||||
|
|
||||||
|
resp = await self._public('AssetPairs', pairs)
|
||||||
|
err = resp['error']
|
||||||
|
if err:
|
||||||
|
symbolname = pairs['pair'] if pair else None
|
||||||
|
raise SymbolNotFound(f'{symbolname}.kraken')
|
||||||
|
|
||||||
|
pairs = resp['result']
|
||||||
|
|
||||||
|
if pair is not None:
|
||||||
|
_, data = next(iter(pairs.items()))
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
return pairs
|
||||||
|
|
||||||
|
async def cache_symbols(
|
||||||
|
self,
|
||||||
|
) -> dict:
|
||||||
|
if not self._pairs:
|
||||||
|
self._pairs = await self.symbol_info()
|
||||||
|
|
||||||
|
ntable = {}
|
||||||
|
for restapikey, info in self._pairs.items():
|
||||||
|
ntable[restapikey] = ntable[info['wsname']] = info['altname']
|
||||||
|
|
||||||
|
self._ntable.update(ntable)
|
||||||
|
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
async def search_symbols(
|
||||||
|
self,
|
||||||
|
pattern: str,
|
||||||
|
limit: int = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
if self._pairs is not None:
|
||||||
|
data = self._pairs
|
||||||
|
else:
|
||||||
|
data = await self.symbol_info()
|
||||||
|
|
||||||
|
matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
data,
|
||||||
|
score_cutoff=50,
|
||||||
|
)
|
||||||
|
# repack in dict form
|
||||||
|
return {item[0]['altname']: item[0] for item in matches}
|
||||||
|
|
||||||
|
async def bars(
|
||||||
|
self,
|
||||||
|
symbol: str = 'XBTUSD',
|
||||||
|
|
||||||
|
# UTC 2017-07-02 12:53:20
|
||||||
|
since: Optional[Union[int, datetime]] = None,
|
||||||
|
count: int = 720, # <- max allowed per query
|
||||||
|
as_np: bool = True,
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
|
||||||
|
if since is None:
|
||||||
|
since = pendulum.now('UTC').start_of('minute').subtract(
|
||||||
|
minutes=count).timestamp()
|
||||||
|
|
||||||
|
elif isinstance(since, int):
|
||||||
|
since = pendulum.from_timestamp(since).timestamp()
|
||||||
|
|
||||||
|
else: # presumably a pendulum datetime
|
||||||
|
since = since.timestamp()
|
||||||
|
|
||||||
|
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||||
|
since = str(max(1499000000, int(since)))
|
||||||
|
json = await self._public(
|
||||||
|
'OHLC',
|
||||||
|
data={
|
||||||
|
'pair': symbol,
|
||||||
|
'since': since,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
res = json['result']
|
||||||
|
res.pop('last')
|
||||||
|
bars = next(iter(res.values()))
|
||||||
|
|
||||||
|
new_bars = []
|
||||||
|
|
||||||
|
first = bars[0]
|
||||||
|
last_nz_vwap = first[-3]
|
||||||
|
if last_nz_vwap == 0:
|
||||||
|
# use close if vwap is zero
|
||||||
|
last_nz_vwap = first[-4]
|
||||||
|
|
||||||
|
# convert all fields to native types
|
||||||
|
for i, bar in enumerate(bars):
|
||||||
|
# normalize weird zero-ed vwap values..cmon kraken..
|
||||||
|
# indicates vwap didn't change since last bar
|
||||||
|
vwap = float(bar.pop(-3))
|
||||||
|
if vwap != 0:
|
||||||
|
last_nz_vwap = vwap
|
||||||
|
if vwap == 0:
|
||||||
|
vwap = last_nz_vwap
|
||||||
|
|
||||||
|
# re-insert vwap as the last of the fields
|
||||||
|
bar.append(vwap)
|
||||||
|
|
||||||
|
new_bars.append(
|
||||||
|
(i,) + tuple(
|
||||||
|
ftype(bar[j]) for j, (name, ftype) in enumerate(
|
||||||
|
_ohlc_dtype[1:]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||||
|
return array
|
||||||
|
except KeyError:
|
||||||
|
errmsg = json['error'][0]
|
||||||
|
|
||||||
|
if 'not found' in errmsg:
|
||||||
|
raise SymbolNotFound(errmsg + f': {symbol}')
|
||||||
|
|
||||||
|
elif 'Too many requests' in errmsg:
|
||||||
|
raise DataThrottle(f'{symbol}')
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise BrokerError(errmsg)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def normalize_symbol(
|
||||||
|
cls,
|
||||||
|
ticker: str
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Normalize symbol names to to a 3x3 pair from the global
|
||||||
|
definition map which we build out from the data retreived from
|
||||||
|
the 'AssetPairs' endpoint, see methods above.
|
||||||
|
|
||||||
|
'''
|
||||||
|
symlen = len(ticker)
|
||||||
|
if symlen != 6:
|
||||||
|
ticker = cls._ntable[ticker]
|
||||||
|
else:
|
||||||
|
raise ValueError(f'Unhandled symbol: {ticker}')
|
||||||
|
|
||||||
|
return ticker.lower()
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def get_client() -> Client:
|
||||||
|
|
||||||
|
conf = get_config()
|
||||||
|
if conf:
|
||||||
|
client = Client(
|
||||||
|
conf,
|
||||||
|
name=conf['key_descr'],
|
||||||
|
api_key=conf['api_key'],
|
||||||
|
secret=conf['secret']
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
client = Client({})
|
||||||
|
|
||||||
|
# at startup, load all symbols, and asset info in
|
||||||
|
# batch requests.
|
||||||
|
async with trio.open_nursery() as nurse:
|
||||||
|
nurse.start_soon(client.cache_assets)
|
||||||
|
await client.cache_symbols()
|
||||||
|
|
||||||
|
yield client
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,507 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Real-time and historical data feed endpoints.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
|
import time
|
||||||
|
|
||||||
|
from async_generator import aclosing
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
import pendulum
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import tractor
|
||||||
|
import trio
|
||||||
|
import wsproto
|
||||||
|
|
||||||
|
from piker._cacheables import open_cached_client
|
||||||
|
from piker.brokers._util import (
|
||||||
|
BrokerError,
|
||||||
|
DataThrottle,
|
||||||
|
DataUnavailable,
|
||||||
|
)
|
||||||
|
from piker.log import get_console_log
|
||||||
|
from piker.data import ShmArray
|
||||||
|
from piker.data.types import Struct
|
||||||
|
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
||||||
|
from . import log
|
||||||
|
from .api import (
|
||||||
|
Client,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# https://www.kraken.com/features/api#get-tradable-pairs
|
||||||
|
class Pair(Struct):
|
||||||
|
altname: str # alternate pair name
|
||||||
|
wsname: str # WebSocket pair name (if available)
|
||||||
|
aclass_base: str # asset class of base component
|
||||||
|
base: str # asset id of base component
|
||||||
|
aclass_quote: str # asset class of quote component
|
||||||
|
quote: str # asset id of quote component
|
||||||
|
lot: str # volume lot size
|
||||||
|
|
||||||
|
pair_decimals: int # scaling decimal places for pair
|
||||||
|
lot_decimals: int # scaling decimal places for volume
|
||||||
|
|
||||||
|
# amount to multiply lot volume by to get currency volume
|
||||||
|
lot_multiplier: float
|
||||||
|
|
||||||
|
# array of leverage amounts available when buying
|
||||||
|
leverage_buy: list[int]
|
||||||
|
# array of leverage amounts available when selling
|
||||||
|
leverage_sell: list[int]
|
||||||
|
|
||||||
|
# fee schedule array in [volume, percent fee] tuples
|
||||||
|
fees: list[tuple[int, float]]
|
||||||
|
|
||||||
|
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||||
|
# maker/taker)
|
||||||
|
fees_maker: list[tuple[int, float]]
|
||||||
|
|
||||||
|
fee_volume_currency: str # volume discount currency
|
||||||
|
margin_call: str # margin call level
|
||||||
|
margin_stop: str # stop-out/liquidation margin level
|
||||||
|
ordermin: float # minimum order volume for pair
|
||||||
|
|
||||||
|
|
||||||
|
class OHLC(Struct):
|
||||||
|
'''
|
||||||
|
Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
|
For schema details see:
|
||||||
|
https://docs.kraken.com/websockets/#message-ohlc
|
||||||
|
|
||||||
|
'''
|
||||||
|
chan_id: int # internal kraken id
|
||||||
|
chan_name: str # eg. ohlc-1 (name-interval)
|
||||||
|
pair: str # fx pair
|
||||||
|
time: float # Begin time of interval, in seconds since epoch
|
||||||
|
etime: float # End time of interval, in seconds since epoch
|
||||||
|
open: float # Open price of interval
|
||||||
|
high: float # High price within interval
|
||||||
|
low: float # Low price within interval
|
||||||
|
close: float # Close price of interval
|
||||||
|
vwap: float # Volume weighted average price within interval
|
||||||
|
volume: float # Accumulated volume **within interval**
|
||||||
|
count: int # Number of trades within interval
|
||||||
|
# (sampled) generated tick data
|
||||||
|
ticks: list[Any] = []
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_messages(
|
||||||
|
ws: NoBsWs,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Message stream parser and heartbeat handler.
|
||||||
|
|
||||||
|
Deliver ws subscription messages as well as handle heartbeat logic
|
||||||
|
though a single async generator.
|
||||||
|
|
||||||
|
'''
|
||||||
|
too_slow_count = last_hb = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
|
||||||
|
with trio.move_on_after(5) as cs:
|
||||||
|
msg = await ws.recv_msg()
|
||||||
|
|
||||||
|
# trigger reconnection if heartbeat is laggy
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
|
||||||
|
too_slow_count += 1
|
||||||
|
|
||||||
|
if too_slow_count > 20:
|
||||||
|
log.warning(
|
||||||
|
"Heartbeat is too slow, resetting ws connection")
|
||||||
|
|
||||||
|
await ws._connect()
|
||||||
|
too_slow_count = 0
|
||||||
|
continue
|
||||||
|
|
||||||
|
match msg:
|
||||||
|
case {'event': 'heartbeat'}:
|
||||||
|
now = time.time()
|
||||||
|
delay = now - last_hb
|
||||||
|
last_hb = now
|
||||||
|
|
||||||
|
# XXX: why tf is this not printing without --tl flag?
|
||||||
|
log.debug(f"Heartbeat after {delay}")
|
||||||
|
# print(f"Heartbeat after {delay}")
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
case _:
|
||||||
|
# passthrough sub msgs
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
|
||||||
|
async def process_data_feed_msgs(
|
||||||
|
ws: NoBsWs,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Parse and pack data feed messages.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async for msg in stream_messages(ws):
|
||||||
|
match msg:
|
||||||
|
case {
|
||||||
|
'errorMessage': errmsg
|
||||||
|
}:
|
||||||
|
raise BrokerError(errmsg)
|
||||||
|
|
||||||
|
case {
|
||||||
|
'event': 'subscriptionStatus',
|
||||||
|
} as sub:
|
||||||
|
log.info(
|
||||||
|
'WS subscription is active:\n'
|
||||||
|
f'{sub}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case [
|
||||||
|
chan_id,
|
||||||
|
*payload_array,
|
||||||
|
chan_name,
|
||||||
|
pair
|
||||||
|
]:
|
||||||
|
if 'ohlc' in chan_name:
|
||||||
|
ohlc = OHLC(
|
||||||
|
chan_id,
|
||||||
|
chan_name,
|
||||||
|
pair,
|
||||||
|
*payload_array[0]
|
||||||
|
)
|
||||||
|
ohlc.typecast()
|
||||||
|
yield 'ohlc', ohlc
|
||||||
|
|
||||||
|
elif 'spread' in chan_name:
|
||||||
|
|
||||||
|
bid, ask, ts, bsize, asize = map(
|
||||||
|
float, payload_array[0])
|
||||||
|
|
||||||
|
# TODO: really makes you think IB has a horrible API...
|
||||||
|
quote = {
|
||||||
|
'symbol': pair.replace('/', ''),
|
||||||
|
'ticks': [
|
||||||
|
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||||
|
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||||
|
|
||||||
|
{'type': 'ask', 'price': ask, 'size': asize},
|
||||||
|
{'type': 'asize', 'price': ask, 'size': asize},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
yield 'l1', quote
|
||||||
|
|
||||||
|
# elif 'book' in msg[-2]:
|
||||||
|
# chan_id, *payload_array, chan_name, pair = msg
|
||||||
|
# print(msg)
|
||||||
|
|
||||||
|
case _:
|
||||||
|
print(f'UNHANDLED MSG: {msg}')
|
||||||
|
# yield msg
|
||||||
|
|
||||||
|
|
||||||
|
def normalize(
|
||||||
|
ohlc: OHLC,
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
quote = ohlc.to_dict()
|
||||||
|
quote['broker_ts'] = quote['time']
|
||||||
|
quote['brokerd_ts'] = time.time()
|
||||||
|
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
||||||
|
quote['last'] = quote['close']
|
||||||
|
quote['bar_wap'] = ohlc.vwap
|
||||||
|
|
||||||
|
# seriously eh? what's with this non-symmetry everywhere
|
||||||
|
# in subscription systems...
|
||||||
|
# XXX: piker style is always lowercases symbols.
|
||||||
|
topic = quote['pair'].replace('/', '').lower()
|
||||||
|
|
||||||
|
# print(quote)
|
||||||
|
return topic, quote
|
||||||
|
|
||||||
|
|
||||||
|
def make_sub(pairs: list[str], data: dict[str, Any]) -> dict[str, str]:
|
||||||
|
'''
|
||||||
|
Create a request subscription packet dict.
|
||||||
|
|
||||||
|
https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
|
||||||
|
'''
|
||||||
|
# eg. specific logic for this in kraken's sync client:
|
||||||
|
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||||
|
return {
|
||||||
|
'pair': pairs,
|
||||||
|
'event': 'subscribe',
|
||||||
|
'subscription': data,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
symbol: str,
|
||||||
|
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
# TODO implement history getter for the new storage layer.
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
# lol, kraken won't send any more then the "last"
|
||||||
|
# 720 1m bars.. so we have to just ignore further
|
||||||
|
# requests of this type..
|
||||||
|
queries: int = 0
|
||||||
|
|
||||||
|
async def get_ohlc(
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
datetime, # start
|
||||||
|
datetime, # end
|
||||||
|
]:
|
||||||
|
|
||||||
|
nonlocal queries
|
||||||
|
if queries > 0:
|
||||||
|
raise DataUnavailable
|
||||||
|
|
||||||
|
count = 0
|
||||||
|
while count <= 3:
|
||||||
|
try:
|
||||||
|
array = await client.bars(
|
||||||
|
symbol,
|
||||||
|
since=end_dt,
|
||||||
|
)
|
||||||
|
count += 1
|
||||||
|
queries += 1
|
||||||
|
break
|
||||||
|
except DataThrottle:
|
||||||
|
log.warning(f'kraken OHLC throttle for {symbol}')
|
||||||
|
await trio.sleep(1)
|
||||||
|
|
||||||
|
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||||
|
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||||
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
|
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
||||||
|
|
||||||
|
|
||||||
|
async def backfill_bars(
|
||||||
|
|
||||||
|
sym: str,
|
||||||
|
shm: ShmArray, # type: ignore # noqa
|
||||||
|
count: int = 10, # NOTE: any more and we'll overrun the underlying buffer
|
||||||
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Fill historical bars into shared mem / storage afap.
|
||||||
|
'''
|
||||||
|
with trio.CancelScope() as cs:
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
bars = await client.bars(symbol=sym)
|
||||||
|
shm.push(bars)
|
||||||
|
task_status.started(cs)
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_quotes(
|
||||||
|
|
||||||
|
send_chan: trio.abc.SendChannel,
|
||||||
|
symbols: list[str],
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
loglevel: str = None,
|
||||||
|
|
||||||
|
# backend specific
|
||||||
|
sub_type: str = 'ohlc',
|
||||||
|
|
||||||
|
# startup sync
|
||||||
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Subscribe for ohlc stream of quotes for ``pairs``.
|
||||||
|
|
||||||
|
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
ws_pairs = {}
|
||||||
|
sym_infos = {}
|
||||||
|
|
||||||
|
async with open_cached_client('kraken') as client, send_chan as send_chan:
|
||||||
|
|
||||||
|
# keep client cached for real-time section
|
||||||
|
for sym in symbols:
|
||||||
|
|
||||||
|
# transform to upper since piker style is always lower
|
||||||
|
sym = sym.upper()
|
||||||
|
|
||||||
|
si = Pair(**await client.symbol_info(sym)) # validation
|
||||||
|
syminfo = si.to_dict()
|
||||||
|
syminfo['price_tick_size'] = 1 / 10**si.pair_decimals
|
||||||
|
syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals
|
||||||
|
syminfo['asset_type'] = 'crypto'
|
||||||
|
sym_infos[sym] = syminfo
|
||||||
|
ws_pairs[sym] = si.wsname
|
||||||
|
|
||||||
|
symbol = symbols[0].lower()
|
||||||
|
|
||||||
|
init_msgs = {
|
||||||
|
# pass back token, and bool, signalling if we're the writer
|
||||||
|
# and that history has been written
|
||||||
|
symbol: {
|
||||||
|
'symbol_info': sym_infos[sym],
|
||||||
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def subscribe(ws: wsproto.WSConnection):
|
||||||
|
# XXX: setup subs
|
||||||
|
# https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
# specific logic for this in kraken's shitty sync client:
|
||||||
|
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||||
|
ohlc_sub = make_sub(
|
||||||
|
list(ws_pairs.values()),
|
||||||
|
{'name': 'ohlc', 'interval': 1}
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: we want to eventually allow unsubs which should
|
||||||
|
# be completely fine to request from a separate task
|
||||||
|
# since internally the ws methods appear to be FIFO
|
||||||
|
# locked.
|
||||||
|
await ws.send_msg(ohlc_sub)
|
||||||
|
|
||||||
|
# trade data (aka L1)
|
||||||
|
l1_sub = make_sub(
|
||||||
|
list(ws_pairs.values()),
|
||||||
|
{'name': 'spread'} # 'depth': 10}
|
||||||
|
)
|
||||||
|
|
||||||
|
# pull a first quote and deliver
|
||||||
|
await ws.send_msg(l1_sub)
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
# unsub from all pairs on teardown
|
||||||
|
await ws.send_msg({
|
||||||
|
'pair': list(ws_pairs.values()),
|
||||||
|
'event': 'unsubscribe',
|
||||||
|
'subscription': ['ohlc', 'spread'],
|
||||||
|
})
|
||||||
|
|
||||||
|
# XXX: do we need to ack the unsub?
|
||||||
|
# await ws.recv_msg()
|
||||||
|
|
||||||
|
# see the tips on reconnection logic:
|
||||||
|
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||||
|
ws: NoBsWs
|
||||||
|
async with (
|
||||||
|
open_autorecon_ws(
|
||||||
|
'wss://ws.kraken.com/',
|
||||||
|
fixture=subscribe,
|
||||||
|
) as ws,
|
||||||
|
aclosing(process_data_feed_msgs(ws)) as msg_gen,
|
||||||
|
):
|
||||||
|
# pull a first quote and deliver
|
||||||
|
typ, ohlc_last = await anext(msg_gen)
|
||||||
|
topic, quote = normalize(ohlc_last)
|
||||||
|
|
||||||
|
task_status.started((init_msgs, quote))
|
||||||
|
|
||||||
|
# lol, only "closes" when they're margin squeezing clients ;P
|
||||||
|
feed_is_live.set()
|
||||||
|
|
||||||
|
# keep start of last interval for volume tracking
|
||||||
|
last_interval_start = ohlc_last.etime
|
||||||
|
|
||||||
|
# start streaming
|
||||||
|
async for typ, ohlc in msg_gen:
|
||||||
|
|
||||||
|
if typ == 'ohlc':
|
||||||
|
|
||||||
|
# TODO: can get rid of all this by using
|
||||||
|
# ``trades`` subscription...
|
||||||
|
|
||||||
|
# generate tick values to match time & sales pane:
|
||||||
|
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
|
||||||
|
volume = ohlc.volume
|
||||||
|
|
||||||
|
# new OHLC sample interval
|
||||||
|
if ohlc.etime > last_interval_start:
|
||||||
|
last_interval_start = ohlc.etime
|
||||||
|
tick_volume = volume
|
||||||
|
|
||||||
|
else:
|
||||||
|
# this is the tick volume *within the interval*
|
||||||
|
tick_volume = volume - ohlc_last.volume
|
||||||
|
|
||||||
|
ohlc_last = ohlc
|
||||||
|
last = ohlc.close
|
||||||
|
|
||||||
|
if tick_volume:
|
||||||
|
ohlc.ticks.append({
|
||||||
|
'type': 'trade',
|
||||||
|
'price': last,
|
||||||
|
'size': tick_volume,
|
||||||
|
})
|
||||||
|
|
||||||
|
topic, quote = normalize(ohlc)
|
||||||
|
|
||||||
|
elif typ == 'l1':
|
||||||
|
quote = ohlc
|
||||||
|
topic = quote['symbol'].lower()
|
||||||
|
|
||||||
|
await send_chan.send({topic: quote})
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def open_symbol_search(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
|
||||||
|
) -> Client:
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
# load all symbols locally for fast search
|
||||||
|
cache = await client.cache_symbols()
|
||||||
|
await ctx.started(cache)
|
||||||
|
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
|
||||||
|
async for pattern in stream:
|
||||||
|
|
||||||
|
matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
cache,
|
||||||
|
score_cutoff=50,
|
||||||
|
)
|
||||||
|
# repack in dict form
|
||||||
|
await stream.send(
|
||||||
|
{item[0]['altname']: item[0]
|
||||||
|
for item in matches}
|
||||||
|
)
|
|
@ -22,54 +22,10 @@ from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
from pydantic import BaseModel, validator
|
|
||||||
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ._messages import BrokerdPosition, Status
|
from ..data.types import Struct
|
||||||
|
from ..pp import Position
|
||||||
|
|
||||||
class Position(BaseModel):
|
|
||||||
'''
|
|
||||||
Basic pp (personal position) model with attached fills history.
|
|
||||||
|
|
||||||
This type should be IPC wire ready?
|
|
||||||
|
|
||||||
'''
|
|
||||||
symbol: Symbol
|
|
||||||
|
|
||||||
# last size and avg entry price
|
|
||||||
size: float
|
|
||||||
avg_price: float # TODO: contextual pricing
|
|
||||||
|
|
||||||
# ordered record of known constituent trade messages
|
|
||||||
fills: list[Status] = []
|
|
||||||
|
|
||||||
def update_from_msg(
|
|
||||||
self,
|
|
||||||
msg: BrokerdPosition,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
# XXX: better place to do this?
|
|
||||||
symbol = self.symbol
|
|
||||||
|
|
||||||
lot_size_digits = symbol.lot_size_digits
|
|
||||||
avg_price, size = (
|
|
||||||
round(msg['avg_price'], ndigits=symbol.tick_size_digits),
|
|
||||||
round(msg['size'], ndigits=lot_size_digits),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.avg_price = avg_price
|
|
||||||
self.size = size
|
|
||||||
|
|
||||||
@property
|
|
||||||
def dsize(self) -> float:
|
|
||||||
'''
|
|
||||||
The "dollar" size of the pp, normally in trading (fiat) unit
|
|
||||||
terms.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return self.avg_price * self.size
|
|
||||||
|
|
||||||
|
|
||||||
_size_units = bidict({
|
_size_units = bidict({
|
||||||
|
@ -84,33 +40,30 @@ SizeUnit = Enum(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Allocator(BaseModel):
|
class Allocator(Struct):
|
||||||
|
|
||||||
class Config:
|
|
||||||
validate_assignment = True
|
|
||||||
copy_on_model_validation = False
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
# required to get the account validator lookup working?
|
|
||||||
extra = 'allow'
|
|
||||||
underscore_attrs_are_private = False
|
|
||||||
|
|
||||||
symbol: Symbol
|
symbol: Symbol
|
||||||
account: Optional[str] = 'paper'
|
account: Optional[str] = 'paper'
|
||||||
|
|
||||||
|
_size_units: bidict[str, Optional[str]] = _size_units
|
||||||
|
|
||||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||||
# a default at startup by passing in a `dict` but yet you can set
|
# a default at startup by passing in a `dict` but yet you can set
|
||||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||||
# unintuitive garbage.
|
# unintuitive garbage.
|
||||||
size_unit: str = 'currency'
|
_size_unit: str = 'currency'
|
||||||
_size_units: dict[str, Optional[str]] = _size_units
|
|
||||||
|
|
||||||
@validator('size_unit', pre=True)
|
@property
|
||||||
def maybe_lookup_key(cls, v):
|
def size_unit(self) -> str:
|
||||||
# apply the corresponding enum key for the text "description" value
|
return self._size_unit
|
||||||
|
|
||||||
|
@size_unit.setter
|
||||||
|
def size_unit(self, v: str) -> Optional[str]:
|
||||||
if v not in _size_units:
|
if v not in _size_units:
|
||||||
return _size_units.inverse[v]
|
v = _size_units.inverse[v]
|
||||||
|
|
||||||
assert v in _size_units
|
assert v in _size_units
|
||||||
|
self._size_unit = v
|
||||||
return v
|
return v
|
||||||
|
|
||||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||||
|
@ -173,7 +126,7 @@ class Allocator(BaseModel):
|
||||||
l_sub_pp = self.units_limit - abs_live_size
|
l_sub_pp = self.units_limit - abs_live_size
|
||||||
|
|
||||||
elif size_unit == 'currency':
|
elif size_unit == 'currency':
|
||||||
live_cost_basis = abs_live_size * live_pp.avg_price
|
live_cost_basis = abs_live_size * live_pp.ppu
|
||||||
slot_size = currency_per_slot / price
|
slot_size = currency_per_slot / price
|
||||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||||
|
|
||||||
|
@ -205,7 +158,7 @@ class Allocator(BaseModel):
|
||||||
if size_unit == 'currency':
|
if size_unit == 'currency':
|
||||||
# compute the "projected" limit's worth of units at the
|
# compute the "projected" limit's worth of units at the
|
||||||
# current pp (weighted) price:
|
# current pp (weighted) price:
|
||||||
slot_size = currency_per_slot / live_pp.avg_price
|
slot_size = currency_per_slot / live_pp.ppu
|
||||||
|
|
||||||
else:
|
else:
|
||||||
slot_size = u_per_slot
|
slot_size = u_per_slot
|
||||||
|
@ -244,7 +197,12 @@ class Allocator(BaseModel):
|
||||||
if order_size < slot_size:
|
if order_size < slot_size:
|
||||||
# compute a fractional slots size to display
|
# compute a fractional slots size to display
|
||||||
slots_used = self.slots_used(
|
slots_used = self.slots_used(
|
||||||
Position(symbol=sym, size=order_size, avg_price=price)
|
Position(
|
||||||
|
symbol=sym,
|
||||||
|
size=order_size,
|
||||||
|
ppu=price,
|
||||||
|
bsuid=sym,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -271,8 +229,8 @@ class Allocator(BaseModel):
|
||||||
abs_pp_size = abs(pp.size)
|
abs_pp_size = abs(pp.size)
|
||||||
|
|
||||||
if self.size_unit == 'currency':
|
if self.size_unit == 'currency':
|
||||||
# live_currency_size = size or (abs_pp_size * pp.avg_price)
|
# live_currency_size = size or (abs_pp_size * pp.ppu)
|
||||||
live_currency_size = abs_pp_size * pp.avg_price
|
live_currency_size = abs_pp_size * pp.ppu
|
||||||
prop = live_currency_size / self.currency_limit
|
prop = live_currency_size / self.currency_limit
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -300,7 +258,7 @@ def mk_allocator(
|
||||||
# default allocation settings
|
# default allocation settings
|
||||||
defaults: dict[str, float] = {
|
defaults: dict[str, float] = {
|
||||||
'account': None, # select paper by default
|
'account': None, # select paper by default
|
||||||
'size_unit': 'currency',
|
# 'size_unit': 'currency',
|
||||||
'units_limit': 400,
|
'units_limit': 400,
|
||||||
'currency_limit': 5e3,
|
'currency_limit': 5e3,
|
||||||
'slots': 4,
|
'slots': 4,
|
||||||
|
@ -339,10 +297,13 @@ def mk_allocator(
|
||||||
# entry step 1.0
|
# entry step 1.0
|
||||||
alloc.units_limit = alloc.slots
|
alloc.units_limit = alloc.slots
|
||||||
|
|
||||||
|
else:
|
||||||
|
alloc.size_unit = 'currency'
|
||||||
|
|
||||||
# if the current position is already greater then the limit
|
# if the current position is already greater then the limit
|
||||||
# settings, increase the limit to the current position
|
# settings, increase the limit to the current position
|
||||||
if alloc.size_unit == 'currency':
|
if alloc.size_unit == 'currency':
|
||||||
startup_size = startup_pp.size * startup_pp.avg_price
|
startup_size = startup_pp.size * startup_pp.ppu
|
||||||
|
|
||||||
if startup_size > alloc.currency_limit:
|
if startup_size > alloc.currency_limit:
|
||||||
alloc.currency_limit = round(startup_size, ndigits=2)
|
alloc.currency_limit = round(startup_size, ndigits=2)
|
||||||
|
|
|
@ -31,6 +31,7 @@ from ..log import get_logger
|
||||||
from ._ems import _emsd_main
|
from ._ems import _emsd_main
|
||||||
from .._daemon import maybe_open_emsd
|
from .._daemon import maybe_open_emsd
|
||||||
from ._messages import Order, Cancel
|
from ._messages import Order, Cancel
|
||||||
|
from ..brokers import get_brokermod
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -58,11 +59,11 @@ class OrderBook:
|
||||||
|
|
||||||
def send(
|
def send(
|
||||||
self,
|
self,
|
||||||
msg: Order,
|
msg: Order | dict,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
self._sent_orders[msg.oid] = msg
|
self._sent_orders[msg.oid] = msg
|
||||||
self._to_ems.send_nowait(msg.dict())
|
self._to_ems.send_nowait(msg)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def update(
|
def update(
|
||||||
|
@ -73,9 +74,8 @@ class OrderBook:
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
cmd = self._sent_orders[uuid]
|
cmd = self._sent_orders[uuid]
|
||||||
msg = cmd.dict()
|
msg = cmd.copy(update=data)
|
||||||
msg.update(data)
|
self._sent_orders[uuid] = msg
|
||||||
self._sent_orders[uuid] = Order(**msg)
|
|
||||||
self._to_ems.send_nowait(msg)
|
self._to_ems.send_nowait(msg)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
@ -88,7 +88,7 @@ class OrderBook:
|
||||||
oid=uuid,
|
oid=uuid,
|
||||||
symbol=cmd.symbol,
|
symbol=cmd.symbol,
|
||||||
)
|
)
|
||||||
self._to_ems.send_nowait(msg.dict())
|
self._to_ems.send_nowait(msg)
|
||||||
|
|
||||||
|
|
||||||
_orders: OrderBook = None
|
_orders: OrderBook = None
|
||||||
|
@ -149,7 +149,7 @@ async def relay_order_cmds_from_sync_code(
|
||||||
book = get_orders()
|
book = get_orders()
|
||||||
async with book._from_order_book.subscribe() as orders_stream:
|
async with book._from_order_book.subscribe() as orders_stream:
|
||||||
async for cmd in orders_stream:
|
async for cmd in orders_stream:
|
||||||
if cmd['symbol'] == symbol_key:
|
if cmd.symbol == symbol_key:
|
||||||
log.info(f'Send order cmd:\n{pformat(cmd)}')
|
log.info(f'Send order cmd:\n{pformat(cmd)}')
|
||||||
# send msg over IPC / wire
|
# send msg over IPC / wire
|
||||||
await to_ems_stream.send(cmd)
|
await to_ems_stream.send(cmd)
|
||||||
|
@ -204,14 +204,21 @@ async def open_ems(
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import unpack_fqsn
|
||||||
broker, symbol, suffix = unpack_fqsn(fqsn)
|
broker, symbol, suffix = unpack_fqsn(fqsn)
|
||||||
|
|
||||||
|
mode: str = 'live'
|
||||||
|
|
||||||
async with maybe_open_emsd(broker) as portal:
|
async with maybe_open_emsd(broker) as portal:
|
||||||
|
|
||||||
|
mod = get_brokermod(broker)
|
||||||
|
if not getattr(mod, 'trades_dialogue', None):
|
||||||
|
mode = 'paper'
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
# connect to emsd
|
# connect to emsd
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
|
|
||||||
_emsd_main,
|
_emsd_main,
|
||||||
fqsn=fqsn,
|
fqsn=fqsn,
|
||||||
|
exec_mode=mode,
|
||||||
|
|
||||||
) as (ctx, (positions, accounts)),
|
) as (ctx, (positions, accounts)),
|
||||||
|
|
||||||
|
|
|
@ -20,12 +20,12 @@ In da suit parlances: "Execution management systems"
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
from math import isnan
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
import time
|
import time
|
||||||
from typing import AsyncIterator, Callable
|
from typing import AsyncIterator, Callable
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
from pydantic import BaseModel
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
|
@ -33,6 +33,7 @@ import tractor
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data.feed import Feed, maybe_open_feed
|
from ..data.feed import Feed, maybe_open_feed
|
||||||
|
from ..data.types import Struct
|
||||||
from .._daemon import maybe_spawn_brokerd
|
from .._daemon import maybe_spawn_brokerd
|
||||||
from . import _paper_engine as paper
|
from . import _paper_engine as paper
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
|
@ -87,7 +88,8 @@ def mk_check(
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class _DarkBook:
|
class _DarkBook:
|
||||||
'''EMS-trigger execution book.
|
'''
|
||||||
|
EMS-trigger execution book.
|
||||||
|
|
||||||
Contains conditions for executions (aka "orders" or "triggers")
|
Contains conditions for executions (aka "orders" or "triggers")
|
||||||
which are not exposed to brokers and thus the market; i.e. these are
|
which are not exposed to brokers and thus the market; i.e. these are
|
||||||
|
@ -230,7 +232,7 @@ async def clear_dark_triggers(
|
||||||
price=submit_price,
|
price=submit_price,
|
||||||
size=cmd['size'],
|
size=cmd['size'],
|
||||||
)
|
)
|
||||||
await brokerd_orders_stream.send(msg.dict())
|
await brokerd_orders_stream.send(msg)
|
||||||
|
|
||||||
# mark this entry as having sent an order
|
# mark this entry as having sent an order
|
||||||
# request. the entry will be replaced once the
|
# request. the entry will be replaced once the
|
||||||
|
@ -246,14 +248,11 @@ async def clear_dark_triggers(
|
||||||
|
|
||||||
msg = Status(
|
msg = Status(
|
||||||
oid=oid, # ems order id
|
oid=oid, # ems order id
|
||||||
resp=resp,
|
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
symbol=fqsn,
|
resp=resp,
|
||||||
trigger_price=price,
|
trigger_price=price,
|
||||||
broker_details={'name': broker},
|
brokerd_msg=cmd,
|
||||||
cmd=cmd, # original request message
|
)
|
||||||
|
|
||||||
).dict()
|
|
||||||
|
|
||||||
# remove exec-condition from set
|
# remove exec-condition from set
|
||||||
log.info(f'removing pred for {oid}')
|
log.info(f'removing pred for {oid}')
|
||||||
|
@ -289,7 +288,11 @@ class TradesRelay:
|
||||||
brokerd_dialogue: tractor.MsgStream
|
brokerd_dialogue: tractor.MsgStream
|
||||||
|
|
||||||
# map of symbols to dicts of accounts to pp msgs
|
# map of symbols to dicts of accounts to pp msgs
|
||||||
positions: dict[str, dict[str, BrokerdPosition]]
|
positions: dict[
|
||||||
|
# brokername, acctid
|
||||||
|
tuple[str, str],
|
||||||
|
list[BrokerdPosition],
|
||||||
|
]
|
||||||
|
|
||||||
# allowed account names
|
# allowed account names
|
||||||
accounts: tuple[str]
|
accounts: tuple[str]
|
||||||
|
@ -298,7 +301,7 @@ class TradesRelay:
|
||||||
consumers: int = 0
|
consumers: int = 0
|
||||||
|
|
||||||
|
|
||||||
class Router(BaseModel):
|
class Router(Struct):
|
||||||
'''
|
'''
|
||||||
Order router which manages and tracks per-broker dark book,
|
Order router which manages and tracks per-broker dark book,
|
||||||
alerts, clearing and related data feed management.
|
alerts, clearing and related data feed management.
|
||||||
|
@ -319,10 +322,6 @@ class Router(BaseModel):
|
||||||
# brokername to trades-dialogues streams with ``brokerd`` actors
|
# brokername to trades-dialogues streams with ``brokerd`` actors
|
||||||
relays: dict[str, TradesRelay] = {}
|
relays: dict[str, TradesRelay] = {}
|
||||||
|
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
underscore_attrs_are_private = False
|
|
||||||
|
|
||||||
def get_dark_book(
|
def get_dark_book(
|
||||||
self,
|
self,
|
||||||
brokername: str,
|
brokername: str,
|
||||||
|
@ -333,12 +332,11 @@ class Router(BaseModel):
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def maybe_open_brokerd_trades_dialogue(
|
async def maybe_open_brokerd_trades_dialogue(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
feed: Feed,
|
feed: Feed,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
dark_book: _DarkBook,
|
dark_book: _DarkBook,
|
||||||
_exec_mode: str,
|
exec_mode: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> tuple[dict, tractor.MsgStream]:
|
) -> tuple[dict, tractor.MsgStream]:
|
||||||
|
@ -348,14 +346,23 @@ class Router(BaseModel):
|
||||||
'''
|
'''
|
||||||
relay = self.relays.get(feed.mod.name)
|
relay = self.relays.get(feed.mod.name)
|
||||||
|
|
||||||
if relay is None:
|
if (
|
||||||
|
relay is None
|
||||||
|
|
||||||
|
# We always want to spawn a new relay for the paper engine
|
||||||
|
# per symbol since we need a new tractor context to be
|
||||||
|
# opened for every every symbol such that a new data feed
|
||||||
|
# and ``PaperBoi`` client will be created and then used to
|
||||||
|
# simulate clearing events.
|
||||||
|
or exec_mode == 'paper'
|
||||||
|
):
|
||||||
|
|
||||||
relay = await self.nursery.start(
|
relay = await self.nursery.start(
|
||||||
open_brokerd_trades_dialogue,
|
open_brokerd_trades_dialogue,
|
||||||
self,
|
self,
|
||||||
feed,
|
feed,
|
||||||
symbol,
|
symbol,
|
||||||
_exec_mode,
|
exec_mode,
|
||||||
loglevel,
|
loglevel,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -382,7 +389,7 @@ async def open_brokerd_trades_dialogue(
|
||||||
router: Router,
|
router: Router,
|
||||||
feed: Feed,
|
feed: Feed,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
_exec_mode: str,
|
exec_mode: str,
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
task_status: TaskStatus[TradesRelay] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[TradesRelay] = trio.TASK_STATUS_IGNORED,
|
||||||
|
@ -406,20 +413,20 @@ async def open_brokerd_trades_dialogue(
|
||||||
# when the data feed closes it may result in a half-closed
|
# when the data feed closes it may result in a half-closed
|
||||||
# channel that the brokerd side thinks is still open somehow!?
|
# channel that the brokerd side thinks is still open somehow!?
|
||||||
async with maybe_spawn_brokerd(
|
async with maybe_spawn_brokerd(
|
||||||
|
|
||||||
broker,
|
broker,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
) as portal:
|
) as portal:
|
||||||
|
if (
|
||||||
if trades_endpoint is None or _exec_mode == 'paper':
|
trades_endpoint is None
|
||||||
|
or exec_mode == 'paper'
|
||||||
|
):
|
||||||
# for paper mode we need to mock this trades response feed
|
# for paper mode we need to mock this trades response feed
|
||||||
# so we load bidir stream to a new sub-actor running a
|
# so we load bidir stream to a new sub-actor running
|
||||||
# paper-simulator clearing engine.
|
# a paper-simulator clearing engine.
|
||||||
|
|
||||||
# load the paper trading engine
|
# load the paper trading engine
|
||||||
_exec_mode = 'paper'
|
exec_mode = 'paper'
|
||||||
log.warning(f'Entering paper trading mode for {broker}')
|
log.warning(f'Entering paper trading mode for {broker}')
|
||||||
|
|
||||||
# load the paper trading engine as a subactor of this emsd
|
# load the paper trading engine as a subactor of this emsd
|
||||||
|
@ -461,18 +468,24 @@ async def open_brokerd_trades_dialogue(
|
||||||
# normalizing them to EMS messages and relaying back to
|
# normalizing them to EMS messages and relaying back to
|
||||||
# the piker order client set.
|
# the piker order client set.
|
||||||
|
|
||||||
# locally cache and track positions per account.
|
# locally cache and track positions per account with
|
||||||
|
# a table of (brokername, acctid) -> `BrokerdPosition`
|
||||||
|
# msgs.
|
||||||
pps = {}
|
pps = {}
|
||||||
for msg in positions:
|
for msg in positions:
|
||||||
log.info(f'loading pp: {msg}')
|
log.info(f'loading pp: {msg}')
|
||||||
|
|
||||||
account = msg['account']
|
account = msg['account']
|
||||||
|
|
||||||
|
# TODO: better value error for this which
|
||||||
|
# dumps the account and message and states the
|
||||||
|
# mismatch..
|
||||||
assert account in accounts
|
assert account in accounts
|
||||||
|
|
||||||
pps.setdefault(
|
pps.setdefault(
|
||||||
f'{msg["symbol"]}.{broker}',
|
(broker, account),
|
||||||
{}
|
[],
|
||||||
)[account] = msg
|
).append(msg)
|
||||||
|
|
||||||
relay = TradesRelay(
|
relay = TradesRelay(
|
||||||
brokerd_dialogue=brokerd_trades_stream,
|
brokerd_dialogue=brokerd_trades_stream,
|
||||||
|
@ -570,19 +583,17 @@ async def translate_and_relay_brokerd_events(
|
||||||
|
|
||||||
if name == 'position':
|
if name == 'position':
|
||||||
|
|
||||||
pos_msg = BrokerdPosition(**brokerd_msg).dict()
|
pos_msg = BrokerdPosition(**brokerd_msg)
|
||||||
|
|
||||||
# XXX: this will be useful for automatic strats yah?
|
# XXX: this will be useful for automatic strats yah?
|
||||||
# keep pps per account up to date locally in ``emsd`` mem
|
# keep pps per account up to date locally in ``emsd`` mem
|
||||||
sym, broker = pos_msg['symbol'], pos_msg['broker']
|
sym, broker = pos_msg.symbol, pos_msg.broker
|
||||||
|
|
||||||
relay.positions.setdefault(
|
relay.positions.setdefault(
|
||||||
# NOTE: translate to a FQSN!
|
# NOTE: translate to a FQSN!
|
||||||
f'{sym}.{broker}',
|
(broker, sym),
|
||||||
{}
|
[]
|
||||||
).setdefault(
|
).append(pos_msg)
|
||||||
pos_msg['account'], {}
|
|
||||||
).update(pos_msg)
|
|
||||||
|
|
||||||
# fan-out-relay position msgs immediately by
|
# fan-out-relay position msgs immediately by
|
||||||
# broadcasting updates on all client streams
|
# broadcasting updates on all client streams
|
||||||
|
@ -635,14 +646,21 @@ async def translate_and_relay_brokerd_events(
|
||||||
# something is out of order, we don't have an oid for
|
# something is out of order, we don't have an oid for
|
||||||
# this broker-side message.
|
# this broker-side message.
|
||||||
log.error(
|
log.error(
|
||||||
'Unknown oid:{oid} for msg:\n'
|
f'Unknown oid: {oid} for msg:\n'
|
||||||
f'{pformat(brokerd_msg)}'
|
f'{pformat(brokerd_msg)}\n'
|
||||||
'Unable to relay message to client side!?'
|
'Unable to relay message to client side!?'
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# check for existing live flow entry
|
# check for existing live flow entry
|
||||||
entry = book._ems_entries.get(oid)
|
entry = book._ems_entries.get(oid)
|
||||||
|
old_reqid = entry.reqid
|
||||||
|
|
||||||
|
if old_reqid and old_reqid != reqid:
|
||||||
|
log.warning(
|
||||||
|
f'Brokerd order id change for {oid}:\n'
|
||||||
|
f'{old_reqid} -> {reqid}'
|
||||||
|
)
|
||||||
|
|
||||||
# initial response to brokerd order request
|
# initial response to brokerd order request
|
||||||
if name == 'ack':
|
if name == 'ack':
|
||||||
|
@ -653,6 +671,10 @@ async def translate_and_relay_brokerd_events(
|
||||||
# a ``BrokerdOrderAck`` **must** be sent after an order
|
# a ``BrokerdOrderAck`` **must** be sent after an order
|
||||||
# request in order to establish this id mapping.
|
# request in order to establish this id mapping.
|
||||||
book._ems2brokerd_ids[oid] = reqid
|
book._ems2brokerd_ids[oid] = reqid
|
||||||
|
log.info(
|
||||||
|
'Rx ACK for order\n'
|
||||||
|
f'oid: {oid} -> reqid: {reqid}'
|
||||||
|
)
|
||||||
|
|
||||||
# new order which has not yet be registered into the
|
# new order which has not yet be registered into the
|
||||||
# local ems book, insert it now and handle 2 cases:
|
# local ems book, insert it now and handle 2 cases:
|
||||||
|
@ -667,7 +689,7 @@ async def translate_and_relay_brokerd_events(
|
||||||
entry.reqid = reqid
|
entry.reqid = reqid
|
||||||
|
|
||||||
# tell broker to cancel immediately
|
# tell broker to cancel immediately
|
||||||
await brokerd_trades_stream.send(entry.dict())
|
await brokerd_trades_stream.send(entry)
|
||||||
|
|
||||||
# - the order is now active and will be mirrored in
|
# - the order is now active and will be mirrored in
|
||||||
# our book -> registered as live flow
|
# our book -> registered as live flow
|
||||||
|
@ -680,6 +702,9 @@ async def translate_and_relay_brokerd_events(
|
||||||
# a live flow now exists
|
# a live flow now exists
|
||||||
oid = entry.oid
|
oid = entry.oid
|
||||||
|
|
||||||
|
# TODO: instead this should be our status set.
|
||||||
|
# ack, open, fill, closed, cancelled'
|
||||||
|
|
||||||
resp = None
|
resp = None
|
||||||
broker_details = {}
|
broker_details = {}
|
||||||
|
|
||||||
|
@ -707,7 +732,7 @@ async def translate_and_relay_brokerd_events(
|
||||||
# if 10147 in message: cancel
|
# if 10147 in message: cancel
|
||||||
|
|
||||||
resp = 'broker_errored'
|
resp = 'broker_errored'
|
||||||
broker_details = msg.dict()
|
broker_details = msg
|
||||||
|
|
||||||
# don't relay message to order requester client
|
# don't relay message to order requester client
|
||||||
# continue
|
# continue
|
||||||
|
@ -742,7 +767,7 @@ async def translate_and_relay_brokerd_events(
|
||||||
resp = 'broker_' + msg.status
|
resp = 'broker_' + msg.status
|
||||||
|
|
||||||
# pass the BrokerdStatus msg inside the broker details field
|
# pass the BrokerdStatus msg inside the broker details field
|
||||||
broker_details = msg.dict()
|
broker_details = msg
|
||||||
|
|
||||||
elif name in (
|
elif name in (
|
||||||
'fill',
|
'fill',
|
||||||
|
@ -751,7 +776,7 @@ async def translate_and_relay_brokerd_events(
|
||||||
|
|
||||||
# proxy through the "fill" result(s)
|
# proxy through the "fill" result(s)
|
||||||
resp = 'broker_filled'
|
resp = 'broker_filled'
|
||||||
broker_details = msg.dict()
|
broker_details = msg
|
||||||
|
|
||||||
log.info(f'\nFill for {oid} cleared with:\n{pformat(resp)}')
|
log.info(f'\nFill for {oid} cleared with:\n{pformat(resp)}')
|
||||||
|
|
||||||
|
@ -769,7 +794,7 @@ async def translate_and_relay_brokerd_events(
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
broker_reqid=reqid,
|
broker_reqid=reqid,
|
||||||
brokerd_msg=broker_details,
|
brokerd_msg=broker_details,
|
||||||
).dict()
|
)
|
||||||
)
|
)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
log.error(
|
log.error(
|
||||||
|
@ -834,14 +859,14 @@ async def process_client_order_cmds(
|
||||||
|
|
||||||
# NOTE: cancel response will be relayed back in messages
|
# NOTE: cancel response will be relayed back in messages
|
||||||
# from corresponding broker
|
# from corresponding broker
|
||||||
if reqid:
|
if reqid is not None:
|
||||||
|
|
||||||
# send cancel to brokerd immediately!
|
# send cancel to brokerd immediately!
|
||||||
log.info(
|
log.info(
|
||||||
f'Submitting cancel for live order {reqid}'
|
f'Submitting cancel for live order {reqid}'
|
||||||
)
|
)
|
||||||
|
|
||||||
await brokerd_order_stream.send(msg.dict())
|
await brokerd_order_stream.send(msg)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# this might be a cancel for an order that hasn't been
|
# this might be a cancel for an order that hasn't been
|
||||||
|
@ -863,7 +888,7 @@ async def process_client_order_cmds(
|
||||||
resp='dark_cancelled',
|
resp='dark_cancelled',
|
||||||
oid=oid,
|
oid=oid,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
).dict()
|
)
|
||||||
)
|
)
|
||||||
# de-register this client dialogue
|
# de-register this client dialogue
|
||||||
router.dialogues.pop(oid)
|
router.dialogues.pop(oid)
|
||||||
|
@ -918,7 +943,7 @@ async def process_client_order_cmds(
|
||||||
# handle relaying the ems side responses back to
|
# handle relaying the ems side responses back to
|
||||||
# the client/cmd sender from this request
|
# the client/cmd sender from this request
|
||||||
log.info(f'Sending live order to {broker}:\n{pformat(msg)}')
|
log.info(f'Sending live order to {broker}:\n{pformat(msg)}')
|
||||||
await brokerd_order_stream.send(msg.dict())
|
await brokerd_order_stream.send(msg)
|
||||||
|
|
||||||
# an immediate response should be ``BrokerdOrderAck``
|
# an immediate response should be ``BrokerdOrderAck``
|
||||||
# with ems order id from the ``trades_dialogue()``
|
# with ems order id from the ``trades_dialogue()``
|
||||||
|
@ -943,6 +968,12 @@ async def process_client_order_cmds(
|
||||||
# like every other shitty tina platform that makes
|
# like every other shitty tina platform that makes
|
||||||
# the user choose the predicate operator.
|
# the user choose the predicate operator.
|
||||||
last = dark_book.lasts[fqsn]
|
last = dark_book.lasts[fqsn]
|
||||||
|
|
||||||
|
# sometimes the real-time feed hasn't come up
|
||||||
|
# so just pull from the latest history.
|
||||||
|
if isnan(last):
|
||||||
|
last = feed.shm.array[-1]['close']
|
||||||
|
|
||||||
pred = mk_check(trigger_price, last, action)
|
pred = mk_check(trigger_price, last, action)
|
||||||
|
|
||||||
spread_slap: float = 5
|
spread_slap: float = 5
|
||||||
|
@ -992,7 +1023,7 @@ async def process_client_order_cmds(
|
||||||
resp=resp,
|
resp=resp,
|
||||||
oid=oid,
|
oid=oid,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
).dict()
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -1001,8 +1032,8 @@ async def _emsd_main(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
|
exec_mode: str, # ('paper', 'live')
|
||||||
|
|
||||||
_exec_mode: str = 'dark', # ('paper', 'dark', 'live')
|
|
||||||
loglevel: str = 'info',
|
loglevel: str = 'info',
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -1078,7 +1109,7 @@ async def _emsd_main(
|
||||||
feed,
|
feed,
|
||||||
symbol,
|
symbol,
|
||||||
dark_book,
|
dark_book,
|
||||||
_exec_mode,
|
exec_mode,
|
||||||
loglevel,
|
loglevel,
|
||||||
|
|
||||||
) as relay,
|
) as relay,
|
||||||
|
@ -1088,15 +1119,12 @@ async def _emsd_main(
|
||||||
|
|
||||||
brokerd_stream = relay.brokerd_dialogue # .clone()
|
brokerd_stream = relay.brokerd_dialogue # .clone()
|
||||||
|
|
||||||
# flatten out collected pps from brokerd for delivery
|
|
||||||
pp_msgs = {
|
|
||||||
fqsn: list(pps.values())
|
|
||||||
for fqsn, pps in relay.positions.items()
|
|
||||||
}
|
|
||||||
|
|
||||||
# signal to client that we're started and deliver
|
# signal to client that we're started and deliver
|
||||||
# all known pps and accounts for this ``brokerd``.
|
# all known pps and accounts for this ``brokerd``.
|
||||||
await ems_ctx.started((pp_msgs, list(relay.accounts)))
|
await ems_ctx.started((
|
||||||
|
relay.positions,
|
||||||
|
list(relay.accounts),
|
||||||
|
))
|
||||||
|
|
||||||
# establish 2-way stream with requesting order-client and
|
# establish 2-way stream with requesting order-client and
|
||||||
# begin handling inbound order requests and updates
|
# begin handling inbound order requests and updates
|
||||||
|
@ -1133,8 +1161,14 @@ async def _emsd_main(
|
||||||
)
|
)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# remove client from "registry"
|
# try to remove client from "registry"
|
||||||
|
try:
|
||||||
_router.clients.remove(ems_client_order_stream)
|
_router.clients.remove(ems_client_order_stream)
|
||||||
|
except KeyError:
|
||||||
|
log.warning(
|
||||||
|
f'Stream {ems_client_order_stream._ctx.chan.uid}'
|
||||||
|
' was already dropped?'
|
||||||
|
)
|
||||||
|
|
||||||
dialogues = _router.dialogues
|
dialogues = _router.dialogues
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -15,21 +15,26 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Clearing system messagingn types and protocols.
|
Clearing sub-system message and protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Optional, Union
|
from typing import Optional, Union
|
||||||
|
|
||||||
# TODO: try out just encoding/send direction for now?
|
|
||||||
# import msgspec
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: ``msgspec`` stuff worth paying attention to:
|
||||||
|
# - schema evolution: https://jcristharif.com/msgspec/usage.html#schema-evolution
|
||||||
|
# - use literals for a common msg determined by diff keys?
|
||||||
|
# - https://jcristharif.com/msgspec/usage.html#literal
|
||||||
|
# - for eg. ``BrokerdStatus``, instead just have separate messages?
|
||||||
|
|
||||||
|
# --------------
|
||||||
# Client -> emsd
|
# Client -> emsd
|
||||||
|
# --------------
|
||||||
|
|
||||||
|
class Cancel(Struct):
|
||||||
class Cancel(BaseModel):
|
|
||||||
'''Cancel msg for removing a dark (ems triggered) or
|
'''Cancel msg for removing a dark (ems triggered) or
|
||||||
broker-submitted (live) trigger/order.
|
broker-submitted (live) trigger/order.
|
||||||
|
|
||||||
|
@ -39,8 +44,10 @@ class Cancel(BaseModel):
|
||||||
symbol: str
|
symbol: str
|
||||||
|
|
||||||
|
|
||||||
class Order(BaseModel):
|
class Order(Struct):
|
||||||
|
|
||||||
|
# TODO: use ``msgspec.Literal``
|
||||||
|
# https://jcristharif.com/msgspec/usage.html#literal
|
||||||
action: str # {'buy', 'sell', 'alert'}
|
action: str # {'buy', 'sell', 'alert'}
|
||||||
# internal ``emdsd`` unique "order id"
|
# internal ``emdsd`` unique "order id"
|
||||||
oid: str # uuid4
|
oid: str # uuid4
|
||||||
|
@ -48,6 +55,9 @@ class Order(BaseModel):
|
||||||
account: str # should we set a default as '' ?
|
account: str # should we set a default as '' ?
|
||||||
|
|
||||||
price: float
|
price: float
|
||||||
|
# TODO: could we drop the ``.action`` field above and instead just
|
||||||
|
# use +/- values here? Would make the msg smaller at the sake of a
|
||||||
|
# teensie fp precision?
|
||||||
size: float
|
size: float
|
||||||
brokers: list[str]
|
brokers: list[str]
|
||||||
|
|
||||||
|
@ -59,20 +69,14 @@ class Order(BaseModel):
|
||||||
# the backend broker
|
# the backend broker
|
||||||
exec_mode: str # {'dark', 'live', 'paper'}
|
exec_mode: str # {'dark', 'live', 'paper'}
|
||||||
|
|
||||||
class Config:
|
|
||||||
# just for pre-loading a ``Symbol`` when used
|
|
||||||
# in the order mode staging process
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
# don't copy this model instance when used in
|
|
||||||
# a recursive model
|
|
||||||
copy_on_model_validation = False
|
|
||||||
|
|
||||||
|
# --------------
|
||||||
# Client <- emsd
|
# Client <- emsd
|
||||||
|
# --------------
|
||||||
# update msgs from ems which relay state change info
|
# update msgs from ems which relay state change info
|
||||||
# from the active clearing engine.
|
# from the active clearing engine.
|
||||||
|
|
||||||
|
class Status(Struct):
|
||||||
class Status(BaseModel):
|
|
||||||
|
|
||||||
name: str = 'status'
|
name: str = 'status'
|
||||||
oid: str # uuid4
|
oid: str # uuid4
|
||||||
|
@ -95,8 +99,6 @@ class Status(BaseModel):
|
||||||
# }
|
# }
|
||||||
resp: str # "response", see above
|
resp: str # "response", see above
|
||||||
|
|
||||||
# symbol: str
|
|
||||||
|
|
||||||
# trigger info
|
# trigger info
|
||||||
trigger_price: Optional[float] = None
|
trigger_price: Optional[float] = None
|
||||||
# price: float
|
# price: float
|
||||||
|
@ -111,10 +113,12 @@ class Status(BaseModel):
|
||||||
brokerd_msg: dict = {}
|
brokerd_msg: dict = {}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------
|
||||||
# emsd -> brokerd
|
# emsd -> brokerd
|
||||||
|
# ---------------
|
||||||
# requests *sent* from ems to respective backend broker daemon
|
# requests *sent* from ems to respective backend broker daemon
|
||||||
|
|
||||||
class BrokerdCancel(BaseModel):
|
class BrokerdCancel(Struct):
|
||||||
|
|
||||||
action: str = 'cancel'
|
action: str = 'cancel'
|
||||||
oid: str # piker emsd order id
|
oid: str # piker emsd order id
|
||||||
|
@ -130,7 +134,7 @@ class BrokerdCancel(BaseModel):
|
||||||
reqid: Optional[Union[int, str]] = None
|
reqid: Optional[Union[int, str]] = None
|
||||||
|
|
||||||
|
|
||||||
class BrokerdOrder(BaseModel):
|
class BrokerdOrder(Struct):
|
||||||
|
|
||||||
action: str # {buy, sell}
|
action: str # {buy, sell}
|
||||||
oid: str
|
oid: str
|
||||||
|
@ -150,11 +154,12 @@ class BrokerdOrder(BaseModel):
|
||||||
size: float
|
size: float
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------
|
||||||
# emsd <- brokerd
|
# emsd <- brokerd
|
||||||
|
# ---------------
|
||||||
# requests *received* to ems from broker backend
|
# requests *received* to ems from broker backend
|
||||||
|
|
||||||
|
class BrokerdOrderAck(Struct):
|
||||||
class BrokerdOrderAck(BaseModel):
|
|
||||||
'''
|
'''
|
||||||
Immediate reponse to a brokerd order request providing the broker
|
Immediate reponse to a brokerd order request providing the broker
|
||||||
specific unique order id so that the EMS can associate this
|
specific unique order id so that the EMS can associate this
|
||||||
|
@ -172,7 +177,7 @@ class BrokerdOrderAck(BaseModel):
|
||||||
account: str = ''
|
account: str = ''
|
||||||
|
|
||||||
|
|
||||||
class BrokerdStatus(BaseModel):
|
class BrokerdStatus(Struct):
|
||||||
|
|
||||||
name: str = 'status'
|
name: str = 'status'
|
||||||
reqid: Union[int, str]
|
reqid: Union[int, str]
|
||||||
|
@ -181,6 +186,7 @@ class BrokerdStatus(BaseModel):
|
||||||
# XXX: should be best effort set for every update
|
# XXX: should be best effort set for every update
|
||||||
account: str = ''
|
account: str = ''
|
||||||
|
|
||||||
|
# TODO: instead (ack, pending, open, fill, clos(ed), cancelled)
|
||||||
# {
|
# {
|
||||||
# 'submitted',
|
# 'submitted',
|
||||||
# 'cancelled',
|
# 'cancelled',
|
||||||
|
@ -205,7 +211,7 @@ class BrokerdStatus(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class BrokerdFill(BaseModel):
|
class BrokerdFill(Struct):
|
||||||
'''
|
'''
|
||||||
A single message indicating a "fill-details" event from the broker
|
A single message indicating a "fill-details" event from the broker
|
||||||
if avaiable.
|
if avaiable.
|
||||||
|
@ -216,10 +222,13 @@ class BrokerdFill(BaseModel):
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
|
||||||
# order exeuction related
|
# order exeuction related
|
||||||
action: str
|
|
||||||
size: float
|
size: float
|
||||||
price: float
|
price: float
|
||||||
|
|
||||||
|
# TODO: pretty sure we can just remove this and instaed use
|
||||||
|
# +/- size values right?
|
||||||
|
action: Optional[str] = None
|
||||||
|
|
||||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||||
|
|
||||||
# brokerd timestamp required for order mode arrow placement on x-axis
|
# brokerd timestamp required for order mode arrow placement on x-axis
|
||||||
|
@ -230,7 +239,7 @@ class BrokerdFill(BaseModel):
|
||||||
broker_time: float
|
broker_time: float
|
||||||
|
|
||||||
|
|
||||||
class BrokerdError(BaseModel):
|
class BrokerdError(Struct):
|
||||||
'''
|
'''
|
||||||
Optional error type that can be relayed to emsd for error handling.
|
Optional error type that can be relayed to emsd for error handling.
|
||||||
|
|
||||||
|
@ -249,7 +258,7 @@ class BrokerdError(BaseModel):
|
||||||
broker_details: dict = {}
|
broker_details: dict = {}
|
||||||
|
|
||||||
|
|
||||||
class BrokerdPosition(BaseModel):
|
class BrokerdPosition(Struct):
|
||||||
'''Position update event from brokerd.
|
'''Position update event from brokerd.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -258,6 +267,6 @@ class BrokerdPosition(BaseModel):
|
||||||
broker: str
|
broker: str
|
||||||
account: str
|
account: str
|
||||||
symbol: str
|
symbol: str
|
||||||
currency: str
|
|
||||||
size: float
|
size: float
|
||||||
avg_price: float
|
avg_price: float
|
||||||
|
currency: str = ''
|
||||||
|
|
|
@ -22,15 +22,25 @@ from contextlib import asynccontextmanager
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
import time
|
import time
|
||||||
from typing import Tuple, Optional, Callable
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
|
import pendulum
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
|
||||||
from .. import data
|
from .. import data
|
||||||
|
from ..data._source import Symbol
|
||||||
|
from ..pp import (
|
||||||
|
Position,
|
||||||
|
Transaction,
|
||||||
|
)
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import unpack_fqsn
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
@ -61,11 +71,12 @@ class PaperBoi:
|
||||||
_buys: bidict
|
_buys: bidict
|
||||||
_sells: bidict
|
_sells: bidict
|
||||||
_reqids: bidict
|
_reqids: bidict
|
||||||
_positions: dict[str, BrokerdPosition]
|
_positions: dict[str, Position]
|
||||||
|
_trade_ledger: dict[str, Any]
|
||||||
|
|
||||||
# init edge case L1 spread
|
# init edge case L1 spread
|
||||||
last_ask: Tuple[float, float] = (float('inf'), 0) # price, size
|
last_ask: tuple[float, float] = (float('inf'), 0) # price, size
|
||||||
last_bid: Tuple[float, float] = (0, 0)
|
last_bid: tuple[float, float] = (0, 0)
|
||||||
|
|
||||||
async def submit_limit(
|
async def submit_limit(
|
||||||
self,
|
self,
|
||||||
|
@ -75,20 +86,21 @@ class PaperBoi:
|
||||||
action: str,
|
action: str,
|
||||||
size: float,
|
size: float,
|
||||||
reqid: Optional[str],
|
reqid: Optional[str],
|
||||||
|
|
||||||
) -> int:
|
) -> int:
|
||||||
"""Place an order and return integer request id provided by client.
|
'''
|
||||||
|
Place an order and return integer request id provided by client.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
is_modify: bool = False
|
is_modify: bool = False
|
||||||
if reqid is None:
|
|
||||||
reqid = str(uuid.uuid4())
|
|
||||||
|
|
||||||
else:
|
entry = self._reqids.get(reqid)
|
||||||
|
if entry:
|
||||||
# order is already existing, this is a modify
|
# order is already existing, this is a modify
|
||||||
(oid, symbol, action, old_price) = self._reqids[reqid]
|
(oid, symbol, action, old_price) = entry
|
||||||
assert old_price != price
|
assert old_price != price
|
||||||
is_modify = True
|
is_modify = True
|
||||||
|
else:
|
||||||
# register order internally
|
# register order internally
|
||||||
self._reqids[reqid] = (oid, symbol, action, price)
|
self._reqids[reqid] = (oid, symbol, action, price)
|
||||||
|
|
||||||
|
@ -109,13 +121,14 @@ class PaperBoi:
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
status='submitted',
|
status='submitted',
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
broker=self.broker,
|
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
filled=0.0,
|
filled=0.0,
|
||||||
reason='paper_trigger',
|
reason='paper_trigger',
|
||||||
remaining=size,
|
remaining=size,
|
||||||
|
|
||||||
|
broker_details={'name': 'paperboi'},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
# if we're already a clearing price simulate an immediate fill
|
# if we're already a clearing price simulate an immediate fill
|
||||||
if (
|
if (
|
||||||
|
@ -166,12 +179,11 @@ class PaperBoi:
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
status='cancelled',
|
status='cancelled',
|
||||||
oid=oid,
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
broker=self.broker,
|
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
|
broker_details={'name': 'paperboi'},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
async def fake_fill(
|
async def fake_fill(
|
||||||
self,
|
self,
|
||||||
|
@ -195,16 +207,15 @@ class PaperBoi:
|
||||||
"""
|
"""
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
await trio.sleep(0.05)
|
await trio.sleep(0.05)
|
||||||
|
fill_time_ns = time.time_ns()
|
||||||
|
fill_time_s = time.time()
|
||||||
|
|
||||||
msg = BrokerdFill(
|
fill_msg = BrokerdFill(
|
||||||
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
time_ns=time.time_ns(),
|
time_ns=fill_time_ns,
|
||||||
|
|
||||||
action=action,
|
action=action,
|
||||||
size=size,
|
size=size,
|
||||||
price=price,
|
price=price,
|
||||||
|
|
||||||
broker_time=datetime.now().timestamp(),
|
broker_time=datetime.now().timestamp(),
|
||||||
broker_details={
|
broker_details={
|
||||||
'paper_info': {
|
'paper_info': {
|
||||||
|
@ -214,7 +225,9 @@ class PaperBoi:
|
||||||
'name': self.broker + '_paper',
|
'name': self.broker + '_paper',
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(fill_msg)
|
||||||
|
|
||||||
|
self._trade_ledger.update(fill_msg.to_dict())
|
||||||
|
|
||||||
if order_complete:
|
if order_complete:
|
||||||
|
|
||||||
|
@ -227,24 +240,41 @@ class PaperBoi:
|
||||||
filled=size,
|
filled=size,
|
||||||
remaining=0 if order_complete else remaining,
|
remaining=0 if order_complete else remaining,
|
||||||
|
|
||||||
action=action,
|
|
||||||
size=size,
|
|
||||||
price=price,
|
|
||||||
|
|
||||||
broker_details={
|
broker_details={
|
||||||
'paper_info': {
|
'paper_info': {
|
||||||
'oid': oid,
|
'oid': oid,
|
||||||
},
|
},
|
||||||
|
'action': action,
|
||||||
|
'size': size,
|
||||||
|
'price': price,
|
||||||
'name': self.broker,
|
'name': self.broker,
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
# lookup any existing position
|
# lookup any existing position
|
||||||
token = f'{symbol}.{self.broker}'
|
token = f'{symbol}.{self.broker}'
|
||||||
pp_msg = self._positions.setdefault(
|
pp = self._positions.setdefault(
|
||||||
token,
|
token,
|
||||||
BrokerdPosition(
|
Position(
|
||||||
|
Symbol(key=symbol),
|
||||||
|
size=size,
|
||||||
|
ppu=price,
|
||||||
|
bsuid=symbol,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
t = Transaction(
|
||||||
|
fqsn=symbol,
|
||||||
|
tid=oid,
|
||||||
|
size=size,
|
||||||
|
price=price,
|
||||||
|
cost=0, # TODO: cost model
|
||||||
|
dt=pendulum.from_timestamp(fill_time_s),
|
||||||
|
bsuid=symbol,
|
||||||
|
)
|
||||||
|
pp.add_clear(t)
|
||||||
|
|
||||||
|
pp_msg = BrokerdPosition(
|
||||||
broker=self.broker,
|
broker=self.broker,
|
||||||
account='paper',
|
account='paper',
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
|
@ -252,41 +282,17 @@ class PaperBoi:
|
||||||
# broker info. i guess for crypto this can be
|
# broker info. i guess for crypto this can be
|
||||||
# inferred from the pair?
|
# inferred from the pair?
|
||||||
currency='',
|
currency='',
|
||||||
size=0.0,
|
size=pp.size,
|
||||||
avg_price=0,
|
avg_price=pp.ppu,
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# "avg position price" calcs
|
await self.ems_trades_stream.send(pp_msg)
|
||||||
# TODO: eventually it'd be nice to have a small set of routines
|
|
||||||
# to do this stuff from a sequence of cleared orders to enable
|
|
||||||
# so called "contextual positions".
|
|
||||||
new_size = size + pp_msg.size
|
|
||||||
|
|
||||||
# old size minus the new size gives us size differential with
|
|
||||||
# +ve -> increase in pp size
|
|
||||||
# -ve -> decrease in pp size
|
|
||||||
size_diff = abs(new_size) - abs(pp_msg.size)
|
|
||||||
|
|
||||||
if new_size == 0:
|
|
||||||
pp_msg.avg_price = 0
|
|
||||||
|
|
||||||
elif size_diff > 0:
|
|
||||||
# only update the "average position price" when the position
|
|
||||||
# size increases not when it decreases (i.e. the position is
|
|
||||||
# being made smaller)
|
|
||||||
pp_msg.avg_price = (
|
|
||||||
abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
|
|
||||||
) / abs(new_size)
|
|
||||||
|
|
||||||
pp_msg.size = new_size
|
|
||||||
|
|
||||||
await self.ems_trades_stream.send(pp_msg.dict())
|
|
||||||
|
|
||||||
|
|
||||||
async def simulate_fills(
|
async def simulate_fills(
|
||||||
quote_stream: 'tractor.ReceiveStream', # noqa
|
quote_stream: 'tractor.ReceiveStream', # noqa
|
||||||
client: PaperBoi,
|
client: PaperBoi,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# TODO: more machinery to better simulate real-world market things:
|
# TODO: more machinery to better simulate real-world market things:
|
||||||
|
@ -390,18 +396,37 @@ async def handle_order_requests(
|
||||||
account = request_msg['account']
|
account = request_msg['account']
|
||||||
if account != 'paper':
|
if account != 'paper':
|
||||||
log.error(
|
log.error(
|
||||||
'This is a paper account, only a `paper` selection is valid'
|
'This is a paper account,'
|
||||||
|
' only a `paper` selection is valid'
|
||||||
)
|
)
|
||||||
await ems_order_stream.send(BrokerdError(
|
await ems_order_stream.send(BrokerdError(
|
||||||
oid=request_msg['oid'],
|
oid=request_msg['oid'],
|
||||||
symbol=request_msg['symbol'],
|
symbol=request_msg['symbol'],
|
||||||
reason=f'Paper only. No account found: `{account}` ?',
|
reason=f'Paper only. No account found: `{account}` ?',
|
||||||
).dict())
|
))
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# validate
|
# validate
|
||||||
order = BrokerdOrder(**request_msg)
|
order = BrokerdOrder(**request_msg)
|
||||||
|
|
||||||
|
if order.reqid is None:
|
||||||
|
reqid = str(uuid.uuid4())
|
||||||
|
else:
|
||||||
|
reqid = order.reqid
|
||||||
|
|
||||||
|
# deliver ack that order has been submitted to broker routing
|
||||||
|
await ems_order_stream.send(
|
||||||
|
BrokerdOrderAck(
|
||||||
|
|
||||||
|
# ems order request id
|
||||||
|
oid=order.oid,
|
||||||
|
|
||||||
|
# broker specific request id
|
||||||
|
reqid=reqid,
|
||||||
|
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
# call our client api to submit the order
|
# call our client api to submit the order
|
||||||
reqid = await client.submit_limit(
|
reqid = await client.submit_limit(
|
||||||
|
|
||||||
|
@ -415,20 +440,7 @@ async def handle_order_requests(
|
||||||
# there is no existing order so ask the client to create
|
# there is no existing order so ask the client to create
|
||||||
# a new one (which it seems to do by allocating an int
|
# a new one (which it seems to do by allocating an int
|
||||||
# counter - collision prone..)
|
# counter - collision prone..)
|
||||||
reqid=order.reqid,
|
|
||||||
)
|
|
||||||
|
|
||||||
# deliver ack that order has been submitted to broker routing
|
|
||||||
await ems_order_stream.send(
|
|
||||||
BrokerdOrderAck(
|
|
||||||
|
|
||||||
# ems order request id
|
|
||||||
oid=order.oid,
|
|
||||||
|
|
||||||
# broker specific request id
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
|
|
||||||
).dict()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
elif action == 'cancel':
|
elif action == 'cancel':
|
||||||
|
@ -454,7 +466,6 @@ async def trades_dialogue(
|
||||||
tractor.log.get_console_log(loglevel)
|
tractor.log.get_console_log(loglevel)
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
data.open_feed(
|
data.open_feed(
|
||||||
[fqsn],
|
[fqsn],
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
@ -464,13 +475,12 @@ async def trades_dialogue(
|
||||||
# TODO: load paper positions per broker from .toml config file
|
# TODO: load paper positions per broker from .toml config file
|
||||||
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
||||||
# await ctx.started(all_positions)
|
# await ctx.started(all_positions)
|
||||||
await ctx.started(({}, {'paper',}))
|
await ctx.started(({}, ['paper']))
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as ems_stream,
|
ctx.open_stream() as ems_stream,
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
|
|
||||||
client = PaperBoi(
|
client = PaperBoi(
|
||||||
broker,
|
broker,
|
||||||
ems_stream,
|
ems_stream,
|
||||||
|
@ -481,9 +491,16 @@ async def trades_dialogue(
|
||||||
|
|
||||||
# TODO: load paper positions from ``positions.toml``
|
# TODO: load paper positions from ``positions.toml``
|
||||||
_positions={},
|
_positions={},
|
||||||
|
|
||||||
|
# TODO: load postions from ledger file
|
||||||
|
_trade_ledger={},
|
||||||
)
|
)
|
||||||
|
|
||||||
n.start_soon(handle_order_requests, client, ems_stream)
|
n.start_soon(
|
||||||
|
handle_order_requests,
|
||||||
|
client,
|
||||||
|
ems_stream,
|
||||||
|
)
|
||||||
|
|
||||||
# paper engine simulator clearing task
|
# paper engine simulator clearing task
|
||||||
await simulate_fills(feed.stream, client)
|
await simulate_fills(feed.stream, client)
|
||||||
|
@ -511,6 +528,7 @@ async def open_paperboi(
|
||||||
# (we likely don't need more then one proc for basic
|
# (we likely don't need more then one proc for basic
|
||||||
# simulated order clearing)
|
# simulated order clearing)
|
||||||
if portal is None:
|
if portal is None:
|
||||||
|
log.info('Starting new paper-engine actor')
|
||||||
portal = await tn.start_actor(
|
portal = await tn.start_actor(
|
||||||
service_name,
|
service_name,
|
||||||
enable_modules=[__name__]
|
enable_modules=[__name__]
|
||||||
|
@ -523,5 +541,4 @@ async def open_paperboi(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
||||||
yield ctx, first
|
yield ctx, first
|
||||||
|
|
|
@ -83,9 +83,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
|
|
||||||
)
|
)
|
||||||
log.info(
|
log.info(
|
||||||
f'`marketstore` up!\n'
|
f'`marketstored` up!\n'
|
||||||
f'`marketstored` pid: {pid}\n'
|
f'pid: {pid}\n'
|
||||||
f'docker container id: {cid}\n'
|
f'container id: {cid[:12]}\n'
|
||||||
f'config: {pformat(config)}'
|
f'config: {pformat(config)}'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,7 @@ Broker configuration mgmt.
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
from os import path
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
@ -111,6 +112,7 @@ if _parent_user:
|
||||||
|
|
||||||
_conf_names: set[str] = {
|
_conf_names: set[str] = {
|
||||||
'brokers',
|
'brokers',
|
||||||
|
'pps',
|
||||||
'trades',
|
'trades',
|
||||||
'watchlists',
|
'watchlists',
|
||||||
}
|
}
|
||||||
|
@ -147,19 +149,21 @@ def get_conf_path(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers',
|
||||||
|
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Return the default config path normally under
|
'''
|
||||||
``~/.config/piker`` on linux.
|
Return the top-level default config path normally under
|
||||||
|
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
||||||
|
name.
|
||||||
|
|
||||||
Contains files such as:
|
Contains files such as:
|
||||||
- brokers.toml
|
- brokers.toml
|
||||||
|
- pp.toml
|
||||||
- watchlists.toml
|
- watchlists.toml
|
||||||
- trades.toml
|
|
||||||
|
|
||||||
# maybe coming soon ;)
|
# maybe coming soon ;)
|
||||||
- signals.toml
|
- signals.toml
|
||||||
- strats.toml
|
- strats.toml
|
||||||
|
|
||||||
"""
|
'''
|
||||||
assert conf_name in _conf_names
|
assert conf_name in _conf_names
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
|
@ -173,7 +177,7 @@ def repodir():
|
||||||
Return the abspath to the repo directory.
|
Return the abspath to the repo directory.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
dirpath = os.path.abspath(
|
dirpath = path.abspath(
|
||||||
# we're 3 levels down in **this** module file
|
# we're 3 levels down in **this** module file
|
||||||
dirname(dirname(os.path.realpath(__file__)))
|
dirname(dirname(os.path.realpath(__file__)))
|
||||||
)
|
)
|
||||||
|
@ -182,7 +186,9 @@ def repodir():
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers',
|
||||||
path: str = None
|
path: str = None,
|
||||||
|
|
||||||
|
**tomlkws,
|
||||||
|
|
||||||
) -> (dict, str):
|
) -> (dict, str):
|
||||||
'''
|
'''
|
||||||
|
@ -190,6 +196,7 @@ def load(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
path = path or get_conf_path(conf_name)
|
path = path or get_conf_path(conf_name)
|
||||||
|
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
|
|
||||||
|
@ -202,8 +209,11 @@ def load(
|
||||||
# if one exists.
|
# if one exists.
|
||||||
if os.path.isfile(template):
|
if os.path.isfile(template):
|
||||||
shutil.copyfile(template, path)
|
shutil.copyfile(template, path)
|
||||||
|
else:
|
||||||
|
with open(path, 'w'):
|
||||||
|
pass # touch
|
||||||
|
|
||||||
config = toml.load(path)
|
config = toml.load(path, **tomlkws)
|
||||||
log.debug(f"Read config file {path}")
|
log.debug(f"Read config file {path}")
|
||||||
return config, path
|
return config, path
|
||||||
|
|
||||||
|
@ -212,6 +222,7 @@ def write(
|
||||||
config: dict, # toml config as dict
|
config: dict, # toml config as dict
|
||||||
name: str = 'brokers',
|
name: str = 'brokers',
|
||||||
path: str = None,
|
path: str = None,
|
||||||
|
**toml_kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
''''
|
''''
|
||||||
|
@ -235,11 +246,14 @@ def write(
|
||||||
f"{path}"
|
f"{path}"
|
||||||
)
|
)
|
||||||
with open(path, 'w') as cf:
|
with open(path, 'w') as cf:
|
||||||
return toml.dump(config, cf)
|
return toml.dump(
|
||||||
|
config,
|
||||||
|
cf,
|
||||||
|
**toml_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_accounts(
|
def load_accounts(
|
||||||
|
|
||||||
providers: Optional[list[str]] = None
|
providers: Optional[list[str]] = None
|
||||||
|
|
||||||
) -> bidict[str, Optional[str]]:
|
) -> bidict[str, Optional[str]]:
|
||||||
|
|
|
@ -37,8 +37,13 @@ from docker.models.containers import Container as DockerContainer
|
||||||
from docker.errors import (
|
from docker.errors import (
|
||||||
DockerException,
|
DockerException,
|
||||||
APIError,
|
APIError,
|
||||||
|
# ContainerError,
|
||||||
|
)
|
||||||
|
import requests
|
||||||
|
from requests.exceptions import (
|
||||||
|
ConnectionError,
|
||||||
|
ReadTimeout,
|
||||||
)
|
)
|
||||||
from requests.exceptions import ConnectionError, ReadTimeout
|
|
||||||
|
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .. import config
|
from .. import config
|
||||||
|
@ -50,8 +55,8 @@ class DockerNotStarted(Exception):
|
||||||
'Prolly you dint start da daemon bruh'
|
'Prolly you dint start da daemon bruh'
|
||||||
|
|
||||||
|
|
||||||
class ContainerError(RuntimeError):
|
class ApplicationLogError(Exception):
|
||||||
'Error reported via app-container logging level'
|
'App in container reported an error in logs'
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
@ -96,9 +101,9 @@ async def open_docker(
|
||||||
# not perms?
|
# not perms?
|
||||||
raise
|
raise
|
||||||
|
|
||||||
finally:
|
# finally:
|
||||||
if client:
|
# if client:
|
||||||
client.close()
|
# client.close()
|
||||||
|
|
||||||
|
|
||||||
class Container:
|
class Container:
|
||||||
|
@ -156,7 +161,7 @@ class Container:
|
||||||
|
|
||||||
# print(f'level: {level}')
|
# print(f'level: {level}')
|
||||||
if level in ('error', 'fatal'):
|
if level in ('error', 'fatal'):
|
||||||
raise ContainerError(msg)
|
raise ApplicationLogError(msg)
|
||||||
|
|
||||||
if patt in msg:
|
if patt in msg:
|
||||||
return True
|
return True
|
||||||
|
@ -185,12 +190,29 @@ class Container:
|
||||||
if 'is not running' in err.explanation:
|
if 'is not running' in err.explanation:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def hard_kill(self, start: float) -> None:
|
||||||
|
delay = time.time() - start
|
||||||
|
# get out the big guns, bc apparently marketstore
|
||||||
|
# doesn't actually know how to terminate gracefully
|
||||||
|
# :eyeroll:...
|
||||||
|
log.error(
|
||||||
|
f'SIGKILL-ing: {self.cntr.id} after {delay}s\n'
|
||||||
|
)
|
||||||
|
self.try_signal('SIGKILL')
|
||||||
|
self.cntr.wait(
|
||||||
|
timeout=3,
|
||||||
|
condition='not-running',
|
||||||
|
)
|
||||||
|
|
||||||
async def cancel(
|
async def cancel(
|
||||||
self,
|
self,
|
||||||
stop_msg: str,
|
stop_msg: str,
|
||||||
|
hard_kill: bool = False,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
cid = self.cntr.id
|
cid = self.cntr.id
|
||||||
|
|
||||||
# first try a graceful cancel
|
# first try a graceful cancel
|
||||||
log.cancel(
|
log.cancel(
|
||||||
f'SIGINT cancelling container: {cid}\n'
|
f'SIGINT cancelling container: {cid}\n'
|
||||||
|
@ -199,16 +221,26 @@ class Container:
|
||||||
self.try_signal('SIGINT')
|
self.try_signal('SIGINT')
|
||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
for _ in range(30):
|
for _ in range(6):
|
||||||
|
|
||||||
with trio.move_on_after(0.5) as cs:
|
with trio.move_on_after(0.5) as cs:
|
||||||
cs.shield = True
|
log.cancel('polling for CNTR logs...')
|
||||||
await self.process_logs_until(stop_msg)
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.process_logs_until(stop_msg)
|
||||||
|
except ApplicationLogError:
|
||||||
|
hard_kill = True
|
||||||
|
else:
|
||||||
# if we aren't cancelled on above checkpoint then we
|
# if we aren't cancelled on above checkpoint then we
|
||||||
# assume we read the expected stop msg and terminated.
|
# assume we read the expected stop msg and
|
||||||
|
# terminated.
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
# on timeout just try a hard kill after
|
||||||
|
# a quick container sync-wait.
|
||||||
|
hard_kill = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
log.info(f'Polling for container shutdown:\n{cid}')
|
log.info(f'Polling for container shutdown:\n{cid}')
|
||||||
|
|
||||||
|
@ -218,6 +250,7 @@ class Container:
|
||||||
condition='not-running',
|
condition='not-running',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# graceful exit if we didn't time out
|
||||||
break
|
break
|
||||||
|
|
||||||
except (
|
except (
|
||||||
|
@ -229,24 +262,22 @@ class Container:
|
||||||
except (
|
except (
|
||||||
docker.errors.APIError,
|
docker.errors.APIError,
|
||||||
ConnectionError,
|
ConnectionError,
|
||||||
|
requests.exceptions.ConnectionError,
|
||||||
|
trio.Cancelled,
|
||||||
):
|
):
|
||||||
log.exception('Docker connection failure')
|
log.exception('Docker connection failure')
|
||||||
break
|
self.hard_kill(start)
|
||||||
else:
|
raise
|
||||||
delay = time.time() - start
|
|
||||||
log.error(
|
|
||||||
f'Failed to kill container {cid} after {delay}s\n'
|
|
||||||
'sending SIGKILL..'
|
|
||||||
)
|
|
||||||
# get out the big guns, bc apparently marketstore
|
|
||||||
# doesn't actually know how to terminate gracefully
|
|
||||||
# :eyeroll:...
|
|
||||||
self.try_signal('SIGKILL')
|
|
||||||
self.cntr.wait(
|
|
||||||
timeout=3,
|
|
||||||
condition='not-running',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
except trio.Cancelled:
|
||||||
|
log.exception('trio cancelled...')
|
||||||
|
self.hard_kill(start)
|
||||||
|
else:
|
||||||
|
hard_kill = True
|
||||||
|
|
||||||
|
if hard_kill:
|
||||||
|
self.hard_kill(start)
|
||||||
|
else:
|
||||||
log.cancel(f'Container stopped: {cid}')
|
log.cancel(f'Container stopped: {cid}')
|
||||||
|
|
||||||
|
|
||||||
|
@ -289,14 +320,12 @@ async def open_ahabd(
|
||||||
))
|
))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# TODO: we might eventually want a proxy-style msg-prot here
|
# TODO: we might eventually want a proxy-style msg-prot here
|
||||||
# to allow remote control of containers without needing
|
# to allow remote control of containers without needing
|
||||||
# callers to have root perms?
|
# callers to have root perms?
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
with trio.CancelScope(shield=True):
|
|
||||||
await cntr.cancel(stop_msg)
|
await cntr.cancel(stop_msg)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ def iterticks(
|
||||||
sig = (
|
sig = (
|
||||||
time,
|
time,
|
||||||
tick['price'],
|
tick['price'],
|
||||||
tick['size']
|
tick.get('size')
|
||||||
)
|
)
|
||||||
|
|
||||||
if ttype == 'dark_trade':
|
if ttype == 'dark_trade':
|
||||||
|
|
|
@ -27,13 +27,14 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||||
if _USE_POSIX:
|
if _USE_POSIX:
|
||||||
from _posixshmem import shm_unlink
|
from _posixshmem import shm_unlink
|
||||||
|
|
||||||
import tractor
|
# import msgspec
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel
|
|
||||||
from numpy.lib import recfunctions as rfn
|
from numpy.lib import recfunctions as rfn
|
||||||
|
import tractor
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._source import base_iohlc_dtype
|
from ._source import base_iohlc_dtype
|
||||||
|
from .types import Struct
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -107,15 +108,12 @@ class SharedInt:
|
||||||
log.warning(f'Shm for {name} already unlinked?')
|
log.warning(f'Shm for {name} already unlinked?')
|
||||||
|
|
||||||
|
|
||||||
class _Token(BaseModel):
|
class _Token(Struct, frozen=True):
|
||||||
'''
|
'''
|
||||||
Internal represenation of a shared memory "token"
|
Internal represenation of a shared memory "token"
|
||||||
which can be used to key a system wide post shm entry.
|
which can be used to key a system wide post shm entry.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
frozen = True
|
|
||||||
|
|
||||||
shm_name: str # this servers as a "key" value
|
shm_name: str # this servers as a "key" value
|
||||||
shm_first_index_name: str
|
shm_first_index_name: str
|
||||||
shm_last_index_name: str
|
shm_last_index_name: str
|
||||||
|
@ -126,17 +124,22 @@ class _Token(BaseModel):
|
||||||
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||||
|
|
||||||
def as_msg(self):
|
def as_msg(self):
|
||||||
return self.dict()
|
return self.to_dict()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_msg(cls, msg: dict) -> _Token:
|
def from_msg(cls, msg: dict) -> _Token:
|
||||||
if isinstance(msg, _Token):
|
if isinstance(msg, _Token):
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
# TODO: native struct decoding
|
||||||
|
# return _token_dec.decode(msg)
|
||||||
|
|
||||||
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||||
return _Token(**msg)
|
return _Token(**msg)
|
||||||
|
|
||||||
|
|
||||||
|
# _token_dec = msgspec.msgpack.Decoder(_Token)
|
||||||
|
|
||||||
# TODO: this api?
|
# TODO: this api?
|
||||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
||||||
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
||||||
|
@ -167,7 +170,7 @@ def _make_token(
|
||||||
shm_name=key,
|
shm_name=key,
|
||||||
shm_first_index_name=key + "_first",
|
shm_first_index_name=key + "_first",
|
||||||
shm_last_index_name=key + "_last",
|
shm_last_index_name=key + "_last",
|
||||||
dtype_descr=np.dtype(dtype).descr
|
dtype_descr=tuple(np.dtype(dtype).descr)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ import decimal
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel
|
from msgspec import Struct
|
||||||
# from numba import from_dtype
|
# from numba import from_dtype
|
||||||
|
|
||||||
|
|
||||||
|
@ -126,7 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Symbol(BaseModel):
|
class Symbol(Struct):
|
||||||
'''
|
'''
|
||||||
I guess this is some kinda container thing for dealing with
|
I guess this is some kinda container thing for dealing with
|
||||||
all the different meta-data formats from brokers?
|
all the different meta-data formats from brokers?
|
||||||
|
@ -152,9 +152,7 @@ class Symbol(BaseModel):
|
||||||
info: dict[str, Any],
|
info: dict[str, Any],
|
||||||
suffix: str = '',
|
suffix: str = '',
|
||||||
|
|
||||||
# XXX: like wtf..
|
) -> Symbol:
|
||||||
# ) -> 'Symbol':
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
tick_size = info.get('price_tick_size', 0.01)
|
tick_size = info.get('price_tick_size', 0.01)
|
||||||
lot_tick_size = info.get('lot_tick_size', 0.0)
|
lot_tick_size = info.get('lot_tick_size', 0.0)
|
||||||
|
@ -175,9 +173,7 @@ class Symbol(BaseModel):
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
info: dict[str, Any],
|
info: dict[str, Any],
|
||||||
|
|
||||||
# XXX: like wtf..
|
) -> Symbol:
|
||||||
# ) -> 'Symbol':
|
|
||||||
) -> None:
|
|
||||||
broker, key, suffix = unpack_fqsn(fqsn)
|
broker, key, suffix = unpack_fqsn(fqsn)
|
||||||
return cls.from_broker_info(
|
return cls.from_broker_info(
|
||||||
broker,
|
broker,
|
||||||
|
@ -240,7 +236,7 @@ class Symbol(BaseModel):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
tokens = self.tokens()
|
tokens = self.tokens()
|
||||||
fqsn = '.'.join(tokens)
|
fqsn = '.'.join(map(str.lower, tokens))
|
||||||
return fqsn
|
return fqsn
|
||||||
|
|
||||||
def iterfqsns(self) -> list[str]:
|
def iterfqsns(self) -> list[str]:
|
||||||
|
|
|
@ -53,13 +53,11 @@ class NoBsWs:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
token: str,
|
|
||||||
stack: AsyncExitStack,
|
stack: AsyncExitStack,
|
||||||
fixture: Callable,
|
fixture: Callable,
|
||||||
serializer: ModuleType = json,
|
serializer: ModuleType = json,
|
||||||
):
|
):
|
||||||
self.url = url
|
self.url = url
|
||||||
self.token = token
|
|
||||||
self.fixture = fixture
|
self.fixture = fixture
|
||||||
self._stack = stack
|
self._stack = stack
|
||||||
self._ws: 'WebSocketConnection' = None # noqa
|
self._ws: 'WebSocketConnection' = None # noqa
|
||||||
|
@ -83,14 +81,9 @@ class NoBsWs:
|
||||||
trio_websocket.open_websocket_url(self.url)
|
trio_websocket.open_websocket_url(self.url)
|
||||||
)
|
)
|
||||||
# rerun user code fixture
|
# rerun user code fixture
|
||||||
if self.token == '':
|
|
||||||
ret = await self._stack.enter_async_context(
|
ret = await self._stack.enter_async_context(
|
||||||
self.fixture(self)
|
self.fixture(self)
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
ret = await self._stack.enter_async_context(
|
|
||||||
self.fixture(self, self.token)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert ret is None
|
assert ret is None
|
||||||
|
|
||||||
|
@ -135,14 +128,13 @@ async def open_autorecon_ws(
|
||||||
|
|
||||||
# TODO: proper type annot smh
|
# TODO: proper type annot smh
|
||||||
fixture: Callable,
|
fixture: Callable,
|
||||||
# used for authenticated websockets
|
|
||||||
token: str = '',
|
|
||||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
async with AsyncExitStack() as stack:
|
async with AsyncExitStack() as stack:
|
||||||
ws = NoBsWs(url, token, stack, fixture=fixture)
|
ws = NoBsWs(url, stack, fixture=fixture)
|
||||||
await ws._connect()
|
await ws._connect()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -42,7 +42,6 @@ from trio_typing import TaskStatus
|
||||||
import trimeter
|
import trimeter
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import maybe_open_context
|
from tractor.trionics import maybe_open_context
|
||||||
from pydantic import BaseModel
|
|
||||||
import pendulum
|
import pendulum
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
@ -59,6 +58,7 @@ from ._sharedmem import (
|
||||||
ShmArray,
|
ShmArray,
|
||||||
)
|
)
|
||||||
from .ingest import get_ingestormod
|
from .ingest import get_ingestormod
|
||||||
|
from .types import Struct
|
||||||
from ._source import (
|
from ._source import (
|
||||||
base_iohlc_dtype,
|
base_iohlc_dtype,
|
||||||
Symbol,
|
Symbol,
|
||||||
|
@ -84,7 +84,7 @@ if TYPE_CHECKING:
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class _FeedsBus(BaseModel):
|
class _FeedsBus(Struct):
|
||||||
'''
|
'''
|
||||||
Data feeds broadcaster and persistence management.
|
Data feeds broadcaster and persistence management.
|
||||||
|
|
||||||
|
@ -100,10 +100,6 @@ class _FeedsBus(BaseModel):
|
||||||
a dedicated cancel scope.
|
a dedicated cancel scope.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
underscore_attrs_are_private = False
|
|
||||||
|
|
||||||
brokername: str
|
brokername: str
|
||||||
nursery: trio.Nursery
|
nursery: trio.Nursery
|
||||||
feeds: dict[str, tuple[dict, dict]] = {}
|
feeds: dict[str, tuple[dict, dict]] = {}
|
||||||
|
@ -313,7 +309,7 @@ async def start_backfill(
|
||||||
# when no tsdb "last datum" is provided, we just load
|
# when no tsdb "last datum" is provided, we just load
|
||||||
# some near-term history.
|
# some near-term history.
|
||||||
periods = {
|
periods = {
|
||||||
1: {'days': 1},
|
1: {'seconds': 4000},
|
||||||
60: {'days': 14},
|
60: {'days': 14},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,75 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Built-in (extension) types.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from typing import Optional
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
import msgspec
|
||||||
|
|
||||||
|
|
||||||
|
class Struct(
|
||||||
|
msgspec.Struct,
|
||||||
|
|
||||||
|
# https://jcristharif.com/msgspec/structs.html#tagged-unions
|
||||||
|
# tag='pikerstruct',
|
||||||
|
# tag=True,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
A "human friendlier" (aka repl buddy) struct subtype.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
f: getattr(self, f)
|
||||||
|
for f in self.__struct_fields__
|
||||||
|
}
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return f'Struct({pformat(self.to_dict())})'
|
||||||
|
|
||||||
|
def copy(
|
||||||
|
self,
|
||||||
|
update: Optional[dict] = None,
|
||||||
|
|
||||||
|
) -> msgspec.Struct:
|
||||||
|
'''
|
||||||
|
Validate-typecast all self defined fields, return a copy of us
|
||||||
|
with all such fields.
|
||||||
|
|
||||||
|
This is kinda like the default behaviour in `pydantic.BaseModel`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if update:
|
||||||
|
for k, v in update.items():
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
# roundtrip serialize to validate
|
||||||
|
return msgspec.msgpack.Decoder(
|
||||||
|
type=type(self)
|
||||||
|
).decode(
|
||||||
|
msgspec.msgpack.Encoder().encode(self)
|
||||||
|
)
|
||||||
|
|
||||||
|
def typecast(
|
||||||
|
self,
|
||||||
|
# fields: Optional[list[str]] = None,
|
||||||
|
) -> None:
|
||||||
|
for fname, ftype in self.__annotations__.items():
|
||||||
|
setattr(self, fname, ftype(getattr(self, fname)))
|
|
@ -78,7 +78,8 @@ class Fsp:
|
||||||
# + the consuming fsp *to* the consumers output
|
# + the consuming fsp *to* the consumers output
|
||||||
# shm flow.
|
# shm flow.
|
||||||
_flow_registry: dict[
|
_flow_registry: dict[
|
||||||
tuple[_Token, str], _Token,
|
tuple[_Token, str],
|
||||||
|
tuple[_Token, Optional[ShmArray]],
|
||||||
] = {}
|
] = {}
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -120,7 +121,6 @@ class Fsp:
|
||||||
):
|
):
|
||||||
return self.func(*args, **kwargs)
|
return self.func(*args, **kwargs)
|
||||||
|
|
||||||
# TODO: lru_cache this? prettty sure it'll work?
|
|
||||||
def get_shm(
|
def get_shm(
|
||||||
self,
|
self,
|
||||||
src_shm: ShmArray,
|
src_shm: ShmArray,
|
||||||
|
@ -131,12 +131,27 @@ class Fsp:
|
||||||
for this "instance" of a signal processor for
|
for this "instance" of a signal processor for
|
||||||
the given ``key``.
|
the given ``key``.
|
||||||
|
|
||||||
|
The destination shm "token" and array are cached if possible to
|
||||||
|
minimize multiple stdlib/system calls.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
dst_token = self._flow_registry[
|
dst_token, maybe_array = self._flow_registry[
|
||||||
(src_shm._token, self.name)
|
(src_shm._token, self.name)
|
||||||
]
|
]
|
||||||
shm = attach_shm_array(dst_token)
|
if maybe_array is None:
|
||||||
return shm
|
self._flow_registry[
|
||||||
|
(src_shm._token, self.name)
|
||||||
|
] = (
|
||||||
|
dst_token,
|
||||||
|
# "cache" the ``ShmArray`` such that
|
||||||
|
# we call the underlying "attach" code as few
|
||||||
|
# times as possible as per:
|
||||||
|
# - https://github.com/pikers/piker/issues/359
|
||||||
|
# - https://github.com/pikers/piker/issues/332
|
||||||
|
maybe_array := attach_shm_array(dst_token)
|
||||||
|
)
|
||||||
|
|
||||||
|
return maybe_array
|
||||||
|
|
||||||
|
|
||||||
def fsp(
|
def fsp(
|
||||||
|
|
|
@ -114,7 +114,7 @@ async def fsp_compute(
|
||||||
dict[str, np.ndarray], # multi-output case
|
dict[str, np.ndarray], # multi-output case
|
||||||
np.ndarray, # single output case
|
np.ndarray, # single output case
|
||||||
]
|
]
|
||||||
history_output = await out_stream.__anext__()
|
history_output = await anext(out_stream)
|
||||||
|
|
||||||
func_name = func.__name__
|
func_name = func.__name__
|
||||||
profiler(f'{func_name} generated history')
|
profiler(f'{func_name} generated history')
|
||||||
|
@ -284,9 +284,10 @@ async def cascade(
|
||||||
# TODO: ugh i hate this wind/unwind to list over the wire
|
# TODO: ugh i hate this wind/unwind to list over the wire
|
||||||
# but not sure how else to do it.
|
# but not sure how else to do it.
|
||||||
for (token, fsp_name, dst_token) in shm_registry:
|
for (token, fsp_name, dst_token) in shm_registry:
|
||||||
Fsp._flow_registry[
|
Fsp._flow_registry[(
|
||||||
(_Token.from_msg(token), fsp_name)
|
_Token.from_msg(token),
|
||||||
] = _Token.from_msg(dst_token)
|
fsp_name,
|
||||||
|
)] = _Token.from_msg(dst_token), None
|
||||||
|
|
||||||
fsp: Fsp = reg.get(
|
fsp: Fsp = reg.get(
|
||||||
NamespacePath(ns_path)
|
NamespacePath(ns_path)
|
||||||
|
@ -374,7 +375,8 @@ async def cascade(
|
||||||
'key': dst_shm_token,
|
'key': dst_shm_token,
|
||||||
'first': dst._first.value,
|
'first': dst._first.value,
|
||||||
'last': dst._last.value,
|
'last': dst._last.value,
|
||||||
}})
|
}
|
||||||
|
})
|
||||||
return tracker, index
|
return tracker, index
|
||||||
|
|
||||||
def is_synced(
|
def is_synced(
|
||||||
|
|
|
@ -0,0 +1,894 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
'''
|
||||||
|
Personal/Private position parsing, calculating, summarizing in a way
|
||||||
|
that doesn't try to cuk most humans who prefer to not lose their moneys..
|
||||||
|
(looking at you `ib` and dirt-bird friends)
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import contextmanager as cm
|
||||||
|
from pprint import pformat
|
||||||
|
import os
|
||||||
|
from os import path
|
||||||
|
from math import copysign
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
import pendulum
|
||||||
|
from pendulum import datetime, now
|
||||||
|
import tomli
|
||||||
|
import toml
|
||||||
|
|
||||||
|
from . import config
|
||||||
|
from .brokers import get_brokermod
|
||||||
|
from .clearing._messages import BrokerdPosition, Status
|
||||||
|
from .data._source import Symbol
|
||||||
|
from .log import get_logger
|
||||||
|
from .data.types import Struct
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def open_trade_ledger(
|
||||||
|
broker: str,
|
||||||
|
account: str,
|
||||||
|
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Indempotently create and read in a trade log file from the
|
||||||
|
``<configuration_dir>/ledgers/`` directory.
|
||||||
|
|
||||||
|
Files are named per broker account of the form
|
||||||
|
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
|
||||||
|
name as defined in the user's ``brokers.toml`` config.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ldir = path.join(config._config_dir, 'ledgers')
|
||||||
|
if not path.isdir(ldir):
|
||||||
|
os.makedirs(ldir)
|
||||||
|
|
||||||
|
fname = f'trades_{broker}_{account}.toml'
|
||||||
|
tradesfile = path.join(ldir, fname)
|
||||||
|
|
||||||
|
if not path.isfile(tradesfile):
|
||||||
|
log.info(
|
||||||
|
f'Creating new local trades ledger: {tradesfile}'
|
||||||
|
)
|
||||||
|
with open(tradesfile, 'w') as cf:
|
||||||
|
pass # touch
|
||||||
|
with open(tradesfile, 'rb') as cf:
|
||||||
|
start = time.time()
|
||||||
|
ledger = tomli.load(cf)
|
||||||
|
print(f'Ledger load took {time.time() - start}s')
|
||||||
|
cpy = ledger.copy()
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield cpy
|
||||||
|
finally:
|
||||||
|
if cpy != ledger:
|
||||||
|
# TODO: show diff output?
|
||||||
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||||
|
print(f'Updating ledger for {tradesfile}:\n')
|
||||||
|
ledger.update(cpy)
|
||||||
|
|
||||||
|
# we write on close the mutated ledger data
|
||||||
|
with open(tradesfile, 'w') as cf:
|
||||||
|
toml.dump(ledger, cf)
|
||||||
|
|
||||||
|
|
||||||
|
class Transaction(Struct, frozen=True):
|
||||||
|
# TODO: should this be ``.to`` (see below)?
|
||||||
|
fqsn: str
|
||||||
|
|
||||||
|
tid: Union[str, int] # unique transaction id
|
||||||
|
size: float
|
||||||
|
price: float
|
||||||
|
cost: float # commisions or other additional costs
|
||||||
|
dt: datetime
|
||||||
|
expiry: Optional[datetime] = None
|
||||||
|
|
||||||
|
# optional key normally derived from the broker
|
||||||
|
# backend which ensures the instrument-symbol this record
|
||||||
|
# is for is truly unique.
|
||||||
|
bsuid: Optional[Union[str, int]] = None
|
||||||
|
|
||||||
|
# optional fqsn for the source "asset"/money symbol?
|
||||||
|
# from: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class Position(Struct):
|
||||||
|
'''
|
||||||
|
Basic pp (personal/piker position) model with attached clearing
|
||||||
|
transaction history.
|
||||||
|
|
||||||
|
'''
|
||||||
|
symbol: Symbol
|
||||||
|
|
||||||
|
# can be +ve or -ve for long/short
|
||||||
|
size: float
|
||||||
|
|
||||||
|
# "breakeven price" above or below which pnl moves above and below
|
||||||
|
# zero for the entirety of the current "trade state".
|
||||||
|
ppu: float
|
||||||
|
|
||||||
|
# unique backend symbol id
|
||||||
|
bsuid: str
|
||||||
|
|
||||||
|
# ordered record of known constituent trade messages
|
||||||
|
clears: dict[
|
||||||
|
Union[str, int, Status], # trade id
|
||||||
|
dict[str, Any], # transaction history summaries
|
||||||
|
] = {}
|
||||||
|
|
||||||
|
expiry: Optional[datetime] = None
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
f: getattr(self, f)
|
||||||
|
for f in self.__struct_fields__
|
||||||
|
}
|
||||||
|
|
||||||
|
def to_pretoml(self) -> tuple[str, dict]:
|
||||||
|
'''
|
||||||
|
Prep this position's data contents for export to toml including
|
||||||
|
re-structuring of the ``.clears`` table to an array of
|
||||||
|
inline-subtables for better ``pps.toml`` compactness.
|
||||||
|
|
||||||
|
'''
|
||||||
|
d = self.to_dict()
|
||||||
|
clears = d.pop('clears')
|
||||||
|
expiry = d.pop('expiry')
|
||||||
|
|
||||||
|
# TODO: we need to figure out how to have one top level
|
||||||
|
# listing venue here even when the backend isn't providing
|
||||||
|
# it via the trades ledger..
|
||||||
|
# drop symbol obj in serialized form
|
||||||
|
s = d.pop('symbol')
|
||||||
|
fqsn = s.front_fqsn()
|
||||||
|
|
||||||
|
size = d.pop('size')
|
||||||
|
ppu = d.pop('ppu')
|
||||||
|
d['size'], d['ppu'] = self.audit_sizing(size, ppu)
|
||||||
|
|
||||||
|
if self.expiry is None:
|
||||||
|
d.pop('expiry', None)
|
||||||
|
elif expiry:
|
||||||
|
d['expiry'] = str(expiry)
|
||||||
|
|
||||||
|
toml_clears_list = []
|
||||||
|
for tid, data in sorted(
|
||||||
|
list(clears.items()),
|
||||||
|
|
||||||
|
# sort by datetime
|
||||||
|
key=lambda item: item[1]['dt'],
|
||||||
|
):
|
||||||
|
inline_table = toml.TomlDecoder().get_empty_inline_table()
|
||||||
|
|
||||||
|
inline_table['dt'] = data['dt']
|
||||||
|
|
||||||
|
# insert optional clear fields in column order
|
||||||
|
for k in ['ppu', 'accum_size']:
|
||||||
|
val = data.get(k)
|
||||||
|
if val:
|
||||||
|
inline_table[k] = val
|
||||||
|
|
||||||
|
# insert required fields
|
||||||
|
for k in ['price', 'size', 'cost']:
|
||||||
|
inline_table[k] = data[k]
|
||||||
|
|
||||||
|
inline_table['tid'] = tid
|
||||||
|
toml_clears_list.append(inline_table)
|
||||||
|
|
||||||
|
d['clears'] = toml_clears_list
|
||||||
|
|
||||||
|
return fqsn, d
|
||||||
|
|
||||||
|
def audit_sizing(
|
||||||
|
self,
|
||||||
|
size: Optional[float] = None,
|
||||||
|
ppu: Optional[float] = None,
|
||||||
|
|
||||||
|
) -> tuple[float, float]:
|
||||||
|
'''
|
||||||
|
Audit either the `.size` and `.ppu` values or equvialent
|
||||||
|
passed in values against the clears table calculations and
|
||||||
|
return the calc-ed values if they differ and log warnings to
|
||||||
|
console.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size = size or self.size
|
||||||
|
ppu = ppu or self.ppu
|
||||||
|
csize = self.calc_size()
|
||||||
|
cppu = self.calc_ppu()
|
||||||
|
|
||||||
|
if size != csize:
|
||||||
|
log.warning(f'size != calculated size: {size} != {csize}')
|
||||||
|
size = csize
|
||||||
|
|
||||||
|
if ppu != cppu:
|
||||||
|
log.warning(
|
||||||
|
f'ppu != calculated ppu: {ppu} != {cppu}'
|
||||||
|
)
|
||||||
|
ppu = cppu
|
||||||
|
|
||||||
|
return size, ppu
|
||||||
|
|
||||||
|
def update_from_msg(
|
||||||
|
self,
|
||||||
|
msg: BrokerdPosition,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# XXX: better place to do this?
|
||||||
|
symbol = self.symbol
|
||||||
|
|
||||||
|
lot_size_digits = symbol.lot_size_digits
|
||||||
|
ppu, size = (
|
||||||
|
round(
|
||||||
|
msg['avg_price'],
|
||||||
|
ndigits=symbol.tick_size_digits
|
||||||
|
),
|
||||||
|
round(
|
||||||
|
msg['size'],
|
||||||
|
ndigits=lot_size_digits
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.ppu = ppu
|
||||||
|
self.size = size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dsize(self) -> float:
|
||||||
|
'''
|
||||||
|
The "dollar" size of the pp, normally in trading (fiat) unit
|
||||||
|
terms.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return self.ppu * self.size
|
||||||
|
|
||||||
|
# TODO: idea: "real LIFO" dynamic positioning.
|
||||||
|
# - when a trade takes place where the pnl for
|
||||||
|
# the (set of) trade(s) is below the breakeven price
|
||||||
|
# it may be that the trader took a +ve pnl on a short(er)
|
||||||
|
# term trade in the same account.
|
||||||
|
# - in this case we could recalc the be price to
|
||||||
|
# be reverted back to it's prior value before the nearest term
|
||||||
|
# trade was opened.?
|
||||||
|
# def lifo_price() -> float:
|
||||||
|
# ...
|
||||||
|
|
||||||
|
def calc_ppu(
|
||||||
|
self,
|
||||||
|
# include transaction cost in breakeven price
|
||||||
|
# and presume the worst case of the same cost
|
||||||
|
# to exit this transaction (even though in reality
|
||||||
|
# it will be dynamic based on exit stratetgy).
|
||||||
|
cost_scalar: float = 2,
|
||||||
|
|
||||||
|
) -> float:
|
||||||
|
'''
|
||||||
|
Compute the "price-per-unit" price for the given non-zero sized
|
||||||
|
rolling position.
|
||||||
|
|
||||||
|
The recurrence relation which computes this (exponential) mean
|
||||||
|
per new clear which **increases** the accumulative postiion size
|
||||||
|
is:
|
||||||
|
|
||||||
|
ppu[-1] = (
|
||||||
|
ppu[-2] * accum_size[-2]
|
||||||
|
+
|
||||||
|
ppu[-1] * size
|
||||||
|
) / accum_size[-1]
|
||||||
|
|
||||||
|
where `cost_basis` for the current step is simply the price
|
||||||
|
* size of the most recent clearing transaction.
|
||||||
|
|
||||||
|
'''
|
||||||
|
asize_h: list[float] = [] # historical accumulative size
|
||||||
|
ppu_h: list[float] = [] # historical price-per-unit
|
||||||
|
|
||||||
|
clears = list(self.clears.items())
|
||||||
|
|
||||||
|
for i, (tid, entry) in enumerate(clears):
|
||||||
|
|
||||||
|
clear_size = entry['size']
|
||||||
|
clear_price = entry['price']
|
||||||
|
|
||||||
|
last_accum_size = asize_h[-1] if asize_h else 0
|
||||||
|
accum_size = last_accum_size + clear_size
|
||||||
|
accum_sign = copysign(1, accum_size)
|
||||||
|
|
||||||
|
sign_change: bool = False
|
||||||
|
|
||||||
|
if accum_size == 0:
|
||||||
|
ppu_h.append(0)
|
||||||
|
asize_h.append(0)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# test if the pp somehow went "passed" a net zero size state
|
||||||
|
# resulting in a change of the "sign" of the size (+ve for
|
||||||
|
# long, -ve for short).
|
||||||
|
sign_change = (
|
||||||
|
copysign(1, last_accum_size) + accum_sign == 0
|
||||||
|
and last_accum_size != 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# since we passed the net-zero-size state the new size
|
||||||
|
# after sum should be the remaining size the new
|
||||||
|
# "direction" (aka, long vs. short) for this clear.
|
||||||
|
if sign_change:
|
||||||
|
clear_size = accum_size
|
||||||
|
abs_diff = abs(accum_size)
|
||||||
|
asize_h.append(0)
|
||||||
|
ppu_h.append(0)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# old size minus the new size gives us size diff with
|
||||||
|
# +ve -> increase in pp size
|
||||||
|
# -ve -> decrease in pp size
|
||||||
|
abs_diff = abs(accum_size) - abs(last_accum_size)
|
||||||
|
|
||||||
|
# XXX: LIFO breakeven price update. only an increaze in size
|
||||||
|
# of the position contributes the breakeven price,
|
||||||
|
# a decrease does not (i.e. the position is being made
|
||||||
|
# smaller).
|
||||||
|
# abs_clear_size = abs(clear_size)
|
||||||
|
abs_new_size = abs(accum_size)
|
||||||
|
|
||||||
|
if abs_diff > 0:
|
||||||
|
|
||||||
|
cost_basis = (
|
||||||
|
# cost basis for this clear
|
||||||
|
clear_price * abs(clear_size)
|
||||||
|
+
|
||||||
|
# transaction cost
|
||||||
|
accum_sign * cost_scalar * entry['cost']
|
||||||
|
)
|
||||||
|
|
||||||
|
if asize_h:
|
||||||
|
size_last = abs(asize_h[-1])
|
||||||
|
cb_last = ppu_h[-1] * size_last
|
||||||
|
ppu = (cost_basis + cb_last) / abs_new_size
|
||||||
|
|
||||||
|
else:
|
||||||
|
ppu = cost_basis / abs_new_size
|
||||||
|
|
||||||
|
ppu_h.append(ppu)
|
||||||
|
asize_h.append(accum_size)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# on "exit" clears from a given direction,
|
||||||
|
# only the size changes not the price-per-unit
|
||||||
|
# need to be updated since the ppu remains constant
|
||||||
|
# and gets weighted by the new size.
|
||||||
|
asize_h.append(accum_size)
|
||||||
|
ppu_h.append(ppu_h[-1])
|
||||||
|
|
||||||
|
return ppu_h[-1] if ppu_h else 0
|
||||||
|
|
||||||
|
def calc_size(self) -> float:
|
||||||
|
size: float = 0
|
||||||
|
for tid, entry in self.clears.items():
|
||||||
|
size += entry['size']
|
||||||
|
return size
|
||||||
|
|
||||||
|
def minimize_clears(
|
||||||
|
self,
|
||||||
|
|
||||||
|
) -> dict[str, dict]:
|
||||||
|
'''
|
||||||
|
Minimize the position's clears entries by removing
|
||||||
|
all transactions before the last net zero size to avoid
|
||||||
|
unecessary history irrelevant to the current pp state.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size: float = 0
|
||||||
|
clears_since_zero: list[tuple(str, dict)] = []
|
||||||
|
|
||||||
|
# TODO: we might just want to always do this when iterating
|
||||||
|
# a ledger? keep a state of the last net-zero and only do the
|
||||||
|
# full iterate when no state was stashed?
|
||||||
|
|
||||||
|
# scan for the last "net zero" position by iterating
|
||||||
|
# transactions until the next net-zero size, rinse, repeat.
|
||||||
|
for tid, clear in self.clears.items():
|
||||||
|
size += clear['size']
|
||||||
|
clears_since_zero.append((tid, clear))
|
||||||
|
|
||||||
|
if size == 0:
|
||||||
|
clears_since_zero.clear()
|
||||||
|
|
||||||
|
self.clears = dict(clears_since_zero)
|
||||||
|
return self.clears
|
||||||
|
|
||||||
|
def add_clear(
|
||||||
|
self,
|
||||||
|
t: Transaction,
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Update clearing table and populate rolling ppu and accumulative
|
||||||
|
size in both the clears entry and local attrs state.
|
||||||
|
|
||||||
|
'''
|
||||||
|
clear = self.clears[t.tid] = {
|
||||||
|
'cost': t.cost,
|
||||||
|
'price': t.price,
|
||||||
|
'size': t.size,
|
||||||
|
'dt': str(t.dt),
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO: compute these incrementally instead
|
||||||
|
# of re-looping through each time resulting in O(n**2)
|
||||||
|
# behaviour..
|
||||||
|
# compute these **after** adding the entry
|
||||||
|
# in order to make the recurrence relation math work
|
||||||
|
# inside ``.calc_size()``.
|
||||||
|
self.size = clear['accum_size'] = self.calc_size()
|
||||||
|
self.ppu = clear['ppu'] = self.calc_ppu()
|
||||||
|
|
||||||
|
return clear
|
||||||
|
|
||||||
|
|
||||||
|
class PpTable(Struct):
|
||||||
|
|
||||||
|
brokername: str
|
||||||
|
acctid: str
|
||||||
|
pps: dict[str, Position]
|
||||||
|
conf: Optional[dict] = {}
|
||||||
|
|
||||||
|
def update_from_trans(
|
||||||
|
self,
|
||||||
|
trans: dict[str, Transaction],
|
||||||
|
cost_scalar: float = 2,
|
||||||
|
|
||||||
|
) -> dict[str, Position]:
|
||||||
|
|
||||||
|
pps = self.pps
|
||||||
|
updated: dict[str, Position] = {}
|
||||||
|
|
||||||
|
# lifo update all pps from records
|
||||||
|
for tid, t in trans.items():
|
||||||
|
|
||||||
|
pp = pps.setdefault(
|
||||||
|
t.bsuid,
|
||||||
|
|
||||||
|
# if no existing pp, allocate fresh one.
|
||||||
|
Position(
|
||||||
|
Symbol.from_fqsn(
|
||||||
|
t.fqsn,
|
||||||
|
info={},
|
||||||
|
),
|
||||||
|
size=0.0,
|
||||||
|
ppu=0.0,
|
||||||
|
bsuid=t.bsuid,
|
||||||
|
expiry=t.expiry,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# don't do updates for ledger records we already have
|
||||||
|
# included in the current pps state.
|
||||||
|
if t.tid in pp.clears:
|
||||||
|
# NOTE: likely you'll see repeats of the same
|
||||||
|
# ``Transaction`` passed in here if/when you are restarting
|
||||||
|
# a ``brokerd.ib`` where the API will re-report trades from
|
||||||
|
# the current session, so we need to make sure we don't
|
||||||
|
# "double count" these in pp calculations.
|
||||||
|
continue
|
||||||
|
|
||||||
|
# update clearing table
|
||||||
|
pp.add_clear(t)
|
||||||
|
updated[t.bsuid] = pp
|
||||||
|
|
||||||
|
# minimize clears tables and update sizing.
|
||||||
|
for bsuid, pp in updated.items():
|
||||||
|
pp.size, pp.ppu = pp.audit_sizing()
|
||||||
|
|
||||||
|
return updated
|
||||||
|
|
||||||
|
def dump_active(
|
||||||
|
self,
|
||||||
|
) -> tuple[
|
||||||
|
dict[str, Position],
|
||||||
|
dict[str, Position]
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Iterate all tabulated positions, render active positions to
|
||||||
|
a ``dict`` format amenable to serialization (via TOML) and drop
|
||||||
|
from state (``.pps``) as well as return in a ``dict`` all
|
||||||
|
``Position``s which have recently closed.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# NOTE: newly closed position are also important to report/return
|
||||||
|
# since a consumer, like an order mode UI ;), might want to react
|
||||||
|
# based on the closure (for example removing the breakeven line
|
||||||
|
# and clearing the entry from any lists/monitors).
|
||||||
|
closed_pp_objs: dict[str, Position] = {}
|
||||||
|
open_pp_objs: dict[str, Position] = {}
|
||||||
|
|
||||||
|
pp_objs = self.pps
|
||||||
|
for bsuid in list(pp_objs):
|
||||||
|
pp = pp_objs[bsuid]
|
||||||
|
|
||||||
|
# XXX: debug hook for size mismatches
|
||||||
|
# qqqbsuid = 320227571
|
||||||
|
# if bsuid == qqqbsuid:
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
|
pp.size, pp.ppu = pp.audit_sizing()
|
||||||
|
|
||||||
|
if (
|
||||||
|
# "net-zero" is a "closed" position
|
||||||
|
pp.size == 0
|
||||||
|
|
||||||
|
# time-expired pps (normally derivatives) are "closed"
|
||||||
|
or (pp.expiry and pp.expiry < now())
|
||||||
|
):
|
||||||
|
# for expired cases
|
||||||
|
pp.size = 0
|
||||||
|
|
||||||
|
# NOTE: we DO NOT pop the pp here since it can still be
|
||||||
|
# used to check for duplicate clears that may come in as
|
||||||
|
# new transaction from some backend API and need to be
|
||||||
|
# ignored; the closed positions won't be written to the
|
||||||
|
# ``pps.toml`` since ``pp_active_entries`` above is what's
|
||||||
|
# written.
|
||||||
|
closed_pp_objs[bsuid] = pp
|
||||||
|
|
||||||
|
else:
|
||||||
|
open_pp_objs[bsuid] = pp
|
||||||
|
|
||||||
|
return open_pp_objs, closed_pp_objs
|
||||||
|
|
||||||
|
def to_toml(
|
||||||
|
self,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
|
||||||
|
active, closed = self.dump_active()
|
||||||
|
|
||||||
|
# ONLY dict-serialize all active positions; those that are closed
|
||||||
|
# we don't store in the ``pps.toml``.
|
||||||
|
to_toml_dict = {}
|
||||||
|
|
||||||
|
for bsuid, pos in active.items():
|
||||||
|
|
||||||
|
# keep the minimal amount of clears that make up this
|
||||||
|
# position since the last net-zero state.
|
||||||
|
pos.minimize_clears()
|
||||||
|
|
||||||
|
# serialize to pre-toml form
|
||||||
|
fqsn, asdict = pos.to_pretoml()
|
||||||
|
log.info(f'Updating active pp: {fqsn}')
|
||||||
|
|
||||||
|
# XXX: ugh, it's cuz we push the section under
|
||||||
|
# the broker name.. maybe we need to rethink this?
|
||||||
|
brokerless_key = fqsn.removeprefix(f'{self.brokername}.')
|
||||||
|
to_toml_dict[brokerless_key] = asdict
|
||||||
|
|
||||||
|
return to_toml_dict
|
||||||
|
|
||||||
|
def write_config(self) -> None:
|
||||||
|
'''
|
||||||
|
Write the current position table to the user's ``pps.toml``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# TODO: show diff output?
|
||||||
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||||
|
print(f'Updating ``pps.toml`` for {path}:\n')
|
||||||
|
|
||||||
|
# active, closed_pp_objs = table.dump_active()
|
||||||
|
pp_entries = self.to_toml()
|
||||||
|
self.conf[self.brokername][self.acctid] = pp_entries
|
||||||
|
|
||||||
|
# TODO: why tf haven't they already done this for inline
|
||||||
|
# tables smh..
|
||||||
|
enc = PpsEncoder(preserve=True)
|
||||||
|
# table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
|
||||||
|
enc.dump_funcs[
|
||||||
|
toml.decoder.InlineTableDict
|
||||||
|
] = enc.dump_inline_table
|
||||||
|
|
||||||
|
config.write(
|
||||||
|
self.conf,
|
||||||
|
'pps',
|
||||||
|
encoder=enc,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_pps_from_ledger(
|
||||||
|
|
||||||
|
brokername: str,
|
||||||
|
acctname: str,
|
||||||
|
|
||||||
|
# post normalization filter on ledger entries to be processed
|
||||||
|
filter_by: Optional[list[dict]] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
dict[str, Transaction],
|
||||||
|
dict[str, Position],
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Open a ledger file by broker name and account and read in and
|
||||||
|
process any trade records into our normalized ``Transaction`` form
|
||||||
|
and then update the equivalent ``Pptable`` and deliver the two
|
||||||
|
bsuid-mapped dict-sets of the transactions and pps.
|
||||||
|
|
||||||
|
'''
|
||||||
|
with (
|
||||||
|
open_trade_ledger(brokername, acctname) as ledger,
|
||||||
|
open_pps(brokername, acctname) as table,
|
||||||
|
):
|
||||||
|
if not ledger:
|
||||||
|
# null case, no ledger file with content
|
||||||
|
return {}
|
||||||
|
|
||||||
|
mod = get_brokermod(brokername)
|
||||||
|
src_records: dict[str, Transaction] = mod.norm_trade_records(ledger)
|
||||||
|
|
||||||
|
if filter_by:
|
||||||
|
records = {}
|
||||||
|
bsuids = set(filter_by)
|
||||||
|
for tid, r in src_records.items():
|
||||||
|
if r.bsuid in bsuids:
|
||||||
|
records[tid] = r
|
||||||
|
else:
|
||||||
|
records = src_records
|
||||||
|
|
||||||
|
updated = table.update_from_trans(records)
|
||||||
|
|
||||||
|
return records, updated
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: instead see if we can hack tomli and tomli-w to do the same:
|
||||||
|
# - https://github.com/hukkin/tomli
|
||||||
|
# - https://github.com/hukkin/tomli-w
|
||||||
|
class PpsEncoder(toml.TomlEncoder):
|
||||||
|
'''
|
||||||
|
Special "styled" encoder that makes a ``pps.toml`` redable and
|
||||||
|
compact by putting `.clears` tables inline and everything else
|
||||||
|
flat-ish.
|
||||||
|
|
||||||
|
'''
|
||||||
|
separator = ','
|
||||||
|
|
||||||
|
def dump_list(self, v):
|
||||||
|
'''
|
||||||
|
Dump an inline list with a newline after every element and
|
||||||
|
with consideration for denoted inline table types.
|
||||||
|
|
||||||
|
'''
|
||||||
|
retval = "[\n"
|
||||||
|
for u in v:
|
||||||
|
if isinstance(u, toml.decoder.InlineTableDict):
|
||||||
|
out = self.dump_inline_table(u)
|
||||||
|
else:
|
||||||
|
out = str(self.dump_value(u))
|
||||||
|
|
||||||
|
retval += " " + out + "," + "\n"
|
||||||
|
retval += "]"
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def dump_inline_table(self, section):
|
||||||
|
"""Preserve inline table in its compact syntax instead of expanding
|
||||||
|
into subsection.
|
||||||
|
https://github.com/toml-lang/toml#user-content-inline-table
|
||||||
|
"""
|
||||||
|
val_list = []
|
||||||
|
for k, v in section.items():
|
||||||
|
# if isinstance(v, toml.decoder.InlineTableDict):
|
||||||
|
if isinstance(v, dict):
|
||||||
|
val = self.dump_inline_table(v)
|
||||||
|
else:
|
||||||
|
val = str(self.dump_value(v))
|
||||||
|
|
||||||
|
val_list.append(k + " = " + val)
|
||||||
|
|
||||||
|
retval = "{ " + ", ".join(val_list) + " }"
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def dump_sections(self, o, sup):
|
||||||
|
retstr = ""
|
||||||
|
if sup != "" and sup[-1] != ".":
|
||||||
|
sup += '.'
|
||||||
|
retdict = self._dict()
|
||||||
|
arraystr = ""
|
||||||
|
for section in o:
|
||||||
|
qsection = str(section)
|
||||||
|
value = o[section]
|
||||||
|
|
||||||
|
if not re.match(r'^[A-Za-z0-9_-]+$', section):
|
||||||
|
qsection = toml.encoder._dump_str(section)
|
||||||
|
|
||||||
|
# arrayoftables = False
|
||||||
|
if (
|
||||||
|
self.preserve
|
||||||
|
and isinstance(value, toml.decoder.InlineTableDict)
|
||||||
|
):
|
||||||
|
retstr += (
|
||||||
|
qsection
|
||||||
|
+
|
||||||
|
" = "
|
||||||
|
+
|
||||||
|
self.dump_inline_table(o[section])
|
||||||
|
+
|
||||||
|
'\n' # only on the final terminating left brace
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: this code i'm pretty sure is just blatantly bad
|
||||||
|
# and/or wrong..
|
||||||
|
# if isinstance(o[section], list):
|
||||||
|
# for a in o[section]:
|
||||||
|
# if isinstance(a, dict):
|
||||||
|
# arrayoftables = True
|
||||||
|
# if arrayoftables:
|
||||||
|
# for a in o[section]:
|
||||||
|
# arraytabstr = "\n"
|
||||||
|
# arraystr += "[[" + sup + qsection + "]]\n"
|
||||||
|
# s, d = self.dump_sections(a, sup + qsection)
|
||||||
|
# if s:
|
||||||
|
# if s[0] == "[":
|
||||||
|
# arraytabstr += s
|
||||||
|
# else:
|
||||||
|
# arraystr += s
|
||||||
|
# while d:
|
||||||
|
# newd = self._dict()
|
||||||
|
# for dsec in d:
|
||||||
|
# s1, d1 = self.dump_sections(d[dsec], sup +
|
||||||
|
# qsection + "." +
|
||||||
|
# dsec)
|
||||||
|
# if s1:
|
||||||
|
# arraytabstr += ("[" + sup + qsection +
|
||||||
|
# "." + dsec + "]\n")
|
||||||
|
# arraytabstr += s1
|
||||||
|
# for s1 in d1:
|
||||||
|
# newd[dsec + "." + s1] = d1[s1]
|
||||||
|
# d = newd
|
||||||
|
# arraystr += arraytabstr
|
||||||
|
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
retdict[qsection] = o[section]
|
||||||
|
|
||||||
|
elif o[section] is not None:
|
||||||
|
retstr += (
|
||||||
|
qsection
|
||||||
|
+
|
||||||
|
" = "
|
||||||
|
+
|
||||||
|
str(self.dump_value(o[section]))
|
||||||
|
)
|
||||||
|
|
||||||
|
# if not isinstance(value, dict):
|
||||||
|
if not isinstance(value, toml.decoder.InlineTableDict):
|
||||||
|
# inline tables should not contain newlines:
|
||||||
|
# https://toml.io/en/v1.0.0#inline-table
|
||||||
|
retstr += '\n'
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError(value)
|
||||||
|
|
||||||
|
retstr += arraystr
|
||||||
|
return (retstr, retdict)
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def open_pps(
|
||||||
|
brokername: str,
|
||||||
|
acctid: str,
|
||||||
|
write_on_exit: bool = True,
|
||||||
|
|
||||||
|
) -> PpTable:
|
||||||
|
'''
|
||||||
|
Read out broker-specific position entries from
|
||||||
|
incremental update file: ``pps.toml``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
conf, path = config.load('pps')
|
||||||
|
brokersection = conf.setdefault(brokername, {})
|
||||||
|
pps = brokersection.setdefault(acctid, {})
|
||||||
|
|
||||||
|
# TODO: ideally we can pass in an existing
|
||||||
|
# pps state to this right? such that we
|
||||||
|
# don't have to do a ledger reload all the
|
||||||
|
# time.. a couple ideas I can think of,
|
||||||
|
# - mirror this in some client side actor which
|
||||||
|
# does the actual ledger updates (say the paper
|
||||||
|
# engine proc if we decide to always spawn it?),
|
||||||
|
# - do diffs against updates from the ledger writer
|
||||||
|
# actor and the in-mem state here?
|
||||||
|
|
||||||
|
pp_objs = {}
|
||||||
|
table = PpTable(
|
||||||
|
brokername,
|
||||||
|
acctid,
|
||||||
|
pp_objs,
|
||||||
|
conf=conf,
|
||||||
|
)
|
||||||
|
|
||||||
|
# unmarshal/load ``pps.toml`` config entries into object form
|
||||||
|
# and update `PpTable` obj entries.
|
||||||
|
for fqsn, entry in pps.items():
|
||||||
|
bsuid = entry['bsuid']
|
||||||
|
|
||||||
|
# convert clears sub-tables (only in this form
|
||||||
|
# for toml re-presentation) back into a master table.
|
||||||
|
clears_list = entry['clears']
|
||||||
|
|
||||||
|
# index clears entries in "object" form by tid in a top
|
||||||
|
# level dict instead of a list (as is presented in our
|
||||||
|
# ``pps.toml``).
|
||||||
|
pp = pp_objs.get(bsuid)
|
||||||
|
if pp:
|
||||||
|
clears = pp.clears
|
||||||
|
else:
|
||||||
|
clears = {}
|
||||||
|
|
||||||
|
for clears_table in clears_list:
|
||||||
|
tid = clears_table.pop('tid')
|
||||||
|
clears[tid] = clears_table
|
||||||
|
|
||||||
|
size = entry['size']
|
||||||
|
# TODO: remove but, handle old field name for now
|
||||||
|
ppu = entry.get('ppu', entry.get('be_price', 0))
|
||||||
|
|
||||||
|
expiry = entry.get('expiry')
|
||||||
|
if expiry:
|
||||||
|
expiry = pendulum.parse(expiry)
|
||||||
|
|
||||||
|
pp = pp_objs[bsuid] = Position(
|
||||||
|
Symbol.from_fqsn(fqsn, info={}),
|
||||||
|
size=size,
|
||||||
|
ppu=ppu,
|
||||||
|
expiry=expiry,
|
||||||
|
bsuid=entry['bsuid'],
|
||||||
|
|
||||||
|
# XXX: super critical, we need to be sure to include
|
||||||
|
# all pps.toml clears to avoid reusing clears that were
|
||||||
|
# already included in the current incremental update
|
||||||
|
# state, since today's records may have already been
|
||||||
|
# processed!
|
||||||
|
clears=clears,
|
||||||
|
)
|
||||||
|
|
||||||
|
# audit entries loaded from toml
|
||||||
|
pp.size, pp.ppu = pp.audit_sizing()
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield table
|
||||||
|
finally:
|
||||||
|
if write_on_exit:
|
||||||
|
table.write_config()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import sys
|
||||||
|
|
||||||
|
args = sys.argv
|
||||||
|
assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
|
||||||
|
args = args[1:]
|
||||||
|
for acctid in args:
|
||||||
|
broker, name = acctid.split('.')
|
||||||
|
trans, updated_pps = load_pps_from_ledger(broker, name)
|
||||||
|
print(
|
||||||
|
f'Processing transactions into pps for {broker}:{acctid}\n'
|
||||||
|
f'{pformat(trans)}\n\n'
|
||||||
|
f'{pformat(updated_pps)}'
|
||||||
|
)
|
|
@ -230,18 +230,19 @@ class GodWidget(QWidget):
|
||||||
# - we'll probably want per-instrument/provider state here?
|
# - we'll probably want per-instrument/provider state here?
|
||||||
# change the order config form over to the new chart
|
# change the order config form over to the new chart
|
||||||
|
|
||||||
# XXX: since the pp config is a singleton widget we have to
|
|
||||||
# also switch it over to the new chart's interal-layout
|
|
||||||
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
|
|
||||||
chart = linkedsplits.chart
|
|
||||||
|
|
||||||
# chart is already in memory so just focus it
|
# chart is already in memory so just focus it
|
||||||
linkedsplits.show()
|
linkedsplits.show()
|
||||||
linkedsplits.focus()
|
linkedsplits.focus()
|
||||||
linkedsplits.graphics_cycle()
|
linkedsplits.graphics_cycle()
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
# XXX: since the pp config is a singleton widget we have to
|
||||||
|
# also switch it over to the new chart's interal-layout
|
||||||
|
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
|
||||||
|
chart = linkedsplits.chart
|
||||||
|
|
||||||
# resume feeds *after* rendering chart view asap
|
# resume feeds *after* rendering chart view asap
|
||||||
|
if chart:
|
||||||
chart.resume_all_feeds()
|
chart.resume_all_feeds()
|
||||||
|
|
||||||
# TODO: we need a check to see if the chart
|
# TODO: we need a check to see if the chart
|
||||||
|
@ -452,13 +453,6 @@ class LinkedSplits(QWidget):
|
||||||
# add crosshair graphic
|
# add crosshair graphic
|
||||||
self.chart.addItem(self.cursor)
|
self.chart.addItem(self.cursor)
|
||||||
|
|
||||||
# axis placement
|
|
||||||
if (
|
|
||||||
_xaxis_at == 'bottom' and
|
|
||||||
'bottom' in self.chart.plotItem.axes
|
|
||||||
):
|
|
||||||
self.chart.hideAxis('bottom')
|
|
||||||
|
|
||||||
# style?
|
# style?
|
||||||
self.chart.setFrameStyle(
|
self.chart.setFrameStyle(
|
||||||
QFrame.StyledPanel |
|
QFrame.StyledPanel |
|
||||||
|
@ -523,6 +517,15 @@ class LinkedSplits(QWidget):
|
||||||
cpw.hideAxis('left')
|
cpw.hideAxis('left')
|
||||||
cpw.hideAxis('bottom')
|
cpw.hideAxis('bottom')
|
||||||
|
|
||||||
|
if (
|
||||||
|
_xaxis_at == 'bottom' and (
|
||||||
|
self.xaxis_chart
|
||||||
|
or (
|
||||||
|
not self.subplots
|
||||||
|
and self.xaxis_chart is None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
if self.xaxis_chart:
|
if self.xaxis_chart:
|
||||||
self.xaxis_chart.hideAxis('bottom')
|
self.xaxis_chart.hideAxis('bottom')
|
||||||
|
|
||||||
|
@ -531,13 +534,9 @@ class LinkedSplits(QWidget):
|
||||||
# https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master
|
# https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master
|
||||||
# _ = self.xaxis_chart.removeAxis('bottom', unlink=False)
|
# _ = self.xaxis_chart.removeAxis('bottom', unlink=False)
|
||||||
# assert 'bottom' not in self.xaxis_chart.plotItem.axes
|
# assert 'bottom' not in self.xaxis_chart.plotItem.axes
|
||||||
|
|
||||||
self.xaxis_chart = cpw
|
self.xaxis_chart = cpw
|
||||||
cpw.showAxis('bottom')
|
cpw.showAxis('bottom')
|
||||||
|
|
||||||
if self.xaxis_chart is None:
|
|
||||||
self.xaxis_chart = cpw
|
|
||||||
|
|
||||||
qframe.chart = cpw
|
qframe.chart = cpw
|
||||||
qframe.hbox.addWidget(cpw)
|
qframe.hbox.addWidget(cpw)
|
||||||
|
|
||||||
|
@ -760,9 +759,18 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
||||||
|
|
||||||
|
# indempotent startup flag for auto-yrange subsys
|
||||||
|
# to detect the "first time" y-domain graphics begin
|
||||||
|
# to be shown in the (main) graphics view.
|
||||||
|
self._on_screen: bool = False
|
||||||
|
|
||||||
def resume_all_feeds(self):
|
def resume_all_feeds(self):
|
||||||
|
try:
|
||||||
for feed in self._feeds.values():
|
for feed in self._feeds.values():
|
||||||
self.linked.godwidget._root_n.start_soon(feed.resume)
|
self.linked.godwidget._root_n.start_soon(feed.resume)
|
||||||
|
except RuntimeError:
|
||||||
|
# TODO: cancel the qtractor runtime here?
|
||||||
|
raise
|
||||||
|
|
||||||
def pause_all_feeds(self):
|
def pause_all_feeds(self):
|
||||||
for feed in self._feeds.values():
|
for feed in self._feeds.values():
|
||||||
|
@ -859,7 +867,8 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
def default_view(
|
def default_view(
|
||||||
self,
|
self,
|
||||||
bars_from_y: int = 3000,
|
bars_from_y: int = 616,
|
||||||
|
do_ds: bool = True,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -920,8 +929,11 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
max=end,
|
max=end,
|
||||||
padding=0,
|
padding=0,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if do_ds:
|
||||||
self.view.maybe_downsample_graphics()
|
self.view.maybe_downsample_graphics()
|
||||||
view._set_yrange()
|
view._set_yrange()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.linked.graphics_cycle()
|
self.linked.graphics_cycle()
|
||||||
except IndexError:
|
except IndexError:
|
||||||
|
@ -1255,7 +1267,6 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
If ``bars_range`` is provided use that range.
|
If ``bars_range`` is provided use that range.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# print(f'Chart[{self.name}].maxmin()')
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = pg.debug.Profiler(
|
||||||
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
|
@ -1287,11 +1298,18 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
key = round(lbar), round(rbar)
|
key = round(lbar), round(rbar)
|
||||||
res = flow.maxmin(*key)
|
res = flow.maxmin(*key)
|
||||||
if res == (None, None):
|
|
||||||
log.error(
|
if (
|
||||||
|
res is None
|
||||||
|
):
|
||||||
|
log.warning(
|
||||||
f"{flow_key} no mxmn for bars_range => {key} !?"
|
f"{flow_key} no mxmn for bars_range => {key} !?"
|
||||||
)
|
)
|
||||||
res = 0, 0
|
res = 0, 0
|
||||||
|
if not self._on_screen:
|
||||||
|
self.default_view(do_ds=False)
|
||||||
|
self._on_screen = True
|
||||||
|
|
||||||
profiler(f'yrange mxmn: {key} -> {res}')
|
profiler(f'yrange mxmn: {key} -> {res}')
|
||||||
|
# print(f'{flow_key} yrange mxmn: {key} -> {res}')
|
||||||
return res
|
return res
|
||||||
|
|
|
@ -223,14 +223,20 @@ def ds_m4(
|
||||||
assert frames >= (xrange / uppx)
|
assert frames >= (xrange / uppx)
|
||||||
|
|
||||||
# call into ``numba``
|
# call into ``numba``
|
||||||
nb, i_win, y_out = _m4(
|
(
|
||||||
|
nb,
|
||||||
|
x_out,
|
||||||
|
y_out,
|
||||||
|
ymn,
|
||||||
|
ymx,
|
||||||
|
) = _m4(
|
||||||
x,
|
x,
|
||||||
y,
|
y,
|
||||||
|
|
||||||
frames,
|
frames,
|
||||||
|
|
||||||
# TODO: see func below..
|
# TODO: see func below..
|
||||||
# i_win,
|
# x_out,
|
||||||
# y_out,
|
# y_out,
|
||||||
|
|
||||||
# first index in x data to start at
|
# first index in x data to start at
|
||||||
|
@ -243,10 +249,11 @@ def ds_m4(
|
||||||
# filter out any overshoot in the input allocation arrays by
|
# filter out any overshoot in the input allocation arrays by
|
||||||
# removing zero-ed tail entries which should start at a certain
|
# removing zero-ed tail entries which should start at a certain
|
||||||
# index.
|
# index.
|
||||||
i_win = i_win[i_win != 0]
|
x_out = x_out[x_out != 0]
|
||||||
y_out = y_out[:i_win.size]
|
y_out = y_out[:x_out.size]
|
||||||
|
|
||||||
return nb, i_win, y_out
|
# print(f'M4 output ymn, ymx: {ymn},{ymx}')
|
||||||
|
return nb, x_out, y_out, ymn, ymx
|
||||||
|
|
||||||
|
|
||||||
@jit(
|
@jit(
|
||||||
|
@ -260,8 +267,8 @@ def _m4(
|
||||||
|
|
||||||
frames: int,
|
frames: int,
|
||||||
|
|
||||||
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
# TODO: using this approach, having the ``.zeros()`` alloc lines
|
||||||
# below, in put python was causing segs faults and alloc crashes..
|
# below in pure python, there were segs faults and alloc crashes..
|
||||||
# we might need to see how it behaves with shm arrays and consider
|
# we might need to see how it behaves with shm arrays and consider
|
||||||
# allocating them once at startup?
|
# allocating them once at startup?
|
||||||
|
|
||||||
|
@ -274,14 +281,22 @@ def _m4(
|
||||||
x_start: int,
|
x_start: int,
|
||||||
step: float,
|
step: float,
|
||||||
|
|
||||||
) -> int:
|
) -> tuple[
|
||||||
# nbins = len(i_win)
|
int,
|
||||||
# count = len(xs)
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
float,
|
||||||
|
float,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Implementation of the m4 algorithm in ``numba``:
|
||||||
|
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||||
|
|
||||||
|
'''
|
||||||
# these are pre-allocated and mutated by ``numba``
|
# these are pre-allocated and mutated by ``numba``
|
||||||
# code in-place.
|
# code in-place.
|
||||||
y_out = np.zeros((frames, 4), ys.dtype)
|
y_out = np.zeros((frames, 4), ys.dtype)
|
||||||
i_win = np.zeros(frames, xs.dtype)
|
x_out = np.zeros(frames, xs.dtype)
|
||||||
|
|
||||||
bincount = 0
|
bincount = 0
|
||||||
x_left = x_start
|
x_left = x_start
|
||||||
|
@ -295,24 +310,34 @@ def _m4(
|
||||||
|
|
||||||
# set all bins in the left-most entry to the starting left-most x value
|
# set all bins in the left-most entry to the starting left-most x value
|
||||||
# (aka a row broadcast).
|
# (aka a row broadcast).
|
||||||
i_win[bincount] = x_left
|
x_out[bincount] = x_left
|
||||||
# set all y-values to the first value passed in.
|
# set all y-values to the first value passed in.
|
||||||
y_out[bincount] = ys[0]
|
y_out[bincount] = ys[0]
|
||||||
|
|
||||||
|
# full input y-data mx and mn
|
||||||
|
mx: float = -np.inf
|
||||||
|
mn: float = np.inf
|
||||||
|
|
||||||
|
# compute OHLC style max / min values per window sized x-frame.
|
||||||
for i in range(len(xs)):
|
for i in range(len(xs)):
|
||||||
|
|
||||||
x = xs[i]
|
x = xs[i]
|
||||||
y = ys[i]
|
y = ys[i]
|
||||||
|
|
||||||
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||||
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||||
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||||
y_out[bincount, 3] = y
|
y_out[bincount, 3] = y
|
||||||
|
mx = max(mx, ymx)
|
||||||
|
mn = min(mn, ymn)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Find the next bin
|
# Find the next bin
|
||||||
while x >= x_left + step:
|
while x >= x_left + step:
|
||||||
x_left += step
|
x_left += step
|
||||||
|
|
||||||
bincount += 1
|
bincount += 1
|
||||||
i_win[bincount] = x_left
|
x_out[bincount] = x_left
|
||||||
y_out[bincount] = y
|
y_out[bincount] = y
|
||||||
|
|
||||||
return bincount, i_win, y_out
|
return bincount, x_out, y_out, mn, mx
|
||||||
|
|
|
@ -105,6 +105,10 @@ def chart_maxmin(
|
||||||
mn, mx = out
|
mn, mx = out
|
||||||
|
|
||||||
mx_vlm_in_view = 0
|
mx_vlm_in_view = 0
|
||||||
|
|
||||||
|
# TODO: we need to NOT call this to avoid a manual
|
||||||
|
# np.max/min trigger and especially on the vlm_chart
|
||||||
|
# flows which aren't shown.. like vlm?
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
out = vlm_chart.maxmin()
|
out = vlm_chart.maxmin()
|
||||||
if out:
|
if out:
|
||||||
|
@ -132,16 +136,16 @@ class DisplayState:
|
||||||
# high level chart handles
|
# high level chart handles
|
||||||
linked: LinkedSplits
|
linked: LinkedSplits
|
||||||
chart: ChartPlotWidget
|
chart: ChartPlotWidget
|
||||||
vlm_chart: ChartPlotWidget
|
|
||||||
|
|
||||||
# axis labels
|
# axis labels
|
||||||
l1: L1Labels
|
l1: L1Labels
|
||||||
last_price_sticky: YAxisLabel
|
last_price_sticky: YAxisLabel
|
||||||
vlm_sticky: YAxisLabel
|
|
||||||
|
|
||||||
# misc state tracking
|
# misc state tracking
|
||||||
vars: dict[str, Any]
|
vars: dict[str, Any]
|
||||||
|
|
||||||
|
vlm_chart: Optional[ChartPlotWidget] = None
|
||||||
|
vlm_sticky: Optional[YAxisLabel] = None
|
||||||
wap_in_history: bool = False
|
wap_in_history: bool = False
|
||||||
|
|
||||||
|
|
||||||
|
@ -181,9 +185,6 @@ async def graphics_update_loop(
|
||||||
*ohlcv.array[-1][['index', 'close']]
|
*ohlcv.array[-1][['index', 'close']]
|
||||||
)
|
)
|
||||||
|
|
||||||
if vlm_chart:
|
|
||||||
vlm_sticky = vlm_chart._ysticks['volume']
|
|
||||||
|
|
||||||
maxmin = partial(
|
maxmin = partial(
|
||||||
chart_maxmin,
|
chart_maxmin,
|
||||||
chart,
|
chart,
|
||||||
|
@ -222,33 +223,9 @@ async def graphics_update_loop(
|
||||||
tick_margin = 3 * tick_size
|
tick_margin = 3 * tick_size
|
||||||
|
|
||||||
chart.show()
|
chart.show()
|
||||||
# view = chart.view
|
|
||||||
last_quote = time.time()
|
last_quote = time.time()
|
||||||
i_last = ohlcv.index
|
i_last = ohlcv.index
|
||||||
|
|
||||||
# async def iter_drain_quotes():
|
|
||||||
# # NOTE: all code below this loop is expected to be synchronous
|
|
||||||
# # and thus draw instructions are not picked up jntil the next
|
|
||||||
# # wait / iteration.
|
|
||||||
# async for quotes in stream:
|
|
||||||
# while True:
|
|
||||||
# try:
|
|
||||||
# moar = stream.receive_nowait()
|
|
||||||
# except trio.WouldBlock:
|
|
||||||
# yield quotes
|
|
||||||
# break
|
|
||||||
# else:
|
|
||||||
# for sym, quote in moar.items():
|
|
||||||
# ticks_frame = quote.get('ticks')
|
|
||||||
# if ticks_frame:
|
|
||||||
# quotes[sym].setdefault(
|
|
||||||
# 'ticks', []).extend(ticks_frame)
|
|
||||||
# print('pulled extra')
|
|
||||||
|
|
||||||
# yield quotes
|
|
||||||
|
|
||||||
# async for quotes in iter_drain_quotes():
|
|
||||||
|
|
||||||
ds = linked.display_state = DisplayState(**{
|
ds = linked.display_state = DisplayState(**{
|
||||||
'quotes': {},
|
'quotes': {},
|
||||||
'linked': linked,
|
'linked': linked,
|
||||||
|
@ -256,8 +233,6 @@ async def graphics_update_loop(
|
||||||
'ohlcv': ohlcv,
|
'ohlcv': ohlcv,
|
||||||
'chart': chart,
|
'chart': chart,
|
||||||
'last_price_sticky': last_price_sticky,
|
'last_price_sticky': last_price_sticky,
|
||||||
'vlm_chart': vlm_chart,
|
|
||||||
'vlm_sticky': vlm_sticky,
|
|
||||||
'l1': l1,
|
'l1': l1,
|
||||||
|
|
||||||
'vars': {
|
'vars': {
|
||||||
|
@ -270,6 +245,11 @@ async def graphics_update_loop(
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
if vlm_chart:
|
||||||
|
vlm_sticky = vlm_chart._ysticks['volume']
|
||||||
|
ds.vlm_chart = vlm_chart
|
||||||
|
ds.vlm_sticky = vlm_sticky
|
||||||
|
|
||||||
chart.default_view()
|
chart.default_view()
|
||||||
|
|
||||||
# main real-time quotes update loop
|
# main real-time quotes update loop
|
||||||
|
@ -293,6 +273,7 @@ async def graphics_update_loop(
|
||||||
|
|
||||||
# chart isn't active/shown so skip render cycle and pause feed(s)
|
# chart isn't active/shown so skip render cycle and pause feed(s)
|
||||||
if chart.linked.isHidden():
|
if chart.linked.isHidden():
|
||||||
|
print('skipping update')
|
||||||
chart.pause_all_feeds()
|
chart.pause_all_feeds()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -341,7 +322,7 @@ def graphics_update_cycle(
|
||||||
for sym, quote in ds.quotes.items():
|
for sym, quote in ds.quotes.items():
|
||||||
|
|
||||||
# compute the first available graphic's x-units-per-pixel
|
# compute the first available graphic's x-units-per-pixel
|
||||||
uppx = vlm_chart.view.x_uppx()
|
uppx = chart.view.x_uppx()
|
||||||
|
|
||||||
# NOTE: vlm may be written by the ``brokerd`` backend
|
# NOTE: vlm may be written by the ``brokerd`` backend
|
||||||
# event though a tick sample is not emitted.
|
# event though a tick sample is not emitted.
|
||||||
|
@ -416,10 +397,8 @@ def graphics_update_cycle(
|
||||||
)
|
)
|
||||||
or trigger_all
|
or trigger_all
|
||||||
):
|
):
|
||||||
# TODO: we should track and compute whether the last
|
|
||||||
# pixel in a curve should show new data based on uppx
|
|
||||||
# and then iff update curves and shift?
|
|
||||||
chart.increment_view(steps=i_diff)
|
chart.increment_view(steps=i_diff)
|
||||||
|
# chart.increment_view(steps=i_diff + round(append_diff - uppx))
|
||||||
|
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
vlm_chart.increment_view(steps=i_diff)
|
vlm_chart.increment_view(steps=i_diff)
|
||||||
|
@ -477,7 +456,6 @@ def graphics_update_cycle(
|
||||||
):
|
):
|
||||||
chart.update_graphics_from_flow(
|
chart.update_graphics_from_flow(
|
||||||
chart.name,
|
chart.name,
|
||||||
# do_append=uppx < update_uppx,
|
|
||||||
do_append=do_append,
|
do_append=do_append,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -808,7 +786,10 @@ async def display_symbol_data(
|
||||||
async with trio.open_nursery() as ln:
|
async with trio.open_nursery() as ln:
|
||||||
|
|
||||||
# if available load volume related built-in display(s)
|
# if available load volume related built-in display(s)
|
||||||
if has_vlm(ohlcv):
|
if (
|
||||||
|
not symbol.broker_info[provider].get('no_vlm', False)
|
||||||
|
and has_vlm(ohlcv)
|
||||||
|
):
|
||||||
vlm_chart = await ln.start(
|
vlm_chart = await ln.start(
|
||||||
open_vlm_displays,
|
open_vlm_displays,
|
||||||
linked,
|
linked,
|
||||||
|
@ -843,6 +824,9 @@ async def display_symbol_data(
|
||||||
order_mode_started
|
order_mode_started
|
||||||
)
|
)
|
||||||
):
|
):
|
||||||
|
if not vlm_chart:
|
||||||
|
chart.default_view()
|
||||||
|
|
||||||
# let Qt run to render all widgets and make sure the
|
# let Qt run to render all widgets and make sure the
|
||||||
# sidepanes line up vertically.
|
# sidepanes line up vertically.
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
|
@ -21,7 +21,6 @@ Qt event proxying and processing using ``trio`` mem chans.
|
||||||
from contextlib import asynccontextmanager, AsyncExitStack
|
from contextlib import asynccontextmanager, AsyncExitStack
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import trio
|
import trio
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
||||||
|
@ -30,6 +29,8 @@ from PyQt5.QtWidgets import (
|
||||||
QGraphicsSceneMouseEvent as gs_mouse,
|
QGraphicsSceneMouseEvent as gs_mouse,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
MOUSE_EVENTS = {
|
MOUSE_EVENTS = {
|
||||||
gs_mouse.GraphicsSceneMousePress,
|
gs_mouse.GraphicsSceneMousePress,
|
||||||
|
@ -43,13 +44,10 @@ MOUSE_EVENTS = {
|
||||||
# TODO: maybe consider some constrained ints down the road?
|
# TODO: maybe consider some constrained ints down the road?
|
||||||
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
||||||
|
|
||||||
class KeyboardMsg(BaseModel):
|
class KeyboardMsg(Struct):
|
||||||
'''Unpacked Qt keyboard event data.
|
'''Unpacked Qt keyboard event data.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
event: QEvent
|
event: QEvent
|
||||||
etype: int
|
etype: int
|
||||||
key: int
|
key: int
|
||||||
|
@ -57,16 +55,13 @@ class KeyboardMsg(BaseModel):
|
||||||
txt: str
|
txt: str
|
||||||
|
|
||||||
def to_tuple(self) -> tuple:
|
def to_tuple(self) -> tuple:
|
||||||
return tuple(self.dict().values())
|
return tuple(self.to_dict().values())
|
||||||
|
|
||||||
|
|
||||||
class MouseMsg(BaseModel):
|
class MouseMsg(Struct):
|
||||||
'''Unpacked Qt keyboard event data.
|
'''Unpacked Qt keyboard event data.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
event: QEvent
|
event: QEvent
|
||||||
etype: int
|
etype: int
|
||||||
button: int
|
button: int
|
||||||
|
|
|
@ -337,6 +337,7 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
name: str
|
name: str
|
||||||
plot: pg.PlotItem
|
plot: pg.PlotItem
|
||||||
graphics: Union[Curve, BarItems]
|
graphics: Union[Curve, BarItems]
|
||||||
|
yrange: tuple[float, float] = None
|
||||||
|
|
||||||
# in some cases a flow may want to change its
|
# in some cases a flow may want to change its
|
||||||
# graphical "type" or, "form" when downsampling,
|
# graphical "type" or, "form" when downsampling,
|
||||||
|
@ -386,10 +387,11 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
lbar: int,
|
lbar: int,
|
||||||
rbar: int,
|
rbar: int,
|
||||||
|
|
||||||
) -> tuple[float, float]:
|
) -> Optional[tuple[float, float]]:
|
||||||
'''
|
'''
|
||||||
Compute the cached max and min y-range values for a given
|
Compute the cached max and min y-range values for a given
|
||||||
x-range determined by ``lbar`` and ``rbar``.
|
x-range determined by ``lbar`` and ``rbar`` or ``None``
|
||||||
|
if no range can be determined (yet).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
rkey = (lbar, rbar)
|
rkey = (lbar, rbar)
|
||||||
|
@ -399,9 +401,8 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
shm = self.shm
|
shm = self.shm
|
||||||
if shm is None:
|
if shm is None:
|
||||||
mxmn = None
|
return None
|
||||||
|
|
||||||
else: # new block for profiling?..
|
|
||||||
arr = shm.array
|
arr = shm.array
|
||||||
|
|
||||||
# build relative indexes into shm array
|
# build relative indexes into shm array
|
||||||
|
@ -414,7 +415,11 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
]
|
]
|
||||||
|
|
||||||
if not slice_view.size:
|
if not slice_view.size:
|
||||||
mxmn = None
|
return None
|
||||||
|
|
||||||
|
elif self.yrange:
|
||||||
|
mxmn = self.yrange
|
||||||
|
# print(f'{self.name} M4 maxmin: {mxmn}')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if self.is_ohlc:
|
if self.is_ohlc:
|
||||||
|
@ -427,9 +432,10 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
yhigh = np.max(view)
|
yhigh = np.max(view)
|
||||||
|
|
||||||
mxmn = ylow, yhigh
|
mxmn = ylow, yhigh
|
||||||
|
# print(f'{self.name} MANUAL maxmin: {mxmin}')
|
||||||
|
|
||||||
if mxmn is not None:
|
# cache result for input range
|
||||||
# cache new mxmn result
|
assert mxmn
|
||||||
self._mxmns[rkey] = mxmn
|
self._mxmns[rkey] = mxmn
|
||||||
|
|
||||||
return mxmn
|
return mxmn
|
||||||
|
@ -628,10 +634,13 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
# source data so we clear our path data in prep
|
# source data so we clear our path data in prep
|
||||||
# to generate a new one from original source data.
|
# to generate a new one from original source data.
|
||||||
new_sample_rate = True
|
new_sample_rate = True
|
||||||
showing_src_data = True
|
|
||||||
should_ds = False
|
should_ds = False
|
||||||
should_redraw = True
|
should_redraw = True
|
||||||
|
|
||||||
|
showing_src_data = True
|
||||||
|
# reset yrange to be computed from source data
|
||||||
|
self.yrange = None
|
||||||
|
|
||||||
# MAIN RENDER LOGIC:
|
# MAIN RENDER LOGIC:
|
||||||
# - determine in view data and redraw on range change
|
# - determine in view data and redraw on range change
|
||||||
# - determine downsampling ops if needed
|
# - determine downsampling ops if needed
|
||||||
|
@ -657,6 +666,10 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
**rkwargs,
|
**rkwargs,
|
||||||
)
|
)
|
||||||
|
if showing_src_data:
|
||||||
|
# print(f"{self.name} SHOWING SOURCE")
|
||||||
|
# reset yrange to be computed from source data
|
||||||
|
self.yrange = None
|
||||||
|
|
||||||
if not out:
|
if not out:
|
||||||
log.warning(f'{self.name} failed to render!?')
|
log.warning(f'{self.name} failed to render!?')
|
||||||
|
@ -664,6 +677,9 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
path, data, reset = out
|
path, data, reset = out
|
||||||
|
|
||||||
|
# if self.yrange:
|
||||||
|
# print(f'flow {self.name} yrange from m4: {self.yrange}')
|
||||||
|
|
||||||
# XXX: SUPER UGGGHHH... without this we get stale cache
|
# XXX: SUPER UGGGHHH... without this we get stale cache
|
||||||
# graphics that don't update until you downsampler again..
|
# graphics that don't update until you downsampler again..
|
||||||
if reset:
|
if reset:
|
||||||
|
@ -1058,6 +1074,7 @@ class Renderer(msgspec.Struct):
|
||||||
# xy-path data transform: convert source data to a format
|
# xy-path data transform: convert source data to a format
|
||||||
# able to be passed to a `QPainterPath` rendering routine.
|
# able to be passed to a `QPainterPath` rendering routine.
|
||||||
if not len(hist):
|
if not len(hist):
|
||||||
|
# XXX: this might be why the profiler only has exits?
|
||||||
return
|
return
|
||||||
|
|
||||||
x_out, y_out, connect = self.format_xy(
|
x_out, y_out, connect = self.format_xy(
|
||||||
|
@ -1144,11 +1161,14 @@ class Renderer(msgspec.Struct):
|
||||||
|
|
||||||
elif should_ds and uppx > 1:
|
elif should_ds and uppx > 1:
|
||||||
|
|
||||||
x_out, y_out = xy_downsample(
|
x_out, y_out, ymn, ymx = xy_downsample(
|
||||||
x_out,
|
x_out,
|
||||||
y_out,
|
y_out,
|
||||||
uppx,
|
uppx,
|
||||||
)
|
)
|
||||||
|
self.flow.yrange = ymn, ymx
|
||||||
|
# print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}')
|
||||||
|
|
||||||
reset = True
|
reset = True
|
||||||
profiler(f'FULL PATH downsample redraw={should_ds}')
|
profiler(f'FULL PATH downsample redraw={should_ds}')
|
||||||
self._in_ds = True
|
self._in_ds = True
|
||||||
|
|
|
@ -619,7 +619,7 @@ class FillStatusBar(QProgressBar):
|
||||||
# color: #19232D;
|
# color: #19232D;
|
||||||
# width: 10px;
|
# width: 10px;
|
||||||
|
|
||||||
self.setRange(0, slots)
|
self.setRange(0, int(slots))
|
||||||
self.setValue(value)
|
self.setValue(value)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -27,12 +27,13 @@ from itertools import cycle
|
||||||
from typing import Optional, AsyncGenerator, Any
|
from typing import Optional, AsyncGenerator, Any
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import create_model
|
import msgspec
|
||||||
import tractor
|
import tractor
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
|
from piker.data.types import Struct
|
||||||
from ._axes import PriceAxis
|
from ._axes import PriceAxis
|
||||||
from .._cacheables import maybe_open_context
|
from .._cacheables import maybe_open_context
|
||||||
from ..calc import humanize
|
from ..calc import humanize
|
||||||
|
@ -53,7 +54,7 @@ from ._forms import (
|
||||||
from ..fsp._api import maybe_mk_fsp_shm, Fsp
|
from ..fsp._api import maybe_mk_fsp_shm, Fsp
|
||||||
from ..fsp import cascade
|
from ..fsp import cascade
|
||||||
from ..fsp._volume import (
|
from ..fsp._volume import (
|
||||||
tina_vwap,
|
# tina_vwap,
|
||||||
dolla_vlm,
|
dolla_vlm,
|
||||||
flow_rates,
|
flow_rates,
|
||||||
)
|
)
|
||||||
|
@ -153,12 +154,13 @@ async def open_fsp_sidepane(
|
||||||
)
|
)
|
||||||
|
|
||||||
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
||||||
FspConfig = create_model(
|
FspConfig = msgspec.defstruct(
|
||||||
'FspConfig',
|
"Point",
|
||||||
name=name,
|
[('name', name)] + list(params.items()),
|
||||||
**params,
|
bases=(Struct,),
|
||||||
)
|
)
|
||||||
sidepane.model = FspConfig()
|
model = FspConfig(name=name, **params)
|
||||||
|
sidepane.model = model
|
||||||
|
|
||||||
# just a logger for now until we get fsp configs up and running.
|
# just a logger for now until we get fsp configs up and running.
|
||||||
async def settings_change(
|
async def settings_change(
|
||||||
|
@ -440,7 +442,9 @@ class FspAdmin:
|
||||||
# if the chart isn't hidden try to update
|
# if the chart isn't hidden try to update
|
||||||
# the data on screen.
|
# the data on screen.
|
||||||
if not self.linked.isHidden():
|
if not self.linked.isHidden():
|
||||||
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
|
log.debug(
|
||||||
|
f'Re-syncing graphics for fsp: {ns_path}'
|
||||||
|
)
|
||||||
self.linked.graphics_cycle(
|
self.linked.graphics_cycle(
|
||||||
trigger_all=True,
|
trigger_all=True,
|
||||||
prepend_update_index=info['first'],
|
prepend_update_index=info['first'],
|
||||||
|
@ -469,9 +473,10 @@ class FspAdmin:
|
||||||
target=target,
|
target=target,
|
||||||
readonly=True,
|
readonly=True,
|
||||||
)
|
)
|
||||||
self._flow_registry[
|
self._flow_registry[(
|
||||||
(self.src_shm._token, target.name)
|
self.src_shm._token,
|
||||||
] = dst_shm._token
|
target.name
|
||||||
|
)] = dst_shm._token
|
||||||
|
|
||||||
# if not opened:
|
# if not opened:
|
||||||
# raise RuntimeError(
|
# raise RuntimeError(
|
||||||
|
@ -639,20 +644,25 @@ async def open_vlm_displays(
|
||||||
names: list[str],
|
names: list[str],
|
||||||
|
|
||||||
) -> tuple[float, float]:
|
) -> tuple[float, float]:
|
||||||
|
'''
|
||||||
|
Flows "group" maxmin loop; assumes all named flows
|
||||||
|
are in the same co-domain and thus can be sorted
|
||||||
|
as one set.
|
||||||
|
|
||||||
|
Iterates all the named flows and calls the chart
|
||||||
|
api to find their range values and return.
|
||||||
|
|
||||||
|
TODO: really we should probably have a more built-in API
|
||||||
|
for this?
|
||||||
|
|
||||||
|
'''
|
||||||
mx = 0
|
mx = 0
|
||||||
for name in names:
|
for name in names:
|
||||||
|
ymn, ymx = chart.maxmin(name=name)
|
||||||
mxmn = chart.maxmin(name=name)
|
mx = max(mx, ymx)
|
||||||
if mxmn:
|
|
||||||
ymax = mxmn[1]
|
|
||||||
if ymax > mx:
|
|
||||||
mx = ymax
|
|
||||||
|
|
||||||
return 0, mx
|
return 0, mx
|
||||||
|
|
||||||
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
|
||||||
|
|
||||||
# TODO: fix the x-axis label issue where if you put
|
# TODO: fix the x-axis label issue where if you put
|
||||||
# the axis on the left it's totally not lined up...
|
# the axis on the left it's totally not lined up...
|
||||||
# show volume units value on LHS (for dinkus)
|
# show volume units value on LHS (for dinkus)
|
||||||
|
@ -776,6 +786,7 @@ async def open_vlm_displays(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
for name in names:
|
for name in names:
|
||||||
|
|
||||||
if 'dark' in name:
|
if 'dark' in name:
|
||||||
color = dark_vlm_color
|
color = dark_vlm_color
|
||||||
elif 'rate' in name:
|
elif 'rate' in name:
|
||||||
|
|
|
@ -923,6 +923,7 @@ class ChartView(ViewBox):
|
||||||
# XXX: super important to be aware of this.
|
# XXX: super important to be aware of this.
|
||||||
# or not flow.graphics.isVisible()
|
# or not flow.graphics.isVisible()
|
||||||
):
|
):
|
||||||
|
# print(f'skipping {flow.name}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# pass in no array which will read and render from the last
|
# pass in no array which will read and render from the last
|
||||||
|
|
|
@ -22,12 +22,9 @@ from __future__ import annotations
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional, Generic,
|
Optional, Generic,
|
||||||
TypeVar, Callable,
|
TypeVar, Callable,
|
||||||
Literal,
|
|
||||||
)
|
)
|
||||||
import enum
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pydantic import BaseModel, validator
|
# from pydantic import BaseModel, validator
|
||||||
from pydantic.generics import GenericModel
|
from pydantic.generics import GenericModel
|
||||||
from PyQt5.QtWidgets import (
|
from PyQt5.QtWidgets import (
|
||||||
QWidget,
|
QWidget,
|
||||||
|
@ -38,6 +35,7 @@ from ._forms import (
|
||||||
# FontScaledDelegate,
|
# FontScaledDelegate,
|
||||||
Edit,
|
Edit,
|
||||||
)
|
)
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
DataType = TypeVar('DataType')
|
DataType = TypeVar('DataType')
|
||||||
|
@ -62,7 +60,7 @@ class Selection(Field[DataType], Generic[DataType]):
|
||||||
options: dict[str, DataType]
|
options: dict[str, DataType]
|
||||||
# value: DataType = None
|
# value: DataType = None
|
||||||
|
|
||||||
@validator('value') # , always=True)
|
# @validator('value') # , always=True)
|
||||||
def set_value_first(
|
def set_value_first(
|
||||||
cls,
|
cls,
|
||||||
|
|
||||||
|
@ -100,7 +98,7 @@ class Edit(Field[DataType], Generic[DataType]):
|
||||||
widget_factory = Edit
|
widget_factory = Edit
|
||||||
|
|
||||||
|
|
||||||
class AllocatorPane(BaseModel):
|
class AllocatorPane(Struct):
|
||||||
|
|
||||||
account = Selection[str](
|
account = Selection[str](
|
||||||
options=dict.fromkeys(
|
options=dict.fromkeys(
|
||||||
|
|
|
@ -49,12 +49,17 @@ def xy_downsample(
|
||||||
|
|
||||||
x_spacer: float = 0.5,
|
x_spacer: float = 0.5,
|
||||||
|
|
||||||
) -> tuple[np.ndarray, np.ndarray]:
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
float,
|
||||||
|
float,
|
||||||
|
]:
|
||||||
|
|
||||||
# downsample whenever more then 1 pixels per datum can be shown.
|
# downsample whenever more then 1 pixels per datum can be shown.
|
||||||
# always refresh data bounds until we get diffing
|
# always refresh data bounds until we get diffing
|
||||||
# working properly, see above..
|
# working properly, see above..
|
||||||
bins, x, y = ds_m4(
|
bins, x, y, ymn, ymx = ds_m4(
|
||||||
x,
|
x,
|
||||||
y,
|
y,
|
||||||
uppx,
|
uppx,
|
||||||
|
@ -67,7 +72,7 @@ def xy_downsample(
|
||||||
)).flatten()
|
)).flatten()
|
||||||
y = y.flatten()
|
y = y.flatten()
|
||||||
|
|
||||||
return x, y
|
return x, y, ymn, ymx
|
||||||
|
|
||||||
|
|
||||||
@njit(
|
@njit(
|
||||||
|
|
|
@ -19,6 +19,7 @@ Position info and display
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from copy import copy
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from math import floor, copysign
|
from math import floor, copysign
|
||||||
|
@ -105,8 +106,8 @@ async def update_pnl_from_feed(
|
||||||
# compute and display pnl status
|
# compute and display pnl status
|
||||||
order_mode.pane.pnl_label.format(
|
order_mode.pane.pnl_label.format(
|
||||||
pnl=copysign(1, size) * pnl(
|
pnl=copysign(1, size) * pnl(
|
||||||
# live.avg_price,
|
# live.ppu,
|
||||||
order_mode.current_pp.live_pp.avg_price,
|
order_mode.current_pp.live_pp.ppu,
|
||||||
tick['price'],
|
tick['price'],
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -356,7 +357,7 @@ class SettingsPane:
|
||||||
# last historical close price
|
# last historical close price
|
||||||
last = feed.shm.array[-1][['close']][0]
|
last = feed.shm.array[-1][['close']][0]
|
||||||
pnl_value = copysign(1, size) * pnl(
|
pnl_value = copysign(1, size) * pnl(
|
||||||
tracker.live_pp.avg_price,
|
tracker.live_pp.ppu,
|
||||||
last,
|
last,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -476,7 +477,7 @@ class PositionTracker:
|
||||||
|
|
||||||
self.alloc = alloc
|
self.alloc = alloc
|
||||||
self.startup_pp = startup_pp
|
self.startup_pp = startup_pp
|
||||||
self.live_pp = startup_pp.copy()
|
self.live_pp = copy(startup_pp)
|
||||||
|
|
||||||
view = chart.getViewBox()
|
view = chart.getViewBox()
|
||||||
|
|
||||||
|
@ -556,7 +557,7 @@ class PositionTracker:
|
||||||
pp = position or self.live_pp
|
pp = position or self.live_pp
|
||||||
|
|
||||||
self.update_line(
|
self.update_line(
|
||||||
pp.avg_price,
|
pp.ppu,
|
||||||
pp.size,
|
pp.size,
|
||||||
self.chart.linked.symbol.lot_size_digits,
|
self.chart.linked.symbol.lot_size_digits,
|
||||||
)
|
)
|
||||||
|
@ -570,7 +571,7 @@ class PositionTracker:
|
||||||
self.hide()
|
self.hide()
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self._level_marker.level = pp.avg_price
|
self._level_marker.level = pp.ppu
|
||||||
|
|
||||||
# these updates are critical to avoid lag on view/scene changes
|
# these updates are critical to avoid lag on view/scene changes
|
||||||
self._level_marker.update() # trigger paint
|
self._level_marker.update() # trigger paint
|
||||||
|
|
|
@ -27,20 +27,20 @@ import time
|
||||||
from typing import Optional, Dict, Callable, Any
|
from typing import Optional, Dict, Callable, Any
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from PyQt5.QtCore import Qt
|
from PyQt5.QtCore import Qt
|
||||||
|
|
||||||
from .. import config
|
from .. import config
|
||||||
|
from ..pp import Position
|
||||||
from ..clearing._client import open_ems, OrderBook
|
from ..clearing._client import open_ems, OrderBook
|
||||||
from ..clearing._allocate import (
|
from ..clearing._allocate import (
|
||||||
mk_allocator,
|
mk_allocator,
|
||||||
Position,
|
|
||||||
)
|
)
|
||||||
from ._style import _font
|
from ._style import _font
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ..data.feed import Feed
|
from ..data.feed import Feed
|
||||||
|
from ..data.types import Struct
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._editors import LineEditor, ArrowEditor
|
from ._editors import LineEditor, ArrowEditor
|
||||||
from ._lines import order_line, LevelLine
|
from ._lines import order_line, LevelLine
|
||||||
|
@ -58,8 +58,9 @@ from ._forms import open_form_input_handling
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class OrderDialog(BaseModel):
|
class OrderDialog(Struct):
|
||||||
'''Trade dialogue meta-data describing the lifetime
|
'''
|
||||||
|
Trade dialogue meta-data describing the lifetime
|
||||||
of an order submission to ``emsd`` from a chart.
|
of an order submission to ``emsd`` from a chart.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -72,10 +73,6 @@ class OrderDialog(BaseModel):
|
||||||
msgs: dict[str, dict] = {}
|
msgs: dict[str, dict] = {}
|
||||||
fills: Dict[str, Any] = {}
|
fills: Dict[str, Any] = {}
|
||||||
|
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
underscore_attrs_are_private = False
|
|
||||||
|
|
||||||
|
|
||||||
def on_level_change_update_next_order_info(
|
def on_level_change_update_next_order_info(
|
||||||
|
|
||||||
|
@ -87,7 +84,8 @@ def on_level_change_update_next_order_info(
|
||||||
tracker: PositionTracker,
|
tracker: PositionTracker,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''A callback applied for each level change to the line
|
'''
|
||||||
|
A callback applied for each level change to the line
|
||||||
which will recompute the order size based on allocator
|
which will recompute the order size based on allocator
|
||||||
settings. this is assigned inside
|
settings. this is assigned inside
|
||||||
``OrderMode.line_from_order()``
|
``OrderMode.line_from_order()``
|
||||||
|
@ -266,7 +264,8 @@ class OrderMode:
|
||||||
self,
|
self,
|
||||||
|
|
||||||
) -> OrderDialog:
|
) -> OrderDialog:
|
||||||
'''Send execution order to EMS return a level line to
|
'''
|
||||||
|
Send execution order to EMS return a level line to
|
||||||
represent the order on a chart.
|
represent the order on a chart.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -275,13 +274,9 @@ class OrderMode:
|
||||||
oid = str(uuid.uuid4())
|
oid = str(uuid.uuid4())
|
||||||
|
|
||||||
# format order data for ems
|
# format order data for ems
|
||||||
fqsn = symbol.front_fqsn()
|
order = staged.copy()
|
||||||
order = staged.copy(
|
order.oid = oid
|
||||||
update={
|
order.symbol = symbol.front_fqsn()
|
||||||
'symbol': fqsn,
|
|
||||||
'oid': oid,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
line = self.line_from_order(
|
line = self.line_from_order(
|
||||||
order,
|
order,
|
||||||
|
@ -577,9 +572,9 @@ async def open_order_mode(
|
||||||
providers=symbol.brokers
|
providers=symbol.brokers
|
||||||
)
|
)
|
||||||
|
|
||||||
# XXX: ``brokerd`` delivers a set of account names that it allows
|
# XXX: ``brokerd`` delivers a set of account names that it
|
||||||
# use of but the user also can define the accounts they'd like
|
# allows use of but the user also can define the accounts they'd
|
||||||
# to use, in order, in their `brokers.toml` file.
|
# like to use, in order, in their `brokers.toml` file.
|
||||||
accounts = {}
|
accounts = {}
|
||||||
for name in brokerd_accounts:
|
for name in brokerd_accounts:
|
||||||
# ensure name is in ``brokers.toml``
|
# ensure name is in ``brokers.toml``
|
||||||
|
@ -592,10 +587,21 @@ async def open_order_mode(
|
||||||
iter(accounts.keys())
|
iter(accounts.keys())
|
||||||
) if accounts else 'paper'
|
) if accounts else 'paper'
|
||||||
|
|
||||||
|
# Pack position messages by account, should only be one-to-one.
|
||||||
# NOTE: requires the backend exactly specifies
|
# NOTE: requires the backend exactly specifies
|
||||||
# the expected symbol key in its positions msg.
|
# the expected symbol key in its positions msg.
|
||||||
pp_msgs = position_msgs.get(symkey, ())
|
pps_by_account = {}
|
||||||
pps_by_account = {msg['account']: msg for msg in pp_msgs}
|
for (broker, acctid), msgs in position_msgs.items():
|
||||||
|
for msg in msgs:
|
||||||
|
|
||||||
|
sym = msg['symbol']
|
||||||
|
if (
|
||||||
|
sym == symkey or
|
||||||
|
# mega-UGH, i think we need to fix the FQSN stuff sooner
|
||||||
|
# then later..
|
||||||
|
sym == symkey.removesuffix(f'.{broker}')
|
||||||
|
):
|
||||||
|
pps_by_account[acctid] = msg
|
||||||
|
|
||||||
# update pp trackers with data relayed from ``brokerd``.
|
# update pp trackers with data relayed from ``brokerd``.
|
||||||
for account_name in accounts:
|
for account_name in accounts:
|
||||||
|
@ -604,7 +610,10 @@ async def open_order_mode(
|
||||||
startup_pp = Position(
|
startup_pp = Position(
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
size=0,
|
size=0,
|
||||||
avg_price=0,
|
ppu=0,
|
||||||
|
|
||||||
|
# XXX: BLEH, do we care about this on the client side?
|
||||||
|
bsuid=symbol,
|
||||||
)
|
)
|
||||||
msg = pps_by_account.get(account_name)
|
msg = pps_by_account.get(account_name)
|
||||||
if msg:
|
if msg:
|
||||||
|
@ -785,15 +794,11 @@ async def process_trades_and_update_ui(
|
||||||
pp_msg_symbol = msg['symbol'].lower()
|
pp_msg_symbol = msg['symbol'].lower()
|
||||||
fqsn = sym.front_fqsn()
|
fqsn = sym.front_fqsn()
|
||||||
broker, key = sym.front_feed()
|
broker, key = sym.front_feed()
|
||||||
# print(
|
|
||||||
# f'pp msg symbol: {pp_msg_symbol}\n',
|
|
||||||
# f'fqsn: {fqsn}\n',
|
|
||||||
# f'front key: {key}\n',
|
|
||||||
# )
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
pp_msg_symbol == fqsn.replace(f'.{broker}', '')
|
pp_msg_symbol == fqsn
|
||||||
|
or pp_msg_symbol == fqsn.removesuffix(f'.{broker}')
|
||||||
):
|
):
|
||||||
|
log.info(f'{fqsn} matched pp msg: {fmsg}')
|
||||||
tracker = mode.trackers[msg['account']]
|
tracker = mode.trackers[msg['account']]
|
||||||
tracker.live_pp.update_from_msg(msg)
|
tracker.live_pp.update_from_msg(msg)
|
||||||
# update order pane widgets
|
# update order pane widgets
|
||||||
|
@ -834,15 +839,26 @@ async def process_trades_and_update_ui(
|
||||||
# resp to 'cancel' request or error condition
|
# resp to 'cancel' request or error condition
|
||||||
# for action request
|
# for action request
|
||||||
elif resp in (
|
elif resp in (
|
||||||
'broker_cancelled',
|
|
||||||
'broker_inactive',
|
'broker_inactive',
|
||||||
'broker_errored',
|
'broker_errored',
|
||||||
|
):
|
||||||
|
# delete level line from view
|
||||||
|
mode.on_cancel(oid)
|
||||||
|
broker_msg = msg['brokerd_msg']
|
||||||
|
log.error(
|
||||||
|
f'Order {oid}->{resp} with:\n{pformat(broker_msg)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
elif resp in (
|
||||||
|
'broker_cancelled',
|
||||||
'dark_cancelled'
|
'dark_cancelled'
|
||||||
):
|
):
|
||||||
# delete level line from view
|
# delete level line from view
|
||||||
mode.on_cancel(oid)
|
mode.on_cancel(oid)
|
||||||
broker_msg = msg['brokerd_msg']
|
broker_msg = msg['brokerd_msg']
|
||||||
log.warning(f'Order {oid} failed with:\n{pformat(broker_msg)}')
|
log.cancel(
|
||||||
|
f'Order {oid}->{resp} with:\n{pformat(broker_msg)}'
|
||||||
|
)
|
||||||
|
|
||||||
elif resp in (
|
elif resp in (
|
||||||
'dark_triggered'
|
'dark_triggered'
|
||||||
|
|
4
setup.py
4
setup.py
|
@ -41,17 +41,17 @@ setup(
|
||||||
},
|
},
|
||||||
install_requires=[
|
install_requires=[
|
||||||
'toml',
|
'toml',
|
||||||
|
'tomli', # fastest pure py reader
|
||||||
'click',
|
'click',
|
||||||
'colorlog',
|
'colorlog',
|
||||||
'attrs',
|
'attrs',
|
||||||
'pygments',
|
'pygments',
|
||||||
'colorama', # numba traceback coloring
|
'colorama', # numba traceback coloring
|
||||||
'pydantic', # structured data
|
'msgspec', # performant IPC messaging and structs
|
||||||
|
|
||||||
# async
|
# async
|
||||||
'trio',
|
'trio',
|
||||||
'trio-websocket',
|
'trio-websocket',
|
||||||
'msgspec', # performant IPC messaging
|
|
||||||
'async_generator',
|
'async_generator',
|
||||||
|
|
||||||
# from github currently (see requirements.txt)
|
# from github currently (see requirements.txt)
|
||||||
|
|
Loading…
Reference in New Issue