Compare commits
	
		
			No commits in common. "no_signal_pi_overlays" and "310_plus" have entirely different histories. 
		
	
	
		
			no_signal_
			...
			310_plus
		
	
		| 
						 | 
					@ -50,8 +50,3 @@ prefer_data_account = [
 | 
				
			||||||
paper = "XX0000000"
 | 
					paper = "XX0000000"
 | 
				
			||||||
margin = "X0000000"
 | 
					margin = "X0000000"
 | 
				
			||||||
ira = "X0000000"
 | 
					ira = "X0000000"
 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[deribit]
 | 
					 | 
				
			||||||
key_id = 'XXXXXXXX'
 | 
					 | 
				
			||||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -3,12 +3,11 @@
 | 
				
			||||||
version: "3.5"
 | 
					version: "3.5"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
services:
 | 
					services:
 | 
				
			||||||
  ib_gw_paper:
 | 
					  ib-gateway:
 | 
				
			||||||
    # other image tags available:
 | 
					    # other image tags available:
 | 
				
			||||||
    # https://github.com/waytrade/ib-gateway-docker#supported-tags
 | 
					    # https://github.com/waytrade/ib-gateway-docker#supported-tags
 | 
				
			||||||
    # image: waytrade/ib-gateway:981.3j
 | 
					    image: waytrade/ib-gateway:981.3j
 | 
				
			||||||
    image: waytrade/ib-gateway:1012.2i
 | 
					    restart: always
 | 
				
			||||||
    restart: always  # restart whenev there's a crash or user clicsk
 | 
					 | 
				
			||||||
    network_mode: 'host'
 | 
					    network_mode: 'host'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    volumes:
 | 
					    volumes:
 | 
				
			||||||
| 
						 | 
					@ -40,12 +39,14 @@ services:
 | 
				
			||||||
    # this compose file which looks something like:
 | 
					    # this compose file which looks something like:
 | 
				
			||||||
    # TWS_USERID='myuser'
 | 
					    # TWS_USERID='myuser'
 | 
				
			||||||
    # TWS_PASSWORD='guest'
 | 
					    # TWS_PASSWORD='guest'
 | 
				
			||||||
 | 
					    # TRADING_MODE=paper (or live)
 | 
				
			||||||
 | 
					    # VNC_SERVER_PASSWORD='diggity'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    environment:
 | 
					    environment:
 | 
				
			||||||
      TWS_USERID: ${TWS_USERID}
 | 
					      TWS_USERID: ${TWS_USERID}
 | 
				
			||||||
      TWS_PASSWORD: ${TWS_PASSWORD}
 | 
					      TWS_PASSWORD: ${TWS_PASSWORD}
 | 
				
			||||||
      TRADING_MODE: 'paper'
 | 
					      TRADING_MODE: ${TRADING_MODE:-paper}
 | 
				
			||||||
      VNC_SERVER_PASSWORD: 'doggy'
 | 
					      VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-}
 | 
				
			||||||
      VNC_SERVER_PORT: '3003'
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # ports:
 | 
					    # ports:
 | 
				
			||||||
    #   - target: 4002
 | 
					    #   - target: 4002
 | 
				
			||||||
| 
						 | 
					@ -61,40 +62,3 @@ services:
 | 
				
			||||||
      # - "127.0.0.1:4001:4001"
 | 
					      # - "127.0.0.1:4001:4001"
 | 
				
			||||||
      # - "127.0.0.1:4002:4002"
 | 
					      # - "127.0.0.1:4002:4002"
 | 
				
			||||||
      # - "127.0.0.1:5900:5900"
 | 
					      # - "127.0.0.1:5900:5900"
 | 
				
			||||||
 | 
					 | 
				
			||||||
  ib_gw_live:
 | 
					 | 
				
			||||||
    image: waytrade/ib-gateway:1012.2i
 | 
					 | 
				
			||||||
    restart: always
 | 
					 | 
				
			||||||
    network_mode: 'host'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    volumes:
 | 
					 | 
				
			||||||
      - type: bind
 | 
					 | 
				
			||||||
        source: ./jts_live.ini
 | 
					 | 
				
			||||||
        target: /root/jts/jts.ini
 | 
					 | 
				
			||||||
        # don't let ibc clobber this file for
 | 
					 | 
				
			||||||
        # the main reason of not having a stupid
 | 
					 | 
				
			||||||
        # timezone set..
 | 
					 | 
				
			||||||
        read_only: true
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      # force our own ibc config
 | 
					 | 
				
			||||||
      - type: bind
 | 
					 | 
				
			||||||
        source: ./ibc.ini
 | 
					 | 
				
			||||||
        target: /root/ibc/config.ini
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      # force our noop script - socat isn't needed in host mode.
 | 
					 | 
				
			||||||
      - type: bind
 | 
					 | 
				
			||||||
        source: ./fork_ports_delayed.sh
 | 
					 | 
				
			||||||
        target: /root/scripts/fork_ports_delayed.sh
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
      # force our noop script - socat isn't needed in host mode.
 | 
					 | 
				
			||||||
      - type: bind
 | 
					 | 
				
			||||||
        source: ./run_x11_vnc.sh
 | 
					 | 
				
			||||||
        target: /root/scripts/run_x11_vnc.sh
 | 
					 | 
				
			||||||
        read_only: true
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # NOTE: to fill these out, define an `.env` file in the same dir as
 | 
					 | 
				
			||||||
    # this compose file which looks something like:
 | 
					 | 
				
			||||||
    environment:
 | 
					 | 
				
			||||||
      TRADING_MODE: 'live'
 | 
					 | 
				
			||||||
      VNC_SERVER_PASSWORD: 'doggy'
 | 
					 | 
				
			||||||
      VNC_SERVER_PORT: '3004'
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -188,7 +188,7 @@ AcceptNonBrokerageAccountWarning=yes
 | 
				
			||||||
#
 | 
					#
 | 
				
			||||||
# The default value is 60.
 | 
					# The default value is 60.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
LoginDialogDisplayTimeout=20
 | 
					LoginDialogDisplayTimeout = 60
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -292,7 +292,7 @@ ExistingSessionDetectedAction=primary
 | 
				
			||||||
# be set dynamically at run-time: most users will never need it,
 | 
					# be set dynamically at run-time: most users will never need it,
 | 
				
			||||||
# so don't use it unless you know you need it.
 | 
					# so don't use it unless you know you need it.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
; OverrideTwsApiPort=4002
 | 
					OverrideTwsApiPort=4002
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Read-only Login
 | 
					# Read-only Login
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1,33 +0,0 @@
 | 
				
			||||||
[IBGateway]
 | 
					 | 
				
			||||||
ApiOnly=true
 | 
					 | 
				
			||||||
LocalServerPort=4001
 | 
					 | 
				
			||||||
# NOTE: must be set if using IBC's "reject" mode
 | 
					 | 
				
			||||||
TrustedIPs=127.0.0.1
 | 
					 | 
				
			||||||
; RemoteHostOrderRouting=ndc1.ibllc.com
 | 
					 | 
				
			||||||
; WriteDebug=true
 | 
					 | 
				
			||||||
; RemotePortOrderRouting=4001
 | 
					 | 
				
			||||||
; useRemoteSettings=false
 | 
					 | 
				
			||||||
; tradingMode=p
 | 
					 | 
				
			||||||
; Steps=8
 | 
					 | 
				
			||||||
; colorPalletName=dark
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# window geo, this may be useful for sending `xdotool` commands?
 | 
					 | 
				
			||||||
; MainWindow.Width=1986
 | 
					 | 
				
			||||||
; screenHeight=3960
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[Logon]
 | 
					 | 
				
			||||||
Locale=en
 | 
					 | 
				
			||||||
# most markets are oriented around this zone
 | 
					 | 
				
			||||||
# so might as well hard code it.
 | 
					 | 
				
			||||||
TimeZone=America/New_York
 | 
					 | 
				
			||||||
UseSSL=true
 | 
					 | 
				
			||||||
displayedproxymsg=1
 | 
					 | 
				
			||||||
os_titlebar=true
 | 
					 | 
				
			||||||
s3store=true
 | 
					 | 
				
			||||||
useRemoteSettings=false
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
[Communication]
 | 
					 | 
				
			||||||
ctciAutoEncrypt=true
 | 
					 | 
				
			||||||
Region=usr
 | 
					 | 
				
			||||||
; Peer=cdc1.ibllc.com:4001
 | 
					 | 
				
			||||||
| 
						 | 
					@ -1,35 +1,16 @@
 | 
				
			||||||
#!/bin/sh
 | 
					#!/bin/sh
 | 
				
			||||||
# start vnc server and listen for connections
 | 
					 | 
				
			||||||
# on port specced in `$VNC_SERVER_PORT`
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# start VNC server
 | 
				
			||||||
x11vnc \
 | 
					x11vnc \
 | 
				
			||||||
    -listen 127.0.0.1 \
 | 
					    -ncache_cr \
 | 
				
			||||||
    -allow 127.0.0.1 \
 | 
					    -listen localhost \
 | 
				
			||||||
    -rfbport "${VNC_SERVER_PORT}" \
 | 
					 | 
				
			||||||
    -display :1 \
 | 
					    -display :1 \
 | 
				
			||||||
    -forever \
 | 
					    -forever \
 | 
				
			||||||
    -shared \
 | 
					    -shared \
 | 
				
			||||||
 | 
					    -logappend /var/log/x11vnc.log \
 | 
				
			||||||
    -bg \
 | 
					    -bg \
 | 
				
			||||||
    -nowf \
 | 
					 | 
				
			||||||
    -noxdamage \
 | 
					 | 
				
			||||||
    -noxfixes \
 | 
					 | 
				
			||||||
    -no6 \
 | 
					 | 
				
			||||||
    -noipv6 \
 | 
					    -noipv6 \
 | 
				
			||||||
 | 
					    -autoport 3003 \
 | 
				
			||||||
 | 
					    # can't use this because of ``asyncvnc`` issue:
 | 
				
			||||||
    # -nowcr \
 | 
					 | 
				
			||||||
    # TODO: can't use this because of ``asyncvnc`` issue:
 | 
					 | 
				
			||||||
    # https://github.com/barneygale/asyncvnc/issues/1
 | 
					    # https://github.com/barneygale/asyncvnc/issues/1
 | 
				
			||||||
    # -passwd 'ibcansmbz'
 | 
					    # -passwd 'ibcansmbz'
 | 
				
			||||||
 | 
					 | 
				
			||||||
    # XXX: optional graphics caching flags that seem to rekt the overlay
 | 
					 | 
				
			||||||
    # of the 2 gw windows? When running a single gateway
 | 
					 | 
				
			||||||
    # this seems to maybe optimize some memory usage?
 | 
					 | 
				
			||||||
    # -ncache_cr \
 | 
					 | 
				
			||||||
    # -ncache \
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # NOTE: this will prevent logs from going to the console.
 | 
					 | 
				
			||||||
    # -logappend /var/log/x11vnc.log \
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # where to start allocating ports
 | 
					 | 
				
			||||||
    # -autoport "${VNC_SERVER_PORT}" \
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -22,10 +22,10 @@ from typing import Optional, Union, Callable, Any
 | 
				
			||||||
from contextlib import asynccontextmanager as acm
 | 
					from contextlib import asynccontextmanager as acm
 | 
				
			||||||
from collections import defaultdict
 | 
					from collections import defaultdict
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from msgspec import Struct
 | 
					from pydantic import BaseModel
 | 
				
			||||||
import tractor
 | 
					 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
from trio_typing import TaskStatus
 | 
					from trio_typing import TaskStatus
 | 
				
			||||||
 | 
					import tractor
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from .log import get_logger, get_console_log
 | 
					from .log import get_logger, get_console_log
 | 
				
			||||||
from .brokers import get_brokermod
 | 
					from .brokers import get_brokermod
 | 
				
			||||||
| 
						 | 
					@ -47,13 +47,16 @@ _root_modules = [
 | 
				
			||||||
]
 | 
					]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Services(Struct):
 | 
					class Services(BaseModel):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    actor_n: tractor._supervise.ActorNursery
 | 
					    actor_n: tractor._supervise.ActorNursery
 | 
				
			||||||
    service_n: trio.Nursery
 | 
					    service_n: trio.Nursery
 | 
				
			||||||
    debug_mode: bool  # tractor sub-actor debug mode flag
 | 
					    debug_mode: bool  # tractor sub-actor debug mode flag
 | 
				
			||||||
    service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
 | 
					    service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class Config:
 | 
				
			||||||
 | 
					        arbitrary_types_allowed = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def start_service_task(
 | 
					    async def start_service_task(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        name: str,
 | 
					        name: str,
 | 
				
			||||||
| 
						 | 
					@ -195,8 +198,9 @@ async def open_piker_runtime(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> Optional[tractor._portal.Portal]:
 | 
					) -> Optional[tractor._portal.Portal]:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Start a piker actor who's runtime will automatically sync with
 | 
					    Start a piker actor who's runtime will automatically
 | 
				
			||||||
    existing piker actors on the local link based on configuration.
 | 
					    sync with existing piker actors in local network
 | 
				
			||||||
 | 
					    based on configuration.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    global _services
 | 
					    global _services
 | 
				
			||||||
| 
						 | 
					@ -216,7 +220,7 @@ async def open_piker_runtime(
 | 
				
			||||||
            # TODO: eventually we should be able to avoid
 | 
					            # TODO: eventually we should be able to avoid
 | 
				
			||||||
            # having the root have more then permissions to
 | 
					            # having the root have more then permissions to
 | 
				
			||||||
            # spawn other specialized daemons I think?
 | 
					            # spawn other specialized daemons I think?
 | 
				
			||||||
            enable_modules=_root_modules + enable_modules,
 | 
					            enable_modules=_root_modules,
 | 
				
			||||||
        ) as _,
 | 
					        ) as _,
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        yield tractor.current_actor()
 | 
					        yield tractor.current_actor()
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -18,10 +18,7 @@
 | 
				
			||||||
Profiling wrappers for internal libs.
 | 
					Profiling wrappers for internal libs.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
import os
 | 
					 | 
				
			||||||
import sys
 | 
					 | 
				
			||||||
import time
 | 
					import time
 | 
				
			||||||
from time import perf_counter
 | 
					 | 
				
			||||||
from functools import wraps
 | 
					from functools import wraps
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# NOTE: you can pass a flag to enable this:
 | 
					# NOTE: you can pass a flag to enable this:
 | 
				
			||||||
| 
						 | 
					@ -47,184 +44,3 @@ def timeit(fn):
 | 
				
			||||||
        return res
 | 
					        return res
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return wrapper
 | 
					    return wrapper
 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Modified version of ``pyqtgraph.debug.Profiler`` that
 | 
					 | 
				
			||||||
# core seems hesitant to land in:
 | 
					 | 
				
			||||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2281
 | 
					 | 
				
			||||||
class Profiler(object):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Simple profiler allowing measurement of multiple time intervals.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    By default, profilers are disabled.  To enable profiling, set the
 | 
					 | 
				
			||||||
    environment variable `PYQTGRAPHPROFILE` to a comma-separated list of
 | 
					 | 
				
			||||||
    fully-qualified names of profiled functions.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Calling a profiler registers a message (defaulting to an increasing
 | 
					 | 
				
			||||||
    counter) that contains the time elapsed since the last call.  When the
 | 
					 | 
				
			||||||
    profiler is about to be garbage-collected, the messages are passed to the
 | 
					 | 
				
			||||||
    outer profiler if one is running, or printed to stdout otherwise.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    If `delayed` is set to False, messages are immediately printed instead.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Example:
 | 
					 | 
				
			||||||
        def function(...):
 | 
					 | 
				
			||||||
            profiler = Profiler()
 | 
					 | 
				
			||||||
            ... do stuff ...
 | 
					 | 
				
			||||||
            profiler('did stuff')
 | 
					 | 
				
			||||||
            ... do other stuff ...
 | 
					 | 
				
			||||||
            profiler('did other stuff')
 | 
					 | 
				
			||||||
            # profiler is garbage-collected and flushed at function end
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    If this function is a method of class C, setting `PYQTGRAPHPROFILE` to
 | 
					 | 
				
			||||||
    "C.function" (without the module name) will enable this profiler.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    For regular functions, use the qualified name of the function, stripping
 | 
					 | 
				
			||||||
    only the initial "pyqtgraph." prefix from the module.
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    _profilers = os.environ.get("PYQTGRAPHPROFILE", None)
 | 
					 | 
				
			||||||
    _profilers = _profilers.split(",") if _profilers is not None else []
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    _depth = 0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # NOTE: without this defined at the class level
 | 
					 | 
				
			||||||
    # you won't see apprpriately "nested" sub-profiler
 | 
					 | 
				
			||||||
    # instance calls.
 | 
					 | 
				
			||||||
    _msgs = []
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # set this flag to disable all or individual profilers at runtime
 | 
					 | 
				
			||||||
    disable = False
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    class DisabledProfiler(object):
 | 
					 | 
				
			||||||
        def __init__(self, *args, **kwds):
 | 
					 | 
				
			||||||
            pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        def __call__(self, *args):
 | 
					 | 
				
			||||||
            pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        def finish(self):
 | 
					 | 
				
			||||||
            pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        def mark(self, msg=None):
 | 
					 | 
				
			||||||
            pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    _disabledProfiler = DisabledProfiler()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def __new__(
 | 
					 | 
				
			||||||
        cls,
 | 
					 | 
				
			||||||
        msg=None,
 | 
					 | 
				
			||||||
        disabled='env',
 | 
					 | 
				
			||||||
        delayed=True,
 | 
					 | 
				
			||||||
        ms_threshold: float = 0.0,
 | 
					 | 
				
			||||||
    ):
 | 
					 | 
				
			||||||
        """Optionally create a new profiler based on caller's qualname.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        ``ms_threshold`` can be set to value in ms for which, if the
 | 
					 | 
				
			||||||
        total measured time  of the lifetime of this profiler is **less
 | 
					 | 
				
			||||||
        than** this value, then no profiling messages will be printed.
 | 
					 | 
				
			||||||
        Setting ``delayed=False`` disables this feature since messages
 | 
					 | 
				
			||||||
        are emitted immediately.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        if (
 | 
					 | 
				
			||||||
            disabled is True
 | 
					 | 
				
			||||||
            or (
 | 
					 | 
				
			||||||
                disabled == 'env'
 | 
					 | 
				
			||||||
                and len(cls._profilers) == 0
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            return cls._disabledProfiler
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # determine the qualified name of the caller function
 | 
					 | 
				
			||||||
        caller_frame = sys._getframe(1)
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            caller_object_type = type(caller_frame.f_locals["self"])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        except KeyError:  # we are in a regular function
 | 
					 | 
				
			||||||
            qualifier = caller_frame.f_globals["__name__"].split(".", 1)[-1]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        else:  # we are in a method
 | 
					 | 
				
			||||||
            qualifier = caller_object_type.__name__
 | 
					 | 
				
			||||||
        func_qualname = qualifier + "." + caller_frame.f_code.co_name
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if disabled == 'env' and func_qualname not in cls._profilers:
 | 
					 | 
				
			||||||
            # don't do anything
 | 
					 | 
				
			||||||
            return cls._disabledProfiler
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # create an actual profiling object
 | 
					 | 
				
			||||||
        cls._depth += 1
 | 
					 | 
				
			||||||
        obj = super(Profiler, cls).__new__(cls)
 | 
					 | 
				
			||||||
        obj._name = msg or func_qualname
 | 
					 | 
				
			||||||
        obj._delayed = delayed
 | 
					 | 
				
			||||||
        obj._markCount = 0
 | 
					 | 
				
			||||||
        obj._finished = False
 | 
					 | 
				
			||||||
        obj._firstTime = obj._lastTime = perf_counter()
 | 
					 | 
				
			||||||
        obj._mt = ms_threshold
 | 
					 | 
				
			||||||
        obj._newMsg("> Entering " + obj._name)
 | 
					 | 
				
			||||||
        return obj
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def __call__(self, msg=None):
 | 
					 | 
				
			||||||
        """Register or print a new message with timing information.
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        if self.disable:
 | 
					 | 
				
			||||||
            return
 | 
					 | 
				
			||||||
        if msg is None:
 | 
					 | 
				
			||||||
            msg = str(self._markCount)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self._markCount += 1
 | 
					 | 
				
			||||||
        newTime = perf_counter()
 | 
					 | 
				
			||||||
        ms = (newTime - self._lastTime) * 1000
 | 
					 | 
				
			||||||
        self._newMsg("  %s: %0.4f ms", msg, ms)
 | 
					 | 
				
			||||||
        self._lastTime = newTime
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def mark(self, msg=None):
 | 
					 | 
				
			||||||
        self(msg)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def _newMsg(self, msg, *args):
 | 
					 | 
				
			||||||
        msg = "  " * (self._depth - 1) + msg
 | 
					 | 
				
			||||||
        if self._delayed:
 | 
					 | 
				
			||||||
            self._msgs.append((msg, args))
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            print(msg % args)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def __del__(self):
 | 
					 | 
				
			||||||
        self.finish()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def finish(self, msg=None):
 | 
					 | 
				
			||||||
        """Add a final message; flush the message list if no parent profiler.
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        if self._finished or self.disable:
 | 
					 | 
				
			||||||
            return
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self._finished = True
 | 
					 | 
				
			||||||
        if msg is not None:
 | 
					 | 
				
			||||||
            self(msg)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        tot_ms = (perf_counter() - self._firstTime) * 1000
 | 
					 | 
				
			||||||
        self._newMsg(
 | 
					 | 
				
			||||||
            "< Exiting %s, total time: %0.4f ms",
 | 
					 | 
				
			||||||
            self._name,
 | 
					 | 
				
			||||||
            tot_ms,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if tot_ms < self._mt:
 | 
					 | 
				
			||||||
            # print(f'{tot_ms} < {self._mt}, clearing')
 | 
					 | 
				
			||||||
            # NOTE: this list **must** be an instance var to avoid
 | 
					 | 
				
			||||||
            # deleting common messages during GC I think?
 | 
					 | 
				
			||||||
            self._msgs.clear()
 | 
					 | 
				
			||||||
        # else:
 | 
					 | 
				
			||||||
        #     print(f'{tot_ms} > {self._mt}, not clearing')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # XXX: why is this needed?
 | 
					 | 
				
			||||||
        # don't we **want to show** nested profiler messages?
 | 
					 | 
				
			||||||
        if self._msgs:  # and self._depth < 1:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # if self._msgs:
 | 
					 | 
				
			||||||
            print("\n".join([m[0] % m[1] for m in self._msgs]))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # clear all entries
 | 
					 | 
				
			||||||
            self._msgs.clear()
 | 
					 | 
				
			||||||
            # type(self)._msgs = []
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        type(self)._depth -= 1
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -33,17 +33,14 @@ import asks
 | 
				
			||||||
from fuzzywuzzy import process as fuzzy
 | 
					from fuzzywuzzy import process as fuzzy
 | 
				
			||||||
import numpy as np
 | 
					import numpy as np
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
 | 
					from pydantic.dataclasses import dataclass
 | 
				
			||||||
 | 
					from pydantic import BaseModel
 | 
				
			||||||
import wsproto
 | 
					import wsproto
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from .._cacheables import open_cached_client
 | 
					from .._cacheables import open_cached_client
 | 
				
			||||||
from ._util import (
 | 
					from ._util import resproc, SymbolNotFound
 | 
				
			||||||
    resproc,
 | 
					 | 
				
			||||||
    SymbolNotFound,
 | 
					 | 
				
			||||||
    DataUnavailable,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from ..log import get_logger, get_console_log
 | 
					from ..log import get_logger, get_console_log
 | 
				
			||||||
from ..data import ShmArray
 | 
					from ..data import ShmArray
 | 
				
			||||||
from ..data.types import Struct
 | 
					 | 
				
			||||||
from ..data._web_bs import open_autorecon_ws, NoBsWs
 | 
					from ..data._web_bs import open_autorecon_ws, NoBsWs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
| 
						 | 
					@ -82,14 +79,12 @@ _show_wap_in_history = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
 | 
					# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
 | 
				
			||||||
class Pair(Struct, frozen=True):
 | 
					class Pair(BaseModel):
 | 
				
			||||||
    symbol: str
 | 
					    symbol: str
 | 
				
			||||||
    status: str
 | 
					    status: str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    baseAsset: str
 | 
					    baseAsset: str
 | 
				
			||||||
    baseAssetPrecision: int
 | 
					    baseAssetPrecision: int
 | 
				
			||||||
    cancelReplaceAllowed: bool
 | 
					 | 
				
			||||||
    allowTrailingStop: bool
 | 
					 | 
				
			||||||
    quoteAsset: str
 | 
					    quoteAsset: str
 | 
				
			||||||
    quotePrecision: int
 | 
					    quotePrecision: int
 | 
				
			||||||
    quoteAssetPrecision: int
 | 
					    quoteAssetPrecision: int
 | 
				
			||||||
| 
						 | 
					@ -109,14 +104,14 @@ class Pair(Struct, frozen=True):
 | 
				
			||||||
    permissions: list[str]
 | 
					    permissions: list[str]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class OHLC(Struct):
 | 
					@dataclass
 | 
				
			||||||
    '''
 | 
					class OHLC:
 | 
				
			||||||
    Description of the flattened OHLC quote format.
 | 
					    """Description of the flattened OHLC quote format.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    For schema details see:
 | 
					    For schema details see:
 | 
				
			||||||
    https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
 | 
					    https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    """
 | 
				
			||||||
    time: int
 | 
					    time: int
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    open: float
 | 
					    open: float
 | 
				
			||||||
| 
						 | 
					@ -265,7 +260,6 @@ class Client:
 | 
				
			||||||
        for i, bar in enumerate(bars):
 | 
					        for i, bar in enumerate(bars):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            bar = OHLC(*bar)
 | 
					            bar = OHLC(*bar)
 | 
				
			||||||
            bar.typecast()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            row = []
 | 
					            row = []
 | 
				
			||||||
            for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
 | 
					            for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
 | 
				
			||||||
| 
						 | 
					@ -293,7 +287,7 @@ async def get_client() -> Client:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# validation type
 | 
					# validation type
 | 
				
			||||||
class AggTrade(Struct):
 | 
					class AggTrade(BaseModel):
 | 
				
			||||||
    e: str  # Event type
 | 
					    e: str  # Event type
 | 
				
			||||||
    E: int  # Event time
 | 
					    E: int  # Event time
 | 
				
			||||||
    s: str  # Symbol
 | 
					    s: str  # Symbol
 | 
				
			||||||
| 
						 | 
					@ -347,9 +341,7 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        elif msg.get('e') == 'aggTrade':
 | 
					        elif msg.get('e') == 'aggTrade':
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # NOTE: this is purely for a definition, ``msgspec.Struct``
 | 
					            # validate
 | 
				
			||||||
            # does not runtime-validate until you decode/encode.
 | 
					 | 
				
			||||||
            # see: https://jcristharif.com/msgspec/structs.html#type-validation
 | 
					 | 
				
			||||||
            msg = AggTrade(**msg)
 | 
					            msg = AggTrade(**msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # TODO: type out and require this quote format
 | 
					            # TODO: type out and require this quote format
 | 
				
			||||||
| 
						 | 
					@ -360,8 +352,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
 | 
				
			||||||
                'brokerd_ts': time.time(),
 | 
					                'brokerd_ts': time.time(),
 | 
				
			||||||
                'ticks': [{
 | 
					                'ticks': [{
 | 
				
			||||||
                    'type': 'trade',
 | 
					                    'type': 'trade',
 | 
				
			||||||
                    'price': float(msg.p),
 | 
					                    'price': msg.p,
 | 
				
			||||||
                    'size': float(msg.q),
 | 
					                    'size': msg.q,
 | 
				
			||||||
                    'broker_ts': msg.T,
 | 
					                    'broker_ts': msg.T,
 | 
				
			||||||
                }],
 | 
					                }],
 | 
				
			||||||
            }
 | 
					            }
 | 
				
			||||||
| 
						 | 
					@ -392,7 +384,6 @@ async def open_history_client(
 | 
				
			||||||
    async with open_cached_client('binance') as client:
 | 
					    async with open_cached_client('binance') as client:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        async def get_ohlc(
 | 
					        async def get_ohlc(
 | 
				
			||||||
            timeframe: float,
 | 
					 | 
				
			||||||
            end_dt: Optional[datetime] = None,
 | 
					            end_dt: Optional[datetime] = None,
 | 
				
			||||||
            start_dt: Optional[datetime] = None,
 | 
					            start_dt: Optional[datetime] = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -401,8 +392,6 @@ async def open_history_client(
 | 
				
			||||||
            datetime,  # start
 | 
					            datetime,  # start
 | 
				
			||||||
            datetime,  # end
 | 
					            datetime,  # end
 | 
				
			||||||
        ]:
 | 
					        ]:
 | 
				
			||||||
            if timeframe != 60:
 | 
					 | 
				
			||||||
                raise DataUnavailable('Only 1m bars are supported')
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            array = await client.bars(
 | 
					            array = await client.bars(
 | 
				
			||||||
                symbol,
 | 
					                symbol,
 | 
				
			||||||
| 
						 | 
					@ -459,7 +448,7 @@ async def stream_quotes(
 | 
				
			||||||
            d = cache[sym.upper()]
 | 
					            d = cache[sym.upper()]
 | 
				
			||||||
            syminfo = Pair(**d)  # validation
 | 
					            syminfo = Pair(**d)  # validation
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            si = sym_infos[sym] = syminfo.to_dict()
 | 
					            si = sym_infos[sym] = syminfo.dict()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # XXX: after manually inspecting the response format we
 | 
					            # XXX: after manually inspecting the response format we
 | 
				
			||||||
            # just directly pick out the info we need
 | 
					            # just directly pick out the info we need
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -39,148 +39,6 @@ _config_dir = click.get_app_dir('piker')
 | 
				
			||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
 | 
					_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
OK = '\033[92m'
 | 
					 | 
				
			||||||
WARNING = '\033[93m'
 | 
					 | 
				
			||||||
FAIL = '\033[91m'
 | 
					 | 
				
			||||||
ENDC = '\033[0m'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def print_ok(s: str, **kwargs):
 | 
					 | 
				
			||||||
    print(OK + s + ENDC, **kwargs)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def print_error(s: str, **kwargs):
 | 
					 | 
				
			||||||
    print(FAIL + s + ENDC, **kwargs)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def get_method(client, meth_name: str):
 | 
					 | 
				
			||||||
    print(f'checking client for method \'{meth_name}\'...', end='', flush=True)
 | 
					 | 
				
			||||||
    method = getattr(client, meth_name, None)
 | 
					 | 
				
			||||||
    assert method
 | 
					 | 
				
			||||||
    print_ok('found!.')
 | 
					 | 
				
			||||||
    return method
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def run_method(client, meth_name: str, **kwargs):
 | 
					 | 
				
			||||||
    method = get_method(client, meth_name)
 | 
					 | 
				
			||||||
    print('running...', end='', flush=True)
 | 
					 | 
				
			||||||
    result = await method(**kwargs)
 | 
					 | 
				
			||||||
    print_ok(f'done! result: {type(result)}')
 | 
					 | 
				
			||||||
    return result
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def run_test(broker_name: str):
 | 
					 | 
				
			||||||
    brokermod = get_brokermod(broker_name)
 | 
					 | 
				
			||||||
    total = 0
 | 
					 | 
				
			||||||
    passed = 0
 | 
					 | 
				
			||||||
    failed = 0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    print(f'getting client...', end='', flush=True)
 | 
					 | 
				
			||||||
    if not hasattr(brokermod, 'get_client'):
 | 
					 | 
				
			||||||
        print_error('fail! no \'get_client\' context manager found.')
 | 
					 | 
				
			||||||
        return
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async with brokermod.get_client(is_brokercheck=True) as client:
 | 
					 | 
				
			||||||
        print_ok(f'done! inside client context.')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # check for methods present on brokermod
 | 
					 | 
				
			||||||
        method_list = [
 | 
					 | 
				
			||||||
            'backfill_bars',
 | 
					 | 
				
			||||||
            'get_client',
 | 
					 | 
				
			||||||
            'trades_dialogue',
 | 
					 | 
				
			||||||
            'open_history_client',
 | 
					 | 
				
			||||||
            'open_symbol_search',
 | 
					 | 
				
			||||||
            'stream_quotes',
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for method in method_list:
 | 
					 | 
				
			||||||
            print(
 | 
					 | 
				
			||||||
                f'checking brokermod for method \'{method}\'...',
 | 
					 | 
				
			||||||
                end='', flush=True)
 | 
					 | 
				
			||||||
            if not hasattr(brokermod, method):
 | 
					 | 
				
			||||||
                print_error(f'fail! method \'{method}\' not found.')
 | 
					 | 
				
			||||||
                failed += 1
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                print_ok('done!')
 | 
					 | 
				
			||||||
                passed += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            total += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # check for methods present con brokermod.Client and their
 | 
					 | 
				
			||||||
        # results
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # for private methods only check is present
 | 
					 | 
				
			||||||
        method_list = [
 | 
					 | 
				
			||||||
            'get_balances',
 | 
					 | 
				
			||||||
            'get_assets',
 | 
					 | 
				
			||||||
            'get_trades',
 | 
					 | 
				
			||||||
            'get_xfers',
 | 
					 | 
				
			||||||
            'submit_limit',
 | 
					 | 
				
			||||||
            'submit_cancel',
 | 
					 | 
				
			||||||
            'search_symbols',
 | 
					 | 
				
			||||||
        ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for method_name in method_list:
 | 
					 | 
				
			||||||
            try:
 | 
					 | 
				
			||||||
                get_method(client, method_name)
 | 
					 | 
				
			||||||
                passed += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            except AssertionError:
 | 
					 | 
				
			||||||
                print_error(f'fail! method \'{method_name}\' not found.')
 | 
					 | 
				
			||||||
                failed += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            total += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # check for methods present con brokermod.Client and their
 | 
					 | 
				
			||||||
        # results
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        syms = await run_method(client, 'symbol_info')
 | 
					 | 
				
			||||||
        total += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if len(syms) == 0:
 | 
					 | 
				
			||||||
            raise BaseException('Empty Symbol list?')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        passed += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        first_sym = tuple(syms.keys())[0]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        method_list = [
 | 
					 | 
				
			||||||
            ('cache_symbols', {}),
 | 
					 | 
				
			||||||
            ('search_symbols', {'pattern': first_sym[:-1]}),
 | 
					 | 
				
			||||||
            ('bars', {'symbol': first_sym})
 | 
					 | 
				
			||||||
        ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for method_name, method_kwargs in method_list:
 | 
					 | 
				
			||||||
            try:
 | 
					 | 
				
			||||||
                await run_method(client, method_name, **method_kwargs)
 | 
					 | 
				
			||||||
                passed += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            except AssertionError:
 | 
					 | 
				
			||||||
                print_error(f'fail! method \'{method_name}\' not found.')
 | 
					 | 
				
			||||||
                failed += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            total += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        print(f'total: {total}, passed: {passed}, failed: {failed}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@cli.command()
 | 
					 | 
				
			||||||
@click.argument('broker', nargs=1, required=True)
 | 
					 | 
				
			||||||
@click.pass_obj
 | 
					 | 
				
			||||||
def brokercheck(config, broker):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Test broker apis for completeness.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    async def bcheck_main():
 | 
					 | 
				
			||||||
        async with maybe_spawn_brokerd(broker) as portal:
 | 
					 | 
				
			||||||
            await portal.run(run_test, broker)
 | 
					 | 
				
			||||||
            await portal.cancel_actor()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    trio.run(run_test, broker)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@cli.command()
 | 
					@cli.command()
 | 
				
			||||||
@click.option('--keys', '-k', multiple=True,
 | 
					@click.option('--keys', '-k', multiple=True,
 | 
				
			||||||
              help='Return results only for these keys')
 | 
					              help='Return results only for these keys')
 | 
				
			||||||
| 
						 | 
					@ -335,8 +193,6 @@ def contracts(ctx, loglevel, broker, symbol, ids):
 | 
				
			||||||
    brokermod = get_brokermod(broker)
 | 
					    brokermod = get_brokermod(broker)
 | 
				
			||||||
    get_console_log(loglevel)
 | 
					    get_console_log(loglevel)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    contracts = trio.run(partial(core.contracts, brokermod, symbol))
 | 
					    contracts = trio.run(partial(core.contracts, brokermod, symbol))
 | 
				
			||||||
    if not ids:
 | 
					    if not ids:
 | 
				
			||||||
        # just print out expiry dates which can be used with
 | 
					        # just print out expiry dates which can be used with
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1,70 +0,0 @@
 | 
				
			||||||
``deribit`` backend
 | 
					 | 
				
			||||||
------------------
 | 
					 | 
				
			||||||
pretty good liquidity crypto derivatives, uses custom json rpc over ws for
 | 
					 | 
				
			||||||
client methods, then `cryptofeed` for data streams.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
status
 | 
					 | 
				
			||||||
******
 | 
					 | 
				
			||||||
- supports option charts
 | 
					 | 
				
			||||||
- no order support yet 
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
config
 | 
					 | 
				
			||||||
******
 | 
					 | 
				
			||||||
In order to get order mode support your ``brokers.toml``
 | 
					 | 
				
			||||||
needs to have something like the following:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. code:: toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    [deribit]
 | 
					 | 
				
			||||||
    key_id = 'XXXXXXXX'
 | 
					 | 
				
			||||||
    key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
To obtain an api id and secret you need to create an account, which can be a
 | 
					 | 
				
			||||||
real market account over at:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    - deribit.com  (requires KYC for deposit address)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
Or a testnet account over at:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    - test.deribit.com
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
For testnet once the account is created here is how you deposit fake crypto to
 | 
					 | 
				
			||||||
try it out:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
1) Go to Wallet:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. figure:: assets/0_wallet.png
 | 
					 | 
				
			||||||
    :align: center
 | 
					 | 
				
			||||||
    :target: assets/0_wallet.png
 | 
					 | 
				
			||||||
    :alt: wallet page
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
2) Then click on the elipsis menu and select deposit
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. figure:: assets/1_wallet_select_deposit.png
 | 
					 | 
				
			||||||
    :align: center
 | 
					 | 
				
			||||||
    :target: assets/1_wallet_select_deposit.png
 | 
					 | 
				
			||||||
    :alt: wallet deposit page
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
3) This will take you to the deposit address page
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. figure:: assets/2_gen_deposit_addr.png
 | 
					 | 
				
			||||||
    :align: center
 | 
					 | 
				
			||||||
    :target: assets/2_gen_deposit_addr.png
 | 
					 | 
				
			||||||
    :alt: generate deposit address page
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
4) After clicking generate you should see the address, copy it and go to the 
 | 
					 | 
				
			||||||
`coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake
 | 
					 | 
				
			||||||
coins to that address.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. figure:: assets/3_deposit_address.png
 | 
					 | 
				
			||||||
    :align: center
 | 
					 | 
				
			||||||
    :target: assets/3_deposit_address.png
 | 
					 | 
				
			||||||
    :alt: generated address
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
5) Back in the deposit address page you should see the deposit in your history
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. figure:: assets/4_wallet_deposit_history.png
 | 
					 | 
				
			||||||
    :align: center
 | 
					 | 
				
			||||||
    :target: assets/4_wallet_deposit_history.png
 | 
					 | 
				
			||||||
    :alt: wallet deposit history
 | 
					 | 
				
			||||||
| 
						 | 
					@ -1,65 +0,0 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					 | 
				
			||||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					 | 
				
			||||||
# the Free Software Foundation, either version 3 of the License, or
 | 
					 | 
				
			||||||
# (at your option) any later version.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is distributed in the hope that it will be useful,
 | 
					 | 
				
			||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
					 | 
				
			||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
					 | 
				
			||||||
# GNU Affero General Public License for more details.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
Deribit backend.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from piker.log import get_logger
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
log = get_logger(__name__)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from .api import (
 | 
					 | 
				
			||||||
    get_client,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from .feed import (
 | 
					 | 
				
			||||||
    open_history_client,
 | 
					 | 
				
			||||||
    open_symbol_search,
 | 
					 | 
				
			||||||
    stream_quotes,
 | 
					 | 
				
			||||||
    backfill_bars
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
# from .broker import (
 | 
					 | 
				
			||||||
    # trades_dialogue,
 | 
					 | 
				
			||||||
    # norm_trade_records,
 | 
					 | 
				
			||||||
# )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
__all__ = [
 | 
					 | 
				
			||||||
    'get_client',
 | 
					 | 
				
			||||||
#    'trades_dialogue',
 | 
					 | 
				
			||||||
    'open_history_client',
 | 
					 | 
				
			||||||
    'open_symbol_search',
 | 
					 | 
				
			||||||
    'stream_quotes',
 | 
					 | 
				
			||||||
#    'norm_trade_records',
 | 
					 | 
				
			||||||
]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# tractor RPC enable arg
 | 
					 | 
				
			||||||
__enable_modules__: list[str] = [
 | 
					 | 
				
			||||||
    'api',
 | 
					 | 
				
			||||||
    'feed',
 | 
					 | 
				
			||||||
#   'broker',
 | 
					 | 
				
			||||||
]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# passed to ``tractor.ActorNursery.start_actor()``
 | 
					 | 
				
			||||||
_spawn_kwargs = {
 | 
					 | 
				
			||||||
    'infect_asyncio': True,
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# annotation to let backend agnostic code
 | 
					 | 
				
			||||||
# know if ``brokerd`` should be spawned with
 | 
					 | 
				
			||||||
# ``tractor``'s aio mode.
 | 
					 | 
				
			||||||
_infect_asyncio: bool = True
 | 
					 | 
				
			||||||
| 
						 | 
					@ -1,667 +0,0 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					 | 
				
			||||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					 | 
				
			||||||
# the Free Software Foundation, either version 3 of the License, or
 | 
					 | 
				
			||||||
# (at your option) any later version.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is distributed in the hope that it will be useful,
 | 
					 | 
				
			||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
					 | 
				
			||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
					 | 
				
			||||||
# GNU Affero General Public License for more details.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
Deribit backend.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
import json
 | 
					 | 
				
			||||||
import time
 | 
					 | 
				
			||||||
import asyncio
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from contextlib import asynccontextmanager as acm, AsyncExitStack
 | 
					 | 
				
			||||||
from functools import partial
 | 
					 | 
				
			||||||
from datetime import datetime
 | 
					 | 
				
			||||||
from typing import Any, Optional, Iterable, Callable
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import pendulum
 | 
					 | 
				
			||||||
import asks
 | 
					 | 
				
			||||||
import trio
 | 
					 | 
				
			||||||
from trio_typing import Nursery, TaskStatus
 | 
					 | 
				
			||||||
from fuzzywuzzy import process as fuzzy
 | 
					 | 
				
			||||||
import numpy as np
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from piker.data.types import Struct
 | 
					 | 
				
			||||||
from piker.data._web_bs import (
 | 
					 | 
				
			||||||
    NoBsWs,
 | 
					 | 
				
			||||||
    open_autorecon_ws,
 | 
					 | 
				
			||||||
    open_jsonrpc_session
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from .._util import resproc
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from piker import config
 | 
					 | 
				
			||||||
from piker.log import get_logger
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from tractor.trionics import (
 | 
					 | 
				
			||||||
    broadcast_receiver,
 | 
					 | 
				
			||||||
    BroadcastReceiver,
 | 
					 | 
				
			||||||
    maybe_open_context
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from tractor import to_asyncio
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from cryptofeed import FeedHandler
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from cryptofeed.defines import (
 | 
					 | 
				
			||||||
    DERIBIT,
 | 
					 | 
				
			||||||
    L1_BOOK, TRADES,
 | 
					 | 
				
			||||||
    OPTION, CALL, PUT
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from cryptofeed.symbols import Symbol
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
log = get_logger(__name__)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
_spawn_kwargs = {
 | 
					 | 
				
			||||||
    'infect_asyncio': True,
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
_url = 'https://www.deribit.com'
 | 
					 | 
				
			||||||
_ws_url = 'wss://www.deribit.com/ws/api/v2'
 | 
					 | 
				
			||||||
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Broker specific ohlc schema (rest)
 | 
					 | 
				
			||||||
_ohlc_dtype = [
 | 
					 | 
				
			||||||
    ('index', int),
 | 
					 | 
				
			||||||
    ('time', int),
 | 
					 | 
				
			||||||
    ('open', float),
 | 
					 | 
				
			||||||
    ('high', float),
 | 
					 | 
				
			||||||
    ('low', float),
 | 
					 | 
				
			||||||
    ('close', float),
 | 
					 | 
				
			||||||
    ('volume', float),
 | 
					 | 
				
			||||||
    ('bar_wap', float),  # will be zeroed by sampler if not filled
 | 
					 | 
				
			||||||
]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class JSONRPCResult(Struct):
 | 
					 | 
				
			||||||
    jsonrpc: str = '2.0'
 | 
					 | 
				
			||||||
    id: int
 | 
					 | 
				
			||||||
    result: Optional[dict] = None
 | 
					 | 
				
			||||||
    error: Optional[dict] = None
 | 
					 | 
				
			||||||
    usIn: int 
 | 
					 | 
				
			||||||
    usOut: int 
 | 
					 | 
				
			||||||
    usDiff: int 
 | 
					 | 
				
			||||||
    testnet: bool
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class KLinesResult(Struct):
 | 
					 | 
				
			||||||
    close: list[float]
 | 
					 | 
				
			||||||
    cost: list[float]
 | 
					 | 
				
			||||||
    high: list[float]
 | 
					 | 
				
			||||||
    low: list[float]
 | 
					 | 
				
			||||||
    open: list[float]
 | 
					 | 
				
			||||||
    status: str
 | 
					 | 
				
			||||||
    ticks: list[int]
 | 
					 | 
				
			||||||
    volume: list[float]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class Trade(Struct):
 | 
					 | 
				
			||||||
    trade_seq: int
 | 
					 | 
				
			||||||
    trade_id: str
 | 
					 | 
				
			||||||
    timestamp: int
 | 
					 | 
				
			||||||
    tick_direction: int
 | 
					 | 
				
			||||||
    price: float
 | 
					 | 
				
			||||||
    mark_price: float
 | 
					 | 
				
			||||||
    iv: float
 | 
					 | 
				
			||||||
    instrument_name: str
 | 
					 | 
				
			||||||
    index_price: float
 | 
					 | 
				
			||||||
    direction: str
 | 
					 | 
				
			||||||
    combo_trade_id: Optional[int] = 0,
 | 
					 | 
				
			||||||
    combo_id: Optional[str] = '',
 | 
					 | 
				
			||||||
    amount: float
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class LastTradesResult(Struct):
 | 
					 | 
				
			||||||
    trades: list[Trade]
 | 
					 | 
				
			||||||
    has_more: bool
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# convert datetime obj timestamp to unixtime in milliseconds
 | 
					 | 
				
			||||||
def deribit_timestamp(when):
 | 
					 | 
				
			||||||
    return int((when.timestamp() * 1000) + (when.microsecond / 1000))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def str_to_cb_sym(name: str) -> Symbol:
 | 
					 | 
				
			||||||
    base, strike_price, expiry_date, option_type = name.split('-')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    quote = base
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if option_type == 'put':
 | 
					 | 
				
			||||||
        option_type = PUT 
 | 
					 | 
				
			||||||
    elif option_type  == 'call':
 | 
					 | 
				
			||||||
        option_type = CALL
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
        raise Exception("Couldn\'t parse option type")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return Symbol(
 | 
					 | 
				
			||||||
        base, quote,
 | 
					 | 
				
			||||||
        type=OPTION,
 | 
					 | 
				
			||||||
        strike_price=strike_price,
 | 
					 | 
				
			||||||
        option_type=option_type,
 | 
					 | 
				
			||||||
        expiry_date=expiry_date,
 | 
					 | 
				
			||||||
        expiry_normalize=False)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def piker_sym_to_cb_sym(name: str) -> Symbol:
 | 
					 | 
				
			||||||
    base, expiry_date, strike_price, option_type = tuple(
 | 
					 | 
				
			||||||
        name.upper().split('-'))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    quote = base
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if option_type == 'P':
 | 
					 | 
				
			||||||
        option_type = PUT 
 | 
					 | 
				
			||||||
    elif option_type  == 'C':
 | 
					 | 
				
			||||||
        option_type = CALL
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
        raise Exception("Couldn\'t parse option type")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return Symbol(
 | 
					 | 
				
			||||||
        base, quote,
 | 
					 | 
				
			||||||
        type=OPTION,
 | 
					 | 
				
			||||||
        strike_price=strike_price,
 | 
					 | 
				
			||||||
        option_type=option_type,
 | 
					 | 
				
			||||||
        expiry_date=expiry_date.upper())
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def cb_sym_to_deribit_inst(sym: Symbol):
 | 
					 | 
				
			||||||
    # cryptofeed normalized
 | 
					 | 
				
			||||||
    cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # deribit specific 
 | 
					 | 
				
			||||||
    months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    exp = sym.expiry_date
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # YYMDD
 | 
					 | 
				
			||||||
    # 01234
 | 
					 | 
				
			||||||
    year, month, day = (
 | 
					 | 
				
			||||||
        exp[:2], months[cb_norm.index(exp[2:3])], exp[3:])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    otype = 'C' if sym.option_type == CALL else 'P'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def get_config() -> dict[str, Any]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    conf, path = config.load()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    section = conf.get('deribit')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: document why we send this, basically because logging params for cryptofeed
 | 
					 | 
				
			||||||
    conf['log'] = {}
 | 
					 | 
				
			||||||
    conf['log']['disabled'] = True
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if section is None:
 | 
					 | 
				
			||||||
        log.warning(f'No config section found for deribit in {path}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return conf 
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class Client:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def __init__(self, json_rpc: Callable) -> None:
 | 
					 | 
				
			||||||
        self._pairs: dict[str, Any] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        config = get_config().get('deribit', {})
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if ('key_id' in config) and ('key_secret' in config):
 | 
					 | 
				
			||||||
            self._key_id = config['key_id']
 | 
					 | 
				
			||||||
            self._key_secret = config['key_secret']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            self._key_id = None
 | 
					 | 
				
			||||||
            self._key_secret = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self.json_rpc = json_rpc
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @property
 | 
					 | 
				
			||||||
    def currencies(self):
 | 
					 | 
				
			||||||
        return ['btc', 'eth', 'sol', 'usd']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def get_balances(self, kind: str = 'option') -> dict[str, float]:
 | 
					 | 
				
			||||||
        """Return the set of positions for this account
 | 
					 | 
				
			||||||
        by symbol.
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        balances = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for currency in self.currencies:
 | 
					 | 
				
			||||||
            resp = await self.json_rpc(
 | 
					 | 
				
			||||||
                'private/get_positions', params={
 | 
					 | 
				
			||||||
                    'currency': currency.upper(),
 | 
					 | 
				
			||||||
                    'kind': kind})
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            balances[currency] = resp.result
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return balances
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def get_assets(self) -> dict[str, float]:
 | 
					 | 
				
			||||||
        """Return the set of asset balances for this account
 | 
					 | 
				
			||||||
        by symbol.
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        balances = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for currency in self.currencies:
 | 
					 | 
				
			||||||
            resp = await self.json_rpc(
 | 
					 | 
				
			||||||
                'private/get_account_summary', params={
 | 
					 | 
				
			||||||
                    'currency': currency.upper()})
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            balances[currency] = resp.result['balance']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return balances
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def submit_limit(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        symbol: str,
 | 
					 | 
				
			||||||
        price: float,
 | 
					 | 
				
			||||||
        action: str,
 | 
					 | 
				
			||||||
        size: float
 | 
					 | 
				
			||||||
    ) -> dict:
 | 
					 | 
				
			||||||
        """Place an order
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        params = {
 | 
					 | 
				
			||||||
            'instrument_name': symbol.upper(),
 | 
					 | 
				
			||||||
            'amount': size,
 | 
					 | 
				
			||||||
            'type': 'limit',
 | 
					 | 
				
			||||||
            'price': price,
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        resp = await self.json_rpc(
 | 
					 | 
				
			||||||
            f'private/{action}', params)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return resp.result
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def submit_cancel(self, oid: str):
 | 
					 | 
				
			||||||
        """Send cancel request for order id
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        resp = await self.json_rpc(
 | 
					 | 
				
			||||||
            'private/cancel', {'order_id': oid})
 | 
					 | 
				
			||||||
        return resp.result
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def symbol_info(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        instrument: Optional[str] = None,
 | 
					 | 
				
			||||||
        currency: str = 'btc',  # BTC, ETH, SOL, USDC
 | 
					 | 
				
			||||||
        kind: str = 'option',
 | 
					 | 
				
			||||||
        expired: bool = False
 | 
					 | 
				
			||||||
    ) -> dict[str, Any]:
 | 
					 | 
				
			||||||
        """Get symbol info for the exchange.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        if self._pairs:
 | 
					 | 
				
			||||||
            return self._pairs
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # will retrieve all symbols by default
 | 
					 | 
				
			||||||
        params = {
 | 
					 | 
				
			||||||
            'currency': currency.upper(),
 | 
					 | 
				
			||||||
            'kind': kind,
 | 
					 | 
				
			||||||
            'expired': str(expired).lower()
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        resp = await self.json_rpc('public/get_instruments', params)
 | 
					 | 
				
			||||||
        results = resp.result
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        instruments = {
 | 
					 | 
				
			||||||
            item['instrument_name'].lower(): item
 | 
					 | 
				
			||||||
            for item in results
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if instrument is not None:
 | 
					 | 
				
			||||||
            return instruments[instrument]
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            return instruments
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def cache_symbols(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
    ) -> dict:
 | 
					 | 
				
			||||||
        if not self._pairs:
 | 
					 | 
				
			||||||
            self._pairs = await self.symbol_info()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return self._pairs
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def search_symbols(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        pattern: str,
 | 
					 | 
				
			||||||
        limit: int = 30,
 | 
					 | 
				
			||||||
    ) -> dict[str, Any]:
 | 
					 | 
				
			||||||
        data = await self.symbol_info()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        matches = fuzzy.extractBests(
 | 
					 | 
				
			||||||
            pattern,
 | 
					 | 
				
			||||||
            data,
 | 
					 | 
				
			||||||
            score_cutoff=35,
 | 
					 | 
				
			||||||
            limit=limit
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        # repack in dict form
 | 
					 | 
				
			||||||
        return {item[0]['instrument_name'].lower(): item[0]
 | 
					 | 
				
			||||||
                for item in matches}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def bars(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        symbol: str,
 | 
					 | 
				
			||||||
        start_dt: Optional[datetime] = None,
 | 
					 | 
				
			||||||
        end_dt: Optional[datetime] = None,
 | 
					 | 
				
			||||||
        limit: int = 1000,
 | 
					 | 
				
			||||||
        as_np: bool = True,
 | 
					 | 
				
			||||||
    ) -> dict:
 | 
					 | 
				
			||||||
        instrument = symbol
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if end_dt is None:
 | 
					 | 
				
			||||||
            end_dt = pendulum.now('UTC')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if start_dt is None:
 | 
					 | 
				
			||||||
            start_dt = end_dt.start_of(
 | 
					 | 
				
			||||||
                'minute').subtract(minutes=limit)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        start_time = deribit_timestamp(start_dt)
 | 
					 | 
				
			||||||
        end_time = deribit_timestamp(end_dt)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # https://docs.deribit.com/#public-get_tradingview_chart_data
 | 
					 | 
				
			||||||
        resp = await self.json_rpc(
 | 
					 | 
				
			||||||
            'public/get_tradingview_chart_data',
 | 
					 | 
				
			||||||
            params={
 | 
					 | 
				
			||||||
                'instrument_name': instrument.upper(),
 | 
					 | 
				
			||||||
                'start_timestamp': start_time,
 | 
					 | 
				
			||||||
                'end_timestamp': end_time,
 | 
					 | 
				
			||||||
                'resolution': '1'
 | 
					 | 
				
			||||||
            })
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        result = KLinesResult(**resp.result)
 | 
					 | 
				
			||||||
        new_bars = []
 | 
					 | 
				
			||||||
        for i in range(len(result.close)):
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            _open = result.open[i]
 | 
					 | 
				
			||||||
            high = result.high[i]
 | 
					 | 
				
			||||||
            low = result.low[i]
 | 
					 | 
				
			||||||
            close = result.close[i]
 | 
					 | 
				
			||||||
            volume = result.volume[i]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            row = [
 | 
					 | 
				
			||||||
                (start_time + (i * (60 * 1000))) / 1000.0,  # time
 | 
					 | 
				
			||||||
                result.open[i],
 | 
					 | 
				
			||||||
                result.high[i],
 | 
					 | 
				
			||||||
                result.low[i],
 | 
					 | 
				
			||||||
                result.close[i],
 | 
					 | 
				
			||||||
                result.volume[i],
 | 
					 | 
				
			||||||
                0
 | 
					 | 
				
			||||||
            ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            new_bars.append((i,) + tuple(row))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines
 | 
					 | 
				
			||||||
        return array
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def last_trades(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        instrument: str,
 | 
					 | 
				
			||||||
        count: int = 10
 | 
					 | 
				
			||||||
    ):
 | 
					 | 
				
			||||||
        resp = await self.json_rpc(
 | 
					 | 
				
			||||||
            'public/get_last_trades_by_instrument',
 | 
					 | 
				
			||||||
            params={
 | 
					 | 
				
			||||||
                'instrument_name': instrument,
 | 
					 | 
				
			||||||
                'count': count
 | 
					 | 
				
			||||||
            })
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return LastTradesResult(**resp.result)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def get_client(
 | 
					 | 
				
			||||||
    is_brokercheck: bool = False
 | 
					 | 
				
			||||||
) -> Client:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async with (
 | 
					 | 
				
			||||||
        trio.open_nursery() as n,
 | 
					 | 
				
			||||||
        open_jsonrpc_session(
 | 
					 | 
				
			||||||
            _testnet_ws_url, dtype=JSONRPCResult) as json_rpc
 | 
					 | 
				
			||||||
    ):
 | 
					 | 
				
			||||||
        client = Client(json_rpc)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        _refresh_token: Optional[str] = None
 | 
					 | 
				
			||||||
        _access_token: Optional[str] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async def _auth_loop(
 | 
					 | 
				
			||||||
            task_status: TaskStatus = trio.TASK_STATUS_IGNORED
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            """Background task that adquires a first access token and then will
 | 
					 | 
				
			||||||
            refresh the access token while the nursery isn't cancelled.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            https://docs.deribit.com/?python#authentication-2
 | 
					 | 
				
			||||||
            """
 | 
					 | 
				
			||||||
            renew_time = 10
 | 
					 | 
				
			||||||
            access_scope = 'trade:read_write'
 | 
					 | 
				
			||||||
            _expiry_time = time.time()
 | 
					 | 
				
			||||||
            got_access = False
 | 
					 | 
				
			||||||
            nonlocal _refresh_token
 | 
					 | 
				
			||||||
            nonlocal _access_token
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            while True:
 | 
					 | 
				
			||||||
                if time.time() - _expiry_time < renew_time:
 | 
					 | 
				
			||||||
                    # if we are close to token expiry time
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    if _refresh_token != None:
 | 
					 | 
				
			||||||
                        # if we have a refresh token already dont need to send
 | 
					 | 
				
			||||||
                        # secret
 | 
					 | 
				
			||||||
                        params = {
 | 
					 | 
				
			||||||
                            'grant_type': 'refresh_token',
 | 
					 | 
				
			||||||
                            'refresh_token': _refresh_token,
 | 
					 | 
				
			||||||
                            'scope': access_scope
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    else:
 | 
					 | 
				
			||||||
                        # we don't have refresh token, send secret to initialize
 | 
					 | 
				
			||||||
                        params = {
 | 
					 | 
				
			||||||
                            'grant_type': 'client_credentials',
 | 
					 | 
				
			||||||
                            'client_id': client._key_id,
 | 
					 | 
				
			||||||
                            'client_secret': client._key_secret,
 | 
					 | 
				
			||||||
                            'scope': access_scope
 | 
					 | 
				
			||||||
                        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    resp = await json_rpc('public/auth', params)
 | 
					 | 
				
			||||||
                    result = resp.result
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    _expiry_time = time.time() + result['expires_in']
 | 
					 | 
				
			||||||
                    _refresh_token = result['refresh_token']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    if 'access_token' in result:
 | 
					 | 
				
			||||||
                        _access_token = result['access_token']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    if not got_access:
 | 
					 | 
				
			||||||
                        # first time this loop runs we must indicate task is
 | 
					 | 
				
			||||||
                        # started, we have auth
 | 
					 | 
				
			||||||
                        got_access = True
 | 
					 | 
				
			||||||
                        task_status.started()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                else:
 | 
					 | 
				
			||||||
                    await trio.sleep(renew_time / 2)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # if we have client creds launch auth loop
 | 
					 | 
				
			||||||
        if client._key_id is not None:
 | 
					 | 
				
			||||||
            await n.start(_auth_loop)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        await client.cache_symbols()
 | 
					 | 
				
			||||||
        yield client
 | 
					 | 
				
			||||||
        n.cancel_scope.cancel()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def open_feed_handler():
 | 
					 | 
				
			||||||
    fh = FeedHandler(config=get_config())
 | 
					 | 
				
			||||||
    yield fh
 | 
					 | 
				
			||||||
    await to_asyncio.run_task(fh.stop_async)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def maybe_open_feed_handler() -> trio.abc.ReceiveStream:
 | 
					 | 
				
			||||||
    async with maybe_open_context(
 | 
					 | 
				
			||||||
        acm_func=open_feed_handler,
 | 
					 | 
				
			||||||
        key='feedhandler',
 | 
					 | 
				
			||||||
    ) as (cache_hit, fh):
 | 
					 | 
				
			||||||
        yield fh
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def aio_price_feed_relay(
 | 
					 | 
				
			||||||
    fh: FeedHandler,
 | 
					 | 
				
			||||||
    instrument: Symbol,
 | 
					 | 
				
			||||||
    from_trio: asyncio.Queue,
 | 
					 | 
				
			||||||
    to_trio: trio.abc.SendChannel,
 | 
					 | 
				
			||||||
) -> None:
 | 
					 | 
				
			||||||
    async def _trade(data: dict, receipt_timestamp):
 | 
					 | 
				
			||||||
        to_trio.send_nowait(('trade', {
 | 
					 | 
				
			||||||
            'symbol': cb_sym_to_deribit_inst(
 | 
					 | 
				
			||||||
                str_to_cb_sym(data.symbol)).lower(),
 | 
					 | 
				
			||||||
            'last': data,
 | 
					 | 
				
			||||||
            'broker_ts': time.time(),
 | 
					 | 
				
			||||||
            'data': data.to_dict(),
 | 
					 | 
				
			||||||
            'receipt': receipt_timestamp
 | 
					 | 
				
			||||||
        }))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def _l1(data: dict, receipt_timestamp):
 | 
					 | 
				
			||||||
        to_trio.send_nowait(('l1', {
 | 
					 | 
				
			||||||
            'symbol': cb_sym_to_deribit_inst(
 | 
					 | 
				
			||||||
                str_to_cb_sym(data.symbol)).lower(),
 | 
					 | 
				
			||||||
            'ticks': [
 | 
					 | 
				
			||||||
                {'type': 'bid',
 | 
					 | 
				
			||||||
                    'price': float(data.bid_price), 'size': float(data.bid_size)},
 | 
					 | 
				
			||||||
                {'type': 'bsize',
 | 
					 | 
				
			||||||
                    'price': float(data.bid_price), 'size': float(data.bid_size)},
 | 
					 | 
				
			||||||
                {'type': 'ask',
 | 
					 | 
				
			||||||
                    'price': float(data.ask_price), 'size': float(data.ask_size)},
 | 
					 | 
				
			||||||
                {'type': 'asize',
 | 
					 | 
				
			||||||
                    'price': float(data.ask_price), 'size': float(data.ask_size)}
 | 
					 | 
				
			||||||
            ]
 | 
					 | 
				
			||||||
        }))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    fh.add_feed(
 | 
					 | 
				
			||||||
        DERIBIT,
 | 
					 | 
				
			||||||
        channels=[TRADES, L1_BOOK],
 | 
					 | 
				
			||||||
        symbols=[piker_sym_to_cb_sym(instrument)],
 | 
					 | 
				
			||||||
        callbacks={
 | 
					 | 
				
			||||||
            TRADES: _trade,
 | 
					 | 
				
			||||||
            L1_BOOK: _l1
 | 
					 | 
				
			||||||
        })
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if not fh.running:
 | 
					 | 
				
			||||||
        fh.run(
 | 
					 | 
				
			||||||
            start_loop=False,
 | 
					 | 
				
			||||||
            install_signal_handlers=False)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # sync with trio
 | 
					 | 
				
			||||||
    to_trio.send_nowait(None)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    await asyncio.sleep(float('inf'))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def open_price_feed(
 | 
					 | 
				
			||||||
    instrument: str
 | 
					 | 
				
			||||||
) -> trio.abc.ReceiveStream:
 | 
					 | 
				
			||||||
    async with maybe_open_feed_handler() as fh:
 | 
					 | 
				
			||||||
        async with to_asyncio.open_channel_from(
 | 
					 | 
				
			||||||
            partial(
 | 
					 | 
				
			||||||
                aio_price_feed_relay,
 | 
					 | 
				
			||||||
                fh,
 | 
					 | 
				
			||||||
                instrument
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
        ) as (first, chan):
 | 
					 | 
				
			||||||
            yield chan
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def maybe_open_price_feed(
 | 
					 | 
				
			||||||
    instrument: str
 | 
					 | 
				
			||||||
) -> trio.abc.ReceiveStream:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: add a predicate to maybe_open_context
 | 
					 | 
				
			||||||
    async with maybe_open_context(
 | 
					 | 
				
			||||||
        acm_func=open_price_feed,
 | 
					 | 
				
			||||||
        kwargs={
 | 
					 | 
				
			||||||
            'instrument': instrument
 | 
					 | 
				
			||||||
        },
 | 
					 | 
				
			||||||
        key=f'{instrument}-price',
 | 
					 | 
				
			||||||
    ) as (cache_hit, feed):
 | 
					 | 
				
			||||||
        if cache_hit:
 | 
					 | 
				
			||||||
            yield broadcast_receiver(feed, 10)
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            yield feed
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def aio_order_feed_relay(
 | 
					 | 
				
			||||||
    fh: FeedHandler,
 | 
					 | 
				
			||||||
    instrument: Symbol,
 | 
					 | 
				
			||||||
    from_trio: asyncio.Queue,
 | 
					 | 
				
			||||||
    to_trio: trio.abc.SendChannel,
 | 
					 | 
				
			||||||
) -> None:
 | 
					 | 
				
			||||||
    async def _fill(data: dict, receipt_timestamp):
 | 
					 | 
				
			||||||
        breakpoint()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def _order_info(data: dict, receipt_timestamp):
 | 
					 | 
				
			||||||
        breakpoint()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    fh.add_feed(
 | 
					 | 
				
			||||||
        DERIBIT,
 | 
					 | 
				
			||||||
        channels=[FILLS, ORDER_INFO],
 | 
					 | 
				
			||||||
        symbols=[instrument.upper()],
 | 
					 | 
				
			||||||
        callbacks={
 | 
					 | 
				
			||||||
            FILLS: _fill,
 | 
					 | 
				
			||||||
            ORDER_INFO: _order_info,
 | 
					 | 
				
			||||||
        })
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if not fh.running:
 | 
					 | 
				
			||||||
        fh.run(
 | 
					 | 
				
			||||||
            start_loop=False,
 | 
					 | 
				
			||||||
            install_signal_handlers=False)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # sync with trio
 | 
					 | 
				
			||||||
    to_trio.send_nowait(None)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    await asyncio.sleep(float('inf'))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def open_order_feed(
 | 
					 | 
				
			||||||
    instrument: list[str]
 | 
					 | 
				
			||||||
) -> trio.abc.ReceiveStream:
 | 
					 | 
				
			||||||
    async with maybe_open_feed_handler() as fh:
 | 
					 | 
				
			||||||
        async with to_asyncio.open_channel_from(
 | 
					 | 
				
			||||||
            partial(
 | 
					 | 
				
			||||||
                aio_order_feed_relay,
 | 
					 | 
				
			||||||
                fh,
 | 
					 | 
				
			||||||
                instrument
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
        ) as (first, chan):
 | 
					 | 
				
			||||||
            yield chan
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def maybe_open_order_feed(
 | 
					 | 
				
			||||||
    instrument: str
 | 
					 | 
				
			||||||
) -> trio.abc.ReceiveStream:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: add a predicate to maybe_open_context
 | 
					 | 
				
			||||||
    async with maybe_open_context(
 | 
					 | 
				
			||||||
        acm_func=open_order_feed,
 | 
					 | 
				
			||||||
        kwargs={
 | 
					 | 
				
			||||||
            'instrument': instrument,
 | 
					 | 
				
			||||||
            'fh': fh
 | 
					 | 
				
			||||||
        },
 | 
					 | 
				
			||||||
        key=f'{instrument}-order',
 | 
					 | 
				
			||||||
    ) as (cache_hit, feed):
 | 
					 | 
				
			||||||
        if cache_hit:
 | 
					 | 
				
			||||||
            yield broadcast_receiver(feed, 10)
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            yield feed
 | 
					 | 
				
			||||||
										
											Binary file not shown.
										
									
								
							| 
		 Before Width: | Height: | Size: 169 KiB  | 
										
											Binary file not shown.
										
									
								
							| 
		 Before Width: | Height: | Size: 106 KiB  | 
										
											Binary file not shown.
										
									
								
							| 
		 Before Width: | Height: | Size: 59 KiB  | 
										
											Binary file not shown.
										
									
								
							| 
		 Before Width: | Height: | Size: 70 KiB  | 
										
											Binary file not shown.
										
									
								
							| 
		 Before Width: | Height: | Size: 132 KiB  | 
| 
						 | 
					@ -1,200 +0,0 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					 | 
				
			||||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					 | 
				
			||||||
# the Free Software Foundation, either version 3 of the License, or
 | 
					 | 
				
			||||||
# (at your option) any later version.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is distributed in the hope that it will be useful,
 | 
					 | 
				
			||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
					 | 
				
			||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
					 | 
				
			||||||
# GNU Affero General Public License for more details.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
Deribit backend.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
from contextlib import asynccontextmanager as acm
 | 
					 | 
				
			||||||
from datetime import datetime
 | 
					 | 
				
			||||||
from typing import Any, Optional, Callable
 | 
					 | 
				
			||||||
import time
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import trio
 | 
					 | 
				
			||||||
from trio_typing import TaskStatus
 | 
					 | 
				
			||||||
import pendulum
 | 
					 | 
				
			||||||
from fuzzywuzzy import process as fuzzy
 | 
					 | 
				
			||||||
import numpy as np
 | 
					 | 
				
			||||||
import tractor
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from piker._cacheables import open_cached_client
 | 
					 | 
				
			||||||
from piker.log import get_logger, get_console_log
 | 
					 | 
				
			||||||
from piker.data import ShmArray
 | 
					 | 
				
			||||||
from piker.brokers._util import (
 | 
					 | 
				
			||||||
    BrokerError,
 | 
					 | 
				
			||||||
    DataUnavailable,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from cryptofeed import FeedHandler
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from cryptofeed.defines import (
 | 
					 | 
				
			||||||
    DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from cryptofeed.symbols import Symbol
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from .api import (
 | 
					 | 
				
			||||||
    Client, Trade,
 | 
					 | 
				
			||||||
    get_config,
 | 
					 | 
				
			||||||
    str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst,
 | 
					 | 
				
			||||||
    maybe_open_price_feed
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
_spawn_kwargs = {
 | 
					 | 
				
			||||||
    'infect_asyncio': True,
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
log = get_logger(__name__)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def open_history_client(
 | 
					 | 
				
			||||||
    instrument: str,
 | 
					 | 
				
			||||||
) -> tuple[Callable, int]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO implement history getter for the new storage layer.
 | 
					 | 
				
			||||||
    async with open_cached_client('deribit') as client:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async def get_ohlc(
 | 
					 | 
				
			||||||
            end_dt: Optional[datetime] = None,
 | 
					 | 
				
			||||||
            start_dt: Optional[datetime] = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        ) -> tuple[
 | 
					 | 
				
			||||||
            np.ndarray,
 | 
					 | 
				
			||||||
            datetime,  # start
 | 
					 | 
				
			||||||
            datetime,  # end
 | 
					 | 
				
			||||||
        ]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            array = await client.bars(
 | 
					 | 
				
			||||||
                instrument,
 | 
					 | 
				
			||||||
                start_dt=start_dt,
 | 
					 | 
				
			||||||
                end_dt=end_dt,
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            if len(array) == 0:
 | 
					 | 
				
			||||||
                raise DataUnavailable
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            start_dt = pendulum.from_timestamp(array[0]['time'])
 | 
					 | 
				
			||||||
            end_dt = pendulum.from_timestamp(array[-1]['time'])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            return array, start_dt, end_dt
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        yield get_ohlc, {'erlangs': 3, 'rate': 3}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def backfill_bars(
 | 
					 | 
				
			||||||
    symbol: str,
 | 
					 | 
				
			||||||
    shm: ShmArray,  # type: ignore # noqa
 | 
					 | 
				
			||||||
    task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
 | 
					 | 
				
			||||||
) -> None:
 | 
					 | 
				
			||||||
    """Fill historical bars into shared mem / storage afap.
 | 
					 | 
				
			||||||
    """
 | 
					 | 
				
			||||||
    instrument = symbol
 | 
					 | 
				
			||||||
    with trio.CancelScope() as cs:
 | 
					 | 
				
			||||||
        async with open_cached_client('deribit') as client:
 | 
					 | 
				
			||||||
            bars = await client.bars(instrument)
 | 
					 | 
				
			||||||
            shm.push(bars)
 | 
					 | 
				
			||||||
            task_status.started(cs)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def stream_quotes(
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    send_chan: trio.abc.SendChannel,
 | 
					 | 
				
			||||||
    symbols: list[str],
 | 
					 | 
				
			||||||
    feed_is_live: trio.Event,
 | 
					 | 
				
			||||||
    loglevel: str = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # startup sync
 | 
					 | 
				
			||||||
    task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> None:
 | 
					 | 
				
			||||||
    # XXX: required to propagate ``tractor`` loglevel to piker logging
 | 
					 | 
				
			||||||
    get_console_log(loglevel or tractor.current_actor().loglevel)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    sym = symbols[0]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async with (
 | 
					 | 
				
			||||||
        open_cached_client('deribit') as client,
 | 
					 | 
				
			||||||
        send_chan as send_chan
 | 
					 | 
				
			||||||
    ):
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        init_msgs = {
 | 
					 | 
				
			||||||
            # pass back token, and bool, signalling if we're the writer
 | 
					 | 
				
			||||||
            # and that history has been written
 | 
					 | 
				
			||||||
            sym: {
 | 
					 | 
				
			||||||
                'symbol_info': {
 | 
					 | 
				
			||||||
                    'asset_type': 'option',
 | 
					 | 
				
			||||||
                    'price_tick_size': 0.0005
 | 
					 | 
				
			||||||
                },
 | 
					 | 
				
			||||||
                'shm_write_opts': {'sum_tick_vml': False},
 | 
					 | 
				
			||||||
                'fqsn': sym,
 | 
					 | 
				
			||||||
            },
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        nsym = piker_sym_to_cb_sym(sym)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async with maybe_open_price_feed(sym) as stream:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            cache = await client.cache_symbols()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            last_trades = (await client.last_trades(
 | 
					 | 
				
			||||||
                cb_sym_to_deribit_inst(nsym), count=1)).trades
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if len(last_trades) == 0:
 | 
					 | 
				
			||||||
                last_trade = None
 | 
					 | 
				
			||||||
                async for typ, quote in stream:
 | 
					 | 
				
			||||||
                    if typ == 'trade':
 | 
					 | 
				
			||||||
                        last_trade = Trade(**(quote['data']))
 | 
					 | 
				
			||||||
                        break
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                last_trade = Trade(**(last_trades[0]))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            first_quote = {
 | 
					 | 
				
			||||||
                'symbol': sym,
 | 
					 | 
				
			||||||
                'last': last_trade.price,
 | 
					 | 
				
			||||||
                'brokerd_ts': last_trade.timestamp,
 | 
					 | 
				
			||||||
                'ticks': [{
 | 
					 | 
				
			||||||
                    'type': 'trade',
 | 
					 | 
				
			||||||
                    'price': last_trade.price,
 | 
					 | 
				
			||||||
                    'size': last_trade.amount,
 | 
					 | 
				
			||||||
                    'broker_ts': last_trade.timestamp
 | 
					 | 
				
			||||||
                }]
 | 
					 | 
				
			||||||
            }
 | 
					 | 
				
			||||||
            task_status.started((init_msgs,  first_quote))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            feed_is_live.set()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            async for typ, quote in stream:
 | 
					 | 
				
			||||||
                topic = quote['symbol']
 | 
					 | 
				
			||||||
                await send_chan.send({topic: quote})
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@tractor.context
 | 
					 | 
				
			||||||
async def open_symbol_search(
 | 
					 | 
				
			||||||
    ctx: tractor.Context,
 | 
					 | 
				
			||||||
) -> Client:
 | 
					 | 
				
			||||||
    async with open_cached_client('deribit') as client:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # load all symbols locally for fast search
 | 
					 | 
				
			||||||
        cache = await client.cache_symbols()
 | 
					 | 
				
			||||||
        await ctx.started()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async with ctx.open_stream() as stream:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            async for pattern in stream:
 | 
					 | 
				
			||||||
                # repack in dict form
 | 
					 | 
				
			||||||
                await stream.send(
 | 
					 | 
				
			||||||
                    await client.search_symbols(pattern))
 | 
					 | 
				
			||||||
| 
						 | 
					@ -1,134 +0,0 @@
 | 
				
			||||||
``ib`` backend
 | 
					 | 
				
			||||||
--------------
 | 
					 | 
				
			||||||
more or less the "everything broker" for traditional and international
 | 
					 | 
				
			||||||
markets. they are the "go to" provider for automatic retail trading
 | 
					 | 
				
			||||||
and we interface to their APIs using the `ib_insync` project.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
status
 | 
					 | 
				
			||||||
******
 | 
					 | 
				
			||||||
current support is *production grade* and both real-time data and order
 | 
					 | 
				
			||||||
management should be correct and fast. this backend is used by core devs
 | 
					 | 
				
			||||||
for live trading.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
currently there is not yet full support for:
 | 
					 | 
				
			||||||
- options charting and trading
 | 
					 | 
				
			||||||
- paxos based crypto rt feeds and trading
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
config
 | 
					 | 
				
			||||||
******
 | 
					 | 
				
			||||||
In order to get order mode support your ``brokers.toml``
 | 
					 | 
				
			||||||
needs to have something like the following:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. code:: toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
   [ib]
 | 
					 | 
				
			||||||
   hosts = [
 | 
					 | 
				
			||||||
    "127.0.0.1",
 | 
					 | 
				
			||||||
   ]
 | 
					 | 
				
			||||||
   # TODO: when we eventually spawn gateways in our
 | 
					 | 
				
			||||||
   # container, we can just dynamically allocate these
 | 
					 | 
				
			||||||
   # using IBC.
 | 
					 | 
				
			||||||
   ports = [
 | 
					 | 
				
			||||||
       4002,
 | 
					 | 
				
			||||||
       4003,
 | 
					 | 
				
			||||||
       4006,
 | 
					 | 
				
			||||||
       4001,
 | 
					 | 
				
			||||||
       7497,
 | 
					 | 
				
			||||||
   ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
   # XXX: for a paper account the flex web query service
 | 
					 | 
				
			||||||
   # is not supported so you have to manually download
 | 
					 | 
				
			||||||
   # and XML report and put it in a location that can be
 | 
					 | 
				
			||||||
   # accessed by the ``brokerd.ib`` backend code for parsing.
 | 
					 | 
				
			||||||
   flex_token = '1111111111111111'
 | 
					 | 
				
			||||||
   flex_trades_query_id = '6969696'  # live accounts only?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
   # 3rd party web-api token
 | 
					 | 
				
			||||||
   # (XXX: not sure if this works yet)
 | 
					 | 
				
			||||||
   trade_log_token = '111111111111111'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
   # when clients are being scanned this determines
 | 
					 | 
				
			||||||
   # which clients are preferred to be used for data feeds
 | 
					 | 
				
			||||||
   # based on account names which are detected as active
 | 
					 | 
				
			||||||
   # on each client.
 | 
					 | 
				
			||||||
   prefer_data_account = [
 | 
					 | 
				
			||||||
       # this has to be first in order to make data work with dual paper + live
 | 
					 | 
				
			||||||
       'main',
 | 
					 | 
				
			||||||
       'algopaper',
 | 
					 | 
				
			||||||
   ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
   [ib.accounts]
 | 
					 | 
				
			||||||
   main = 'U69696969'
 | 
					 | 
				
			||||||
   algopaper = 'DU9696969'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
If everything works correctly you should see any current positions
 | 
					 | 
				
			||||||
loaded in the pps pane on chart load and you should also be able to
 | 
					 | 
				
			||||||
check your trade records in the file::
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    <pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
An example ledger file will have entries written verbatim from the
 | 
					 | 
				
			||||||
trade events schema:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. code:: toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ["0000e1a7.630f5e5a.01.01"]
 | 
					 | 
				
			||||||
    secType = "FUT"
 | 
					 | 
				
			||||||
    conId = 515416577
 | 
					 | 
				
			||||||
    symbol = "MNQ"
 | 
					 | 
				
			||||||
    lastTradeDateOrContractMonth = "20221216"
 | 
					 | 
				
			||||||
    strike = 0.0
 | 
					 | 
				
			||||||
    right = ""
 | 
					 | 
				
			||||||
    multiplier = "2"
 | 
					 | 
				
			||||||
    exchange = "GLOBEX"
 | 
					 | 
				
			||||||
    primaryExchange = ""
 | 
					 | 
				
			||||||
    currency = "USD"
 | 
					 | 
				
			||||||
    localSymbol = "MNQZ2"
 | 
					 | 
				
			||||||
    tradingClass = "MNQ"
 | 
					 | 
				
			||||||
    includeExpired = false
 | 
					 | 
				
			||||||
    secIdType = ""
 | 
					 | 
				
			||||||
    secId = ""
 | 
					 | 
				
			||||||
    comboLegsDescrip = ""
 | 
					 | 
				
			||||||
    comboLegs = []
 | 
					 | 
				
			||||||
    execId = "0000e1a7.630f5e5a.01.01"
 | 
					 | 
				
			||||||
    time = 1661972086.0
 | 
					 | 
				
			||||||
    acctNumber = "DU69696969"
 | 
					 | 
				
			||||||
    side = "BOT"
 | 
					 | 
				
			||||||
    shares = 1.0
 | 
					 | 
				
			||||||
    price = 12372.75
 | 
					 | 
				
			||||||
    permId = 441472655
 | 
					 | 
				
			||||||
    clientId = 6116
 | 
					 | 
				
			||||||
    orderId = 985
 | 
					 | 
				
			||||||
    liquidation = 0
 | 
					 | 
				
			||||||
    cumQty = 1.0
 | 
					 | 
				
			||||||
    avgPrice = 12372.75
 | 
					 | 
				
			||||||
    orderRef = ""
 | 
					 | 
				
			||||||
    evRule = ""
 | 
					 | 
				
			||||||
    evMultiplier = 0.0
 | 
					 | 
				
			||||||
    modelCode = ""
 | 
					 | 
				
			||||||
    lastLiquidity = 1
 | 
					 | 
				
			||||||
    broker_time = 1661972086.0
 | 
					 | 
				
			||||||
    name = "ib"
 | 
					 | 
				
			||||||
    commission = 0.57
 | 
					 | 
				
			||||||
    realizedPNL = 243.41
 | 
					 | 
				
			||||||
    yield_ = 0.0
 | 
					 | 
				
			||||||
    yieldRedemptionDate = 0
 | 
					 | 
				
			||||||
    listingExchange = "GLOBEX"
 | 
					 | 
				
			||||||
    date = "2022-08-31T18:54:46+00:00"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
your ``pps.toml`` file will have position entries like,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. code:: toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    [ib.algopaper."mnq.globex.20221216"]
 | 
					 | 
				
			||||||
    size = -1.0
 | 
					 | 
				
			||||||
    ppu = 12423.630576923071
 | 
					 | 
				
			||||||
    bsuid = 515416577
 | 
					 | 
				
			||||||
    expiry = "2022-12-16T00:00:00+00:00"
 | 
					 | 
				
			||||||
    clears = [
 | 
					 | 
				
			||||||
     { dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
 | 
					 | 
				
			||||||
    ]
 | 
					 | 
				
			||||||
| 
						 | 
					@ -20,10 +20,15 @@ Interactive Brokers API backend.
 | 
				
			||||||
Sub-modules within break into the core functionalities:
 | 
					Sub-modules within break into the core functionalities:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
- ``broker.py`` part for orders / trading endpoints
 | 
					- ``broker.py`` part for orders / trading endpoints
 | 
				
			||||||
- ``feed.py`` for real-time data feed endpoints
 | 
					- ``data.py`` for real-time data feed endpoints
 | 
				
			||||||
- ``api.py`` for the core API machinery which is ``trio``-ized
 | 
					
 | 
				
			||||||
 | 
					- ``client.py`` for the core API machinery which is ``trio``-ized
 | 
				
			||||||
  wrapping around ``ib_insync``.
 | 
					  wrapping around ``ib_insync``.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					- ``report.py`` for the hackery to build manual pp calcs
 | 
				
			||||||
 | 
					  to avoid ib's absolute bullshit FIFO style position
 | 
				
			||||||
 | 
					  tracking..
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from .api import (
 | 
					from .api import (
 | 
				
			||||||
    get_client,
 | 
					    get_client,
 | 
				
			||||||
| 
						 | 
					@ -33,10 +38,7 @@ from .feed import (
 | 
				
			||||||
    open_symbol_search,
 | 
					    open_symbol_search,
 | 
				
			||||||
    stream_quotes,
 | 
					    stream_quotes,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from .broker import (
 | 
					from .broker import trades_dialogue
 | 
				
			||||||
    trades_dialogue,
 | 
					 | 
				
			||||||
    norm_trade_records,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
__all__ = [
 | 
					__all__ = [
 | 
				
			||||||
    'get_client',
 | 
					    'get_client',
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -29,7 +29,6 @@ import itertools
 | 
				
			||||||
from math import isnan
 | 
					from math import isnan
 | 
				
			||||||
from typing import (
 | 
					from typing import (
 | 
				
			||||||
    Any,
 | 
					    Any,
 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Union,
 | 
					    Union,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
import asyncio
 | 
					import asyncio
 | 
				
			||||||
| 
						 | 
					@ -39,30 +38,16 @@ import time
 | 
				
			||||||
from types import SimpleNamespace
 | 
					from types import SimpleNamespace
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from bidict import bidict
 | 
					 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
from tractor import to_asyncio
 | 
					from tractor import to_asyncio
 | 
				
			||||||
import pendulum
 | 
					from ib_insync.wrapper import RequestError
 | 
				
			||||||
import ib_insync as ibis
 | 
					from ib_insync.contract import Contract, ContractDetails
 | 
				
			||||||
from ib_insync.contract import (
 | 
					 | 
				
			||||||
    Contract,
 | 
					 | 
				
			||||||
    ContractDetails,
 | 
					 | 
				
			||||||
    Option,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from ib_insync.order import Order
 | 
					from ib_insync.order import Order
 | 
				
			||||||
from ib_insync.ticker import Ticker
 | 
					from ib_insync.ticker import Ticker
 | 
				
			||||||
from ib_insync.objects import (
 | 
					from ib_insync.objects import Position
 | 
				
			||||||
    BarDataList,
 | 
					import ib_insync as ibis
 | 
				
			||||||
    Position,
 | 
					from ib_insync.wrapper import Wrapper
 | 
				
			||||||
    Fill,
 | 
					 | 
				
			||||||
    Execution,
 | 
					 | 
				
			||||||
    CommissionReport,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from ib_insync.wrapper import (
 | 
					 | 
				
			||||||
    Wrapper,
 | 
					 | 
				
			||||||
    RequestError,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from ib_insync.client import Client as ib_Client
 | 
					from ib_insync.client import Client as ib_Client
 | 
				
			||||||
import numpy as np
 | 
					import numpy as np
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -80,11 +65,26 @@ _time_units = {
 | 
				
			||||||
    'h': ' hours',
 | 
					    'h': ' hours',
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_bar_sizes = {
 | 
					_time_frames = {
 | 
				
			||||||
    1: '1 Sec',
 | 
					    '1s': '1 Sec',
 | 
				
			||||||
    60: '1 min',
 | 
					    '5s': '5 Sec',
 | 
				
			||||||
    60*60: '1 hour',
 | 
					    '30s': '30 Sec',
 | 
				
			||||||
    24*60*60: '1 day',
 | 
					    '1m': 'OneMinute',
 | 
				
			||||||
 | 
					    '2m': 'TwoMinutes',
 | 
				
			||||||
 | 
					    '3m': 'ThreeMinutes',
 | 
				
			||||||
 | 
					    '4m': 'FourMinutes',
 | 
				
			||||||
 | 
					    '5m': 'FiveMinutes',
 | 
				
			||||||
 | 
					    '10m': 'TenMinutes',
 | 
				
			||||||
 | 
					    '15m': 'FifteenMinutes',
 | 
				
			||||||
 | 
					    '20m': 'TwentyMinutes',
 | 
				
			||||||
 | 
					    '30m': 'HalfHour',
 | 
				
			||||||
 | 
					    '1h': 'OneHour',
 | 
				
			||||||
 | 
					    '2h': 'TwoHours',
 | 
				
			||||||
 | 
					    '4h': 'FourHours',
 | 
				
			||||||
 | 
					    'D': 'OneDay',
 | 
				
			||||||
 | 
					    'W': 'OneWeek',
 | 
				
			||||||
 | 
					    'M': 'OneMonth',
 | 
				
			||||||
 | 
					    'Y': 'OneYear',
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_show_wap_in_history: bool = False
 | 
					_show_wap_in_history: bool = False
 | 
				
			||||||
| 
						 | 
					@ -155,102 +155,70 @@ class NonShittyIB(ibis.IB):
 | 
				
			||||||
        self.client.apiEnd += self.disconnectedEvent
 | 
					        self.client.apiEnd += self.disconnectedEvent
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_futes_venues = (
 | 
					 | 
				
			||||||
    'GLOBEX',
 | 
					 | 
				
			||||||
    'NYMEX',
 | 
					 | 
				
			||||||
    'CME',
 | 
					 | 
				
			||||||
    'CMECRYPTO',
 | 
					 | 
				
			||||||
    'COMEX',
 | 
					 | 
				
			||||||
    'CMDTY',  # special name case..
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
_adhoc_futes_set = {
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # equities
 | 
					 | 
				
			||||||
    'nq.globex',
 | 
					 | 
				
			||||||
    'mnq.globex',  # micro
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    'es.globex',
 | 
					 | 
				
			||||||
    'mes.globex',  # micro
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # cypto$
 | 
					 | 
				
			||||||
    'brr.cmecrypto',
 | 
					 | 
				
			||||||
    'ethusdrr.cmecrypto',
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # agriculture
 | 
					 | 
				
			||||||
    'he.nymex',  # lean hogs
 | 
					 | 
				
			||||||
    'le.nymex',  # live cattle (geezers)
 | 
					 | 
				
			||||||
    'gf.nymex',  # feeder cattle (younguns)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # raw
 | 
					 | 
				
			||||||
    'lb.nymex',  # random len lumber
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # metals
 | 
					 | 
				
			||||||
    # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
 | 
					 | 
				
			||||||
    'xauusd.cmdty',  # london gold spot ^
 | 
					 | 
				
			||||||
    'gc.nymex',
 | 
					 | 
				
			||||||
    'mgc.nymex',  # micro
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # oil & gas
 | 
					 | 
				
			||||||
    'cl.nymex',
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    'xagusd.cmdty',  # silver spot
 | 
					 | 
				
			||||||
    'ni.nymex',  # silver futes
 | 
					 | 
				
			||||||
    'qi.comex',  # mini-silver futes
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# taken from list here:
 | 
					 | 
				
			||||||
# https://www.interactivebrokers.com/en/trading/products-spot-currencies.php
 | 
					 | 
				
			||||||
_adhoc_fiat_set = set((
 | 
					 | 
				
			||||||
    'USD, AED, AUD, CAD,'
 | 
					 | 
				
			||||||
    'CHF, CNH, CZK, DKK,'
 | 
					 | 
				
			||||||
    'EUR, GBP, HKD, HUF,'
 | 
					 | 
				
			||||||
    'ILS, JPY, MXN, NOK,'
 | 
					 | 
				
			||||||
    'NZD, PLN, RUB, SAR,'
 | 
					 | 
				
			||||||
    'SEK, SGD, TRY, ZAR'
 | 
					 | 
				
			||||||
    ).split(' ,')
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# map of symbols to contract ids
 | 
					# map of symbols to contract ids
 | 
				
			||||||
_adhoc_symbol_map = {
 | 
					_adhoc_cmdty_data_map = {
 | 
				
			||||||
    # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
 | 
					    # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # NOTE: some cmdtys/metals don't have trade data like gold/usd:
 | 
					    # NOTE: some cmdtys/metals don't have trade data like gold/usd:
 | 
				
			||||||
    # https://groups.io/g/twsapi/message/44174
 | 
					    # https://groups.io/g/twsapi/message/44174
 | 
				
			||||||
    'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
 | 
					    'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
for qsn in _adhoc_futes_set:
 | 
					 | 
				
			||||||
    sym, venue = qsn.split('.')
 | 
					 | 
				
			||||||
    assert venue.upper() in _futes_venues, f'{venue}'
 | 
					 | 
				
			||||||
    _adhoc_symbol_map[sym.upper()] = (
 | 
					 | 
				
			||||||
        {'exchange': venue},
 | 
					 | 
				
			||||||
        {},
 | 
					 | 
				
			||||||
    )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					_futes_venues = (
 | 
				
			||||||
 | 
					    'GLOBEX',
 | 
				
			||||||
 | 
					    'NYMEX',
 | 
				
			||||||
 | 
					    'CME',
 | 
				
			||||||
 | 
					    'CMECRYPTO',
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					_adhoc_futes_set = {
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # equities
 | 
				
			||||||
 | 
					    'nq.globex',
 | 
				
			||||||
 | 
					    'mnq.globex',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    'es.globex',
 | 
				
			||||||
 | 
					    'mes.globex',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # cypto$
 | 
				
			||||||
 | 
					    'brr.cmecrypto',
 | 
				
			||||||
 | 
					    'ethusdrr.cmecrypto',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # agriculture
 | 
				
			||||||
 | 
					    'he.globex',  # lean hogs
 | 
				
			||||||
 | 
					    'le.globex',  # live cattle (geezers)
 | 
				
			||||||
 | 
					    'gf.globex',  # feeder cattle (younguns)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # raw
 | 
				
			||||||
 | 
					    'lb.globex',  # random len lumber
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # metals
 | 
				
			||||||
 | 
					    'xauusd.cmdty',  # gold spot
 | 
				
			||||||
 | 
					    'gc.nymex',
 | 
				
			||||||
 | 
					    'mgc.nymex',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    'xagusd.cmdty',  # silver spot
 | 
				
			||||||
 | 
					    'ni.nymex',  # silver futes
 | 
				
			||||||
 | 
					    'qi.comex',  # mini-silver futes
 | 
				
			||||||
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# exchanges we don't support at the moment due to not knowing
 | 
					# exchanges we don't support at the moment due to not knowing
 | 
				
			||||||
# how to do symbol-contract lookup correctly likely due
 | 
					# how to do symbol-contract lookup correctly likely due
 | 
				
			||||||
# to not having the data feeds subscribed.
 | 
					# to not having the data feeds subscribed.
 | 
				
			||||||
_exch_skip_list = {
 | 
					_exch_skip_list = {
 | 
				
			||||||
 | 
					 | 
				
			||||||
    'ASX',  # aussie stocks
 | 
					    'ASX',  # aussie stocks
 | 
				
			||||||
    'MEXI',  # mexican stocks
 | 
					    'MEXI',  # mexican stocks
 | 
				
			||||||
 | 
					    'VALUE',  # no idea
 | 
				
			||||||
    # no idea
 | 
					 | 
				
			||||||
    'VALUE',
 | 
					 | 
				
			||||||
    'FUNDSERV',
 | 
					 | 
				
			||||||
    'SWB2',
 | 
					 | 
				
			||||||
    'PSE',
 | 
					 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_enters = 0
 | 
					_enters = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def bars_to_np(bars: list) -> np.ndarray:
 | 
					def bars_to_np(bars: list) -> np.ndarray:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Convert a "bars list thing" (``BarDataList`` type from ibis)
 | 
					    Convert a "bars list thing" (``BarsList`` type from ibis)
 | 
				
			||||||
    into a numpy struct array.
 | 
					    into a numpy struct array.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -270,27 +238,6 @@ def bars_to_np(bars: list) -> np.ndarray:
 | 
				
			||||||
    return nparr
 | 
					    return nparr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# NOTE: pacing violations exist for higher sample rates:
 | 
					 | 
				
			||||||
# https://interactivebrokers.github.io/tws-api/historical_limitations.html#pacing_violations
 | 
					 | 
				
			||||||
# Also see note on duration limits being lifted on 1m+ periods,
 | 
					 | 
				
			||||||
# but they say "use with discretion":
 | 
					 | 
				
			||||||
# https://interactivebrokers.github.io/tws-api/historical_limitations.html#non-available_hd
 | 
					 | 
				
			||||||
_samplings: dict[int, tuple[str, str]] = {
 | 
					 | 
				
			||||||
    1: (
 | 
					 | 
				
			||||||
        '1 secs',
 | 
					 | 
				
			||||||
        f'{int(2e3)} S',
 | 
					 | 
				
			||||||
        pendulum.duration(seconds=2e3),
 | 
					 | 
				
			||||||
    ),
 | 
					 | 
				
			||||||
    # TODO: benchmark >1 D duration on query to see if
 | 
					 | 
				
			||||||
    # throughput can be made faster during backfilling.
 | 
					 | 
				
			||||||
    60: (
 | 
					 | 
				
			||||||
        '1 min',
 | 
					 | 
				
			||||||
        '1 D',
 | 
					 | 
				
			||||||
        pendulum.duration(days=1),
 | 
					 | 
				
			||||||
    ),
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class Client:
 | 
					class Client:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    IB wrapped for our broker backend API.
 | 
					    IB wrapped for our broker backend API.
 | 
				
			||||||
| 
						 | 
					@ -314,29 +261,27 @@ class Client:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # NOTE: the ib.client here is "throttled" to 45 rps by default
 | 
					        # NOTE: the ib.client here is "throttled" to 45 rps by default
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def trades(self) -> dict[str, Any]:
 | 
					    async def trades(
 | 
				
			||||||
        '''
 | 
					        self,
 | 
				
			||||||
        Return list of trade-fills from current session in ``dict``.
 | 
					        # api_only: bool = False,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					    ) -> dict[str, Any]:
 | 
				
			||||||
        fills: list[Fill] = self.ib.fills()
 | 
					
 | 
				
			||||||
        norm_fills: list[dict] = []
 | 
					        # orders = await self.ib.reqCompletedOrdersAsync(
 | 
				
			||||||
 | 
					        #     apiOnly=api_only
 | 
				
			||||||
 | 
					        # )
 | 
				
			||||||
 | 
					        fills = await self.ib.reqExecutionsAsync()
 | 
				
			||||||
 | 
					        norm_fills = []
 | 
				
			||||||
        for fill in fills:
 | 
					        for fill in fills:
 | 
				
			||||||
            fill = fill._asdict()  # namedtuple
 | 
					            fill = fill._asdict()  # namedtuple
 | 
				
			||||||
            for key, val in fill.items():
 | 
					            for key, val in fill.copy().items():
 | 
				
			||||||
                match val:
 | 
					                if isinstance(val, Contract):
 | 
				
			||||||
                    case Contract() | Execution() | CommissionReport():
 | 
					                    fill[key] = asdict(val)
 | 
				
			||||||
                        fill[key] = asdict(val)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            norm_fills.append(fill)
 | 
					            norm_fills.append(fill)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return norm_fills
 | 
					        return norm_fills
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def orders(self) -> list[Order]:
 | 
					 | 
				
			||||||
        return await self.ib.reqAllOpenOrdersAsync(
 | 
					 | 
				
			||||||
            apiOnly=False,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def bars(
 | 
					    async def bars(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        fqsn: str,
 | 
					        fqsn: str,
 | 
				
			||||||
| 
						 | 
					@ -345,55 +290,52 @@ class Client:
 | 
				
			||||||
        start_dt: Union[datetime, str] = "1970-01-01T00:00:00.000000-05:00",
 | 
					        start_dt: Union[datetime, str] = "1970-01-01T00:00:00.000000-05:00",
 | 
				
			||||||
        end_dt: Union[datetime, str] = "",
 | 
					        end_dt: Union[datetime, str] = "",
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # ohlc sample period in seconds
 | 
					        sample_period_s: str = 1,  # ohlc sample period
 | 
				
			||||||
        sample_period_s: int = 1,
 | 
					        period_count: int = int(2e3),  # <- max per 1s sample query
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # optional "duration of time" equal to the
 | 
					    ) -> list[dict[str, Any]]:
 | 
				
			||||||
        # length of the returned history frame.
 | 
					 | 
				
			||||||
        duration: Optional[str] = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        **kwargs,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> tuple[BarDataList, np.ndarray, pendulum.Duration]:
 | 
					 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Retreive OHLCV bars for a fqsn over a range to the present.
 | 
					        Retreive OHLCV bars for a fqsn over a range to the present.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        # See API docs here:
 | 
					 | 
				
			||||||
        # https://interactivebrokers.github.io/tws-api/historical_data.html
 | 
					 | 
				
			||||||
        bars_kwargs = {'whatToShow': 'TRADES'}
 | 
					        bars_kwargs = {'whatToShow': 'TRADES'}
 | 
				
			||||||
        bars_kwargs.update(kwargs)
 | 
					 | 
				
			||||||
        bar_size, duration, dt_duration = _samplings[sample_period_s]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        global _enters
 | 
					        global _enters
 | 
				
			||||||
        # log.info(f'REQUESTING BARS {_enters} @ end={end_dt}')
 | 
					        # log.info(f'REQUESTING BARS {_enters} @ end={end_dt}')
 | 
				
			||||||
        print(
 | 
					        print(f'REQUESTING BARS {_enters} @ end={end_dt}')
 | 
				
			||||||
            f"REQUESTING {duration}'s worth {bar_size} BARS\n"
 | 
					 | 
				
			||||||
            f'{_enters} @ end={end_dt}"'
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if not end_dt:
 | 
					        if not end_dt:
 | 
				
			||||||
            end_dt = ''
 | 
					            end_dt = ''
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        _enters += 1
 | 
					        _enters += 1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        contract = (await self.find_contracts(fqsn))[0]
 | 
					        contract = await self.find_contract(fqsn)
 | 
				
			||||||
        bars_kwargs.update(getattr(contract, 'bars_kwargs', {}))
 | 
					        bars_kwargs.update(getattr(contract, 'bars_kwargs', {}))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # _min = min(2000*100, count)
 | 
				
			||||||
        bars = await self.ib.reqHistoricalDataAsync(
 | 
					        bars = await self.ib.reqHistoricalDataAsync(
 | 
				
			||||||
            contract,
 | 
					            contract,
 | 
				
			||||||
            endDateTime=end_dt,
 | 
					            endDateTime=end_dt,
 | 
				
			||||||
            formatDate=2,
 | 
					            formatDate=2,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # time history length values format:
 | 
				
			||||||
 | 
					            # ``durationStr=integer{SPACE}unit (S|D|W|M|Y)``
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # OHLC sampling values:
 | 
					            # OHLC sampling values:
 | 
				
			||||||
            # 1 secs, 5 secs, 10 secs, 15 secs, 30 secs, 1 min, 2 mins,
 | 
					            # 1 secs, 5 secs, 10 secs, 15 secs, 30 secs, 1 min, 2 mins,
 | 
				
			||||||
            # 3 mins, 5 mins, 10 mins, 15 mins, 20 mins, 30 mins,
 | 
					            # 3 mins, 5 mins, 10 mins, 15 mins, 20 mins, 30 mins,
 | 
				
			||||||
            # 1 hour, 2 hours, 3 hours, 4 hours, 8 hours, 1 day, 1W, 1M
 | 
					            # 1 hour, 2 hours, 3 hours, 4 hours, 8 hours, 1 day, 1W, 1M
 | 
				
			||||||
            barSizeSetting=bar_size,
 | 
					            # barSizeSetting='1 secs',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # time history length values format:
 | 
					            # durationStr='{count} S'.format(count=15000 * 5),
 | 
				
			||||||
            # ``durationStr=integer{SPACE}unit (S|D|W|M|Y)``
 | 
					            # durationStr='{count} D'.format(count=1),
 | 
				
			||||||
            durationStr=duration,
 | 
					            # barSizeSetting='5 secs',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            durationStr='{count} S'.format(count=period_count),
 | 
				
			||||||
 | 
					            # barSizeSetting='5 secs',
 | 
				
			||||||
 | 
					            barSizeSetting='1 secs',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # barSizeSetting='1 min',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # always use extended hours
 | 
					            # always use extended hours
 | 
				
			||||||
            useRTH=False,
 | 
					            useRTH=False,
 | 
				
			||||||
| 
						 | 
					@ -404,21 +346,11 @@ class Client:
 | 
				
			||||||
            # whatToShow='TRADES',
 | 
					            # whatToShow='TRADES',
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        if not bars:
 | 
					        if not bars:
 | 
				
			||||||
            # NOTE: there's 2 cases here to handle (and this should be
 | 
					            # TODO: raise underlying error here
 | 
				
			||||||
            # read alongside the implementation of
 | 
					            raise ValueError(f"No bars retreived for {fqsn}?")
 | 
				
			||||||
            # ``.reqHistoricalDataAsync()``):
 | 
					 | 
				
			||||||
            # - no data is returned for the period likely due to
 | 
					 | 
				
			||||||
            # a weekend, holiday or other non-trading period prior to
 | 
					 | 
				
			||||||
            # ``end_dt`` which exceeds the ``duration``,
 | 
					 | 
				
			||||||
            # - a timeout occurred in which case insync internals return
 | 
					 | 
				
			||||||
            # an empty list thing with bars.clear()...
 | 
					 | 
				
			||||||
            return [], np.empty(0), dt_duration
 | 
					 | 
				
			||||||
            # TODO: we could maybe raise ``NoData`` instead if we
 | 
					 | 
				
			||||||
            # rewrite the method in the first case? right now there's no
 | 
					 | 
				
			||||||
            # way to detect a timeout.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        nparr = bars_to_np(bars)
 | 
					        nparr = bars_to_np(bars)
 | 
				
			||||||
        return bars, nparr, dt_duration
 | 
					        return bars, nparr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def con_deats(
 | 
					    async def con_deats(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
| 
						 | 
					@ -432,15 +364,7 @@ class Client:
 | 
				
			||||||
                futs.append(self.ib.reqContractDetailsAsync(con))
 | 
					                futs.append(self.ib.reqContractDetailsAsync(con))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # batch request all details
 | 
					        # batch request all details
 | 
				
			||||||
        try:
 | 
					        results = await asyncio.gather(*futs)
 | 
				
			||||||
            results = await asyncio.gather(*futs)
 | 
					 | 
				
			||||||
        except RequestError as err:
 | 
					 | 
				
			||||||
            msg = err.message
 | 
					 | 
				
			||||||
            if (
 | 
					 | 
				
			||||||
                'No security definition' in msg
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                log.warning(f'{msg}: {contracts}')
 | 
					 | 
				
			||||||
                return {}
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # one set per future result
 | 
					        # one set per future result
 | 
				
			||||||
        details = {}
 | 
					        details = {}
 | 
				
			||||||
| 
						 | 
					@ -449,11 +373,20 @@ class Client:
 | 
				
			||||||
            # XXX: if there is more then one entry in the details list
 | 
					            # XXX: if there is more then one entry in the details list
 | 
				
			||||||
            # then the contract is so called "ambiguous".
 | 
					            # then the contract is so called "ambiguous".
 | 
				
			||||||
            for d in details_set:
 | 
					            for d in details_set:
 | 
				
			||||||
 | 
					                con = d.contract
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # nested dataclass we probably don't need and that won't
 | 
					                key = '.'.join([
 | 
				
			||||||
                # IPC serialize..
 | 
					                    con.symbol,
 | 
				
			||||||
 | 
					                    con.primaryExchange or con.exchange,
 | 
				
			||||||
 | 
					                ])
 | 
				
			||||||
 | 
					                expiry = con.lastTradeDateOrContractMonth
 | 
				
			||||||
 | 
					                if expiry:
 | 
				
			||||||
 | 
					                    key += f'.{expiry}'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # nested dataclass we probably don't need and that
 | 
				
			||||||
 | 
					                # won't IPC serialize..
 | 
				
			||||||
                d.secIdList = ''
 | 
					                d.secIdList = ''
 | 
				
			||||||
                key, calc_price = con2fqsn(d.contract)
 | 
					
 | 
				
			||||||
                details[key] = d
 | 
					                details[key] = d
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return details
 | 
					        return details
 | 
				
			||||||
| 
						 | 
					@ -483,20 +416,17 @@ class Client:
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        pattern: str,
 | 
					        pattern: str,
 | 
				
			||||||
        # how many contracts to search "up to"
 | 
					        # how many contracts to search "up to"
 | 
				
			||||||
        upto: int = 16,
 | 
					        upto: int = 3,
 | 
				
			||||||
        asdicts: bool = True,
 | 
					        asdicts: bool = True,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> dict[str, ContractDetails]:
 | 
					    ) -> dict[str, ContractDetails]:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO add search though our adhoc-locally defined symbol set
 | 
					        # TODO add search though our adhoc-locally defined symbol set
 | 
				
			||||||
        # for futes/cmdtys/
 | 
					        # for futes/cmdtys/
 | 
				
			||||||
        try:
 | 
					        results = await self.search_stocks(
 | 
				
			||||||
            results = await self.search_stocks(
 | 
					            pattern,
 | 
				
			||||||
                pattern,
 | 
					            upto=upto,
 | 
				
			||||||
                upto=upto,
 | 
					        )
 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
        except ConnectionError:
 | 
					 | 
				
			||||||
            return {}
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        for key, deats in results.copy().items():
 | 
					        for key, deats in results.copy().items():
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -507,54 +437,21 @@ class Client:
 | 
				
			||||||
            if sectype == 'IND':
 | 
					            if sectype == 'IND':
 | 
				
			||||||
                results[f'{sym}.IND'] = tract
 | 
					                results[f'{sym}.IND'] = tract
 | 
				
			||||||
                results.pop(key)
 | 
					                results.pop(key)
 | 
				
			||||||
                # exch = tract.exchange
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # XXX: add back one of these to get the weird deadlock
 | 
					 | 
				
			||||||
                # on the debugger from root without the latest
 | 
					 | 
				
			||||||
                # maybe_wait_for_debugger() fix in the `open_context()`
 | 
					 | 
				
			||||||
                # exit.
 | 
					 | 
				
			||||||
                # assert 0
 | 
					 | 
				
			||||||
                # if con.exchange not in _exch_skip_list:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                exch = tract.exchange
 | 
					                exch = tract.exchange
 | 
				
			||||||
                if exch not in _exch_skip_list:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    # try to lookup any contracts from our adhoc set
 | 
					 | 
				
			||||||
                    # since often the exchange/venue is named slightly
 | 
					 | 
				
			||||||
                    # different (eg. BRR.CMECRYPTO` instead of just
 | 
					 | 
				
			||||||
                    # `.CME`).
 | 
					 | 
				
			||||||
                    info = _adhoc_symbol_map.get(sym)
 | 
					 | 
				
			||||||
                    if info:
 | 
					 | 
				
			||||||
                        con_kwargs, bars_kwargs = info
 | 
					 | 
				
			||||||
                        exch = con_kwargs['exchange']
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                if exch in _futes_venues:
 | 
				
			||||||
                    # try get all possible contracts for symbol as per,
 | 
					                    # try get all possible contracts for symbol as per,
 | 
				
			||||||
                    # https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut
 | 
					                    # https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut
 | 
				
			||||||
                    con = ibis.Future(
 | 
					                    con = ibis.Future(
 | 
				
			||||||
                        symbol=sym,
 | 
					                        symbol=sym,
 | 
				
			||||||
                        exchange=exch,
 | 
					                        exchange=exch,
 | 
				
			||||||
                    )
 | 
					                    )
 | 
				
			||||||
                    # TODO: make this work, think it's something to do
 | 
					                    try:
 | 
				
			||||||
                    # with the qualify flag.
 | 
					                        all_deats = await self.con_deats([con])
 | 
				
			||||||
                    # cons = await self.find_contracts(
 | 
					                        results |= all_deats
 | 
				
			||||||
                    #     contract=con,
 | 
					 | 
				
			||||||
                    #     err_on_qualify=False,
 | 
					 | 
				
			||||||
                    # )
 | 
					 | 
				
			||||||
                    # if cons:
 | 
					 | 
				
			||||||
                    all_deats = await self.con_deats([con])
 | 
					 | 
				
			||||||
                    results |= all_deats
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # forex pairs
 | 
					                    except RequestError as err:
 | 
				
			||||||
            elif sectype == 'CASH':
 | 
					                        log.warning(err.message)
 | 
				
			||||||
                dst, src = tract.localSymbol.split('.')
 | 
					 | 
				
			||||||
                pair_key = "/".join([dst, src])
 | 
					 | 
				
			||||||
                exch = tract.exchange.lower()
 | 
					 | 
				
			||||||
                results[f'{pair_key}.{exch}'] = tract
 | 
					 | 
				
			||||||
                results.pop(key)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # XXX: again seems to trigger the weird tractor
 | 
					 | 
				
			||||||
                # bug with the debugger..
 | 
					 | 
				
			||||||
                # assert 0
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return results
 | 
					        return results
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -586,19 +483,13 @@ class Client:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return con
 | 
					        return con
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def get_con(
 | 
					    async def find_contract(
 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        conid: int,
 | 
					 | 
				
			||||||
    ) -> Contract:
 | 
					 | 
				
			||||||
        return await self.ib.qualifyContractsAsync(
 | 
					 | 
				
			||||||
            ibis.Contract(conId=conid)
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def parse_patt2fqsn(
 | 
					 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        pattern: str,
 | 
					        pattern: str,
 | 
				
			||||||
 | 
					        currency: str = 'USD',
 | 
				
			||||||
 | 
					        **kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> tuple[str, str, str, str]:
 | 
					    ) -> Contract:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: we can't use this currently because
 | 
					        # TODO: we can't use this currently because
 | 
				
			||||||
        # ``wrapper.starTicker()`` currently cashes ticker instances
 | 
					        # ``wrapper.starTicker()`` currently cashes ticker instances
 | 
				
			||||||
| 
						 | 
					@ -611,114 +502,61 @@ class Client:
 | 
				
			||||||
        # XXX UPDATE: we can probably do the tick/trades scraping
 | 
					        # XXX UPDATE: we can probably do the tick/trades scraping
 | 
				
			||||||
        # inside our eventkit handler instead to bypass this entirely?
 | 
					        # inside our eventkit handler instead to bypass this entirely?
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        currency = ''
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # fqsn parsing stage
 | 
					 | 
				
			||||||
        # ------------------
 | 
					 | 
				
			||||||
        if '.ib' in pattern:
 | 
					        if '.ib' in pattern:
 | 
				
			||||||
            from ..data._source import unpack_fqsn
 | 
					            from ..data._source import unpack_fqsn
 | 
				
			||||||
            _, symbol, expiry = unpack_fqsn(pattern)
 | 
					            broker, symbol, expiry = unpack_fqsn(pattern)
 | 
				
			||||||
 | 
					 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            symbol = pattern
 | 
					            symbol = pattern
 | 
				
			||||||
            expiry = ''
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # another hack for forex pairs lul.
 | 
					        # try:
 | 
				
			||||||
        if (
 | 
					        #     # give the cache a go
 | 
				
			||||||
            '.idealpro' in symbol
 | 
					        #     return self._contracts[symbol]
 | 
				
			||||||
            # or '/' in symbol
 | 
					        # except KeyError:
 | 
				
			||||||
        ):
 | 
					        #     log.debug(f'Looking up contract for {symbol}')
 | 
				
			||||||
            exch = 'IDEALPRO'
 | 
					        expiry: str = ''
 | 
				
			||||||
            symbol = symbol.removesuffix('.idealpro')
 | 
					        if symbol.count('.') > 1:
 | 
				
			||||||
            if '/' in symbol:
 | 
					            symbol, _, expiry = symbol.rpartition('.')
 | 
				
			||||||
                symbol, currency = symbol.split('/')
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        else:
 | 
					        # use heuristics to figure out contract "type"
 | 
				
			||||||
            # TODO: yes, a cache..
 | 
					        sym, exch = symbol.upper().rsplit('.', maxsplit=1)
 | 
				
			||||||
            # try:
 | 
					 | 
				
			||||||
            #     # give the cache a go
 | 
					 | 
				
			||||||
            #     return self._contracts[symbol]
 | 
					 | 
				
			||||||
            # except KeyError:
 | 
					 | 
				
			||||||
            #     log.debug(f'Looking up contract for {symbol}')
 | 
					 | 
				
			||||||
            expiry: str = ''
 | 
					 | 
				
			||||||
            if symbol.count('.') > 1:
 | 
					 | 
				
			||||||
                symbol, _, expiry = symbol.rpartition('.')
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # use heuristics to figure out contract "type"
 | 
					        qualify: bool = True
 | 
				
			||||||
            symbol, exch = symbol.upper().rsplit('.', maxsplit=1)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return symbol, currency, exch, expiry
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def find_contracts(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        pattern: Optional[str] = None,
 | 
					 | 
				
			||||||
        contract: Optional[Contract] = None,
 | 
					 | 
				
			||||||
        qualify: bool = True,
 | 
					 | 
				
			||||||
        err_on_qualify: bool = True,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> Contract:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if pattern is not None:
 | 
					 | 
				
			||||||
            symbol, currency, exch, expiry = self.parse_patt2fqsn(
 | 
					 | 
				
			||||||
                pattern,
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            sectype = ''
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            assert contract
 | 
					 | 
				
			||||||
            symbol = contract.symbol
 | 
					 | 
				
			||||||
            sectype = contract.secType
 | 
					 | 
				
			||||||
            exch = contract.exchange or contract.primaryExchange
 | 
					 | 
				
			||||||
            expiry = contract.lastTradeDateOrContractMonth
 | 
					 | 
				
			||||||
            currency = contract.currency
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # contract searching stage
 | 
					 | 
				
			||||||
        # ------------------------
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # futes
 | 
					        # futes
 | 
				
			||||||
        if exch in _futes_venues:
 | 
					        if exch in _futes_venues:
 | 
				
			||||||
            if expiry:
 | 
					            if expiry:
 | 
				
			||||||
                # get the "front" contract
 | 
					                # get the "front" contract
 | 
				
			||||||
                con = await self.get_fute(
 | 
					                contract = await self.get_fute(
 | 
				
			||||||
                    symbol=symbol,
 | 
					                    symbol=sym,
 | 
				
			||||||
                    exchange=exch,
 | 
					                    exchange=exch,
 | 
				
			||||||
                    expiry=expiry,
 | 
					                    expiry=expiry,
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                # get the "front" contract
 | 
					                # get the "front" contract
 | 
				
			||||||
                con = await self.get_fute(
 | 
					                contract = await self.get_fute(
 | 
				
			||||||
                    symbol=symbol,
 | 
					                    symbol=sym,
 | 
				
			||||||
                    exchange=exch,
 | 
					                    exchange=exch,
 | 
				
			||||||
                    front=True,
 | 
					                    front=True,
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        elif (
 | 
					            qualify = False
 | 
				
			||||||
            exch in ('IDEALPRO')
 | 
					
 | 
				
			||||||
            or sectype == 'CASH'
 | 
					        elif exch in ('FOREX'):
 | 
				
			||||||
        ):
 | 
					            currency = ''
 | 
				
			||||||
            # if '/' in symbol:
 | 
					            symbol, currency = sym.split('/')
 | 
				
			||||||
            #     currency = ''
 | 
					 | 
				
			||||||
            #     symbol, currency = symbol.split('/')
 | 
					 | 
				
			||||||
            con = ibis.Forex(
 | 
					            con = ibis.Forex(
 | 
				
			||||||
                pair=''.join((symbol, currency)),
 | 
					                symbol=symbol,
 | 
				
			||||||
                currency=currency,
 | 
					                currency=currency,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            con.bars_kwargs = {'whatToShow': 'MIDPOINT'}
 | 
					            con.bars_kwargs = {'whatToShow': 'MIDPOINT'}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # commodities
 | 
					        # commodities
 | 
				
			||||||
        elif exch == 'CMDTY':  # eg. XAUUSD.CMDTY
 | 
					        elif exch == 'CMDTY':  # eg. XAUUSD.CMDTY
 | 
				
			||||||
            con_kwargs, bars_kwargs = _adhoc_symbol_map[symbol]
 | 
					            con_kwargs, bars_kwargs = _adhoc_cmdty_data_map[sym]
 | 
				
			||||||
            con = ibis.Commodity(**con_kwargs)
 | 
					            con = ibis.Commodity(**con_kwargs)
 | 
				
			||||||
            con.bars_kwargs = bars_kwargs
 | 
					            con.bars_kwargs = bars_kwargs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # crypto$
 | 
					 | 
				
			||||||
        elif exch == 'PAXOS':  # btc.paxos
 | 
					 | 
				
			||||||
            con = ibis.Crypto(
 | 
					 | 
				
			||||||
                symbol=symbol,
 | 
					 | 
				
			||||||
                currency=currency,
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # stonks
 | 
					        # stonks
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            # TODO: metadata system for all these exchange rules..
 | 
					            # TODO: metadata system for all these exchange rules..
 | 
				
			||||||
| 
						 | 
					@ -731,61 +569,41 @@ class Client:
 | 
				
			||||||
                exch = 'SMART'
 | 
					                exch = 'SMART'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                # XXX: order is super important here since
 | 
					 | 
				
			||||||
                # a primary == 'SMART' won't ever work.
 | 
					 | 
				
			||||||
                primaryExchange = exch
 | 
					 | 
				
			||||||
                exch = 'SMART'
 | 
					                exch = 'SMART'
 | 
				
			||||||
 | 
					                primaryExchange = exch
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            con = ibis.Stock(
 | 
					            con = ibis.Stock(
 | 
				
			||||||
                symbol=symbol,
 | 
					                symbol=sym,
 | 
				
			||||||
                exchange=exch,
 | 
					                exchange=exch,
 | 
				
			||||||
                primaryExchange=primaryExchange,
 | 
					                primaryExchange=primaryExchange,
 | 
				
			||||||
                currency=currency,
 | 
					                currency=currency,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					        try:
 | 
				
			||||||
            exch = 'SMART' if not exch else exch
 | 
					            exch = 'SMART' if not exch else exch
 | 
				
			||||||
 | 
					            if qualify:
 | 
				
			||||||
 | 
					                contract = (await self.ib.qualifyContractsAsync(con))[0]
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                assert contract
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        contracts = [con]
 | 
					        except IndexError:
 | 
				
			||||||
        if qualify:
 | 
					            raise ValueError(f"No contract could be found {con}")
 | 
				
			||||||
            try:
 | 
					 | 
				
			||||||
                contracts = await self.ib.qualifyContractsAsync(con)
 | 
					 | 
				
			||||||
            except RequestError as err:
 | 
					 | 
				
			||||||
                msg = err.message
 | 
					 | 
				
			||||||
                if (
 | 
					 | 
				
			||||||
                    'No security definition' in msg
 | 
					 | 
				
			||||||
                    and not err_on_qualify
 | 
					 | 
				
			||||||
                ):
 | 
					 | 
				
			||||||
                    log.warning(
 | 
					 | 
				
			||||||
                        f'Could not find def for {con}')
 | 
					 | 
				
			||||||
                    return None
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                else:
 | 
					        self._contracts[pattern] = contract
 | 
				
			||||||
                    raise
 | 
					 | 
				
			||||||
            if not contracts:
 | 
					 | 
				
			||||||
                raise ValueError(f"No contract could be found {con}")
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # pack all contracts into cache
 | 
					        # add an aditional entry with expiry suffix if available
 | 
				
			||||||
        for tract in contracts:
 | 
					        conexp = contract.lastTradeDateOrContractMonth
 | 
				
			||||||
            exch: str = tract.primaryExchange or tract.exchange or exch
 | 
					        if conexp:
 | 
				
			||||||
            pattern = f'{symbol}.{exch}'
 | 
					            self._contracts[pattern + f'.{conexp}'] = contract
 | 
				
			||||||
            expiry = tract.lastTradeDateOrContractMonth
 | 
					 | 
				
			||||||
            # add an entry with expiry suffix if available
 | 
					 | 
				
			||||||
            if expiry:
 | 
					 | 
				
			||||||
                pattern += f'.{expiry}'
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            self._contracts[pattern.lower()] = tract
 | 
					        return contract
 | 
				
			||||||
 | 
					 | 
				
			||||||
        return contracts
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def get_head_time(
 | 
					    async def get_head_time(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        fqsn: str,
 | 
					        contract: Contract,
 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> datetime:
 | 
					    ) -> datetime:
 | 
				
			||||||
        '''
 | 
					        """Return the first datetime stamp for ``contract``.
 | 
				
			||||||
        Return the first datetime stamp for ``contract``.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
        contract = (await self.find_contracts(fqsn))[0]
 | 
					 | 
				
			||||||
        return await self.ib.reqHeadTimeStampAsync(
 | 
					        return await self.ib.reqHeadTimeStampAsync(
 | 
				
			||||||
            contract,
 | 
					            contract,
 | 
				
			||||||
            whatToShow='TRADES',
 | 
					            whatToShow='TRADES',
 | 
				
			||||||
| 
						 | 
					@ -796,10 +614,9 @@ class Client:
 | 
				
			||||||
    async def get_sym_details(
 | 
					    async def get_sym_details(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        symbol: str,
 | 
					        symbol: str,
 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> tuple[Contract, Ticker, ContractDetails]:
 | 
					    ) -> tuple[Contract, Ticker, ContractDetails]:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        contract = (await self.find_contracts(symbol))[0]
 | 
					        contract = await self.find_contract(symbol)
 | 
				
			||||||
        ticker: Ticker = self.ib.reqMktData(
 | 
					        ticker: Ticker = self.ib.reqMktData(
 | 
				
			||||||
            contract,
 | 
					            contract,
 | 
				
			||||||
            snapshot=True,
 | 
					            snapshot=True,
 | 
				
			||||||
| 
						 | 
					@ -855,7 +672,9 @@ class Client:
 | 
				
			||||||
    # async to be consistent for the client proxy, and cuz why not.
 | 
					    # async to be consistent for the client proxy, and cuz why not.
 | 
				
			||||||
    def submit_limit(
 | 
					    def submit_limit(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        oid: str,  # ignored since doesn't support defining your own
 | 
					        # ignored since ib doesn't support defining your
 | 
				
			||||||
 | 
					        # own order id
 | 
				
			||||||
 | 
					        oid: str,
 | 
				
			||||||
        symbol: str,
 | 
					        symbol: str,
 | 
				
			||||||
        price: float,
 | 
					        price: float,
 | 
				
			||||||
        action: str,
 | 
					        action: str,
 | 
				
			||||||
| 
						 | 
					@ -871,9 +690,6 @@ class Client:
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Place an order and return integer request id provided by client.
 | 
					        Place an order and return integer request id provided by client.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        Relevant docs:
 | 
					 | 
				
			||||||
        - https://interactivebrokers.github.io/tws-api/order_limitations.html
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            contract = self._contracts[symbol]
 | 
					            contract = self._contracts[symbol]
 | 
				
			||||||
| 
						 | 
					@ -899,9 +715,6 @@ class Client:
 | 
				
			||||||
                    optOutSmartRouting=True,
 | 
					                    optOutSmartRouting=True,
 | 
				
			||||||
                    routeMarketableToBbo=True,
 | 
					                    routeMarketableToBbo=True,
 | 
				
			||||||
                    designatedLocation='SMART',
 | 
					                    designatedLocation='SMART',
 | 
				
			||||||
                    # TODO: make all orders GTC?
 | 
					 | 
				
			||||||
                    # https://interactivebrokers.github.io/tws-api/classIBApi_1_1Order.html#a95539081751afb9980f4c6bd1655a6ba
 | 
					 | 
				
			||||||
                    # goodTillDate=f"yyyyMMdd-HH:mm:ss",
 | 
					 | 
				
			||||||
                ),
 | 
					                ),
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
        except AssertionError:  # errrg insync..
 | 
					        except AssertionError:  # errrg insync..
 | 
				
			||||||
| 
						 | 
					@ -991,73 +804,6 @@ class Client:
 | 
				
			||||||
        return self.ib.positions(account=account)
 | 
					        return self.ib.positions(account=account)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def con2fqsn(
 | 
					 | 
				
			||||||
    con: Contract,
 | 
					 | 
				
			||||||
    _cache: dict[int, (str, bool)] = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> tuple[str, bool]:
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Convert contracts to fqsn-style strings to be used both in symbol-search
 | 
					 | 
				
			||||||
    matching and as feed tokens passed to the front end data deed layer.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Previously seen contracts are cached by id.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    # should be real volume for this contract by default
 | 
					 | 
				
			||||||
    calc_price = False
 | 
					 | 
				
			||||||
    if con.conId:
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            return _cache[con.conId]
 | 
					 | 
				
			||||||
        except KeyError:
 | 
					 | 
				
			||||||
            pass
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    suffix = con.primaryExchange or con.exchange
 | 
					 | 
				
			||||||
    symbol = con.symbol
 | 
					 | 
				
			||||||
    expiry = con.lastTradeDateOrContractMonth or ''
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    match con:
 | 
					 | 
				
			||||||
        case Option():
 | 
					 | 
				
			||||||
            # TODO: option symbol parsing and sane display:
 | 
					 | 
				
			||||||
            symbol = con.localSymbol.replace(' ', '')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        case ibis.Commodity():
 | 
					 | 
				
			||||||
            # commodities and forex don't have an exchange name and
 | 
					 | 
				
			||||||
            # no real volume so we have to calculate the price
 | 
					 | 
				
			||||||
            suffix = con.secType
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # no real volume on this tract
 | 
					 | 
				
			||||||
            calc_price = True
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        case ibis.Forex() | ibis.Contract(secType='CASH'):
 | 
					 | 
				
			||||||
            dst, src = con.localSymbol.split('.')
 | 
					 | 
				
			||||||
            symbol = ''.join([dst, src])
 | 
					 | 
				
			||||||
            suffix = con.exchange
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # no real volume on forex feeds..
 | 
					 | 
				
			||||||
            calc_price = True
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if not suffix:
 | 
					 | 
				
			||||||
        entry = _adhoc_symbol_map.get(
 | 
					 | 
				
			||||||
            con.symbol or con.localSymbol
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        if entry:
 | 
					 | 
				
			||||||
            meta, kwargs = entry
 | 
					 | 
				
			||||||
            cid = meta.get('conId')
 | 
					 | 
				
			||||||
            if cid:
 | 
					 | 
				
			||||||
                assert con.conId == meta['conId']
 | 
					 | 
				
			||||||
            suffix = meta['exchange']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # append a `.<suffix>` to the returned symbol
 | 
					 | 
				
			||||||
    # key for derivatives that normally is the expiry
 | 
					 | 
				
			||||||
    # date key.
 | 
					 | 
				
			||||||
    if expiry:
 | 
					 | 
				
			||||||
        suffix += f'.{expiry}'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    fqsn_key = '.'.join((symbol, suffix)).lower()
 | 
					 | 
				
			||||||
    _cache[con.conId] = fqsn_key, calc_price
 | 
					 | 
				
			||||||
    return fqsn_key, calc_price
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# per-actor API ep caching
 | 
					# per-actor API ep caching
 | 
				
			||||||
_client_cache: dict[tuple[str, int], Client] = {}
 | 
					_client_cache: dict[tuple[str, int], Client] = {}
 | 
				
			||||||
_scan_ignore: set[tuple[str, int]] = set()
 | 
					_scan_ignore: set[tuple[str, int]] = set()
 | 
				
			||||||
| 
						 | 
					@ -1065,23 +811,10 @@ _scan_ignore: set[tuple[str, int]] = set()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def get_config() -> dict[str, Any]:
 | 
					def get_config() -> dict[str, Any]:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    conf, path = config.load('brokers')
 | 
					    conf, path = config.load()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    section = conf.get('ib')
 | 
					    section = conf.get('ib')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    accounts = section.get('accounts')
 | 
					 | 
				
			||||||
    if not accounts:
 | 
					 | 
				
			||||||
        raise ValueError(
 | 
					 | 
				
			||||||
            'brokers.toml -> `ib.accounts` must be defined\n'
 | 
					 | 
				
			||||||
            f'location: {path}'
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    names = list(accounts.keys())
 | 
					 | 
				
			||||||
    accts = section['accounts'] = bidict(accounts)
 | 
					 | 
				
			||||||
    log.info(
 | 
					 | 
				
			||||||
        f'brokers.toml defines {len(accts)} accounts: '
 | 
					 | 
				
			||||||
        f'{pformat(names)}'
 | 
					 | 
				
			||||||
    )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if section is None:
 | 
					    if section is None:
 | 
				
			||||||
        log.warning(f'No config section found for ib in {path}')
 | 
					        log.warning(f'No config section found for ib in {path}')
 | 
				
			||||||
        return {}
 | 
					        return {}
 | 
				
			||||||
| 
						 | 
					@ -1103,7 +836,6 @@ async def load_aio_clients(
 | 
				
			||||||
    # retry a few times to get the client going..
 | 
					    # retry a few times to get the client going..
 | 
				
			||||||
    connect_retries: int = 3,
 | 
					    connect_retries: int = 3,
 | 
				
			||||||
    connect_timeout: float = 0.5,
 | 
					    connect_timeout: float = 0.5,
 | 
				
			||||||
    disconnect_on_exit: bool = True,
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> dict[str, Client]:
 | 
					) -> dict[str, Client]:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -1176,12 +908,6 @@ async def load_aio_clients(
 | 
				
			||||||
                    # careful.
 | 
					                    # careful.
 | 
				
			||||||
                    timeout=connect_timeout,
 | 
					                    timeout=connect_timeout,
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
                # create and cache client
 | 
					 | 
				
			||||||
                client = Client(ib)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # update all actor-global caches
 | 
					 | 
				
			||||||
                log.info(f"Caching client for {sockaddr}")
 | 
					 | 
				
			||||||
                _client_cache[sockaddr] = client
 | 
					 | 
				
			||||||
                break
 | 
					                break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            except (
 | 
					            except (
 | 
				
			||||||
| 
						 | 
					@ -1205,9 +931,21 @@ async def load_aio_clients(
 | 
				
			||||||
                log.warning(
 | 
					                log.warning(
 | 
				
			||||||
                    f'Failed to connect on {port} for {i} time, retrying...')
 | 
					                    f'Failed to connect on {port} for {i} time, retrying...')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # create and cache client
 | 
				
			||||||
 | 
					        client = Client(ib)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Pre-collect all accounts available for this
 | 
					        # Pre-collect all accounts available for this
 | 
				
			||||||
        # connection and map account names to this client
 | 
					        # connection and map account names to this client
 | 
				
			||||||
        # instance.
 | 
					        # instance.
 | 
				
			||||||
 | 
					        pps = ib.positions()
 | 
				
			||||||
 | 
					        if pps:
 | 
				
			||||||
 | 
					            for pp in pps:
 | 
				
			||||||
 | 
					                accounts_found[
 | 
				
			||||||
 | 
					                    accounts_def.inverse[pp.account]
 | 
				
			||||||
 | 
					                ] = client
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # if there are accounts without positions we should still
 | 
				
			||||||
 | 
					        # register them for this client
 | 
				
			||||||
        for value in ib.accountValues():
 | 
					        for value in ib.accountValues():
 | 
				
			||||||
            acct_number = value.account
 | 
					            acct_number = value.account
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1228,6 +966,10 @@ async def load_aio_clients(
 | 
				
			||||||
            f'{pformat(accounts_found)}'
 | 
					            f'{pformat(accounts_found)}'
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # update all actor-global caches
 | 
				
			||||||
 | 
					        log.info(f"Caching client for {sockaddr}")
 | 
				
			||||||
 | 
					        _client_cache[sockaddr] = client
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # XXX: why aren't we just updating this directy above
 | 
					        # XXX: why aren't we just updating this directy above
 | 
				
			||||||
        # instead of using the intermediary `accounts_found`?
 | 
					        # instead of using the intermediary `accounts_found`?
 | 
				
			||||||
        _accounts2clients.update(accounts_found)
 | 
					        _accounts2clients.update(accounts_found)
 | 
				
			||||||
| 
						 | 
					@ -1245,11 +987,10 @@ async def load_aio_clients(
 | 
				
			||||||
    finally:
 | 
					    finally:
 | 
				
			||||||
        # TODO: for re-scans we'll want to not teardown clients which
 | 
					        # TODO: for re-scans we'll want to not teardown clients which
 | 
				
			||||||
        # are up and stable right?
 | 
					        # are up and stable right?
 | 
				
			||||||
        if disconnect_on_exit:
 | 
					        for acct, client in _accounts2clients.items():
 | 
				
			||||||
            for acct, client in _accounts2clients.items():
 | 
					            log.info(f'Disconnecting {acct}@{client}')
 | 
				
			||||||
                log.info(f'Disconnecting {acct}@{client}')
 | 
					            client.ib.disconnect()
 | 
				
			||||||
                client.ib.disconnect()
 | 
					            _client_cache.pop((host, port))
 | 
				
			||||||
                _client_cache.pop((host, port), None)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def load_clients_for_trio(
 | 
					async def load_clients_for_trio(
 | 
				
			||||||
| 
						 | 
					@ -1278,6 +1019,9 @@ async def load_clients_for_trio(
 | 
				
			||||||
            await asyncio.sleep(float('inf'))
 | 
					            await asyncio.sleep(float('inf'))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					_proxies: dict[str, MethodProxy] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@acm
 | 
				
			||||||
async def open_client_proxies() -> tuple[
 | 
					async def open_client_proxies() -> tuple[
 | 
				
			||||||
    dict[str, MethodProxy],
 | 
					    dict[str, MethodProxy],
 | 
				
			||||||
| 
						 | 
					@ -1285,6 +1029,7 @@ async def open_client_proxies() -> tuple[
 | 
				
			||||||
]:
 | 
					]:
 | 
				
			||||||
    async with (
 | 
					    async with (
 | 
				
			||||||
        tractor.trionics.maybe_open_context(
 | 
					        tractor.trionics.maybe_open_context(
 | 
				
			||||||
 | 
					            # acm_func=open_client_proxies,
 | 
				
			||||||
            acm_func=tractor.to_asyncio.open_channel_from,
 | 
					            acm_func=tractor.to_asyncio.open_channel_from,
 | 
				
			||||||
            kwargs={'target': load_clients_for_trio},
 | 
					            kwargs={'target': load_clients_for_trio},
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1299,14 +1044,13 @@ async def open_client_proxies() -> tuple[
 | 
				
			||||||
        if cache_hit:
 | 
					        if cache_hit:
 | 
				
			||||||
            log.info(f'Re-using cached clients: {clients}')
 | 
					            log.info(f'Re-using cached clients: {clients}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        proxies = {}
 | 
					 | 
				
			||||||
        for acct_name, client in clients.items():
 | 
					        for acct_name, client in clients.items():
 | 
				
			||||||
            proxy = await stack.enter_async_context(
 | 
					            proxy = await stack.enter_async_context(
 | 
				
			||||||
                open_client_proxy(client),
 | 
					                open_client_proxy(client),
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            proxies[acct_name] = proxy
 | 
					            _proxies[acct_name] = proxy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        yield proxies, clients
 | 
					        yield _proxies, clients
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def get_preferred_data_client(
 | 
					def get_preferred_data_client(
 | 
				
			||||||
| 
						 | 
					@ -1455,13 +1199,11 @@ async def open_client_proxy(
 | 
				
			||||||
    event_table = {}
 | 
					    event_table = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async with (
 | 
					    async with (
 | 
				
			||||||
 | 
					 | 
				
			||||||
        to_asyncio.open_channel_from(
 | 
					        to_asyncio.open_channel_from(
 | 
				
			||||||
            open_aio_client_method_relay,
 | 
					            open_aio_client_method_relay,
 | 
				
			||||||
            client=client,
 | 
					            client=client,
 | 
				
			||||||
            event_consumers=event_table,
 | 
					            event_consumers=event_table,
 | 
				
			||||||
        ) as (first, chan),
 | 
					        ) as (first, chan),
 | 
				
			||||||
 | 
					 | 
				
			||||||
        trio.open_nursery() as relay_n,
 | 
					        trio.open_nursery() as relay_n,
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
					@ -1,64 +0,0 @@
 | 
				
			||||||
``kraken`` backend
 | 
					 | 
				
			||||||
------------------
 | 
					 | 
				
			||||||
though they don't have the most liquidity of all the cexes they sure are
 | 
					 | 
				
			||||||
accommodating to those of us who appreciate a little ``xmr``.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
status
 | 
					 | 
				
			||||||
******
 | 
					 | 
				
			||||||
current support is *production grade* and both real-time data and order
 | 
					 | 
				
			||||||
management should be correct and fast. this backend is used by core devs
 | 
					 | 
				
			||||||
for live trading.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
config
 | 
					 | 
				
			||||||
******
 | 
					 | 
				
			||||||
In order to get order mode support your ``brokers.toml``
 | 
					 | 
				
			||||||
needs to have something like the following:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. code:: toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
   [kraken]
 | 
					 | 
				
			||||||
   accounts.spot = 'spot'
 | 
					 | 
				
			||||||
   key_descr = "spot"
 | 
					 | 
				
			||||||
   api_key = "69696969696969696696969696969696969696969696969696969696"
 | 
					 | 
				
			||||||
   secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
If everything works correctly you should see any current positions
 | 
					 | 
				
			||||||
loaded in the pps pane on chart load and you should also be able to
 | 
					 | 
				
			||||||
check your trade records in the file::
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    <pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
An example ledger file will have entries written verbatim from the
 | 
					 | 
				
			||||||
trade events schema:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. code:: toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    [TFJBKK-SMBZS-VJ4UWS]
 | 
					 | 
				
			||||||
    ordertxid = "SMBZSA-7CNQU-3HWLNJ"
 | 
					 | 
				
			||||||
    postxid = "SMBZSE-M7IF5-CFI7LT"
 | 
					 | 
				
			||||||
    pair = "XXMRZEUR"
 | 
					 | 
				
			||||||
    time = 1655691993.4133966
 | 
					 | 
				
			||||||
    type = "buy"
 | 
					 | 
				
			||||||
    ordertype = "limit"
 | 
					 | 
				
			||||||
    price = "103.97000000"
 | 
					 | 
				
			||||||
    cost = "499.99999977"
 | 
					 | 
				
			||||||
    fee = "0.80000000"
 | 
					 | 
				
			||||||
    vol = "4.80907954"
 | 
					 | 
				
			||||||
    margin = "0.00000000"
 | 
					 | 
				
			||||||
    misc = ""
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
your ``pps.toml`` file will have position entries like,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
.. code:: toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
   [kraken.spot."xmreur.kraken"]
 | 
					 | 
				
			||||||
   size = 4.80907954
 | 
					 | 
				
			||||||
   ppu = 103.97000000
 | 
					 | 
				
			||||||
   bsuid = "XXMRZEUR"
 | 
					 | 
				
			||||||
   clears = [
 | 
					 | 
				
			||||||
    { tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
 | 
					 | 
				
			||||||
   ]
 | 
					 | 
				
			||||||
| 
						 | 
					@ -1,61 +0,0 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					 | 
				
			||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					 | 
				
			||||||
# the Free Software Foundation, either version 3 of the License, or
 | 
					 | 
				
			||||||
# (at your option) any later version.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is distributed in the hope that it will be useful,
 | 
					 | 
				
			||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
					 | 
				
			||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
					 | 
				
			||||||
# GNU Affero General Public License for more details.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
Kraken backend.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
Sub-modules within break into the core functionalities:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
- ``broker.py`` part for orders / trading endpoints
 | 
					 | 
				
			||||||
- ``feed.py`` for real-time data feed endpoints
 | 
					 | 
				
			||||||
- ``api.py`` for the core API machinery which is ``trio``-ized
 | 
					 | 
				
			||||||
  wrapping around ``ib_insync``.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from piker.log import get_logger
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
log = get_logger(__name__)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from .api import (
 | 
					 | 
				
			||||||
    get_client,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from .feed import (
 | 
					 | 
				
			||||||
    open_history_client,
 | 
					 | 
				
			||||||
    open_symbol_search,
 | 
					 | 
				
			||||||
    stream_quotes,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from .broker import (
 | 
					 | 
				
			||||||
    trades_dialogue,
 | 
					 | 
				
			||||||
    norm_trade_records,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
__all__ = [
 | 
					 | 
				
			||||||
    'get_client',
 | 
					 | 
				
			||||||
    'trades_dialogue',
 | 
					 | 
				
			||||||
    'open_history_client',
 | 
					 | 
				
			||||||
    'open_symbol_search',
 | 
					 | 
				
			||||||
    'stream_quotes',
 | 
					 | 
				
			||||||
    'norm_trade_records',
 | 
					 | 
				
			||||||
]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# tractor RPC enable arg
 | 
					 | 
				
			||||||
__enable_modules__: list[str] = [
 | 
					 | 
				
			||||||
    'api',
 | 
					 | 
				
			||||||
    'feed',
 | 
					 | 
				
			||||||
    'broker',
 | 
					 | 
				
			||||||
]
 | 
					 | 
				
			||||||
| 
						 | 
					@ -1,540 +0,0 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					 | 
				
			||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					 | 
				
			||||||
# the Free Software Foundation, either version 3 of the License, or
 | 
					 | 
				
			||||||
# (at your option) any later version.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is distributed in the hope that it will be useful,
 | 
					 | 
				
			||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
					 | 
				
			||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
					 | 
				
			||||||
# GNU Affero General Public License for more details.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
Kraken web API wrapping.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
from contextlib import asynccontextmanager as acm
 | 
					 | 
				
			||||||
from datetime import datetime
 | 
					 | 
				
			||||||
import itertools
 | 
					 | 
				
			||||||
from typing import (
 | 
					 | 
				
			||||||
    Any,
 | 
					 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Union,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
import time
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from bidict import bidict
 | 
					 | 
				
			||||||
import pendulum
 | 
					 | 
				
			||||||
import asks
 | 
					 | 
				
			||||||
from fuzzywuzzy import process as fuzzy
 | 
					 | 
				
			||||||
import numpy as np
 | 
					 | 
				
			||||||
import urllib.parse
 | 
					 | 
				
			||||||
import hashlib
 | 
					 | 
				
			||||||
import hmac
 | 
					 | 
				
			||||||
import base64
 | 
					 | 
				
			||||||
import trio
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from piker import config
 | 
					 | 
				
			||||||
from piker.brokers._util import (
 | 
					 | 
				
			||||||
    resproc,
 | 
					 | 
				
			||||||
    SymbolNotFound,
 | 
					 | 
				
			||||||
    BrokerError,
 | 
					 | 
				
			||||||
    DataThrottle,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from piker.pp import Transaction
 | 
					 | 
				
			||||||
from . import log
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# <uri>/<version>/
 | 
					 | 
				
			||||||
_url = 'https://api.kraken.com/0'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# Broker specific ohlc schema which includes a vwap field
 | 
					 | 
				
			||||||
_ohlc_dtype = [
 | 
					 | 
				
			||||||
    ('index', int),
 | 
					 | 
				
			||||||
    ('time', int),
 | 
					 | 
				
			||||||
    ('open', float),
 | 
					 | 
				
			||||||
    ('high', float),
 | 
					 | 
				
			||||||
    ('low', float),
 | 
					 | 
				
			||||||
    ('close', float),
 | 
					 | 
				
			||||||
    ('volume', float),
 | 
					 | 
				
			||||||
    ('count', int),
 | 
					 | 
				
			||||||
    ('bar_wap', float),
 | 
					 | 
				
			||||||
]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# UI components allow this to be declared such that additional
 | 
					 | 
				
			||||||
# (historical) fields can be exposed.
 | 
					 | 
				
			||||||
ohlc_dtype = np.dtype(_ohlc_dtype)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
_show_wap_in_history = True
 | 
					 | 
				
			||||||
_symbol_info_translation: dict[str, str] = {
 | 
					 | 
				
			||||||
    'tick_decimals': 'pair_decimals',
 | 
					 | 
				
			||||||
}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def get_config() -> dict[str, Any]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    conf, path = config.load()
 | 
					 | 
				
			||||||
    section = conf.get('kraken')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if section is None:
 | 
					 | 
				
			||||||
        log.warning(f'No config section found for kraken in {path}')
 | 
					 | 
				
			||||||
        return {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return section
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def get_kraken_signature(
 | 
					 | 
				
			||||||
    urlpath: str,
 | 
					 | 
				
			||||||
    data: dict[str, Any],
 | 
					 | 
				
			||||||
    secret: str
 | 
					 | 
				
			||||||
) -> str:
 | 
					 | 
				
			||||||
    postdata = urllib.parse.urlencode(data)
 | 
					 | 
				
			||||||
    encoded = (str(data['nonce']) + postdata).encode()
 | 
					 | 
				
			||||||
    message = urlpath.encode() + hashlib.sha256(encoded).digest()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
 | 
					 | 
				
			||||||
    sigdigest = base64.b64encode(mac.digest())
 | 
					 | 
				
			||||||
    return sigdigest.decode()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class InvalidKey(ValueError):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    EAPI:Invalid key
 | 
					 | 
				
			||||||
    This error is returned when the API key used for the call is
 | 
					 | 
				
			||||||
    either expired or disabled, please review the API key in your
 | 
					 | 
				
			||||||
    Settings -> API tab of account management or generate a new one
 | 
					 | 
				
			||||||
    and update your application.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class Client:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # global symbol normalization table
 | 
					 | 
				
			||||||
    _ntable: dict[str, str] = {}
 | 
					 | 
				
			||||||
    _atable: bidict[str, str] = bidict()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def __init__(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        config: dict[str, str],
 | 
					 | 
				
			||||||
        name: str = '',
 | 
					 | 
				
			||||||
        api_key: str = '',
 | 
					 | 
				
			||||||
        secret: str = ''
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					 | 
				
			||||||
        self._sesh = asks.Session(connections=4)
 | 
					 | 
				
			||||||
        self._sesh.base_location = _url
 | 
					 | 
				
			||||||
        self._sesh.headers.update({
 | 
					 | 
				
			||||||
            'User-Agent':
 | 
					 | 
				
			||||||
                'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
 | 
					 | 
				
			||||||
        })
 | 
					 | 
				
			||||||
        self.conf: dict[str, str] = config
 | 
					 | 
				
			||||||
        self._pairs: list[str] = []
 | 
					 | 
				
			||||||
        self._name = name
 | 
					 | 
				
			||||||
        self._api_key = api_key
 | 
					 | 
				
			||||||
        self._secret = secret
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @property
 | 
					 | 
				
			||||||
    def pairs(self) -> dict[str, Any]:
 | 
					 | 
				
			||||||
        if self._pairs is None:
 | 
					 | 
				
			||||||
            raise RuntimeError(
 | 
					 | 
				
			||||||
                "Make sure to run `cache_symbols()` on startup!"
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            # retreive and cache all symbols
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return self._pairs
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def _public(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        method: str,
 | 
					 | 
				
			||||||
        data: dict,
 | 
					 | 
				
			||||||
    ) -> dict[str, Any]:
 | 
					 | 
				
			||||||
        resp = await self._sesh.post(
 | 
					 | 
				
			||||||
            path=f'/public/{method}',
 | 
					 | 
				
			||||||
            json=data,
 | 
					 | 
				
			||||||
            timeout=float('inf')
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        return resproc(resp, log)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def _private(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        method: str,
 | 
					 | 
				
			||||||
        data: dict,
 | 
					 | 
				
			||||||
        uri_path: str
 | 
					 | 
				
			||||||
    ) -> dict[str, Any]:
 | 
					 | 
				
			||||||
        headers = {
 | 
					 | 
				
			||||||
            'Content-Type':
 | 
					 | 
				
			||||||
                'application/x-www-form-urlencoded',
 | 
					 | 
				
			||||||
            'API-Key':
 | 
					 | 
				
			||||||
                self._api_key,
 | 
					 | 
				
			||||||
            'API-Sign':
 | 
					 | 
				
			||||||
                get_kraken_signature(uri_path, data, self._secret)
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        resp = await self._sesh.post(
 | 
					 | 
				
			||||||
            path=f'/private/{method}',
 | 
					 | 
				
			||||||
            data=data,
 | 
					 | 
				
			||||||
            headers=headers,
 | 
					 | 
				
			||||||
            timeout=float('inf')
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        return resproc(resp, log)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def endpoint(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        method: str,
 | 
					 | 
				
			||||||
        data: dict[str, Any]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> dict[str, Any]:
 | 
					 | 
				
			||||||
        uri_path = f'/0/private/{method}'
 | 
					 | 
				
			||||||
        data['nonce'] = str(int(1000*time.time()))
 | 
					 | 
				
			||||||
        return await self._private(method, data, uri_path)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def get_balances(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
    ) -> dict[str, float]:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Return the set of asset balances for this account
 | 
					 | 
				
			||||||
        by symbol.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        resp = await self.endpoint(
 | 
					 | 
				
			||||||
            'Balance',
 | 
					 | 
				
			||||||
            {},
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        by_bsuid = resp['result']
 | 
					 | 
				
			||||||
        return {
 | 
					 | 
				
			||||||
            self._atable[sym].lower(): float(bal)
 | 
					 | 
				
			||||||
            for sym, bal in by_bsuid.items()
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def get_assets(self) -> dict[str, dict]:
 | 
					 | 
				
			||||||
        resp = await self._public('Assets', {})
 | 
					 | 
				
			||||||
        return resp['result']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def cache_assets(self) -> None:
 | 
					 | 
				
			||||||
        assets = self.assets = await self.get_assets()
 | 
					 | 
				
			||||||
        for bsuid, info in assets.items():
 | 
					 | 
				
			||||||
            self._atable[bsuid] = info['altname']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def get_trades(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        fetch_limit: int = 10,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> dict[str, Any]:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Get the trades (aka cleared orders) history from the rest endpoint:
 | 
					 | 
				
			||||||
        https://docs.kraken.com/rest/#operation/getTradeHistory
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        ofs = 0
 | 
					 | 
				
			||||||
        trades_by_id: dict[str, Any] = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for i in itertools.count():
 | 
					 | 
				
			||||||
            if i >= fetch_limit:
 | 
					 | 
				
			||||||
                break
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # increment 'ofs' pagination offset
 | 
					 | 
				
			||||||
            ofs = i*50
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            resp = await self.endpoint(
 | 
					 | 
				
			||||||
                'TradesHistory',
 | 
					 | 
				
			||||||
                {'ofs': ofs},
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            by_id = resp['result']['trades']
 | 
					 | 
				
			||||||
            trades_by_id.update(by_id)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # we can get up to 50 results per query
 | 
					 | 
				
			||||||
            if (
 | 
					 | 
				
			||||||
                len(by_id) < 50
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                err = resp.get('error')
 | 
					 | 
				
			||||||
                if err:
 | 
					 | 
				
			||||||
                    raise BrokerError(err)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # we know we received the max amount of
 | 
					 | 
				
			||||||
                # trade results so there may be more history.
 | 
					 | 
				
			||||||
                # catch the end of the trades
 | 
					 | 
				
			||||||
                count = resp['result']['count']
 | 
					 | 
				
			||||||
                break
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # santity check on update
 | 
					 | 
				
			||||||
        assert count == len(trades_by_id.values())
 | 
					 | 
				
			||||||
        return trades_by_id
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def get_xfers(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        asset: str,
 | 
					 | 
				
			||||||
        src_asset: str = '',
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> dict[str, Transaction]:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Get asset balance transfer transactions.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        Currently only withdrawals are supported.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        xfers: list[dict] = (await self.endpoint(
 | 
					 | 
				
			||||||
            'WithdrawStatus',
 | 
					 | 
				
			||||||
            {'asset': asset},
 | 
					 | 
				
			||||||
        ))['result']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # eg. resp schema:
 | 
					 | 
				
			||||||
        # 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
 | 
					 | 
				
			||||||
        #     'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
 | 
					 | 
				
			||||||
        #     'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
 | 
					 | 
				
			||||||
        #     'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
 | 
					 | 
				
			||||||
        #     'amount': '0.00300726', 'fee': '0.00001000', 'time':
 | 
					 | 
				
			||||||
        #     1658347714, 'status': 'Success'}]}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        trans: dict[str, Transaction] = {}
 | 
					 | 
				
			||||||
        for entry in xfers:
 | 
					 | 
				
			||||||
            # look up the normalized name
 | 
					 | 
				
			||||||
            asset = self._atable[entry['asset']].lower()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # XXX: this is in the asset units (likely) so it isn't
 | 
					 | 
				
			||||||
            # quite the same as a commisions cost necessarily..)
 | 
					 | 
				
			||||||
            cost = float(entry['fee'])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            tran = Transaction(
 | 
					 | 
				
			||||||
                fqsn=asset + '.kraken',
 | 
					 | 
				
			||||||
                tid=entry['txid'],
 | 
					 | 
				
			||||||
                dt=pendulum.from_timestamp(entry['time']),
 | 
					 | 
				
			||||||
                bsuid=f'{asset}{src_asset}',
 | 
					 | 
				
			||||||
                size=-1*(
 | 
					 | 
				
			||||||
                    float(entry['amount'])
 | 
					 | 
				
			||||||
                    +
 | 
					 | 
				
			||||||
                    cost
 | 
					 | 
				
			||||||
                ),
 | 
					 | 
				
			||||||
                # since this will be treated as a "sell" it
 | 
					 | 
				
			||||||
                # shouldn't be needed to compute the be price.
 | 
					 | 
				
			||||||
                price='NaN',
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # XXX: see note above
 | 
					 | 
				
			||||||
                cost=0,
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            trans[tran.tid] = tran
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return trans
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def submit_limit(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        symbol: str,
 | 
					 | 
				
			||||||
        price: float,
 | 
					 | 
				
			||||||
        action: str,
 | 
					 | 
				
			||||||
        size: float,
 | 
					 | 
				
			||||||
        reqid: str = None,
 | 
					 | 
				
			||||||
        validate: bool = False  # set True test call without a real submission
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> dict:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Place an order and return integer request id provided by client.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        # Build common data dict for common keys from both endpoints
 | 
					 | 
				
			||||||
        data = {
 | 
					 | 
				
			||||||
            "pair": symbol,
 | 
					 | 
				
			||||||
            "price": str(price),
 | 
					 | 
				
			||||||
            "validate": validate
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        if reqid is None:
 | 
					 | 
				
			||||||
            # Build order data for kraken api
 | 
					 | 
				
			||||||
            data |= {
 | 
					 | 
				
			||||||
                "ordertype": "limit",
 | 
					 | 
				
			||||||
                "type": action,
 | 
					 | 
				
			||||||
                "volume": str(size),
 | 
					 | 
				
			||||||
            }
 | 
					 | 
				
			||||||
            return await self.endpoint('AddOrder', data)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            # Edit order data for kraken api
 | 
					 | 
				
			||||||
            data["txid"] = reqid
 | 
					 | 
				
			||||||
            return await self.endpoint('EditOrder', data)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def submit_cancel(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        reqid: str,
 | 
					 | 
				
			||||||
    ) -> dict:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Send cancel request for order id ``reqid``.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        # txid is a transaction id given by kraken
 | 
					 | 
				
			||||||
        return await self.endpoint('CancelOrder', {"txid": reqid})
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def symbol_info(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        pair: Optional[str] = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> dict[str, dict[str, str]]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if pair is not None:
 | 
					 | 
				
			||||||
            pairs = {'pair': pair}
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            pairs = None  # get all pairs
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        resp = await self._public('AssetPairs', pairs)
 | 
					 | 
				
			||||||
        err = resp['error']
 | 
					 | 
				
			||||||
        if err:
 | 
					 | 
				
			||||||
            symbolname = pairs['pair'] if pair else None
 | 
					 | 
				
			||||||
            raise SymbolNotFound(f'{symbolname}.kraken')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        pairs = resp['result']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if pair is not None:
 | 
					 | 
				
			||||||
            _, data = next(iter(pairs.items()))
 | 
					 | 
				
			||||||
            return data
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            return pairs
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def cache_symbols(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
    ) -> dict:
 | 
					 | 
				
			||||||
        if not self._pairs:
 | 
					 | 
				
			||||||
            self._pairs = await self.symbol_info()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            ntable = {}
 | 
					 | 
				
			||||||
            for restapikey, info in self._pairs.items():
 | 
					 | 
				
			||||||
                ntable[restapikey] = ntable[info['wsname']] = info['altname']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            self._ntable.update(ntable)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return self._pairs
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def search_symbols(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        pattern: str,
 | 
					 | 
				
			||||||
        limit: int = None,
 | 
					 | 
				
			||||||
    ) -> dict[str, Any]:
 | 
					 | 
				
			||||||
        if self._pairs is not None:
 | 
					 | 
				
			||||||
            data = self._pairs
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            data = await self.symbol_info()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        matches = fuzzy.extractBests(
 | 
					 | 
				
			||||||
            pattern,
 | 
					 | 
				
			||||||
            data,
 | 
					 | 
				
			||||||
            score_cutoff=50,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        # repack in dict form
 | 
					 | 
				
			||||||
        return {item[0]['altname']: item[0] for item in matches}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def bars(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        symbol: str = 'XBTUSD',
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # UTC 2017-07-02 12:53:20
 | 
					 | 
				
			||||||
        since: Optional[Union[int, datetime]] = None,
 | 
					 | 
				
			||||||
        count: int = 720,  # <- max allowed per query
 | 
					 | 
				
			||||||
        as_np: bool = True,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> dict:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if since is None:
 | 
					 | 
				
			||||||
            since = pendulum.now('UTC').start_of('minute').subtract(
 | 
					 | 
				
			||||||
                minutes=count).timestamp()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        elif isinstance(since, int):
 | 
					 | 
				
			||||||
            since = pendulum.from_timestamp(since).timestamp()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        else:  # presumably a pendulum datetime
 | 
					 | 
				
			||||||
            since = since.timestamp()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # UTC 2017-07-02 12:53:20 is oldest seconds value
 | 
					 | 
				
			||||||
        since = str(max(1499000000, int(since)))
 | 
					 | 
				
			||||||
        json = await self._public(
 | 
					 | 
				
			||||||
            'OHLC',
 | 
					 | 
				
			||||||
            data={
 | 
					 | 
				
			||||||
                'pair': symbol,
 | 
					 | 
				
			||||||
                'since': since,
 | 
					 | 
				
			||||||
            },
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            res = json['result']
 | 
					 | 
				
			||||||
            res.pop('last')
 | 
					 | 
				
			||||||
            bars = next(iter(res.values()))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            new_bars = []
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            first = bars[0]
 | 
					 | 
				
			||||||
            last_nz_vwap = first[-3]
 | 
					 | 
				
			||||||
            if last_nz_vwap == 0:
 | 
					 | 
				
			||||||
                # use close if vwap is zero
 | 
					 | 
				
			||||||
                last_nz_vwap = first[-4]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # convert all fields to native types
 | 
					 | 
				
			||||||
            for i, bar in enumerate(bars):
 | 
					 | 
				
			||||||
                # normalize weird zero-ed vwap values..cmon kraken..
 | 
					 | 
				
			||||||
                # indicates vwap didn't change since last bar
 | 
					 | 
				
			||||||
                vwap = float(bar.pop(-3))
 | 
					 | 
				
			||||||
                if vwap != 0:
 | 
					 | 
				
			||||||
                    last_nz_vwap = vwap
 | 
					 | 
				
			||||||
                if vwap == 0:
 | 
					 | 
				
			||||||
                    vwap = last_nz_vwap
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # re-insert vwap as the last of the fields
 | 
					 | 
				
			||||||
                bar.append(vwap)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                new_bars.append(
 | 
					 | 
				
			||||||
                    (i,) + tuple(
 | 
					 | 
				
			||||||
                        ftype(bar[j]) for j, (name, ftype) in enumerate(
 | 
					 | 
				
			||||||
                            _ohlc_dtype[1:]
 | 
					 | 
				
			||||||
                        )
 | 
					 | 
				
			||||||
                    )
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
            array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
 | 
					 | 
				
			||||||
            return array
 | 
					 | 
				
			||||||
        except KeyError:
 | 
					 | 
				
			||||||
            errmsg = json['error'][0]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if 'not found' in errmsg:
 | 
					 | 
				
			||||||
                raise SymbolNotFound(errmsg + f': {symbol}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            elif 'Too many requests' in errmsg:
 | 
					 | 
				
			||||||
                raise DataThrottle(f'{symbol}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                raise BrokerError(errmsg)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @classmethod
 | 
					 | 
				
			||||||
    def normalize_symbol(
 | 
					 | 
				
			||||||
        cls,
 | 
					 | 
				
			||||||
        ticker: str
 | 
					 | 
				
			||||||
    ) -> str:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Normalize symbol names to to a 3x3 pair from the global
 | 
					 | 
				
			||||||
        definition map which we build out from the data retreived from
 | 
					 | 
				
			||||||
        the 'AssetPairs' endpoint, see methods above.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        ticker = cls._ntable[ticker]
 | 
					 | 
				
			||||||
        symlen = len(ticker)
 | 
					 | 
				
			||||||
        if symlen != 6:
 | 
					 | 
				
			||||||
            raise ValueError(f'Unhandled symbol: {ticker}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return ticker.lower()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def get_client() -> Client:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    conf = get_config()
 | 
					 | 
				
			||||||
    if conf:
 | 
					 | 
				
			||||||
        client = Client(
 | 
					 | 
				
			||||||
            conf,
 | 
					 | 
				
			||||||
            name=conf['key_descr'],
 | 
					 | 
				
			||||||
            api_key=conf['api_key'],
 | 
					 | 
				
			||||||
            secret=conf['secret']
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
        client = Client({})
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # at startup, load all symbols, and asset info in
 | 
					 | 
				
			||||||
    # batch requests.
 | 
					 | 
				
			||||||
    async with trio.open_nursery() as nurse:
 | 
					 | 
				
			||||||
        nurse.start_soon(client.cache_assets)
 | 
					 | 
				
			||||||
        await client.cache_symbols()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    yield client
 | 
					 | 
				
			||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
					@ -1,502 +0,0 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					 | 
				
			||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					 | 
				
			||||||
# the Free Software Foundation, either version 3 of the License, or
 | 
					 | 
				
			||||||
# (at your option) any later version.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is distributed in the hope that it will be useful,
 | 
					 | 
				
			||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
					 | 
				
			||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
					 | 
				
			||||||
# GNU Affero General Public License for more details.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
Real-time and historical data feed endpoints.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
from contextlib import asynccontextmanager as acm
 | 
					 | 
				
			||||||
from datetime import datetime
 | 
					 | 
				
			||||||
from typing import (
 | 
					 | 
				
			||||||
    Any,
 | 
					 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Callable,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
import time
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from async_generator import aclosing
 | 
					 | 
				
			||||||
from fuzzywuzzy import process as fuzzy
 | 
					 | 
				
			||||||
import numpy as np
 | 
					 | 
				
			||||||
import pendulum
 | 
					 | 
				
			||||||
from trio_typing import TaskStatus
 | 
					 | 
				
			||||||
import tractor
 | 
					 | 
				
			||||||
import trio
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from piker._cacheables import open_cached_client
 | 
					 | 
				
			||||||
from piker.brokers._util import (
 | 
					 | 
				
			||||||
    BrokerError,
 | 
					 | 
				
			||||||
    DataThrottle,
 | 
					 | 
				
			||||||
    DataUnavailable,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from piker.log import get_console_log
 | 
					 | 
				
			||||||
from piker.data import ShmArray
 | 
					 | 
				
			||||||
from piker.data.types import Struct
 | 
					 | 
				
			||||||
from piker.data._web_bs import open_autorecon_ws, NoBsWs
 | 
					 | 
				
			||||||
from . import log
 | 
					 | 
				
			||||||
from .api import (
 | 
					 | 
				
			||||||
    Client,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# https://www.kraken.com/features/api#get-tradable-pairs
 | 
					 | 
				
			||||||
class Pair(Struct):
 | 
					 | 
				
			||||||
    altname: str  # alternate pair name
 | 
					 | 
				
			||||||
    wsname: str  # WebSocket pair name (if available)
 | 
					 | 
				
			||||||
    aclass_base: str  # asset class of base component
 | 
					 | 
				
			||||||
    base: str  # asset id of base component
 | 
					 | 
				
			||||||
    aclass_quote: str  # asset class of quote component
 | 
					 | 
				
			||||||
    quote: str  # asset id of quote component
 | 
					 | 
				
			||||||
    lot: str  # volume lot size
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    cost_decimals: int
 | 
					 | 
				
			||||||
    costmin: float
 | 
					 | 
				
			||||||
    pair_decimals: int  # scaling decimal places for pair
 | 
					 | 
				
			||||||
    lot_decimals: int  # scaling decimal places for volume
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # amount to multiply lot volume by to get currency volume
 | 
					 | 
				
			||||||
    lot_multiplier: float
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # array of leverage amounts available when buying
 | 
					 | 
				
			||||||
    leverage_buy: list[int]
 | 
					 | 
				
			||||||
    # array of leverage amounts available when selling
 | 
					 | 
				
			||||||
    leverage_sell: list[int]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # fee schedule array in [volume, percent fee] tuples
 | 
					 | 
				
			||||||
    fees: list[tuple[int, float]]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # maker fee schedule array in [volume, percent fee] tuples (if on
 | 
					 | 
				
			||||||
    # maker/taker)
 | 
					 | 
				
			||||||
    fees_maker: list[tuple[int, float]]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    fee_volume_currency: str  # volume discount currency
 | 
					 | 
				
			||||||
    margin_call: str  # margin call level
 | 
					 | 
				
			||||||
    margin_stop: str  # stop-out/liquidation margin level
 | 
					 | 
				
			||||||
    ordermin: float  # minimum order volume for pair
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class OHLC(Struct):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Description of the flattened OHLC quote format.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    For schema details see:
 | 
					 | 
				
			||||||
        https://docs.kraken.com/websockets/#message-ohlc
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    chan_id: int  # internal kraken id
 | 
					 | 
				
			||||||
    chan_name: str  # eg. ohlc-1  (name-interval)
 | 
					 | 
				
			||||||
    pair: str  # fx pair
 | 
					 | 
				
			||||||
    time: float  # Begin time of interval, in seconds since epoch
 | 
					 | 
				
			||||||
    etime: float  # End time of interval, in seconds since epoch
 | 
					 | 
				
			||||||
    open: float  # Open price of interval
 | 
					 | 
				
			||||||
    high: float  # High price within interval
 | 
					 | 
				
			||||||
    low: float  # Low price within interval
 | 
					 | 
				
			||||||
    close: float  # Close price of interval
 | 
					 | 
				
			||||||
    vwap: float  # Volume weighted average price within interval
 | 
					 | 
				
			||||||
    volume: float  # Accumulated volume **within interval**
 | 
					 | 
				
			||||||
    count: int  # Number of trades within interval
 | 
					 | 
				
			||||||
    # (sampled) generated tick data
 | 
					 | 
				
			||||||
    ticks: list[Any] = []
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def stream_messages(
 | 
					 | 
				
			||||||
    ws: NoBsWs,
 | 
					 | 
				
			||||||
):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Message stream parser and heartbeat handler.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Deliver ws subscription messages as well as handle heartbeat logic
 | 
					 | 
				
			||||||
    though a single async generator.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    too_slow_count = last_hb = 0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    while True:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        with trio.move_on_after(5) as cs:
 | 
					 | 
				
			||||||
            msg = await ws.recv_msg()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # trigger reconnection if heartbeat is laggy
 | 
					 | 
				
			||||||
        if cs.cancelled_caught:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            too_slow_count += 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if too_slow_count > 20:
 | 
					 | 
				
			||||||
                log.warning(
 | 
					 | 
				
			||||||
                    "Heartbeat is too slow, resetting ws connection")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                await ws._connect()
 | 
					 | 
				
			||||||
                too_slow_count = 0
 | 
					 | 
				
			||||||
                continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        match msg:
 | 
					 | 
				
			||||||
            case {'event': 'heartbeat'}:
 | 
					 | 
				
			||||||
                now = time.time()
 | 
					 | 
				
			||||||
                delay = now - last_hb
 | 
					 | 
				
			||||||
                last_hb = now
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # XXX: why tf is this not printing without --tl flag?
 | 
					 | 
				
			||||||
                log.debug(f"Heartbeat after {delay}")
 | 
					 | 
				
			||||||
                # print(f"Heartbeat after {delay}")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            case _:
 | 
					 | 
				
			||||||
                # passthrough sub msgs
 | 
					 | 
				
			||||||
                yield msg
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def process_data_feed_msgs(
 | 
					 | 
				
			||||||
    ws: NoBsWs,
 | 
					 | 
				
			||||||
):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Parse and pack data feed messages.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    async for msg in stream_messages(ws):
 | 
					 | 
				
			||||||
        match msg:
 | 
					 | 
				
			||||||
            case {
 | 
					 | 
				
			||||||
                'errorMessage': errmsg
 | 
					 | 
				
			||||||
            }:
 | 
					 | 
				
			||||||
                raise BrokerError(errmsg)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            case {
 | 
					 | 
				
			||||||
                'event': 'subscriptionStatus',
 | 
					 | 
				
			||||||
            } as sub:
 | 
					 | 
				
			||||||
                log.info(
 | 
					 | 
				
			||||||
                    'WS subscription is active:\n'
 | 
					 | 
				
			||||||
                    f'{sub}'
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
                continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            case [
 | 
					 | 
				
			||||||
                chan_id,
 | 
					 | 
				
			||||||
                *payload_array,
 | 
					 | 
				
			||||||
                chan_name,
 | 
					 | 
				
			||||||
                pair
 | 
					 | 
				
			||||||
            ]:
 | 
					 | 
				
			||||||
                if 'ohlc' in chan_name:
 | 
					 | 
				
			||||||
                    ohlc = OHLC(
 | 
					 | 
				
			||||||
                        chan_id,
 | 
					 | 
				
			||||||
                        chan_name,
 | 
					 | 
				
			||||||
                        pair,
 | 
					 | 
				
			||||||
                        *payload_array[0]
 | 
					 | 
				
			||||||
                    )
 | 
					 | 
				
			||||||
                    ohlc.typecast()
 | 
					 | 
				
			||||||
                    yield 'ohlc', ohlc
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                elif 'spread' in chan_name:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    bid, ask, ts, bsize, asize = map(
 | 
					 | 
				
			||||||
                        float, payload_array[0])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    # TODO: really makes you think IB has a horrible API...
 | 
					 | 
				
			||||||
                    quote = {
 | 
					 | 
				
			||||||
                        'symbol': pair.replace('/', ''),
 | 
					 | 
				
			||||||
                        'ticks': [
 | 
					 | 
				
			||||||
                            {'type': 'bid', 'price': bid, 'size': bsize},
 | 
					 | 
				
			||||||
                            {'type': 'bsize', 'price': bid, 'size': bsize},
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                            {'type': 'ask', 'price': ask, 'size': asize},
 | 
					 | 
				
			||||||
                            {'type': 'asize', 'price': ask, 'size': asize},
 | 
					 | 
				
			||||||
                        ],
 | 
					 | 
				
			||||||
                    }
 | 
					 | 
				
			||||||
                    yield 'l1', quote
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # elif 'book' in msg[-2]:
 | 
					 | 
				
			||||||
                #     chan_id, *payload_array, chan_name, pair = msg
 | 
					 | 
				
			||||||
                #     print(msg)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            case _:
 | 
					 | 
				
			||||||
                print(f'UNHANDLED MSG: {msg}')
 | 
					 | 
				
			||||||
                # yield msg
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def normalize(
 | 
					 | 
				
			||||||
    ohlc: OHLC,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> dict:
 | 
					 | 
				
			||||||
    quote = ohlc.to_dict()
 | 
					 | 
				
			||||||
    quote['broker_ts'] = quote['time']
 | 
					 | 
				
			||||||
    quote['brokerd_ts'] = time.time()
 | 
					 | 
				
			||||||
    quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
 | 
					 | 
				
			||||||
    quote['last'] = quote['close']
 | 
					 | 
				
			||||||
    quote['bar_wap'] = ohlc.vwap
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # seriously eh? what's with this non-symmetry everywhere
 | 
					 | 
				
			||||||
    # in subscription systems...
 | 
					 | 
				
			||||||
    # XXX: piker style is always lowercases symbols.
 | 
					 | 
				
			||||||
    topic = quote['pair'].replace('/', '').lower()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # print(quote)
 | 
					 | 
				
			||||||
    return topic, quote
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					 | 
				
			||||||
async def open_history_client(
 | 
					 | 
				
			||||||
    symbol: str,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> tuple[Callable, int]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO implement history getter for the new storage layer.
 | 
					 | 
				
			||||||
    async with open_cached_client('kraken') as client:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # lol, kraken won't send any more then the "last"
 | 
					 | 
				
			||||||
        # 720 1m bars.. so we have to just ignore further
 | 
					 | 
				
			||||||
        # requests of this type..
 | 
					 | 
				
			||||||
        queries: int = 0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async def get_ohlc(
 | 
					 | 
				
			||||||
            timeframe: float,
 | 
					 | 
				
			||||||
            end_dt: Optional[datetime] = None,
 | 
					 | 
				
			||||||
            start_dt: Optional[datetime] = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        ) -> tuple[
 | 
					 | 
				
			||||||
            np.ndarray,
 | 
					 | 
				
			||||||
            datetime,  # start
 | 
					 | 
				
			||||||
            datetime,  # end
 | 
					 | 
				
			||||||
        ]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            nonlocal queries
 | 
					 | 
				
			||||||
            if queries > 0:
 | 
					 | 
				
			||||||
                raise DataUnavailable
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            count = 0
 | 
					 | 
				
			||||||
            while count <= 3:
 | 
					 | 
				
			||||||
                try:
 | 
					 | 
				
			||||||
                    array = await client.bars(
 | 
					 | 
				
			||||||
                        symbol,
 | 
					 | 
				
			||||||
                        since=end_dt,
 | 
					 | 
				
			||||||
                    )
 | 
					 | 
				
			||||||
                    count += 1
 | 
					 | 
				
			||||||
                    queries += 1
 | 
					 | 
				
			||||||
                    break
 | 
					 | 
				
			||||||
                except DataThrottle:
 | 
					 | 
				
			||||||
                    log.warning(f'kraken OHLC throttle for {symbol}')
 | 
					 | 
				
			||||||
                    await trio.sleep(1)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            start_dt = pendulum.from_timestamp(array[0]['time'])
 | 
					 | 
				
			||||||
            end_dt = pendulum.from_timestamp(array[-1]['time'])
 | 
					 | 
				
			||||||
            return array, start_dt, end_dt
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        yield get_ohlc, {'erlangs': 1, 'rate': 1}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def backfill_bars(
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    sym: str,
 | 
					 | 
				
			||||||
    shm: ShmArray,  # type: ignore # noqa
 | 
					 | 
				
			||||||
    count: int = 10,  # NOTE: any more and we'll overrun the underlying buffer
 | 
					 | 
				
			||||||
    task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> None:
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Fill historical bars into shared mem / storage afap.
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    with trio.CancelScope() as cs:
 | 
					 | 
				
			||||||
        async with open_cached_client('kraken') as client:
 | 
					 | 
				
			||||||
            bars = await client.bars(symbol=sym)
 | 
					 | 
				
			||||||
            shm.push(bars)
 | 
					 | 
				
			||||||
            task_status.started(cs)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def stream_quotes(
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    send_chan: trio.abc.SendChannel,
 | 
					 | 
				
			||||||
    symbols: list[str],
 | 
					 | 
				
			||||||
    feed_is_live: trio.Event,
 | 
					 | 
				
			||||||
    loglevel: str = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # backend specific
 | 
					 | 
				
			||||||
    sub_type: str = 'ohlc',
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # startup sync
 | 
					 | 
				
			||||||
    task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> None:
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Subscribe for ohlc stream of quotes for ``pairs``.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    # XXX: required to propagate ``tractor`` loglevel to piker logging
 | 
					 | 
				
			||||||
    get_console_log(loglevel or tractor.current_actor().loglevel)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ws_pairs = {}
 | 
					 | 
				
			||||||
    sym_infos = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async with open_cached_client('kraken') as client, send_chan as send_chan:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # keep client cached for real-time section
 | 
					 | 
				
			||||||
        for sym in symbols:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # transform to upper since piker style is always lower
 | 
					 | 
				
			||||||
            sym = sym.upper()
 | 
					 | 
				
			||||||
            sym_info = await client.symbol_info(sym)
 | 
					 | 
				
			||||||
            si = Pair(**sym_info)  # validation
 | 
					 | 
				
			||||||
            syminfo = si.to_dict()
 | 
					 | 
				
			||||||
            syminfo['price_tick_size'] = 1 / 10**si.pair_decimals
 | 
					 | 
				
			||||||
            syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals
 | 
					 | 
				
			||||||
            syminfo['asset_type'] = 'crypto'
 | 
					 | 
				
			||||||
            sym_infos[sym] = syminfo
 | 
					 | 
				
			||||||
            ws_pairs[sym] = si.wsname
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        symbol = symbols[0].lower()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        init_msgs = {
 | 
					 | 
				
			||||||
            # pass back token, and bool, signalling if we're the writer
 | 
					 | 
				
			||||||
            # and that history has been written
 | 
					 | 
				
			||||||
            symbol: {
 | 
					 | 
				
			||||||
                'symbol_info': sym_infos[sym],
 | 
					 | 
				
			||||||
                'shm_write_opts': {'sum_tick_vml': False},
 | 
					 | 
				
			||||||
                'fqsn': sym,
 | 
					 | 
				
			||||||
            },
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        @acm
 | 
					 | 
				
			||||||
        async def subscribe(ws: NoBsWs):
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # XXX: setup subs
 | 
					 | 
				
			||||||
            # https://docs.kraken.com/websockets/#message-subscribe
 | 
					 | 
				
			||||||
            # specific logic for this in kraken's sync client:
 | 
					 | 
				
			||||||
            # https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
 | 
					 | 
				
			||||||
            ohlc_sub = {
 | 
					 | 
				
			||||||
                'event': 'subscribe',
 | 
					 | 
				
			||||||
                'pair': list(ws_pairs.values()),
 | 
					 | 
				
			||||||
                'subscription': {
 | 
					 | 
				
			||||||
                    'name': 'ohlc',
 | 
					 | 
				
			||||||
                    'interval': 1,
 | 
					 | 
				
			||||||
                },
 | 
					 | 
				
			||||||
            }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # TODO: we want to eventually allow unsubs which should
 | 
					 | 
				
			||||||
            # be completely fine to request from a separate task
 | 
					 | 
				
			||||||
            # since internally the ws methods appear to be FIFO
 | 
					 | 
				
			||||||
            # locked.
 | 
					 | 
				
			||||||
            await ws.send_msg(ohlc_sub)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # trade data (aka L1)
 | 
					 | 
				
			||||||
            l1_sub = {
 | 
					 | 
				
			||||||
                'event': 'subscribe',
 | 
					 | 
				
			||||||
                'pair': list(ws_pairs.values()),
 | 
					 | 
				
			||||||
                'subscription': {
 | 
					 | 
				
			||||||
                    'name': 'spread',
 | 
					 | 
				
			||||||
                    # 'depth': 10}
 | 
					 | 
				
			||||||
                },
 | 
					 | 
				
			||||||
            }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # pull a first quote and deliver
 | 
					 | 
				
			||||||
            await ws.send_msg(l1_sub)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            yield
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # unsub from all pairs on teardown
 | 
					 | 
				
			||||||
            await ws.send_msg({
 | 
					 | 
				
			||||||
                'pair': list(ws_pairs.values()),
 | 
					 | 
				
			||||||
                'event': 'unsubscribe',
 | 
					 | 
				
			||||||
                'subscription': ['ohlc', 'spread'],
 | 
					 | 
				
			||||||
            })
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # XXX: do we need to ack the unsub?
 | 
					 | 
				
			||||||
            # await ws.recv_msg()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # see the tips on reconnection logic:
 | 
					 | 
				
			||||||
        # https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
 | 
					 | 
				
			||||||
        ws: NoBsWs
 | 
					 | 
				
			||||||
        async with (
 | 
					 | 
				
			||||||
            open_autorecon_ws(
 | 
					 | 
				
			||||||
                'wss://ws.kraken.com/',
 | 
					 | 
				
			||||||
                fixture=subscribe,
 | 
					 | 
				
			||||||
            ) as ws,
 | 
					 | 
				
			||||||
            aclosing(process_data_feed_msgs(ws)) as msg_gen,
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            # pull a first quote and deliver
 | 
					 | 
				
			||||||
            typ, ohlc_last = await anext(msg_gen)
 | 
					 | 
				
			||||||
            topic, quote = normalize(ohlc_last)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            task_status.started((init_msgs,  quote))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # lol, only "closes" when they're margin squeezing clients ;P
 | 
					 | 
				
			||||||
            feed_is_live.set()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # keep start of last interval for volume tracking
 | 
					 | 
				
			||||||
            last_interval_start = ohlc_last.etime
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # start streaming
 | 
					 | 
				
			||||||
            async for typ, ohlc in msg_gen:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                if typ == 'ohlc':
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    # TODO: can get rid of all this by using
 | 
					 | 
				
			||||||
                    # ``trades`` subscription...
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    # generate tick values to match time & sales pane:
 | 
					 | 
				
			||||||
                    # https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
 | 
					 | 
				
			||||||
                    volume = ohlc.volume
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    # new OHLC sample interval
 | 
					 | 
				
			||||||
                    if ohlc.etime > last_interval_start:
 | 
					 | 
				
			||||||
                        last_interval_start = ohlc.etime
 | 
					 | 
				
			||||||
                        tick_volume = volume
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    else:
 | 
					 | 
				
			||||||
                        # this is the tick volume *within the interval*
 | 
					 | 
				
			||||||
                        tick_volume = volume - ohlc_last.volume
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    ohlc_last = ohlc
 | 
					 | 
				
			||||||
                    last = ohlc.close
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    if tick_volume:
 | 
					 | 
				
			||||||
                        ohlc.ticks.append({
 | 
					 | 
				
			||||||
                            'type': 'trade',
 | 
					 | 
				
			||||||
                            'price': last,
 | 
					 | 
				
			||||||
                            'size': tick_volume,
 | 
					 | 
				
			||||||
                        })
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    topic, quote = normalize(ohlc)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                elif typ == 'l1':
 | 
					 | 
				
			||||||
                    quote = ohlc
 | 
					 | 
				
			||||||
                    topic = quote['symbol'].lower()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                await send_chan.send({topic: quote})
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@tractor.context
 | 
					 | 
				
			||||||
async def open_symbol_search(
 | 
					 | 
				
			||||||
    ctx: tractor.Context,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> Client:
 | 
					 | 
				
			||||||
    async with open_cached_client('kraken') as client:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # load all symbols locally for fast search
 | 
					 | 
				
			||||||
        cache = await client.cache_symbols()
 | 
					 | 
				
			||||||
        await ctx.started(cache)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async with ctx.open_stream() as stream:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            async for pattern in stream:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                matches = fuzzy.extractBests(
 | 
					 | 
				
			||||||
                    pattern,
 | 
					 | 
				
			||||||
                    cache,
 | 
					 | 
				
			||||||
                    score_cutoff=50,
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
                # repack in dict form
 | 
					 | 
				
			||||||
                await stream.send(
 | 
					 | 
				
			||||||
                    {item[0]['altname']: item[0]
 | 
					 | 
				
			||||||
                     for item in matches}
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
| 
						 | 
					@ -22,10 +22,54 @@ from enum import Enum
 | 
				
			||||||
from typing import Optional
 | 
					from typing import Optional
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from bidict import bidict
 | 
					from bidict import bidict
 | 
				
			||||||
 | 
					from pydantic import BaseModel, validator
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ..data._source import Symbol
 | 
					from ..data._source import Symbol
 | 
				
			||||||
from ..data.types import Struct
 | 
					from ._messages import BrokerdPosition, Status
 | 
				
			||||||
from ..pp import Position
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					class Position(BaseModel):
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    Basic pp (personal position) model with attached fills history.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    This type should be IPC wire ready?
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    symbol: Symbol
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # last size and avg entry price
 | 
				
			||||||
 | 
					    size: float
 | 
				
			||||||
 | 
					    avg_price: float  # TODO: contextual pricing
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # ordered record of known constituent trade messages
 | 
				
			||||||
 | 
					    fills: list[Status] = []
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def update_from_msg(
 | 
				
			||||||
 | 
					        self,
 | 
				
			||||||
 | 
					        msg: BrokerdPosition,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # XXX: better place to do this?
 | 
				
			||||||
 | 
					        symbol = self.symbol
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        lot_size_digits = symbol.lot_size_digits
 | 
				
			||||||
 | 
					        avg_price, size = (
 | 
				
			||||||
 | 
					            round(msg['avg_price'], ndigits=symbol.tick_size_digits),
 | 
				
			||||||
 | 
					            round(msg['size'], ndigits=lot_size_digits),
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        self.avg_price = avg_price
 | 
				
			||||||
 | 
					        self.size = size
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    @property
 | 
				
			||||||
 | 
					    def dsize(self) -> float:
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        The "dollar" size of the pp, normally in trading (fiat) unit
 | 
				
			||||||
 | 
					        terms.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        return self.avg_price * self.size
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_size_units = bidict({
 | 
					_size_units = bidict({
 | 
				
			||||||
| 
						 | 
					@ -40,30 +84,33 @@ SizeUnit = Enum(
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Allocator(Struct):
 | 
					class Allocator(BaseModel):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class Config:
 | 
				
			||||||
 | 
					        validate_assignment = True
 | 
				
			||||||
 | 
					        copy_on_model_validation = False
 | 
				
			||||||
 | 
					        arbitrary_types_allowed = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # required to get the account validator lookup working?
 | 
				
			||||||
 | 
					        extra = 'allow'
 | 
				
			||||||
 | 
					        underscore_attrs_are_private = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    symbol: Symbol
 | 
					    symbol: Symbol
 | 
				
			||||||
    account: Optional[str] = 'paper'
 | 
					    account: Optional[str] = 'paper'
 | 
				
			||||||
 | 
					 | 
				
			||||||
    _size_units: bidict[str, Optional[str]] = _size_units
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: for enums this clearly doesn't fucking work, you can't set
 | 
					    # TODO: for enums this clearly doesn't fucking work, you can't set
 | 
				
			||||||
    # a default at startup by passing in a `dict` but yet you can set
 | 
					    # a default at startup by passing in a `dict` but yet you can set
 | 
				
			||||||
    # that value through assignment..for wtv cucked reason.. honestly, pure
 | 
					    # that value through assignment..for wtv cucked reason.. honestly, pure
 | 
				
			||||||
    # unintuitive garbage.
 | 
					    # unintuitive garbage.
 | 
				
			||||||
    _size_unit: str = 'currency'
 | 
					    size_unit: str = 'currency'
 | 
				
			||||||
 | 
					    _size_units: dict[str, Optional[str]] = _size_units
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @property
 | 
					    @validator('size_unit', pre=True)
 | 
				
			||||||
    def size_unit(self) -> str:
 | 
					    def maybe_lookup_key(cls, v):
 | 
				
			||||||
        return self._size_unit
 | 
					        # apply the corresponding enum key for the text "description" value
 | 
				
			||||||
 | 
					 | 
				
			||||||
    @size_unit.setter
 | 
					 | 
				
			||||||
    def size_unit(self, v: str) -> Optional[str]:
 | 
					 | 
				
			||||||
        if v not in _size_units:
 | 
					        if v not in _size_units:
 | 
				
			||||||
            v = _size_units.inverse[v]
 | 
					            return _size_units.inverse[v]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        assert v in _size_units
 | 
					        assert v in _size_units
 | 
				
			||||||
        self._size_unit = v
 | 
					 | 
				
			||||||
        return v
 | 
					        return v
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # TODO: if we ever want ot support non-uniform entry-slot-proportion
 | 
					    # TODO: if we ever want ot support non-uniform entry-slot-proportion
 | 
				
			||||||
| 
						 | 
					@ -93,13 +140,10 @@ class Allocator(Struct):
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            return self.units_limit
 | 
					            return self.units_limit
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def limit_info(self) -> tuple[str, float]:
 | 
					 | 
				
			||||||
        return self.size_unit, self.limit()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def next_order_info(
 | 
					    def next_order_info(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # we only need a startup size for exit calcs, we can then
 | 
					        # we only need a startup size for exit calcs, we can the
 | 
				
			||||||
        # determine how large slots should be if the initial pp size was
 | 
					        # determine how large slots should be if the initial pp size was
 | 
				
			||||||
        # larger then the current live one, and the live one is smaller
 | 
					        # larger then the current live one, and the live one is smaller
 | 
				
			||||||
        # then the initial config settings.
 | 
					        # then the initial config settings.
 | 
				
			||||||
| 
						 | 
					@ -129,7 +173,7 @@ class Allocator(Struct):
 | 
				
			||||||
            l_sub_pp = self.units_limit - abs_live_size
 | 
					            l_sub_pp = self.units_limit - abs_live_size
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        elif size_unit == 'currency':
 | 
					        elif size_unit == 'currency':
 | 
				
			||||||
            live_cost_basis = abs_live_size * live_pp.ppu
 | 
					            live_cost_basis = abs_live_size * live_pp.avg_price
 | 
				
			||||||
            slot_size = currency_per_slot / price
 | 
					            slot_size = currency_per_slot / price
 | 
				
			||||||
            l_sub_pp = (self.currency_limit - live_cost_basis) / price
 | 
					            l_sub_pp = (self.currency_limit - live_cost_basis) / price
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -140,14 +184,12 @@ class Allocator(Struct):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # an entry (adding-to or starting a pp)
 | 
					        # an entry (adding-to or starting a pp)
 | 
				
			||||||
        if (
 | 
					        if (
 | 
				
			||||||
 | 
					            action == 'buy' and live_size > 0 or
 | 
				
			||||||
 | 
					            action == 'sell' and live_size < 0 or
 | 
				
			||||||
            live_size == 0
 | 
					            live_size == 0
 | 
				
			||||||
            or (action == 'buy' and live_size > 0)
 | 
					 | 
				
			||||||
            or action == 'sell' and live_size < 0
 | 
					 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
            order_size = min(
 | 
					
 | 
				
			||||||
                slot_size,
 | 
					            order_size = min(slot_size, l_sub_pp)
 | 
				
			||||||
                max(l_sub_pp, 0),
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # an exit (removing-from or going to net-zero pp)
 | 
					        # an exit (removing-from or going to net-zero pp)
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
| 
						 | 
					@ -163,7 +205,7 @@ class Allocator(Struct):
 | 
				
			||||||
            if size_unit == 'currency':
 | 
					            if size_unit == 'currency':
 | 
				
			||||||
                # compute the "projected" limit's worth of units at the
 | 
					                # compute the "projected" limit's worth of units at the
 | 
				
			||||||
                # current pp (weighted) price:
 | 
					                # current pp (weighted) price:
 | 
				
			||||||
                slot_size = currency_per_slot / live_pp.ppu
 | 
					                slot_size = currency_per_slot / live_pp.avg_price
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                slot_size = u_per_slot
 | 
					                slot_size = u_per_slot
 | 
				
			||||||
| 
						 | 
					@ -202,12 +244,7 @@ class Allocator(Struct):
 | 
				
			||||||
        if order_size < slot_size:
 | 
					        if order_size < slot_size:
 | 
				
			||||||
            # compute a fractional slots size to display
 | 
					            # compute a fractional slots size to display
 | 
				
			||||||
            slots_used = self.slots_used(
 | 
					            slots_used = self.slots_used(
 | 
				
			||||||
                Position(
 | 
					                Position(symbol=sym, size=order_size, avg_price=price)
 | 
				
			||||||
                    symbol=sym,
 | 
					 | 
				
			||||||
                    size=order_size,
 | 
					 | 
				
			||||||
                    ppu=price,
 | 
					 | 
				
			||||||
                    bsuid=sym,
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return {
 | 
					        return {
 | 
				
			||||||
| 
						 | 
					@ -234,8 +271,8 @@ class Allocator(Struct):
 | 
				
			||||||
        abs_pp_size = abs(pp.size)
 | 
					        abs_pp_size = abs(pp.size)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if self.size_unit == 'currency':
 | 
					        if self.size_unit == 'currency':
 | 
				
			||||||
            # live_currency_size = size or (abs_pp_size * pp.ppu)
 | 
					            # live_currency_size = size or (abs_pp_size * pp.avg_price)
 | 
				
			||||||
            live_currency_size = abs_pp_size * pp.ppu
 | 
					            live_currency_size = abs_pp_size * pp.avg_price
 | 
				
			||||||
            prop = live_currency_size / self.currency_limit
 | 
					            prop = live_currency_size / self.currency_limit
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
| 
						 | 
					@ -247,6 +284,14 @@ class Allocator(Struct):
 | 
				
			||||||
        return round(prop * self.slots)
 | 
					        return round(prop * self.slots)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					_derivs = (
 | 
				
			||||||
 | 
					    'future',
 | 
				
			||||||
 | 
					    'continuous_future',
 | 
				
			||||||
 | 
					    'option',
 | 
				
			||||||
 | 
					    'futures_option',
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def mk_allocator(
 | 
					def mk_allocator(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    symbol: Symbol,
 | 
					    symbol: Symbol,
 | 
				
			||||||
| 
						 | 
					@ -255,7 +300,7 @@ def mk_allocator(
 | 
				
			||||||
    # default allocation settings
 | 
					    # default allocation settings
 | 
				
			||||||
    defaults: dict[str, float] = {
 | 
					    defaults: dict[str, float] = {
 | 
				
			||||||
        'account': None,  # select paper by default
 | 
					        'account': None,  # select paper by default
 | 
				
			||||||
        # 'size_unit': 'currency',
 | 
					        'size_unit': 'currency',
 | 
				
			||||||
        'units_limit': 400,
 | 
					        'units_limit': 400,
 | 
				
			||||||
        'currency_limit': 5e3,
 | 
					        'currency_limit': 5e3,
 | 
				
			||||||
        'slots': 4,
 | 
					        'slots': 4,
 | 
				
			||||||
| 
						 | 
					@ -273,9 +318,42 @@ def mk_allocator(
 | 
				
			||||||
        'currency_limit': 6e3,
 | 
					        'currency_limit': 6e3,
 | 
				
			||||||
        'slots': 6,
 | 
					        'slots': 6,
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    defaults.update(user_def)
 | 
					    defaults.update(user_def)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return Allocator(
 | 
					    alloc = Allocator(
 | 
				
			||||||
        symbol=symbol,
 | 
					        symbol=symbol,
 | 
				
			||||||
        **defaults,
 | 
					        **defaults,
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    asset_type = symbol.type_key
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # specific configs by asset class / type
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    if asset_type in _derivs:
 | 
				
			||||||
 | 
					        # since it's harder to know how currency "applies" in this case
 | 
				
			||||||
 | 
					        # given leverage properties
 | 
				
			||||||
 | 
					        alloc.size_unit = '# units'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # set units limit to slots size thus making make the next
 | 
				
			||||||
 | 
					        # entry step 1.0
 | 
				
			||||||
 | 
					        alloc.units_limit = alloc.slots
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # if the current position is already greater then the limit
 | 
				
			||||||
 | 
					    # settings, increase the limit to the current position
 | 
				
			||||||
 | 
					    if alloc.size_unit == 'currency':
 | 
				
			||||||
 | 
					        startup_size = startup_pp.size * startup_pp.avg_price
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if startup_size > alloc.currency_limit:
 | 
				
			||||||
 | 
					            alloc.currency_limit = round(startup_size, ndigits=2)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    else:
 | 
				
			||||||
 | 
					        startup_size = abs(startup_pp.size)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if startup_size > alloc.units_limit:
 | 
				
			||||||
 | 
					            alloc.units_limit = startup_size
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            if asset_type in _derivs:
 | 
				
			||||||
 | 
					                alloc.slots = alloc.units_limit
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return alloc
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -19,24 +19,25 @@ Orders and execution client API.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from contextlib import asynccontextmanager as acm
 | 
					from contextlib import asynccontextmanager as acm
 | 
				
			||||||
 | 
					from typing import Dict
 | 
				
			||||||
from pprint import pformat
 | 
					from pprint import pformat
 | 
				
			||||||
 | 
					from dataclasses import dataclass, field
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
from tractor.trionics import broadcast_receiver
 | 
					from tractor.trionics import broadcast_receiver
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from ..data.types import Struct
 | 
					 | 
				
			||||||
from ._ems import _emsd_main
 | 
					from ._ems import _emsd_main
 | 
				
			||||||
from .._daemon import maybe_open_emsd
 | 
					from .._daemon import maybe_open_emsd
 | 
				
			||||||
from ._messages import Order, Cancel
 | 
					from ._messages import Order, Cancel
 | 
				
			||||||
from ..brokers import get_brokermod
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class OrderBook(Struct):
 | 
					@dataclass
 | 
				
			||||||
 | 
					class OrderBook:
 | 
				
			||||||
    '''EMS-client-side order book ctl and tracking.
 | 
					    '''EMS-client-side order book ctl and tracking.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    A style similar to "model-view" is used here where this api is
 | 
					    A style similar to "model-view" is used here where this api is
 | 
				
			||||||
| 
						 | 
					@ -51,18 +52,20 @@ class OrderBook(Struct):
 | 
				
			||||||
    # mem channels used to relay order requests to the EMS daemon
 | 
					    # mem channels used to relay order requests to the EMS daemon
 | 
				
			||||||
    _to_ems: trio.abc.SendChannel
 | 
					    _to_ems: trio.abc.SendChannel
 | 
				
			||||||
    _from_order_book: trio.abc.ReceiveChannel
 | 
					    _from_order_book: trio.abc.ReceiveChannel
 | 
				
			||||||
    _sent_orders: dict[str, Order] = {}
 | 
					
 | 
				
			||||||
 | 
					    _sent_orders: Dict[str, Order] = field(default_factory=dict)
 | 
				
			||||||
 | 
					    _ready_to_receive: trio.Event = trio.Event()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def send(
 | 
					    def send(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        msg: Order | dict,
 | 
					        msg: Order,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> dict:
 | 
					    ) -> dict:
 | 
				
			||||||
        self._sent_orders[msg.oid] = msg
 | 
					        self._sent_orders[msg.oid] = msg
 | 
				
			||||||
        self._to_ems.send_nowait(msg)
 | 
					        self._to_ems.send_nowait(msg.dict())
 | 
				
			||||||
        return msg
 | 
					        return msg
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def send_update(
 | 
					    def update(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        uuid: str,
 | 
					        uuid: str,
 | 
				
			||||||
| 
						 | 
					@ -70,8 +73,9 @@ class OrderBook(Struct):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> dict:
 | 
					    ) -> dict:
 | 
				
			||||||
        cmd = self._sent_orders[uuid]
 | 
					        cmd = self._sent_orders[uuid]
 | 
				
			||||||
        msg = cmd.copy(update=data)
 | 
					        msg = cmd.dict()
 | 
				
			||||||
        self._sent_orders[uuid] = msg
 | 
					        msg.update(data)
 | 
				
			||||||
 | 
					        self._sent_orders[uuid] = Order(**msg)
 | 
				
			||||||
        self._to_ems.send_nowait(msg)
 | 
					        self._to_ems.send_nowait(msg)
 | 
				
			||||||
        return cmd
 | 
					        return cmd
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -79,18 +83,12 @@ class OrderBook(Struct):
 | 
				
			||||||
        """Cancel an order (or alert) in the EMS.
 | 
					        """Cancel an order (or alert) in the EMS.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        """
 | 
					        """
 | 
				
			||||||
        cmd = self._sent_orders.get(uuid)
 | 
					        cmd = self._sent_orders[uuid]
 | 
				
			||||||
        if not cmd:
 | 
					 | 
				
			||||||
            log.error(
 | 
					 | 
				
			||||||
                f'Unknown order {uuid}!?\n'
 | 
					 | 
				
			||||||
                f'Maybe there is a stale entry or line?\n'
 | 
					 | 
				
			||||||
                f'You should report this as a bug!'
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
        msg = Cancel(
 | 
					        msg = Cancel(
 | 
				
			||||||
            oid=uuid,
 | 
					            oid=uuid,
 | 
				
			||||||
            symbol=cmd.symbol,
 | 
					            symbol=cmd.symbol,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        self._to_ems.send_nowait(msg)
 | 
					        self._to_ems.send_nowait(msg.dict())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_orders: OrderBook = None
 | 
					_orders: OrderBook = None
 | 
				
			||||||
| 
						 | 
					@ -151,17 +149,10 @@ async def relay_order_cmds_from_sync_code(
 | 
				
			||||||
    book = get_orders()
 | 
					    book = get_orders()
 | 
				
			||||||
    async with book._from_order_book.subscribe() as orders_stream:
 | 
					    async with book._from_order_book.subscribe() as orders_stream:
 | 
				
			||||||
        async for cmd in orders_stream:
 | 
					        async for cmd in orders_stream:
 | 
				
			||||||
            sym = cmd.symbol
 | 
					            if cmd['symbol'] == symbol_key:
 | 
				
			||||||
            msg = pformat(cmd)
 | 
					                log.info(f'Send order cmd:\n{pformat(cmd)}')
 | 
				
			||||||
            if sym == symbol_key:
 | 
					 | 
				
			||||||
                log.info(f'Send order cmd:\n{msg}')
 | 
					 | 
				
			||||||
                # send msg over IPC / wire
 | 
					                # send msg over IPC / wire
 | 
				
			||||||
                await to_ems_stream.send(cmd)
 | 
					                await to_ems_stream.send(cmd)
 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                log.warning(
 | 
					 | 
				
			||||||
                    f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
 | 
					 | 
				
			||||||
                    f'\n{msg}'
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@acm
 | 
				
			||||||
| 
						 | 
					@ -213,35 +204,20 @@ async def open_ems(
 | 
				
			||||||
    from ..data._source import unpack_fqsn
 | 
					    from ..data._source import unpack_fqsn
 | 
				
			||||||
    broker, symbol, suffix = unpack_fqsn(fqsn)
 | 
					    broker, symbol, suffix = unpack_fqsn(fqsn)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    mode: str = 'live'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async with maybe_open_emsd(broker) as portal:
 | 
					    async with maybe_open_emsd(broker) as portal:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        mod = get_brokermod(broker)
 | 
					 | 
				
			||||||
        if not getattr(mod, 'trades_dialogue', None):
 | 
					 | 
				
			||||||
            mode = 'paper'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async with (
 | 
					        async with (
 | 
				
			||||||
            # connect to emsd
 | 
					            # connect to emsd
 | 
				
			||||||
            portal.open_context(
 | 
					            portal.open_context(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                _emsd_main,
 | 
					                _emsd_main,
 | 
				
			||||||
                fqsn=fqsn,
 | 
					                fqsn=fqsn,
 | 
				
			||||||
                exec_mode=mode,
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            ) as (
 | 
					            ) as (ctx, (positions, accounts)),
 | 
				
			||||||
                ctx,
 | 
					 | 
				
			||||||
                (
 | 
					 | 
				
			||||||
                    positions,
 | 
					 | 
				
			||||||
                    accounts,
 | 
					 | 
				
			||||||
                    dialogs,
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
            ),
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # open 2-way trade command stream
 | 
					            # open 2-way trade command stream
 | 
				
			||||||
            ctx.open_stream() as trades_stream,
 | 
					            ctx.open_stream() as trades_stream,
 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
            # start sync code order msg delivery task
 | 
					 | 
				
			||||||
            async with trio.open_nursery() as n:
 | 
					            async with trio.open_nursery() as n:
 | 
				
			||||||
                n.start_soon(
 | 
					                n.start_soon(
 | 
				
			||||||
                    relay_order_cmds_from_sync_code,
 | 
					                    relay_order_cmds_from_sync_code,
 | 
				
			||||||
| 
						 | 
					@ -249,10 +225,4 @@ async def open_ems(
 | 
				
			||||||
                    trades_stream
 | 
					                    trades_stream
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                yield (
 | 
					                yield book, trades_stream, positions, accounts
 | 
				
			||||||
                    book,
 | 
					 | 
				
			||||||
                    trades_stream,
 | 
					 | 
				
			||||||
                    positions,
 | 
					 | 
				
			||||||
                    accounts,
 | 
					 | 
				
			||||||
                    dialogs,
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
					@ -1,5 +1,5 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					# piker: trading gear for hackers
 | 
				
			||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
					# Copyright (C) Tyler Goodlet (in stewardship for piker0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					# This program is free software: you can redistribute it and/or modify
 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					# it under the terms of the GNU Affero General Public License as published by
 | 
				
			||||||
| 
						 | 
					@ -15,95 +15,22 @@
 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
Clearing sub-system message and protocols.
 | 
					Clearing system messagingn types and protocols.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
# from collections import (
 | 
					from typing import Optional, Union
 | 
				
			||||||
#     ChainMap,
 | 
					
 | 
				
			||||||
#     deque,
 | 
					# TODO: try out just encoding/send direction for now?
 | 
				
			||||||
# )
 | 
					# import msgspec
 | 
				
			||||||
from typing import (
 | 
					from pydantic import BaseModel
 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Literal,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ..data._source import Symbol
 | 
					from ..data._source import Symbol
 | 
				
			||||||
from ..data.types import Struct
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					 | 
				
			||||||
# TODO: a composite for tracking msg flow on 2-legged
 | 
					 | 
				
			||||||
# dialogs.
 | 
					 | 
				
			||||||
# class Dialog(ChainMap):
 | 
					 | 
				
			||||||
#     '''
 | 
					 | 
				
			||||||
#     Msg collection abstraction to easily track the state changes of
 | 
					 | 
				
			||||||
#     a msg flow in one high level, query-able and immutable construct.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
#     The main use case is to query data from a (long-running)
 | 
					 | 
				
			||||||
#     msg-transaction-sequence
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
#     '''
 | 
					 | 
				
			||||||
#     def update(
 | 
					 | 
				
			||||||
#         self,
 | 
					 | 
				
			||||||
#         msg,
 | 
					 | 
				
			||||||
#     ) -> None:
 | 
					 | 
				
			||||||
#         self.maps.insert(0, msg.to_dict())
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
#     def flatten(self) -> dict:
 | 
					 | 
				
			||||||
#         return dict(self)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# TODO: ``msgspec`` stuff worth paying attention to:
 | 
					 | 
				
			||||||
# - schema evolution:
 | 
					 | 
				
			||||||
# https://jcristharif.com/msgspec/usage.html#schema-evolution
 | 
					 | 
				
			||||||
# - for eg. ``BrokerdStatus``, instead just have separate messages?
 | 
					 | 
				
			||||||
# - use literals for a common msg determined by diff keys?
 | 
					 | 
				
			||||||
#   - https://jcristharif.com/msgspec/usage.html#literal
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# --------------
 | 
					 | 
				
			||||||
# Client -> emsd
 | 
					# Client -> emsd
 | 
				
			||||||
# --------------
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class Order(Struct):
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: ideally we can combine these 2 fields into
 | 
					 | 
				
			||||||
    # 1 and just use the size polarity to determine a buy/sell.
 | 
					 | 
				
			||||||
    # i would like to see this become more like
 | 
					 | 
				
			||||||
    # https://jcristharif.com/msgspec/usage.html#literal
 | 
					 | 
				
			||||||
    # action: Literal[
 | 
					 | 
				
			||||||
    #     'live',
 | 
					 | 
				
			||||||
    #     'dark',
 | 
					 | 
				
			||||||
    #     'alert',
 | 
					 | 
				
			||||||
    # ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    action: Literal[
 | 
					 | 
				
			||||||
        'buy',
 | 
					 | 
				
			||||||
        'sell',
 | 
					 | 
				
			||||||
        'alert',
 | 
					 | 
				
			||||||
    ]
 | 
					 | 
				
			||||||
    # determines whether the create execution
 | 
					 | 
				
			||||||
    # will be submitted to the ems or directly to
 | 
					 | 
				
			||||||
    # the backend broker
 | 
					 | 
				
			||||||
    exec_mode: Literal[
 | 
					 | 
				
			||||||
        'dark',
 | 
					 | 
				
			||||||
        'live',
 | 
					 | 
				
			||||||
        # 'paper',  no right?
 | 
					 | 
				
			||||||
    ]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # internal ``emdsd`` unique "order id"
 | 
					 | 
				
			||||||
    oid: str  # uuid4
 | 
					 | 
				
			||||||
    symbol: str | Symbol
 | 
					 | 
				
			||||||
    account: str  # should we set a default as '' ?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    price: float
 | 
					 | 
				
			||||||
    size: float  # -ve is "sell", +ve is "buy"
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    brokers: Optional[list[str]] = []
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Cancel(Struct):
 | 
					class Cancel(BaseModel):
 | 
				
			||||||
    '''
 | 
					    '''Cancel msg for removing a dark (ems triggered) or
 | 
				
			||||||
    Cancel msg for removing a dark (ems triggered) or
 | 
					 | 
				
			||||||
    broker-submitted (live) trigger/order.
 | 
					    broker-submitted (live) trigger/order.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -112,61 +39,82 @@ class Cancel(Struct):
 | 
				
			||||||
    symbol: str
 | 
					    symbol: str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# --------------
 | 
					class Order(BaseModel):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    action: str  # {'buy', 'sell', 'alert'}
 | 
				
			||||||
 | 
					    # internal ``emdsd`` unique "order id"
 | 
				
			||||||
 | 
					    oid: str  # uuid4
 | 
				
			||||||
 | 
					    symbol: Union[str, Symbol]
 | 
				
			||||||
 | 
					    account: str  # should we set a default as '' ?
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    price: float
 | 
				
			||||||
 | 
					    size: float
 | 
				
			||||||
 | 
					    brokers: list[str]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # Assigned once initial ack is received
 | 
				
			||||||
 | 
					    # ack_time_ns: Optional[int] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # determines whether the create execution
 | 
				
			||||||
 | 
					    # will be submitted to the ems or directly to
 | 
				
			||||||
 | 
					    # the backend broker
 | 
				
			||||||
 | 
					    exec_mode: str  # {'dark', 'live', 'paper'}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    class Config:
 | 
				
			||||||
 | 
					        # just for pre-loading a ``Symbol`` when used
 | 
				
			||||||
 | 
					        # in the order mode staging process
 | 
				
			||||||
 | 
					        arbitrary_types_allowed = True
 | 
				
			||||||
 | 
					        # don't copy this model instance when used in
 | 
				
			||||||
 | 
					        # a recursive model
 | 
				
			||||||
 | 
					        copy_on_model_validation = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Client <- emsd
 | 
					# Client <- emsd
 | 
				
			||||||
# --------------
 | 
					 | 
				
			||||||
# update msgs from ems which relay state change info
 | 
					# update msgs from ems which relay state change info
 | 
				
			||||||
# from the active clearing engine.
 | 
					# from the active clearing engine.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Status(Struct):
 | 
					
 | 
				
			||||||
 | 
					class Status(BaseModel):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    name: str = 'status'
 | 
					    name: str = 'status'
 | 
				
			||||||
 | 
					    oid: str  # uuid4
 | 
				
			||||||
    time_ns: int
 | 
					    time_ns: int
 | 
				
			||||||
    oid: str  # uuid4 ems-order dialog id
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    resp: Literal[
 | 
					    # {
 | 
				
			||||||
      'pending',  # acked by broker but not yet open
 | 
					    #   'dark_submitted',
 | 
				
			||||||
      'open',
 | 
					    #   'dark_cancelled',
 | 
				
			||||||
      'dark_open',  # dark/algo triggered order is open in ems clearing loop
 | 
					    #   'dark_triggered',
 | 
				
			||||||
      'triggered',  # above triggered order sent to brokerd, or an alert closed
 | 
					
 | 
				
			||||||
      'closed',  # fully cleared all size/units
 | 
					    #   'broker_submitted',
 | 
				
			||||||
      'fill',  # partial execution
 | 
					    #   'broker_cancelled',
 | 
				
			||||||
      'canceled',
 | 
					    #   'broker_executed',
 | 
				
			||||||
      'error',
 | 
					    #   'broker_filled',
 | 
				
			||||||
    ]
 | 
					    #   'broker_errored',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    #   'alert_submitted',
 | 
				
			||||||
 | 
					    #   'alert_triggered',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # }
 | 
				
			||||||
 | 
					    resp: str  # "response", see above
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # symbol: str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # trigger info
 | 
				
			||||||
 | 
					    trigger_price: Optional[float] = None
 | 
				
			||||||
 | 
					    # price: float
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # broker: Optional[str] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # this maps normally to the ``BrokerdOrder.reqid`` below, an id
 | 
					    # this maps normally to the ``BrokerdOrder.reqid`` below, an id
 | 
				
			||||||
    # normally allocated internally by the backend broker routing system
 | 
					    # normally allocated internally by the backend broker routing system
 | 
				
			||||||
    reqid: Optional[int | str] = None
 | 
					    broker_reqid: Optional[Union[int, str]] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # the (last) source order/request msg if provided
 | 
					    # for relaying backend msg data "through" the ems layer
 | 
				
			||||||
    # (eg. the Order/Cancel which causes this msg) and
 | 
					 | 
				
			||||||
    # acts as a back-reference to the corresponding
 | 
					 | 
				
			||||||
    # request message which was the source of this msg.
 | 
					 | 
				
			||||||
    req: Order | None = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # XXX: better design/name here?
 | 
					 | 
				
			||||||
    # flag that can be set to indicate a message for an order
 | 
					 | 
				
			||||||
    # event that wasn't originated by piker's emsd (eg. some external
 | 
					 | 
				
			||||||
    # trading system which does it's own order control but that you
 | 
					 | 
				
			||||||
    # might want to "track" using piker UIs/systems).
 | 
					 | 
				
			||||||
    src: Optional[str] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # set when a cancel request msg was set for this order flow dialog
 | 
					 | 
				
			||||||
    # but the brokerd dialog isn't yet in a cancelled state.
 | 
					 | 
				
			||||||
    cancel_called: bool = False
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # for relaying a boxed brokerd-dialog-side msg data "through" the
 | 
					 | 
				
			||||||
    # ems layer to clients.
 | 
					 | 
				
			||||||
    brokerd_msg: dict = {}
 | 
					    brokerd_msg: dict = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# ---------------
 | 
					 | 
				
			||||||
# emsd -> brokerd
 | 
					# emsd -> brokerd
 | 
				
			||||||
# ---------------
 | 
					 | 
				
			||||||
# requests *sent* from ems to respective backend broker daemon
 | 
					# requests *sent* from ems to respective backend broker daemon
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class BrokerdCancel(Struct):
 | 
					class BrokerdCancel(BaseModel):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    action: str = 'cancel'
 | 
					    action: str = 'cancel'
 | 
				
			||||||
    oid: str  # piker emsd order id
 | 
					    oid: str  # piker emsd order id
 | 
				
			||||||
| 
						 | 
					@ -179,38 +127,34 @@ class BrokerdCancel(Struct):
 | 
				
			||||||
    # for setting a unique order id then this value will be relayed back
 | 
					    # for setting a unique order id then this value will be relayed back
 | 
				
			||||||
    # on the emsd order request stream as the ``BrokerdOrderAck.reqid``
 | 
					    # on the emsd order request stream as the ``BrokerdOrderAck.reqid``
 | 
				
			||||||
    # field
 | 
					    # field
 | 
				
			||||||
    reqid: Optional[int | str] = None
 | 
					    reqid: Optional[Union[int, str]] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class BrokerdOrder(Struct):
 | 
					class BrokerdOrder(BaseModel):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    action: str  # {buy, sell}
 | 
				
			||||||
    oid: str
 | 
					    oid: str
 | 
				
			||||||
    account: str
 | 
					    account: str
 | 
				
			||||||
    time_ns: int
 | 
					    time_ns: int
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # TODO: if we instead rely on a +ve/-ve size to determine
 | 
					 | 
				
			||||||
    # the action we more or less don't need this field right?
 | 
					 | 
				
			||||||
    action: str = ''  # {buy, sell}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # "broker request id": broker specific/internal order id if this is
 | 
					    # "broker request id": broker specific/internal order id if this is
 | 
				
			||||||
    # None, creates a new order otherwise if the id is valid the backend
 | 
					    # None, creates a new order otherwise if the id is valid the backend
 | 
				
			||||||
    # api must modify the existing matching order. If the broker allows
 | 
					    # api must modify the existing matching order. If the broker allows
 | 
				
			||||||
    # for setting a unique order id then this value will be relayed back
 | 
					    # for setting a unique order id then this value will be relayed back
 | 
				
			||||||
    # on the emsd order request stream as the ``BrokerdOrderAck.reqid``
 | 
					    # on the emsd order request stream as the ``BrokerdOrderAck.reqid``
 | 
				
			||||||
    # field
 | 
					    # field
 | 
				
			||||||
    reqid: Optional[int | str] = None
 | 
					    reqid: Optional[Union[int, str]] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    symbol: str  # fqsn
 | 
					    symbol: str  # symbol.<providername> ?
 | 
				
			||||||
    price: float
 | 
					    price: float
 | 
				
			||||||
    size: float
 | 
					    size: float
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# ---------------
 | 
					 | 
				
			||||||
# emsd <- brokerd
 | 
					# emsd <- brokerd
 | 
				
			||||||
# ---------------
 | 
					 | 
				
			||||||
# requests *received* to ems from broker backend
 | 
					# requests *received* to ems from broker backend
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class BrokerdOrderAck(Struct):
 | 
					
 | 
				
			||||||
 | 
					class BrokerdOrderAck(BaseModel):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Immediate reponse to a brokerd order request providing the broker
 | 
					    Immediate reponse to a brokerd order request providing the broker
 | 
				
			||||||
    specific unique order id so that the EMS can associate this
 | 
					    specific unique order id so that the EMS can associate this
 | 
				
			||||||
| 
						 | 
					@ -221,32 +165,39 @@ class BrokerdOrderAck(Struct):
 | 
				
			||||||
    name: str = 'ack'
 | 
					    name: str = 'ack'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # defined and provided by backend
 | 
					    # defined and provided by backend
 | 
				
			||||||
    reqid: int | str
 | 
					    reqid: Union[int, str]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # emsd id originally sent in matching request msg
 | 
					    # emsd id originally sent in matching request msg
 | 
				
			||||||
    oid: str
 | 
					    oid: str
 | 
				
			||||||
    account: str = ''
 | 
					    account: str = ''
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class BrokerdStatus(Struct):
 | 
					class BrokerdStatus(BaseModel):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    name: str = 'status'
 | 
					    name: str = 'status'
 | 
				
			||||||
    reqid: int | str
 | 
					    reqid: Union[int, str]
 | 
				
			||||||
    time_ns: int
 | 
					    time_ns: int
 | 
				
			||||||
    status: Literal[
 | 
					 | 
				
			||||||
        'open',
 | 
					 | 
				
			||||||
        'canceled',
 | 
					 | 
				
			||||||
        'fill',
 | 
					 | 
				
			||||||
        'pending',
 | 
					 | 
				
			||||||
        'error',
 | 
					 | 
				
			||||||
    ]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    account: str
 | 
					    # XXX: should be best effort set for every update
 | 
				
			||||||
 | 
					    account: str = ''
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # {
 | 
				
			||||||
 | 
					    #   'submitted',
 | 
				
			||||||
 | 
					    #   'cancelled',
 | 
				
			||||||
 | 
					    #   'filled',
 | 
				
			||||||
 | 
					    # }
 | 
				
			||||||
 | 
					    status: str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    filled: float = 0.0
 | 
					    filled: float = 0.0
 | 
				
			||||||
    reason: str = ''
 | 
					    reason: str = ''
 | 
				
			||||||
    remaining: float = 0.0
 | 
					    remaining: float = 0.0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # external: bool = False
 | 
					    # XXX: better design/name here?
 | 
				
			||||||
 | 
					    # flag that can be set to indicate a message for an order
 | 
				
			||||||
 | 
					    # event that wasn't originated by piker's emsd (eg. some external
 | 
				
			||||||
 | 
					    # trading system which does it's own order control but that you
 | 
				
			||||||
 | 
					    # might want to "track" using piker UIs/systems).
 | 
				
			||||||
 | 
					    external: bool = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # XXX: not required schema as of yet
 | 
					    # XXX: not required schema as of yet
 | 
				
			||||||
    broker_details: dict = {
 | 
					    broker_details: dict = {
 | 
				
			||||||
| 
						 | 
					@ -254,21 +205,21 @@ class BrokerdStatus(Struct):
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class BrokerdFill(Struct):
 | 
					class BrokerdFill(BaseModel):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    A single message indicating a "fill-details" event from the broker
 | 
					    A single message indicating a "fill-details" event from the broker
 | 
				
			||||||
    if avaiable.
 | 
					    if avaiable.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    name: str = 'fill'
 | 
					    name: str = 'fill'
 | 
				
			||||||
    reqid: int | str
 | 
					    reqid: Union[int, str]
 | 
				
			||||||
    time_ns: int
 | 
					    time_ns: int
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # order exeuction related
 | 
					    # order exeuction related
 | 
				
			||||||
 | 
					    action: str
 | 
				
			||||||
    size: float
 | 
					    size: float
 | 
				
			||||||
    price: float
 | 
					    price: float
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    action: Optional[str] = None
 | 
					 | 
				
			||||||
    broker_details: dict = {}  # meta-data (eg. commisions etc.)
 | 
					    broker_details: dict = {}  # meta-data (eg. commisions etc.)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # brokerd timestamp required for order mode arrow placement on x-axis
 | 
					    # brokerd timestamp required for order mode arrow placement on x-axis
 | 
				
			||||||
| 
						 | 
					@ -279,7 +230,7 @@ class BrokerdFill(Struct):
 | 
				
			||||||
    broker_time: float
 | 
					    broker_time: float
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class BrokerdError(Struct):
 | 
					class BrokerdError(BaseModel):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Optional error type that can be relayed to emsd for error handling.
 | 
					    Optional error type that can be relayed to emsd for error handling.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -291,14 +242,14 @@ class BrokerdError(Struct):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # if no brokerd order request was actually submitted (eg. we errored
 | 
					    # if no brokerd order request was actually submitted (eg. we errored
 | 
				
			||||||
    # at the ``pikerd`` layer) then there will be ``reqid`` allocated.
 | 
					    # at the ``pikerd`` layer) then there will be ``reqid`` allocated.
 | 
				
			||||||
    reqid: Optional[int | str] = None
 | 
					    reqid: Optional[Union[int, str]] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    symbol: str
 | 
					    symbol: str
 | 
				
			||||||
    reason: str
 | 
					    reason: str
 | 
				
			||||||
    broker_details: dict = {}
 | 
					    broker_details: dict = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class BrokerdPosition(Struct):
 | 
					class BrokerdPosition(BaseModel):
 | 
				
			||||||
    '''Position update event from brokerd.
 | 
					    '''Position update event from brokerd.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -307,6 +258,6 @@ class BrokerdPosition(Struct):
 | 
				
			||||||
    broker: str
 | 
					    broker: str
 | 
				
			||||||
    account: str
 | 
					    account: str
 | 
				
			||||||
    symbol: str
 | 
					    symbol: str
 | 
				
			||||||
 | 
					    currency: str
 | 
				
			||||||
    size: float
 | 
					    size: float
 | 
				
			||||||
    avg_price: float
 | 
					    avg_price: float
 | 
				
			||||||
    currency: str = ''
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -18,71 +18,54 @@
 | 
				
			||||||
Fake trading for forward testing.
 | 
					Fake trading for forward testing.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from collections import defaultdict
 | 
					 | 
				
			||||||
from contextlib import asynccontextmanager
 | 
					from contextlib import asynccontextmanager
 | 
				
			||||||
from datetime import datetime
 | 
					from datetime import datetime
 | 
				
			||||||
from operator import itemgetter
 | 
					from operator import itemgetter
 | 
				
			||||||
import itertools
 | 
					 | 
				
			||||||
import time
 | 
					import time
 | 
				
			||||||
from typing import (
 | 
					from typing import Tuple, Optional, Callable
 | 
				
			||||||
    Any,
 | 
					 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Callable,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
import uuid
 | 
					import uuid
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from bidict import bidict
 | 
					from bidict import bidict
 | 
				
			||||||
import pendulum
 | 
					 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
 | 
					from dataclasses import dataclass
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from .. import data
 | 
					from .. import data
 | 
				
			||||||
from ..data._source import Symbol
 | 
					 | 
				
			||||||
from ..data.types import Struct
 | 
					 | 
				
			||||||
from ..pp import (
 | 
					 | 
				
			||||||
    Position,
 | 
					 | 
				
			||||||
    Transaction,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from ..data._normalize import iterticks
 | 
					from ..data._normalize import iterticks
 | 
				
			||||||
from ..data._source import unpack_fqsn
 | 
					from ..data._source import unpack_fqsn
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from ._messages import (
 | 
					from ._messages import (
 | 
				
			||||||
    BrokerdCancel,
 | 
					    BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
 | 
				
			||||||
    BrokerdOrder,
 | 
					    BrokerdFill, BrokerdPosition, BrokerdError
 | 
				
			||||||
    BrokerdOrderAck,
 | 
					 | 
				
			||||||
    BrokerdStatus,
 | 
					 | 
				
			||||||
    BrokerdFill,
 | 
					 | 
				
			||||||
    BrokerdPosition,
 | 
					 | 
				
			||||||
    BrokerdError,
 | 
					 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class PaperBoi(Struct):
 | 
					@dataclass
 | 
				
			||||||
    '''
 | 
					class PaperBoi:
 | 
				
			||||||
    Emulates a broker order client providing approximately the same API
 | 
					    """
 | 
				
			||||||
    and delivering an order-event response stream but with methods for
 | 
					    Emulates a broker order client providing the same API and
 | 
				
			||||||
 | 
					    delivering an order-event response stream but with methods for
 | 
				
			||||||
    triggering desired events based on forward testing engine
 | 
					    triggering desired events based on forward testing engine
 | 
				
			||||||
    requirements (eg open, closed, fill msgs).
 | 
					    requirements.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    """
 | 
				
			||||||
    broker: str
 | 
					    broker: str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ems_trades_stream: tractor.MsgStream
 | 
					    ems_trades_stream: tractor.MsgStream
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # map of paper "live" orders which be used
 | 
					    # map of paper "live" orders which be used
 | 
				
			||||||
    # to simulate fills based on paper engine settings
 | 
					    # to simulate fills based on paper engine settings
 | 
				
			||||||
    _buys: defaultdict[str, bidict]
 | 
					    _buys: bidict
 | 
				
			||||||
    _sells: defaultdict[str, bidict]
 | 
					    _sells: bidict
 | 
				
			||||||
    _reqids: bidict
 | 
					    _reqids: bidict
 | 
				
			||||||
    _positions: dict[str, Position]
 | 
					    _positions: dict[str, BrokerdPosition]
 | 
				
			||||||
    _trade_ledger: dict[str, Any]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # init edge case L1 spread
 | 
					    # init edge case L1 spread
 | 
				
			||||||
    last_ask: tuple[float, float] = (float('inf'), 0)  # price, size
 | 
					    last_ask: Tuple[float, float] = (float('inf'), 0)  # price, size
 | 
				
			||||||
    last_bid: tuple[float, float] = (0, 0)
 | 
					    last_bid: Tuple[float, float] = (0, 0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def submit_limit(
 | 
					    async def submit_limit(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
| 
						 | 
					@ -92,24 +75,27 @@ class PaperBoi(Struct):
 | 
				
			||||||
        action: str,
 | 
					        action: str,
 | 
				
			||||||
        size: float,
 | 
					        size: float,
 | 
				
			||||||
        reqid: Optional[str],
 | 
					        reqid: Optional[str],
 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> int:
 | 
					    ) -> int:
 | 
				
			||||||
        '''
 | 
					        """Place an order and return integer request id provided by client.
 | 
				
			||||||
        Place an order and return integer request id provided by client.
 | 
					
 | 
				
			||||||
 | 
					        """
 | 
				
			||||||
 | 
					        is_modify: bool = False
 | 
				
			||||||
 | 
					        if reqid is None:
 | 
				
			||||||
 | 
					            reqid = str(uuid.uuid4())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
 | 
					            # order is already existing, this is a modify
 | 
				
			||||||
 | 
					            (oid, symbol, action, old_price) = self._reqids[reqid]
 | 
				
			||||||
 | 
					            assert old_price != price
 | 
				
			||||||
 | 
					            is_modify = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # register order internally
 | 
				
			||||||
 | 
					        self._reqids[reqid] = (oid, symbol, action, price)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        if action == 'alert':
 | 
					        if action == 'alert':
 | 
				
			||||||
            # bypass all fill simulation
 | 
					            # bypass all fill simulation
 | 
				
			||||||
            return reqid
 | 
					            return reqid
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        entry = self._reqids.get(reqid)
 | 
					 | 
				
			||||||
        if entry:
 | 
					 | 
				
			||||||
            # order is already existing, this is a modify
 | 
					 | 
				
			||||||
            (oid, symbol, action, old_price) = entry
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            # register order internally
 | 
					 | 
				
			||||||
            self._reqids[reqid] = (oid, symbol, action, price)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: net latency model
 | 
					        # TODO: net latency model
 | 
				
			||||||
        # we checkpoint here quickly particulalry
 | 
					        # we checkpoint here quickly particulalry
 | 
				
			||||||
        # for dark orders since we want the dark_executed
 | 
					        # for dark orders since we want the dark_executed
 | 
				
			||||||
| 
						 | 
					@ -121,18 +107,15 @@ class PaperBoi(Struct):
 | 
				
			||||||
            size = -size
 | 
					            size = -size
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        msg = BrokerdStatus(
 | 
					        msg = BrokerdStatus(
 | 
				
			||||||
            status='open',
 | 
					            status='submitted',
 | 
				
			||||||
            # account=f'paper_{self.broker}',
 | 
					 | 
				
			||||||
            account='paper',
 | 
					 | 
				
			||||||
            reqid=reqid,
 | 
					            reqid=reqid,
 | 
				
			||||||
 | 
					            broker=self.broker,
 | 
				
			||||||
            time_ns=time.time_ns(),
 | 
					            time_ns=time.time_ns(),
 | 
				
			||||||
            filled=0.0,
 | 
					            filled=0.0,
 | 
				
			||||||
            reason='paper_trigger',
 | 
					            reason='paper_trigger',
 | 
				
			||||||
            remaining=size,
 | 
					            remaining=size,
 | 
				
			||||||
 | 
					 | 
				
			||||||
            broker_details={'name': 'paperboi'},
 | 
					 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        await self.ems_trades_stream.send(msg)
 | 
					        await self.ems_trades_stream.send(msg.dict())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # if we're already a clearing price simulate an immediate fill
 | 
					        # if we're already a clearing price simulate an immediate fill
 | 
				
			||||||
        if (
 | 
					        if (
 | 
				
			||||||
| 
						 | 
					@ -140,28 +123,28 @@ class PaperBoi(Struct):
 | 
				
			||||||
            ) or (
 | 
					            ) or (
 | 
				
			||||||
            action == 'sell' and (clear_price := self.last_bid[0]) >= price
 | 
					            action == 'sell' and (clear_price := self.last_bid[0]) >= price
 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
            await self.fake_fill(
 | 
					            await self.fake_fill(symbol, clear_price, size, action, reqid, oid)
 | 
				
			||||||
                symbol,
 | 
					 | 
				
			||||||
                clear_price,
 | 
					 | 
				
			||||||
                size,
 | 
					 | 
				
			||||||
                action,
 | 
					 | 
				
			||||||
                reqid,
 | 
					 | 
				
			||||||
                oid,
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # register this submissions as a paper live order
 | 
					 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            # set the simulated order in the respective table for lookup
 | 
					            # register this submissions as a paper live order
 | 
				
			||||||
            # and trigger by the simulated clearing task normally
 | 
					
 | 
				
			||||||
            # running ``simulate_fills()``.
 | 
					            # submit order to book simulation fill loop
 | 
				
			||||||
            if action == 'buy':
 | 
					            if action == 'buy':
 | 
				
			||||||
                orders = self._buys
 | 
					                orders = self._buys
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            elif action == 'sell':
 | 
					            elif action == 'sell':
 | 
				
			||||||
                orders = self._sells
 | 
					                orders = self._sells
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # {symbol -> bidict[oid, (<price data>)]}
 | 
					            # set the simulated order in the respective table for lookup
 | 
				
			||||||
            orders[symbol][oid] = (price, size, reqid, action)
 | 
					            # and trigger by the simulated clearing task normally
 | 
				
			||||||
 | 
					            # running ``simulate_fills()``.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            if is_modify:
 | 
				
			||||||
 | 
					                # remove any existing order for the old price
 | 
				
			||||||
 | 
					                orders[symbol].pop((oid, old_price))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # buys/sells: (symbol  -> (price -> order))
 | 
				
			||||||
 | 
					            orders.setdefault(symbol, {})[(oid, price)] = (size, reqid, action)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return reqid
 | 
					        return reqid
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -174,26 +157,26 @@ class PaperBoi(Struct):
 | 
				
			||||||
        oid, symbol, action, price = self._reqids[reqid]
 | 
					        oid, symbol, action, price = self._reqids[reqid]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if action == 'buy':
 | 
					        if action == 'buy':
 | 
				
			||||||
            self._buys[symbol].pop(oid, None)
 | 
					            self._buys[symbol].pop((oid, price))
 | 
				
			||||||
        elif action == 'sell':
 | 
					        elif action == 'sell':
 | 
				
			||||||
            self._sells[symbol].pop(oid, None)
 | 
					            self._sells[symbol].pop((oid, price))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: net latency model
 | 
					        # TODO: net latency model
 | 
				
			||||||
        await trio.sleep(0.05)
 | 
					        await trio.sleep(0.05)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        msg = BrokerdStatus(
 | 
					        msg = BrokerdStatus(
 | 
				
			||||||
            status='canceled',
 | 
					            status='cancelled',
 | 
				
			||||||
            account='paper',
 | 
					            oid=oid,
 | 
				
			||||||
            reqid=reqid,
 | 
					            reqid=reqid,
 | 
				
			||||||
 | 
					            broker=self.broker,
 | 
				
			||||||
            time_ns=time.time_ns(),
 | 
					            time_ns=time.time_ns(),
 | 
				
			||||||
            broker_details={'name': 'paperboi'},
 | 
					 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        await self.ems_trades_stream.send(msg)
 | 
					        await self.ems_trades_stream.send(msg.dict())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def fake_fill(
 | 
					    async def fake_fill(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        fqsn: str,
 | 
					        symbol: str,
 | 
				
			||||||
        price: float,
 | 
					        price: float,
 | 
				
			||||||
        size: float,
 | 
					        size: float,
 | 
				
			||||||
        action: str,  # one of {'buy', 'sell'}
 | 
					        action: str,  # one of {'buy', 'sell'}
 | 
				
			||||||
| 
						 | 
					@ -207,21 +190,21 @@ class PaperBoi(Struct):
 | 
				
			||||||
        remaining: float = 0,
 | 
					        remaining: float = 0,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        '''
 | 
					        """Pretend to fill a broker order @ price and size.
 | 
				
			||||||
        Pretend to fill a broker order @ price and size.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
        # TODO: net latency model
 | 
					        # TODO: net latency model
 | 
				
			||||||
        await trio.sleep(0.05)
 | 
					        await trio.sleep(0.05)
 | 
				
			||||||
        fill_time_ns = time.time_ns()
 | 
					 | 
				
			||||||
        fill_time_s = time.time()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        fill_msg = BrokerdFill(
 | 
					        msg = BrokerdFill(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            reqid=reqid,
 | 
					            reqid=reqid,
 | 
				
			||||||
            time_ns=fill_time_ns,
 | 
					            time_ns=time.time_ns(),
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            action=action,
 | 
					            action=action,
 | 
				
			||||||
            size=size,
 | 
					            size=size,
 | 
				
			||||||
            price=price,
 | 
					            price=price,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            broker_time=datetime.now().timestamp(),
 | 
					            broker_time=datetime.now().timestamp(),
 | 
				
			||||||
            broker_details={
 | 
					            broker_details={
 | 
				
			||||||
                'paper_info': {
 | 
					                'paper_info': {
 | 
				
			||||||
| 
						 | 
					@ -231,67 +214,79 @@ class PaperBoi(Struct):
 | 
				
			||||||
                'name': self.broker + '_paper',
 | 
					                'name': self.broker + '_paper',
 | 
				
			||||||
            },
 | 
					            },
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        log.info(f'Fake filling order:\n{fill_msg}')
 | 
					        await self.ems_trades_stream.send(msg.dict())
 | 
				
			||||||
        await self.ems_trades_stream.send(fill_msg)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self._trade_ledger.update(fill_msg.to_dict())
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if order_complete:
 | 
					        if order_complete:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            msg = BrokerdStatus(
 | 
					            msg = BrokerdStatus(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                reqid=reqid,
 | 
					                reqid=reqid,
 | 
				
			||||||
                time_ns=time.time_ns(),
 | 
					                time_ns=time.time_ns(),
 | 
				
			||||||
                # account=f'paper_{self.broker}',
 | 
					
 | 
				
			||||||
                account='paper',
 | 
					                status='filled',
 | 
				
			||||||
                status='closed',
 | 
					 | 
				
			||||||
                filled=size,
 | 
					                filled=size,
 | 
				
			||||||
                remaining=0 if order_complete else remaining,
 | 
					                remaining=0 if order_complete else remaining,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                action=action,
 | 
				
			||||||
 | 
					                size=size,
 | 
				
			||||||
 | 
					                price=price,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                broker_details={
 | 
				
			||||||
 | 
					                    'paper_info': {
 | 
				
			||||||
 | 
					                        'oid': oid,
 | 
				
			||||||
 | 
					                    },
 | 
				
			||||||
 | 
					                    'name': self.broker,
 | 
				
			||||||
 | 
					                },
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            await self.ems_trades_stream.send(msg)
 | 
					            await self.ems_trades_stream.send(msg.dict())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # lookup any existing position
 | 
					        # lookup any existing position
 | 
				
			||||||
        key = fqsn.rstrip(f'.{self.broker}')
 | 
					        token = f'{symbol}.{self.broker}'
 | 
				
			||||||
        pp = self._positions.setdefault(
 | 
					        pp_msg = self._positions.setdefault(
 | 
				
			||||||
            fqsn,
 | 
					            token,
 | 
				
			||||||
            Position(
 | 
					            BrokerdPosition(
 | 
				
			||||||
                Symbol(
 | 
					                broker=self.broker,
 | 
				
			||||||
                    key=key,
 | 
					                account='paper',
 | 
				
			||||||
                    broker_info={self.broker: {}},
 | 
					                symbol=symbol,
 | 
				
			||||||
                ),
 | 
					                # TODO: we need to look up the asset currency from
 | 
				
			||||||
                size=size,
 | 
					                # broker info. i guess for crypto this can be
 | 
				
			||||||
                ppu=price,
 | 
					                # inferred from the pair?
 | 
				
			||||||
                bsuid=key,
 | 
					                currency='',
 | 
				
			||||||
 | 
					                size=0.0,
 | 
				
			||||||
 | 
					                avg_price=0,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        t = Transaction(
 | 
					 | 
				
			||||||
            fqsn=fqsn,
 | 
					 | 
				
			||||||
            tid=oid,
 | 
					 | 
				
			||||||
            size=size,
 | 
					 | 
				
			||||||
            price=price,
 | 
					 | 
				
			||||||
            cost=0,  # TODO: cost model
 | 
					 | 
				
			||||||
            dt=pendulum.from_timestamp(fill_time_s),
 | 
					 | 
				
			||||||
            bsuid=key,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        pp.add_clear(t)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        pp_msg = BrokerdPosition(
 | 
					        # "avg position price" calcs
 | 
				
			||||||
            broker=self.broker,
 | 
					        # TODO: eventually it'd be nice to have a small set of routines
 | 
				
			||||||
            account='paper',
 | 
					        # to do this stuff from a sequence of cleared orders to enable
 | 
				
			||||||
            symbol=fqsn,
 | 
					        # so called "contextual positions".
 | 
				
			||||||
            # TODO: we need to look up the asset currency from
 | 
					        new_size = size + pp_msg.size
 | 
				
			||||||
            # broker info. i guess for crypto this can be
 | 
					 | 
				
			||||||
            # inferred from the pair?
 | 
					 | 
				
			||||||
            currency='',
 | 
					 | 
				
			||||||
            size=pp.size,
 | 
					 | 
				
			||||||
            avg_price=pp.ppu,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        await self.ems_trades_stream.send(pp_msg)
 | 
					        # old size minus the new size gives us size differential with
 | 
				
			||||||
 | 
					        # +ve -> increase in pp size
 | 
				
			||||||
 | 
					        # -ve -> decrease in pp size
 | 
				
			||||||
 | 
					        size_diff = abs(new_size) - abs(pp_msg.size)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if new_size == 0:
 | 
				
			||||||
 | 
					            pp_msg.avg_price = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        elif size_diff > 0:
 | 
				
			||||||
 | 
					            # only update the "average position price" when the position
 | 
				
			||||||
 | 
					            # size increases not when it decreases (i.e. the position is
 | 
				
			||||||
 | 
					            # being made smaller)
 | 
				
			||||||
 | 
					            pp_msg.avg_price = (
 | 
				
			||||||
 | 
					                abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
 | 
				
			||||||
 | 
					            ) / abs(new_size)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        pp_msg.size = new_size
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        await self.ems_trades_stream.send(pp_msg.dict())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def simulate_fills(
 | 
					async def simulate_fills(
 | 
				
			||||||
    quote_stream: tractor.MsgStream,  # noqa
 | 
					    quote_stream: 'tractor.ReceiveStream',  # noqa
 | 
				
			||||||
    client: PaperBoi,
 | 
					    client: PaperBoi,
 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # TODO: more machinery to better simulate real-world market things:
 | 
					    # TODO: more machinery to better simulate real-world market things:
 | 
				
			||||||
| 
						 | 
					@ -311,116 +306,61 @@ async def simulate_fills(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # this stream may eventually contain multiple symbols
 | 
					    # this stream may eventually contain multiple symbols
 | 
				
			||||||
    async for quotes in quote_stream:
 | 
					    async for quotes in quote_stream:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        for sym, quote in quotes.items():
 | 
					        for sym, quote in quotes.items():
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            for tick in iterticks(
 | 
					            for tick in iterticks(
 | 
				
			||||||
                quote,
 | 
					                quote,
 | 
				
			||||||
                # dark order price filter(s)
 | 
					                # dark order price filter(s)
 | 
				
			||||||
                types=('ask', 'bid', 'trade', 'last')
 | 
					                types=('ask', 'bid', 'trade', 'last')
 | 
				
			||||||
            ):
 | 
					            ):
 | 
				
			||||||
                tick_price = tick['price']
 | 
					                # print(tick)
 | 
				
			||||||
 | 
					                tick_price = tick.get('price')
 | 
				
			||||||
 | 
					                ttype = tick['type']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                buys: bidict[str, tuple] = client._buys[sym]
 | 
					                if ttype in ('ask',):
 | 
				
			||||||
                iter_buys = reversed(sorted(
 | 
					 | 
				
			||||||
                    buys.values(),
 | 
					 | 
				
			||||||
                    key=itemgetter(0),
 | 
					 | 
				
			||||||
                ))
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                def buy_on_ask(our_price):
 | 
					                    client.last_ask = (
 | 
				
			||||||
                    return tick_price <= our_price
 | 
					                        tick_price,
 | 
				
			||||||
 | 
					                        tick.get('size', client.last_ask[1]),
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                sells: bidict[str, tuple] = client._sells[sym]
 | 
					                    orders = client._buys.get(sym, {})
 | 
				
			||||||
                iter_sells = sorted(
 | 
					 | 
				
			||||||
                    sells.values(),
 | 
					 | 
				
			||||||
                    key=itemgetter(0)
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                def sell_on_bid(our_price):
 | 
					                    book_sequence = reversed(
 | 
				
			||||||
                    return tick_price >= our_price
 | 
					                        sorted(orders.keys(), key=itemgetter(1)))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                match tick:
 | 
					                    def pred(our_price):
 | 
				
			||||||
 | 
					                        return tick_price < our_price
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    # on an ask queue tick, only clear buy entries
 | 
					                elif ttype in ('bid',):
 | 
				
			||||||
                    case {
 | 
					 | 
				
			||||||
                        'price': tick_price,
 | 
					 | 
				
			||||||
                        'type': 'ask',
 | 
					 | 
				
			||||||
                    }:
 | 
					 | 
				
			||||||
                        client.last_ask = (
 | 
					 | 
				
			||||||
                            tick_price,
 | 
					 | 
				
			||||||
                            tick.get('size', client.last_ask[1]),
 | 
					 | 
				
			||||||
                        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        iter_entries = zip(
 | 
					                    client.last_bid = (
 | 
				
			||||||
                            iter_buys,
 | 
					                        tick_price,
 | 
				
			||||||
                            itertools.repeat(buy_on_ask)
 | 
					                        tick.get('size', client.last_bid[1]),
 | 
				
			||||||
                        )
 | 
					                    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    # on a bid queue tick, only clear sell entries
 | 
					                    orders = client._sells.get(sym, {})
 | 
				
			||||||
                    case {
 | 
					                    book_sequence = sorted(orders.keys(), key=itemgetter(1))
 | 
				
			||||||
                        'price': tick_price,
 | 
					 | 
				
			||||||
                        'type': 'bid',
 | 
					 | 
				
			||||||
                    }:
 | 
					 | 
				
			||||||
                        client.last_bid = (
 | 
					 | 
				
			||||||
                            tick_price,
 | 
					 | 
				
			||||||
                            tick.get('size', client.last_bid[1]),
 | 
					 | 
				
			||||||
                        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        iter_entries = zip(
 | 
					                    def pred(our_price):
 | 
				
			||||||
                            iter_sells,
 | 
					                        return tick_price > our_price
 | 
				
			||||||
                            itertools.repeat(sell_on_bid)
 | 
					 | 
				
			||||||
                        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    # TODO: fix this block, though it definitely
 | 
					                elif ttype in ('trade', 'last'):
 | 
				
			||||||
                    # costs a lot more CPU-wise
 | 
					                    # TODO: simulate actual book queues and our orders
 | 
				
			||||||
                    # - doesn't seem like clears are happening still on
 | 
					                    # place in it, might require full L2 data?
 | 
				
			||||||
                    #   "resting" limit orders?
 | 
					                    continue
 | 
				
			||||||
                    case {
 | 
					 | 
				
			||||||
                        'price': tick_price,
 | 
					 | 
				
			||||||
                        'type': ('trade' | 'last'),
 | 
					 | 
				
			||||||
                    }:
 | 
					 | 
				
			||||||
                        # in the clearing price / last price case we
 | 
					 | 
				
			||||||
                        # want to iterate both sides of our book for
 | 
					 | 
				
			||||||
                        # clears since we don't know which direction the
 | 
					 | 
				
			||||||
                        # price is going to move (especially with HFT)
 | 
					 | 
				
			||||||
                        # and thus we simply interleave both sides (buys
 | 
					 | 
				
			||||||
                        # and sells) until one side clears and then
 | 
					 | 
				
			||||||
                        # break until the next tick?
 | 
					 | 
				
			||||||
                        def interleave():
 | 
					 | 
				
			||||||
                            for pair in zip(
 | 
					 | 
				
			||||||
                                iter_buys,
 | 
					 | 
				
			||||||
                                iter_sells,
 | 
					 | 
				
			||||||
                            ):
 | 
					 | 
				
			||||||
                                for order_info, pred in zip(
 | 
					 | 
				
			||||||
                                    pair,
 | 
					 | 
				
			||||||
                                    itertools.cycle([buy_on_ask, sell_on_bid]),
 | 
					 | 
				
			||||||
                                ):
 | 
					 | 
				
			||||||
                                    yield order_info, pred
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        iter_entries = interleave()
 | 
					                # iterate book prices descending
 | 
				
			||||||
 | 
					                for oid, our_price in book_sequence:
 | 
				
			||||||
 | 
					                    if pred(our_price):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    # NOTE: all other (non-clearable) tick event types
 | 
					                        # retreive order info
 | 
				
			||||||
                    # - we don't want to sping the simulated clear loop
 | 
					                        (size, reqid, action) = orders.pop((oid, our_price))
 | 
				
			||||||
                    # below unecessarily and further don't want to pop
 | 
					 | 
				
			||||||
                    # simulated live orders prematurely.
 | 
					 | 
				
			||||||
                    case _:
 | 
					 | 
				
			||||||
                        continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # iterate all potentially clearable book prices
 | 
					 | 
				
			||||||
                # in FIFO order per side.
 | 
					 | 
				
			||||||
                for order_info, pred in iter_entries:
 | 
					 | 
				
			||||||
                    (our_price, size, reqid, action) = order_info
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    # print(order_info)
 | 
					 | 
				
			||||||
                    clearable = pred(our_price)
 | 
					 | 
				
			||||||
                    if clearable:
 | 
					 | 
				
			||||||
                        # pop and retreive order info
 | 
					 | 
				
			||||||
                        oid = {
 | 
					 | 
				
			||||||
                            'buy': buys,
 | 
					 | 
				
			||||||
                            'sell': sells
 | 
					 | 
				
			||||||
                        }[action].inverse.pop(order_info)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        # clearing price would have filled entirely
 | 
					                        # clearing price would have filled entirely
 | 
				
			||||||
                        await client.fake_fill(
 | 
					                        await client.fake_fill(
 | 
				
			||||||
                            fqsn=sym,
 | 
					                            symbol=sym,
 | 
				
			||||||
                            # todo slippage to determine fill price
 | 
					                            # todo slippage to determine fill price
 | 
				
			||||||
                            price=tick_price,
 | 
					                            price=tick_price,
 | 
				
			||||||
                            size=size,
 | 
					                            size=size,
 | 
				
			||||||
| 
						 | 
					@ -428,6 +368,9 @@ async def simulate_fills(
 | 
				
			||||||
                            reqid=reqid,
 | 
					                            reqid=reqid,
 | 
				
			||||||
                            oid=oid,
 | 
					                            oid=oid,
 | 
				
			||||||
                        )
 | 
					                        )
 | 
				
			||||||
 | 
					                    else:
 | 
				
			||||||
 | 
					                        # prices are iterated in sorted order so we're done
 | 
				
			||||||
 | 
					                        break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def handle_order_requests(
 | 
					async def handle_order_requests(
 | 
				
			||||||
| 
						 | 
					@ -437,81 +380,66 @@ async def handle_order_requests(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    request_msg: dict
 | 
					    # order_request: dict
 | 
				
			||||||
    async for request_msg in ems_order_stream:
 | 
					    async for request_msg in ems_order_stream:
 | 
				
			||||||
        match request_msg:
 | 
					 | 
				
			||||||
            case {'action': ('buy' | 'sell')}:
 | 
					 | 
				
			||||||
                order = BrokerdOrder(**request_msg)
 | 
					 | 
				
			||||||
                account = order.account
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # error on bad inputs
 | 
					        action = request_msg['action']
 | 
				
			||||||
                reason = None
 | 
					 | 
				
			||||||
                if account != 'paper':
 | 
					 | 
				
			||||||
                    reason = f'No account found:`{account}` (paper only)?'
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                elif order.size == 0:
 | 
					        if action in {'buy', 'sell'}:
 | 
				
			||||||
                    reason = 'Invalid size: 0'
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                if reason:
 | 
					            account = request_msg['account']
 | 
				
			||||||
                    log.error(reason)
 | 
					            if account != 'paper':
 | 
				
			||||||
                    await ems_order_stream.send(BrokerdError(
 | 
					                log.error(
 | 
				
			||||||
                        oid=order.oid,
 | 
					                    'This is a paper account, only a `paper` selection is valid'
 | 
				
			||||||
                        symbol=order.symbol,
 | 
					 | 
				
			||||||
                        reason=reason,
 | 
					 | 
				
			||||||
                    ))
 | 
					 | 
				
			||||||
                    continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                reqid = order.reqid or str(uuid.uuid4())
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # deliver ack that order has been submitted to broker routing
 | 
					 | 
				
			||||||
                await ems_order_stream.send(
 | 
					 | 
				
			||||||
                    BrokerdOrderAck(
 | 
					 | 
				
			||||||
                        oid=order.oid,
 | 
					 | 
				
			||||||
                        reqid=reqid,
 | 
					 | 
				
			||||||
                    )
 | 
					 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					                await ems_order_stream.send(BrokerdError(
 | 
				
			||||||
 | 
					                    oid=request_msg['oid'],
 | 
				
			||||||
 | 
					                    symbol=request_msg['symbol'],
 | 
				
			||||||
 | 
					                    reason=f'Paper only. No account found: `{account}` ?',
 | 
				
			||||||
 | 
					                ).dict())
 | 
				
			||||||
 | 
					                continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # call our client api to submit the order
 | 
					            # validate
 | 
				
			||||||
                reqid = await client.submit_limit(
 | 
					            order = BrokerdOrder(**request_msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # call our client api to submit the order
 | 
				
			||||||
 | 
					            reqid = await client.submit_limit(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                oid=order.oid,
 | 
				
			||||||
 | 
					                symbol=order.symbol,
 | 
				
			||||||
 | 
					                price=order.price,
 | 
				
			||||||
 | 
					                action=order.action,
 | 
				
			||||||
 | 
					                size=order.size,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # XXX: by default 0 tells ``ib_insync`` methods that
 | 
				
			||||||
 | 
					                # there is no existing order so ask the client to create
 | 
				
			||||||
 | 
					                # a new one (which it seems to do by allocating an int
 | 
				
			||||||
 | 
					                # counter - collision prone..)
 | 
				
			||||||
 | 
					                reqid=order.reqid,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # deliver ack that order has been submitted to broker routing
 | 
				
			||||||
 | 
					            await ems_order_stream.send(
 | 
				
			||||||
 | 
					                BrokerdOrderAck(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                    # ems order request id
 | 
				
			||||||
                    oid=order.oid,
 | 
					                    oid=order.oid,
 | 
				
			||||||
                    symbol=f'{order.symbol}.{client.broker}',
 | 
					
 | 
				
			||||||
                    price=order.price,
 | 
					                    # broker specific request id
 | 
				
			||||||
                    action=order.action,
 | 
					 | 
				
			||||||
                    size=order.size,
 | 
					 | 
				
			||||||
                    # XXX: by default 0 tells ``ib_insync`` methods that
 | 
					 | 
				
			||||||
                    # there is no existing order so ask the client to create
 | 
					 | 
				
			||||||
                    # a new one (which it seems to do by allocating an int
 | 
					 | 
				
			||||||
                    # counter - collision prone..)
 | 
					 | 
				
			||||||
                    reqid=reqid,
 | 
					                    reqid=reqid,
 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
                log.info(f'Submitted paper LIMIT {reqid}:\n{order}')
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            case {'action': 'cancel'}:
 | 
					                ).dict()
 | 
				
			||||||
                msg = BrokerdCancel(**request_msg)
 | 
					            )
 | 
				
			||||||
                await client.submit_cancel(
 | 
					 | 
				
			||||||
                    reqid=msg.reqid
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            case _:
 | 
					        elif action == 'cancel':
 | 
				
			||||||
                log.error(f'Unknown order command: {request_msg}')
 | 
					            msg = BrokerdCancel(**request_msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            await client.submit_cancel(
 | 
				
			||||||
 | 
					                reqid=msg.reqid
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_reqids: bidict[str, tuple] = {}
 | 
					        else:
 | 
				
			||||||
_buys: defaultdict[
 | 
					            log.error(f'Unknown order command: {request_msg}')
 | 
				
			||||||
    str,  # symbol
 | 
					 | 
				
			||||||
    bidict[
 | 
					 | 
				
			||||||
        str,  # oid
 | 
					 | 
				
			||||||
        tuple[float, float, str, str],  # order info
 | 
					 | 
				
			||||||
    ]
 | 
					 | 
				
			||||||
] = defaultdict(bidict)
 | 
					 | 
				
			||||||
_sells: defaultdict[
 | 
					 | 
				
			||||||
    str,  # symbol
 | 
					 | 
				
			||||||
    bidict[
 | 
					 | 
				
			||||||
        str,  # oid
 | 
					 | 
				
			||||||
        tuple[float, float, str, str],  # order info
 | 
					 | 
				
			||||||
    ]
 | 
					 | 
				
			||||||
] = defaultdict(bidict)
 | 
					 | 
				
			||||||
_positions: dict[str, Position] = {}
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@tractor.context
 | 
					@tractor.context
 | 
				
			||||||
| 
						 | 
					@ -523,59 +451,39 @@ async def trades_dialogue(
 | 
				
			||||||
    loglevel: str = None,
 | 
					    loglevel: str = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
 | 
					 | 
				
			||||||
    tractor.log.get_console_log(loglevel)
 | 
					    tractor.log.get_console_log(loglevel)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async with (
 | 
					    async with (
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        data.open_feed(
 | 
					        data.open_feed(
 | 
				
			||||||
            [fqsn],
 | 
					            [fqsn],
 | 
				
			||||||
            loglevel=loglevel,
 | 
					            loglevel=loglevel,
 | 
				
			||||||
        ) as feed,
 | 
					        ) as feed,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        pp_msgs: list[BrokerdPosition] = []
 | 
					 | 
				
			||||||
        pos: Position
 | 
					 | 
				
			||||||
        token: str  # f'{symbol}.{self.broker}'
 | 
					 | 
				
			||||||
        for token, pos in _positions.items():
 | 
					 | 
				
			||||||
            pp_msgs.append(BrokerdPosition(
 | 
					 | 
				
			||||||
                broker=broker,
 | 
					 | 
				
			||||||
                account='paper',
 | 
					 | 
				
			||||||
                symbol=pos.symbol.front_fqsn(),
 | 
					 | 
				
			||||||
                size=pos.size,
 | 
					 | 
				
			||||||
                avg_price=pos.ppu,
 | 
					 | 
				
			||||||
            ))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: load paper positions per broker from .toml config file
 | 
					        # TODO: load paper positions per broker from .toml config file
 | 
				
			||||||
        # and pass as symbol to position data mapping: ``dict[str, dict]``
 | 
					        # and pass as symbol to position data mapping: ``dict[str, dict]``
 | 
				
			||||||
        await ctx.started((
 | 
					        # await ctx.started(all_positions)
 | 
				
			||||||
            pp_msgs,
 | 
					        await ctx.started(({}, {'paper',}))
 | 
				
			||||||
            ['paper'],
 | 
					 | 
				
			||||||
        ))
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        async with (
 | 
					        async with (
 | 
				
			||||||
            ctx.open_stream() as ems_stream,
 | 
					            ctx.open_stream() as ems_stream,
 | 
				
			||||||
            trio.open_nursery() as n,
 | 
					            trio.open_nursery() as n,
 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            client = PaperBoi(
 | 
					            client = PaperBoi(
 | 
				
			||||||
                broker,
 | 
					                broker,
 | 
				
			||||||
                ems_stream,
 | 
					                ems_stream,
 | 
				
			||||||
                _buys=_buys,
 | 
					                _buys={},
 | 
				
			||||||
                _sells=_sells,
 | 
					                _sells={},
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                _reqids=_reqids,
 | 
					                _reqids={},
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # TODO: load paper positions from ``positions.toml``
 | 
					                # TODO: load paper positions from ``positions.toml``
 | 
				
			||||||
                _positions=_positions,
 | 
					                _positions={},
 | 
				
			||||||
 | 
					 | 
				
			||||||
                # TODO: load postions from ledger file
 | 
					 | 
				
			||||||
                _trade_ledger={},
 | 
					 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            n.start_soon(
 | 
					            n.start_soon(handle_order_requests, client, ems_stream)
 | 
				
			||||||
                handle_order_requests,
 | 
					 | 
				
			||||||
                client,
 | 
					 | 
				
			||||||
                ems_stream,
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # paper engine simulator clearing task
 | 
					            # paper engine simulator clearing task
 | 
				
			||||||
            await simulate_fills(feed.stream, client)
 | 
					            await simulate_fills(feed.stream, client)
 | 
				
			||||||
| 
						 | 
					@ -603,17 +511,17 @@ async def open_paperboi(
 | 
				
			||||||
        # (we likely don't need more then one proc for basic
 | 
					        # (we likely don't need more then one proc for basic
 | 
				
			||||||
        # simulated order clearing)
 | 
					        # simulated order clearing)
 | 
				
			||||||
        if portal is None:
 | 
					        if portal is None:
 | 
				
			||||||
            log.info('Starting new paper-engine actor')
 | 
					 | 
				
			||||||
            portal = await tn.start_actor(
 | 
					            portal = await tn.start_actor(
 | 
				
			||||||
                service_name,
 | 
					                service_name,
 | 
				
			||||||
                enable_modules=[__name__]
 | 
					                enable_modules=[__name__]
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        async with portal.open_context(
 | 
					        async with portal.open_context(
 | 
				
			||||||
            trades_dialogue,
 | 
					                trades_dialogue,
 | 
				
			||||||
            broker=broker,
 | 
					                broker=broker,
 | 
				
			||||||
            fqsn=fqsn,
 | 
					                fqsn=fqsn,
 | 
				
			||||||
            loglevel=loglevel,
 | 
					                loglevel=loglevel,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        ) as (ctx, first):
 | 
					        ) as (ctx, first):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            yield ctx, first
 | 
					            yield ctx, first
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -83,9 +83,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
                log.info(
 | 
					                log.info(
 | 
				
			||||||
                    f'`marketstored` up!\n'
 | 
					                    f'`marketstore` up!\n'
 | 
				
			||||||
                    f'pid: {pid}\n'
 | 
					                    f'`marketstored` pid: {pid}\n'
 | 
				
			||||||
                    f'container id: {cid[:12]}\n'
 | 
					                    f'docker container id: {cid}\n'
 | 
				
			||||||
                    f'config: {pformat(config)}'
 | 
					                    f'config: {pformat(config)}'
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -138,26 +138,25 @@ def cli(ctx, brokers, loglevel, tl, configdir):
 | 
				
			||||||
@click.pass_obj
 | 
					@click.pass_obj
 | 
				
			||||||
def services(config, tl, names):
 | 
					def services(config, tl, names):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    from .._daemon import open_piker_runtime
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def list_services():
 | 
					    async def list_services():
 | 
				
			||||||
        async with (
 | 
					
 | 
				
			||||||
            open_piker_runtime(
 | 
					        async with tractor.get_arbiter(
 | 
				
			||||||
                name='service_query',
 | 
					            *_tractor_kwargs['arbiter_addr']
 | 
				
			||||||
                loglevel=config['loglevel'] if tl else None,
 | 
					        ) as portal:
 | 
				
			||||||
            ),
 | 
					 | 
				
			||||||
            tractor.get_arbiter(
 | 
					 | 
				
			||||||
                *_tractor_kwargs['arbiter_addr']
 | 
					 | 
				
			||||||
            ) as portal
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            registry = await portal.run_from_ns('self', 'get_registry')
 | 
					            registry = await portal.run_from_ns('self', 'get_registry')
 | 
				
			||||||
            json_d = {}
 | 
					            json_d = {}
 | 
				
			||||||
            for key, socket in registry.items():
 | 
					            for key, socket in registry.items():
 | 
				
			||||||
 | 
					                # name, uuid = uid
 | 
				
			||||||
                host, port = socket
 | 
					                host, port = socket
 | 
				
			||||||
                json_d[key] = f'{host}:{port}'
 | 
					                json_d[key] = f'{host}:{port}'
 | 
				
			||||||
            click.echo(f"{colorize_json(json_d)}")
 | 
					            click.echo(f"{colorize_json(json_d)}")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    trio.run(list_services)
 | 
					    tractor.run(
 | 
				
			||||||
 | 
					        list_services,
 | 
				
			||||||
 | 
					        name='service_query',
 | 
				
			||||||
 | 
					        loglevel=config['loglevel'] if tl else None,
 | 
				
			||||||
 | 
					        arbiter_addr=_tractor_kwargs['arbiter_addr'],
 | 
				
			||||||
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def _load_clis() -> None:
 | 
					def _load_clis() -> None:
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -21,7 +21,6 @@ Broker configuration mgmt.
 | 
				
			||||||
import platform
 | 
					import platform
 | 
				
			||||||
import sys
 | 
					import sys
 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
from os import path
 | 
					 | 
				
			||||||
from os.path import dirname
 | 
					from os.path import dirname
 | 
				
			||||||
import shutil
 | 
					import shutil
 | 
				
			||||||
from typing import Optional
 | 
					from typing import Optional
 | 
				
			||||||
| 
						 | 
					@ -112,7 +111,6 @@ if _parent_user:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_conf_names: set[str] = {
 | 
					_conf_names: set[str] = {
 | 
				
			||||||
    'brokers',
 | 
					    'brokers',
 | 
				
			||||||
    'pps',
 | 
					 | 
				
			||||||
    'trades',
 | 
					    'trades',
 | 
				
			||||||
    'watchlists',
 | 
					    'watchlists',
 | 
				
			||||||
}
 | 
					}
 | 
				
			||||||
| 
						 | 
					@ -149,21 +147,19 @@ def get_conf_path(
 | 
				
			||||||
    conf_name: str = 'brokers',
 | 
					    conf_name: str = 'brokers',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> str:
 | 
					) -> str:
 | 
				
			||||||
    '''
 | 
					    """Return the default config path normally under
 | 
				
			||||||
    Return the top-level default config path normally under
 | 
					    ``~/.config/piker`` on linux.
 | 
				
			||||||
    ``~/.config/piker`` on linux for a given ``conf_name``, the config
 | 
					 | 
				
			||||||
    name.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Contains files such as:
 | 
					    Contains files such as:
 | 
				
			||||||
    - brokers.toml
 | 
					    - brokers.toml
 | 
				
			||||||
    - pp.toml
 | 
					 | 
				
			||||||
    - watchlists.toml
 | 
					    - watchlists.toml
 | 
				
			||||||
 | 
					    - trades.toml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # maybe coming soon ;)
 | 
					    # maybe coming soon ;)
 | 
				
			||||||
    - signals.toml
 | 
					    - signals.toml
 | 
				
			||||||
    - strats.toml
 | 
					    - strats.toml
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    """
 | 
				
			||||||
    assert conf_name in _conf_names
 | 
					    assert conf_name in _conf_names
 | 
				
			||||||
    fn = _conf_fn_w_ext(conf_name)
 | 
					    fn = _conf_fn_w_ext(conf_name)
 | 
				
			||||||
    return os.path.join(
 | 
					    return os.path.join(
 | 
				
			||||||
| 
						 | 
					@ -177,7 +173,7 @@ def repodir():
 | 
				
			||||||
    Return the abspath to the repo directory.
 | 
					    Return the abspath to the repo directory.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    dirpath = path.abspath(
 | 
					    dirpath = os.path.abspath(
 | 
				
			||||||
        # we're 3 levels down in **this** module file
 | 
					        # we're 3 levels down in **this** module file
 | 
				
			||||||
        dirname(dirname(os.path.realpath(__file__)))
 | 
					        dirname(dirname(os.path.realpath(__file__)))
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
| 
						 | 
					@ -186,9 +182,7 @@ def repodir():
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def load(
 | 
					def load(
 | 
				
			||||||
    conf_name: str = 'brokers',
 | 
					    conf_name: str = 'brokers',
 | 
				
			||||||
    path: str = None,
 | 
					    path: str = None
 | 
				
			||||||
 | 
					 | 
				
			||||||
    **tomlkws,
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> (dict, str):
 | 
					) -> (dict, str):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -196,7 +190,6 @@ def load(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    path = path or get_conf_path(conf_name)
 | 
					    path = path or get_conf_path(conf_name)
 | 
				
			||||||
 | 
					 | 
				
			||||||
    if not os.path.isfile(path):
 | 
					    if not os.path.isfile(path):
 | 
				
			||||||
        fn = _conf_fn_w_ext(conf_name)
 | 
					        fn = _conf_fn_w_ext(conf_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -209,11 +202,8 @@ def load(
 | 
				
			||||||
        # if one exists.
 | 
					        # if one exists.
 | 
				
			||||||
        if os.path.isfile(template):
 | 
					        if os.path.isfile(template):
 | 
				
			||||||
            shutil.copyfile(template, path)
 | 
					            shutil.copyfile(template, path)
 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            with open(path, 'w'):
 | 
					 | 
				
			||||||
                pass  # touch
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    config = toml.load(path, **tomlkws)
 | 
					    config = toml.load(path)
 | 
				
			||||||
    log.debug(f"Read config file {path}")
 | 
					    log.debug(f"Read config file {path}")
 | 
				
			||||||
    return config, path
 | 
					    return config, path
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -222,7 +212,6 @@ def write(
 | 
				
			||||||
    config: dict,  # toml config as dict
 | 
					    config: dict,  # toml config as dict
 | 
				
			||||||
    name: str = 'brokers',
 | 
					    name: str = 'brokers',
 | 
				
			||||||
    path: str = None,
 | 
					    path: str = None,
 | 
				
			||||||
    **toml_kwargs,
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
    ''''
 | 
					    ''''
 | 
				
			||||||
| 
						 | 
					@ -246,14 +235,11 @@ def write(
 | 
				
			||||||
        f"{path}"
 | 
					        f"{path}"
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    with open(path, 'w') as cf:
 | 
					    with open(path, 'w') as cf:
 | 
				
			||||||
        return toml.dump(
 | 
					        return toml.dump(config, cf)
 | 
				
			||||||
            config,
 | 
					 | 
				
			||||||
            cf,
 | 
					 | 
				
			||||||
            **toml_kwargs,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def load_accounts(
 | 
					def load_accounts(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    providers: Optional[list[str]] = None
 | 
					    providers: Optional[list[str]] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> bidict[str, Optional[str]]:
 | 
					) -> bidict[str, Optional[str]]:
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -37,13 +37,8 @@ from docker.models.containers import Container as DockerContainer
 | 
				
			||||||
from docker.errors import (
 | 
					from docker.errors import (
 | 
				
			||||||
    DockerException,
 | 
					    DockerException,
 | 
				
			||||||
    APIError,
 | 
					    APIError,
 | 
				
			||||||
    # ContainerError,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
import requests
 | 
					 | 
				
			||||||
from requests.exceptions import (
 | 
					 | 
				
			||||||
    ConnectionError,
 | 
					 | 
				
			||||||
    ReadTimeout,
 | 
					 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					from requests.exceptions import ConnectionError, ReadTimeout
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ..log import get_logger, get_console_log
 | 
					from ..log import get_logger, get_console_log
 | 
				
			||||||
from .. import config
 | 
					from .. import config
 | 
				
			||||||
| 
						 | 
					@ -55,8 +50,8 @@ class DockerNotStarted(Exception):
 | 
				
			||||||
    'Prolly you dint start da daemon bruh'
 | 
					    'Prolly you dint start da daemon bruh'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ApplicationLogError(Exception):
 | 
					class ContainerError(RuntimeError):
 | 
				
			||||||
    'App in container reported an error in logs'
 | 
					    'Error reported via app-container logging level'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@acm
 | 
				
			||||||
| 
						 | 
					@ -101,9 +96,9 @@ async def open_docker(
 | 
				
			||||||
        # not perms?
 | 
					        # not perms?
 | 
				
			||||||
        raise
 | 
					        raise
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # finally:
 | 
					    finally:
 | 
				
			||||||
    #     if client:
 | 
					        if client:
 | 
				
			||||||
    #         client.close()
 | 
					            client.close()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Container:
 | 
					class Container:
 | 
				
			||||||
| 
						 | 
					@ -161,7 +156,7 @@ class Container:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    # print(f'level: {level}')
 | 
					                    # print(f'level: {level}')
 | 
				
			||||||
                    if level in ('error', 'fatal'):
 | 
					                    if level in ('error', 'fatal'):
 | 
				
			||||||
                        raise ApplicationLogError(msg)
 | 
					                        raise ContainerError(msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                if patt in msg:
 | 
					                if patt in msg:
 | 
				
			||||||
                    return True
 | 
					                    return True
 | 
				
			||||||
| 
						 | 
					@ -190,29 +185,12 @@ class Container:
 | 
				
			||||||
            if 'is not running' in err.explanation:
 | 
					            if 'is not running' in err.explanation:
 | 
				
			||||||
                return False
 | 
					                return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def hard_kill(self, start: float) -> None:
 | 
					 | 
				
			||||||
        delay = time.time() - start
 | 
					 | 
				
			||||||
        # get out the big guns, bc apparently marketstore
 | 
					 | 
				
			||||||
        # doesn't actually know how to terminate gracefully
 | 
					 | 
				
			||||||
        # :eyeroll:...
 | 
					 | 
				
			||||||
        log.error(
 | 
					 | 
				
			||||||
            f'SIGKILL-ing: {self.cntr.id} after {delay}s\n'
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        self.try_signal('SIGKILL')
 | 
					 | 
				
			||||||
        self.cntr.wait(
 | 
					 | 
				
			||||||
            timeout=3,
 | 
					 | 
				
			||||||
            condition='not-running',
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def cancel(
 | 
					    async def cancel(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        stop_msg: str,
 | 
					        stop_msg: str,
 | 
				
			||||||
        hard_kill: bool = False,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        cid = self.cntr.id
 | 
					        cid = self.cntr.id
 | 
				
			||||||
 | 
					 | 
				
			||||||
        # first try a graceful cancel
 | 
					        # first try a graceful cancel
 | 
				
			||||||
        log.cancel(
 | 
					        log.cancel(
 | 
				
			||||||
            f'SIGINT cancelling container: {cid}\n'
 | 
					            f'SIGINT cancelling container: {cid}\n'
 | 
				
			||||||
| 
						 | 
					@ -221,25 +199,15 @@ class Container:
 | 
				
			||||||
        self.try_signal('SIGINT')
 | 
					        self.try_signal('SIGINT')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        start = time.time()
 | 
					        start = time.time()
 | 
				
			||||||
        for _ in range(6):
 | 
					        for _ in range(30):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            with trio.move_on_after(0.5) as cs:
 | 
					            with trio.move_on_after(0.5) as cs:
 | 
				
			||||||
                log.cancel('polling for CNTR logs...')
 | 
					                cs.shield = True
 | 
				
			||||||
 | 
					                await self.process_logs_until(stop_msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                try:
 | 
					                # if we aren't cancelled on above checkpoint then we
 | 
				
			||||||
                    await self.process_logs_until(stop_msg)
 | 
					                # assume we read the expected stop msg and terminated.
 | 
				
			||||||
                except ApplicationLogError:
 | 
					                break
 | 
				
			||||||
                    hard_kill = True
 | 
					 | 
				
			||||||
                else:
 | 
					 | 
				
			||||||
                    # if we aren't cancelled on above checkpoint then we
 | 
					 | 
				
			||||||
                    # assume we read the expected stop msg and
 | 
					 | 
				
			||||||
                    # terminated.
 | 
					 | 
				
			||||||
                    break
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if cs.cancelled_caught:
 | 
					 | 
				
			||||||
                # on timeout just try a hard kill after
 | 
					 | 
				
			||||||
                # a quick container sync-wait.
 | 
					 | 
				
			||||||
                hard_kill = True
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                log.info(f'Polling for container shutdown:\n{cid}')
 | 
					                log.info(f'Polling for container shutdown:\n{cid}')
 | 
				
			||||||
| 
						 | 
					@ -250,7 +218,6 @@ class Container:
 | 
				
			||||||
                        condition='not-running',
 | 
					                        condition='not-running',
 | 
				
			||||||
                    )
 | 
					                    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # graceful exit if we didn't time out
 | 
					 | 
				
			||||||
                break
 | 
					                break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            except (
 | 
					            except (
 | 
				
			||||||
| 
						 | 
					@ -262,23 +229,25 @@ class Container:
 | 
				
			||||||
            except (
 | 
					            except (
 | 
				
			||||||
                docker.errors.APIError,
 | 
					                docker.errors.APIError,
 | 
				
			||||||
                ConnectionError,
 | 
					                ConnectionError,
 | 
				
			||||||
                requests.exceptions.ConnectionError,
 | 
					 | 
				
			||||||
                trio.Cancelled,
 | 
					 | 
				
			||||||
            ):
 | 
					            ):
 | 
				
			||||||
                log.exception('Docker connection failure')
 | 
					                log.exception('Docker connection failure')
 | 
				
			||||||
                self.hard_kill(start)
 | 
					                break
 | 
				
			||||||
                raise
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            except trio.Cancelled:
 | 
					 | 
				
			||||||
                log.exception('trio cancelled...')
 | 
					 | 
				
			||||||
                self.hard_kill(start)
 | 
					 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            hard_kill = True
 | 
					            delay = time.time() - start
 | 
				
			||||||
 | 
					            log.error(
 | 
				
			||||||
 | 
					                f'Failed to kill container {cid} after {delay}s\n'
 | 
				
			||||||
 | 
					                'sending SIGKILL..'
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            # get out the big guns, bc apparently marketstore
 | 
				
			||||||
 | 
					            # doesn't actually know how to terminate gracefully
 | 
				
			||||||
 | 
					            # :eyeroll:...
 | 
				
			||||||
 | 
					            self.try_signal('SIGKILL')
 | 
				
			||||||
 | 
					            self.cntr.wait(
 | 
				
			||||||
 | 
					                timeout=3,
 | 
				
			||||||
 | 
					                condition='not-running',
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if hard_kill:
 | 
					        log.cancel(f'Container stopped: {cid}')
 | 
				
			||||||
            self.hard_kill(start)
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            log.cancel(f'Container stopped: {cid}')
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@tractor.context
 | 
					@tractor.context
 | 
				
			||||||
| 
						 | 
					@ -320,13 +289,15 @@ async def open_ahabd(
 | 
				
			||||||
        ))
 | 
					        ))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # TODO: we might eventually want a proxy-style msg-prot here
 | 
					            # TODO: we might eventually want a proxy-style msg-prot here
 | 
				
			||||||
            # to allow remote control of containers without needing
 | 
					            # to allow remote control of containers without needing
 | 
				
			||||||
            # callers to have root perms?
 | 
					            # callers to have root perms?
 | 
				
			||||||
            await trio.sleep_forever()
 | 
					            await trio.sleep_forever()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        finally:
 | 
					        finally:
 | 
				
			||||||
            await cntr.cancel(stop_msg)
 | 
					            with trio.CancelScope(shield=True):
 | 
				
			||||||
 | 
					                await cntr.cancel(stop_msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def start_ahab(
 | 
					async def start_ahab(
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -56,7 +56,7 @@ def iterticks(
 | 
				
			||||||
                    sig = (
 | 
					                    sig = (
 | 
				
			||||||
                        time,
 | 
					                        time,
 | 
				
			||||||
                        tick['price'],
 | 
					                        tick['price'],
 | 
				
			||||||
                        tick.get('size')
 | 
					                        tick['size']
 | 
				
			||||||
                    )
 | 
					                    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    if ttype == 'dark_trade':
 | 
					                    if ttype == 'dark_trade':
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -37,9 +37,6 @@ if TYPE_CHECKING:
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
_default_delay_s: float = 1.0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class sampler:
 | 
					class sampler:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Global sampling engine registry.
 | 
					    Global sampling engine registry.
 | 
				
			||||||
| 
						 | 
					@ -107,18 +104,14 @@ async def increment_ohlc_buffer(
 | 
				
			||||||
            # TODO: do we want to support dynamically
 | 
					            # TODO: do we want to support dynamically
 | 
				
			||||||
            # adding a "lower" lowest increment period?
 | 
					            # adding a "lower" lowest increment period?
 | 
				
			||||||
            await trio.sleep(ad)
 | 
					            await trio.sleep(ad)
 | 
				
			||||||
            total_s += delay_s
 | 
					            total_s += lowest
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # increment all subscribed shm arrays
 | 
					            # increment all subscribed shm arrays
 | 
				
			||||||
            # TODO:
 | 
					            # TODO:
 | 
				
			||||||
            # - this in ``numba``
 | 
					            # - this in ``numba``
 | 
				
			||||||
            # - just lookup shms for this step instead of iterating?
 | 
					            # - just lookup shms for this step instead of iterating?
 | 
				
			||||||
            for this_delay_s, shms in sampler.ohlcv_shms.items():
 | 
					            for delay_s, shms in sampler.ohlcv_shms.items():
 | 
				
			||||||
 | 
					                if total_s % delay_s != 0:
 | 
				
			||||||
                # short-circuit on any not-ready because slower sample
 | 
					 | 
				
			||||||
                # rate consuming shm buffers.
 | 
					 | 
				
			||||||
                if total_s % this_delay_s != 0:
 | 
					 | 
				
			||||||
                    # print(f'skipping `{this_delay_s}s` sample update')
 | 
					 | 
				
			||||||
                    continue
 | 
					                    continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # TODO: ``numba`` this!
 | 
					                # TODO: ``numba`` this!
 | 
				
			||||||
| 
						 | 
					@ -137,7 +130,7 @@ async def increment_ohlc_buffer(
 | 
				
			||||||
                    # this copies non-std fields (eg. vwap) from the last datum
 | 
					                    # this copies non-std fields (eg. vwap) from the last datum
 | 
				
			||||||
                    last[
 | 
					                    last[
 | 
				
			||||||
                        ['time', 'volume', 'open', 'high', 'low', 'close']
 | 
					                        ['time', 'volume', 'open', 'high', 'low', 'close']
 | 
				
			||||||
                    ][0] = (t + this_delay_s, 0, close, close, close, close)
 | 
					                    ][0] = (t + delay_s, 0, close, close, close, close)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    # write to the buffer
 | 
					                    # write to the buffer
 | 
				
			||||||
                    shm.push(last)
 | 
					                    shm.push(last)
 | 
				
			||||||
| 
						 | 
					@ -159,6 +152,7 @@ async def broadcast(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    subs = sampler.subscribers.get(delay_s, ())
 | 
					    subs = sampler.subscribers.get(delay_s, ())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    first = last = -1
 | 
					    first = last = -1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if shm is None:
 | 
					    if shm is None:
 | 
				
			||||||
| 
						 | 
					@ -227,8 +221,7 @@ async def iter_ohlc_periods(
 | 
				
			||||||
async def sample_and_broadcast(
 | 
					async def sample_and_broadcast(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    bus: _FeedsBus,  # noqa
 | 
					    bus: _FeedsBus,  # noqa
 | 
				
			||||||
    rt_shm: ShmArray,
 | 
					    shm: ShmArray,
 | 
				
			||||||
    hist_shm: ShmArray,
 | 
					 | 
				
			||||||
    quote_stream: trio.abc.ReceiveChannel,
 | 
					    quote_stream: trio.abc.ReceiveChannel,
 | 
				
			||||||
    brokername: str,
 | 
					    brokername: str,
 | 
				
			||||||
    sum_tick_vlm: bool = True,
 | 
					    sum_tick_vlm: bool = True,
 | 
				
			||||||
| 
						 | 
					@ -264,45 +257,41 @@ async def sample_and_broadcast(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    last = tick['price']
 | 
					                    last = tick['price']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    # more compact inline-way to do this assignment
 | 
					                    # update last entry
 | 
				
			||||||
                    # to both buffers?
 | 
					                    # benchmarked in the 4-5 us range
 | 
				
			||||||
                    for shm in [rt_shm, hist_shm]:
 | 
					                    o, high, low, v = shm.array[-1][
 | 
				
			||||||
                        # update last entry
 | 
					                        ['open', 'high', 'low', 'volume']
 | 
				
			||||||
                        # benchmarked in the 4-5 us range
 | 
					                    ]
 | 
				
			||||||
                        # for shm in [rt_shm, hist_shm]:
 | 
					 | 
				
			||||||
                        o, high, low, v = shm.array[-1][
 | 
					 | 
				
			||||||
                            ['open', 'high', 'low', 'volume']
 | 
					 | 
				
			||||||
                        ]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        new_v = tick.get('size', 0)
 | 
					                    new_v = tick.get('size', 0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        if v == 0 and new_v:
 | 
					                    if v == 0 and new_v:
 | 
				
			||||||
                            # no trades for this bar yet so the open
 | 
					                        # no trades for this bar yet so the open
 | 
				
			||||||
                            # is also the close/last trade price
 | 
					                        # is also the close/last trade price
 | 
				
			||||||
                            o = last
 | 
					                        o = last
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        if sum_tick_vlm:
 | 
					                    if sum_tick_vlm:
 | 
				
			||||||
                            volume = v + new_v
 | 
					                        volume = v + new_v
 | 
				
			||||||
                        else:
 | 
					                    else:
 | 
				
			||||||
                            # presume backend takes care of summing
 | 
					                        # presume backend takes care of summing
 | 
				
			||||||
                            # it's own vlm
 | 
					                        # it's own vlm
 | 
				
			||||||
                            volume = quote['volume']
 | 
					                        volume = quote['volume']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        shm.array[[
 | 
					                    shm.array[[
 | 
				
			||||||
                            'open',
 | 
					                        'open',
 | 
				
			||||||
                            'high',
 | 
					                        'high',
 | 
				
			||||||
                            'low',
 | 
					                        'low',
 | 
				
			||||||
                            'close',
 | 
					                        'close',
 | 
				
			||||||
                            'bar_wap',  # can be optionally provided
 | 
					                        'bar_wap',  # can be optionally provided
 | 
				
			||||||
                            'volume',
 | 
					                        'volume',
 | 
				
			||||||
                        ]][-1] = (
 | 
					                    ]][-1] = (
 | 
				
			||||||
                            o,
 | 
					                        o,
 | 
				
			||||||
                            max(high, last),
 | 
					                        max(high, last),
 | 
				
			||||||
                            min(low, last),
 | 
					                        min(low, last),
 | 
				
			||||||
                            last,
 | 
					                        last,
 | 
				
			||||||
                            quote.get('bar_wap', 0),
 | 
					                        quote.get('bar_wap', 0),
 | 
				
			||||||
                            volume,
 | 
					                        volume,
 | 
				
			||||||
                        )
 | 
					                    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # XXX: we need to be very cautious here that no
 | 
					            # XXX: we need to be very cautious here that no
 | 
				
			||||||
            # context-channel is left lingering which doesn't have
 | 
					            # context-channel is left lingering which doesn't have
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1,5 +1,5 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					# piker: trading gear for hackers
 | 
				
			||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
					# Copyright (C) Tyler Goodlet (in stewardship for piker0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					# This program is free software: you can redistribute it and/or modify
 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					# it under the terms of the GNU Affero General Public License as published by
 | 
				
			||||||
| 
						 | 
					@ -27,14 +27,13 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
 | 
				
			||||||
if _USE_POSIX:
 | 
					if _USE_POSIX:
 | 
				
			||||||
    from _posixshmem import shm_unlink
 | 
					    from _posixshmem import shm_unlink
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# import msgspec
 | 
					 | 
				
			||||||
import numpy as np
 | 
					 | 
				
			||||||
from numpy.lib import recfunctions as rfn
 | 
					 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
 | 
					import numpy as np
 | 
				
			||||||
 | 
					from pydantic import BaseModel
 | 
				
			||||||
 | 
					from numpy.lib import recfunctions as rfn
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from ._source import base_iohlc_dtype
 | 
					from ._source import base_iohlc_dtype
 | 
				
			||||||
from .types import Struct
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
| 
						 | 
					@ -50,11 +49,7 @@ _rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def cuckoff_mantracker():
 | 
					def cuckoff_mantracker():
 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Disable all ``multiprocessing``` "resource tracking" machinery since
 | 
					 | 
				
			||||||
    it's an absolute multi-threaded mess of non-SC madness.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    from multiprocessing import resource_tracker as mantracker
 | 
					    from multiprocessing import resource_tracker as mantracker
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Tell the "resource tracker" thing to fuck off.
 | 
					    # Tell the "resource tracker" thing to fuck off.
 | 
				
			||||||
| 
						 | 
					@ -112,39 +107,36 @@ class SharedInt:
 | 
				
			||||||
                log.warning(f'Shm for {name} already unlinked?')
 | 
					                log.warning(f'Shm for {name} already unlinked?')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class _Token(Struct, frozen=True):
 | 
					class _Token(BaseModel):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Internal represenation of a shared memory "token"
 | 
					    Internal represenation of a shared memory "token"
 | 
				
			||||||
    which can be used to key a system wide post shm entry.
 | 
					    which can be used to key a system wide post shm entry.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
 | 
					    class Config:
 | 
				
			||||||
 | 
					        frozen = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    shm_name: str  # this servers as a "key" value
 | 
					    shm_name: str  # this servers as a "key" value
 | 
				
			||||||
    shm_first_index_name: str
 | 
					    shm_first_index_name: str
 | 
				
			||||||
    shm_last_index_name: str
 | 
					    shm_last_index_name: str
 | 
				
			||||||
    dtype_descr: tuple
 | 
					    dtype_descr: tuple
 | 
				
			||||||
    size: int  # in struct-array index / row terms
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @property
 | 
					    @property
 | 
				
			||||||
    def dtype(self) -> np.dtype:
 | 
					    def dtype(self) -> np.dtype:
 | 
				
			||||||
        return np.dtype(list(map(tuple, self.dtype_descr))).descr
 | 
					        return np.dtype(list(map(tuple, self.dtype_descr))).descr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def as_msg(self):
 | 
					    def as_msg(self):
 | 
				
			||||||
        return self.to_dict()
 | 
					        return self.dict()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @classmethod
 | 
					    @classmethod
 | 
				
			||||||
    def from_msg(cls, msg: dict) -> _Token:
 | 
					    def from_msg(cls, msg: dict) -> _Token:
 | 
				
			||||||
        if isinstance(msg, _Token):
 | 
					        if isinstance(msg, _Token):
 | 
				
			||||||
            return msg
 | 
					            return msg
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: native struct decoding
 | 
					 | 
				
			||||||
        # return _token_dec.decode(msg)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
 | 
					        msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
 | 
				
			||||||
        return _Token(**msg)
 | 
					        return _Token(**msg)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# _token_dec = msgspec.msgpack.Decoder(_Token)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# TODO: this api?
 | 
					# TODO: this api?
 | 
				
			||||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
 | 
					# _known_tokens = tractor.ActorVar('_shm_tokens', {})
 | 
				
			||||||
# _known_tokens = tractor.ContextStack('_known_tokens', )
 | 
					# _known_tokens = tractor.ContextStack('_known_tokens', )
 | 
				
			||||||
| 
						 | 
					@ -163,7 +155,6 @@ def get_shm_token(key: str) -> _Token:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def _make_token(
 | 
					def _make_token(
 | 
				
			||||||
    key: str,
 | 
					    key: str,
 | 
				
			||||||
    size: int,
 | 
					 | 
				
			||||||
    dtype: Optional[np.dtype] = None,
 | 
					    dtype: Optional[np.dtype] = None,
 | 
				
			||||||
) -> _Token:
 | 
					) -> _Token:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -176,8 +167,7 @@ def _make_token(
 | 
				
			||||||
        shm_name=key,
 | 
					        shm_name=key,
 | 
				
			||||||
        shm_first_index_name=key + "_first",
 | 
					        shm_first_index_name=key + "_first",
 | 
				
			||||||
        shm_last_index_name=key + "_last",
 | 
					        shm_last_index_name=key + "_last",
 | 
				
			||||||
        dtype_descr=tuple(np.dtype(dtype).descr),
 | 
					        dtype_descr=np.dtype(dtype).descr
 | 
				
			||||||
        size=size,
 | 
					 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -229,7 +219,6 @@ class ShmArray:
 | 
				
			||||||
            shm_first_index_name=self._first._shm.name,
 | 
					            shm_first_index_name=self._first._shm.name,
 | 
				
			||||||
            shm_last_index_name=self._last._shm.name,
 | 
					            shm_last_index_name=self._last._shm.name,
 | 
				
			||||||
            dtype_descr=tuple(self._array.dtype.descr),
 | 
					            dtype_descr=tuple(self._array.dtype.descr),
 | 
				
			||||||
            size=self._len,
 | 
					 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @property
 | 
					    @property
 | 
				
			||||||
| 
						 | 
					@ -444,7 +433,7 @@ class ShmArray:
 | 
				
			||||||
def open_shm_array(
 | 
					def open_shm_array(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    key: Optional[str] = None,
 | 
					    key: Optional[str] = None,
 | 
				
			||||||
    size: int = _default_size,  # see above
 | 
					    size: int = _default_size,
 | 
				
			||||||
    dtype: Optional[np.dtype] = None,
 | 
					    dtype: Optional[np.dtype] = None,
 | 
				
			||||||
    readonly: bool = False,
 | 
					    readonly: bool = False,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -475,8 +464,7 @@ def open_shm_array(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    token = _make_token(
 | 
					    token = _make_token(
 | 
				
			||||||
        key=key,
 | 
					        key=key,
 | 
				
			||||||
        size=size,
 | 
					        dtype=dtype
 | 
				
			||||||
        dtype=dtype,
 | 
					 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # create single entry arrays for storing an first and last indices
 | 
					    # create single entry arrays for storing an first and last indices
 | 
				
			||||||
| 
						 | 
					@ -528,15 +516,15 @@ def open_shm_array(
 | 
				
			||||||
    # "unlink" created shm on process teardown by
 | 
					    # "unlink" created shm on process teardown by
 | 
				
			||||||
    # pushing teardown calls onto actor context stack
 | 
					    # pushing teardown calls onto actor context stack
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    stack = tractor.current_actor().lifetime_stack
 | 
					    tractor._actor._lifetime_stack.callback(shmarr.close)
 | 
				
			||||||
    stack.callback(shmarr.close)
 | 
					    tractor._actor._lifetime_stack.callback(shmarr.destroy)
 | 
				
			||||||
    stack.callback(shmarr.destroy)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return shmarr
 | 
					    return shmarr
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def attach_shm_array(
 | 
					def attach_shm_array(
 | 
				
			||||||
    token: tuple[str, str, tuple[str, str]],
 | 
					    token: tuple[str, str, tuple[str, str]],
 | 
				
			||||||
 | 
					    size: int = _default_size,
 | 
				
			||||||
    readonly: bool = True,
 | 
					    readonly: bool = True,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> ShmArray:
 | 
					) -> ShmArray:
 | 
				
			||||||
| 
						 | 
					@ -575,7 +563,7 @@ def attach_shm_array(
 | 
				
			||||||
            raise _err
 | 
					            raise _err
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    shmarr = np.ndarray(
 | 
					    shmarr = np.ndarray(
 | 
				
			||||||
        (token.size,),
 | 
					        (size,),
 | 
				
			||||||
        dtype=token.dtype,
 | 
					        dtype=token.dtype,
 | 
				
			||||||
        buffer=shm.buf
 | 
					        buffer=shm.buf
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
| 
						 | 
					@ -614,8 +602,8 @@ def attach_shm_array(
 | 
				
			||||||
    if key not in _known_tokens:
 | 
					    if key not in _known_tokens:
 | 
				
			||||||
        _known_tokens[key] = token
 | 
					        _known_tokens[key] = token
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # "close" attached shm on actor teardown
 | 
					    # "close" attached shm on process teardown
 | 
				
			||||||
    tractor.current_actor().lifetime_stack.callback(sha.close)
 | 
					    tractor._actor._lifetime_stack.callback(sha.close)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return sha
 | 
					    return sha
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -643,7 +631,6 @@ def maybe_open_shm_array(
 | 
				
			||||||
    use ``attach_shm_array``.
 | 
					    use ``attach_shm_array``.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    size = kwargs.pop('size', _default_size)
 | 
					 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        # see if we already know this key
 | 
					        # see if we already know this key
 | 
				
			||||||
        token = _known_tokens[key]
 | 
					        token = _known_tokens[key]
 | 
				
			||||||
| 
						 | 
					@ -651,11 +638,7 @@ def maybe_open_shm_array(
 | 
				
			||||||
    except KeyError:
 | 
					    except KeyError:
 | 
				
			||||||
        log.warning(f"Could not find {key} in shms cache")
 | 
					        log.warning(f"Could not find {key} in shms cache")
 | 
				
			||||||
        if dtype:
 | 
					        if dtype:
 | 
				
			||||||
            token = _make_token(
 | 
					            token = _make_token(key, dtype)
 | 
				
			||||||
                key,
 | 
					 | 
				
			||||||
                size=size,
 | 
					 | 
				
			||||||
                dtype=dtype,
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            try:
 | 
					            try:
 | 
				
			||||||
                return attach_shm_array(token=token, **kwargs), False
 | 
					                return attach_shm_array(token=token, **kwargs), False
 | 
				
			||||||
            except FileNotFoundError:
 | 
					            except FileNotFoundError:
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -23,7 +23,7 @@ import decimal
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from bidict import bidict
 | 
					from bidict import bidict
 | 
				
			||||||
import numpy as np
 | 
					import numpy as np
 | 
				
			||||||
from msgspec import Struct
 | 
					from pydantic import BaseModel
 | 
				
			||||||
# from numba import from_dtype
 | 
					# from numba import from_dtype
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -126,7 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class Symbol(Struct):
 | 
					class Symbol(BaseModel):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    I guess this is some kinda container thing for dealing with
 | 
					    I guess this is some kinda container thing for dealing with
 | 
				
			||||||
    all the different meta-data formats from brokers?
 | 
					    all the different meta-data formats from brokers?
 | 
				
			||||||
| 
						 | 
					@ -152,7 +152,9 @@ class Symbol(Struct):
 | 
				
			||||||
        info: dict[str, Any],
 | 
					        info: dict[str, Any],
 | 
				
			||||||
        suffix: str = '',
 | 
					        suffix: str = '',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> Symbol:
 | 
					    # XXX: like wtf..
 | 
				
			||||||
 | 
					    # ) -> 'Symbol':
 | 
				
			||||||
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        tick_size = info.get('price_tick_size', 0.01)
 | 
					        tick_size = info.get('price_tick_size', 0.01)
 | 
				
			||||||
        lot_tick_size = info.get('lot_tick_size', 0.0)
 | 
					        lot_tick_size = info.get('lot_tick_size', 0.0)
 | 
				
			||||||
| 
						 | 
					@ -173,7 +175,9 @@ class Symbol(Struct):
 | 
				
			||||||
        fqsn: str,
 | 
					        fqsn: str,
 | 
				
			||||||
        info: dict[str, Any],
 | 
					        info: dict[str, Any],
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> Symbol:
 | 
					    # XXX: like wtf..
 | 
				
			||||||
 | 
					    # ) -> 'Symbol':
 | 
				
			||||||
 | 
					    ) -> None:
 | 
				
			||||||
        broker, key, suffix = unpack_fqsn(fqsn)
 | 
					        broker, key, suffix = unpack_fqsn(fqsn)
 | 
				
			||||||
        return cls.from_broker_info(
 | 
					        return cls.from_broker_info(
 | 
				
			||||||
            broker,
 | 
					            broker,
 | 
				
			||||||
| 
						 | 
					@ -236,7 +240,7 @@ class Symbol(Struct):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        tokens = self.tokens()
 | 
					        tokens = self.tokens()
 | 
				
			||||||
        fqsn = '.'.join(map(str.lower, tokens))
 | 
					        fqsn = '.'.join(tokens)
 | 
				
			||||||
        return fqsn
 | 
					        return fqsn
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def iterfqsns(self) -> list[str]:
 | 
					    def iterfqsns(self) -> list[str]:
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -19,9 +19,8 @@ ToOlS fOr CoPInG wITh "tHE wEB" protocols.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from contextlib import asynccontextmanager, AsyncExitStack
 | 
					from contextlib import asynccontextmanager, AsyncExitStack
 | 
				
			||||||
from itertools import count
 | 
					 | 
				
			||||||
from types import ModuleType
 | 
					from types import ModuleType
 | 
				
			||||||
from typing import Any, Optional, Callable, AsyncGenerator
 | 
					from typing import Any, Callable, AsyncGenerator
 | 
				
			||||||
import json
 | 
					import json
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
| 
						 | 
					@ -36,8 +35,6 @@ from trio_websocket._impl import (
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from .types import Struct
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -56,11 +53,13 @@ class NoBsWs:
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        url: str,
 | 
					        url: str,
 | 
				
			||||||
 | 
					        token: str,
 | 
				
			||||||
        stack: AsyncExitStack,
 | 
					        stack: AsyncExitStack,
 | 
				
			||||||
        fixture: Optional[Callable] = None,
 | 
					        fixture: Callable,
 | 
				
			||||||
        serializer: ModuleType = json
 | 
					        serializer: ModuleType = json,
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        self.url = url
 | 
					        self.url = url
 | 
				
			||||||
 | 
					        self.token = token
 | 
				
			||||||
        self.fixture = fixture
 | 
					        self.fixture = fixture
 | 
				
			||||||
        self._stack = stack
 | 
					        self._stack = stack
 | 
				
			||||||
        self._ws: 'WebSocketConnection' = None  # noqa
 | 
					        self._ws: 'WebSocketConnection' = None  # noqa
 | 
				
			||||||
| 
						 | 
					@ -83,14 +82,17 @@ class NoBsWs:
 | 
				
			||||||
                self._ws = await self._stack.enter_async_context(
 | 
					                self._ws = await self._stack.enter_async_context(
 | 
				
			||||||
                    trio_websocket.open_websocket_url(self.url)
 | 
					                    trio_websocket.open_websocket_url(self.url)
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					                # rerun user code fixture
 | 
				
			||||||
                if self.fixture is not None:
 | 
					                if self.token == '':
 | 
				
			||||||
                    # rerun user code fixture
 | 
					 | 
				
			||||||
                    ret = await self._stack.enter_async_context(
 | 
					                    ret = await self._stack.enter_async_context(
 | 
				
			||||||
                        self.fixture(self)
 | 
					                        self.fixture(self)
 | 
				
			||||||
                    )
 | 
					                    )
 | 
				
			||||||
 | 
					                else:
 | 
				
			||||||
 | 
					                    ret = await self._stack.enter_async_context(
 | 
				
			||||||
 | 
					                        self.fixture(self, self.token)
 | 
				
			||||||
 | 
					                    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    assert ret is None
 | 
					                assert ret is None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                log.info(f'Connection success: {self.url}')
 | 
					                log.info(f'Connection success: {self.url}')
 | 
				
			||||||
                return self._ws
 | 
					                return self._ws
 | 
				
			||||||
| 
						 | 
					@ -126,26 +128,21 @@ class NoBsWs:
 | 
				
			||||||
            except self.recon_errors:
 | 
					            except self.recon_errors:
 | 
				
			||||||
                await self._connect()
 | 
					                await self._connect()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __aiter__(self):
 | 
					 | 
				
			||||||
        return self
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async def __anext__(self):
 | 
					 | 
				
			||||||
        return await self.recv_msg()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
@asynccontextmanager
 | 
					@asynccontextmanager
 | 
				
			||||||
async def open_autorecon_ws(
 | 
					async def open_autorecon_ws(
 | 
				
			||||||
    url: str,
 | 
					    url: str,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # TODO: proper type annot smh
 | 
					    # TODO: proper type annot smh
 | 
				
			||||||
    fixture: Optional[Callable] = None,
 | 
					    fixture: Callable,
 | 
				
			||||||
 | 
					    # used for authenticated websockets
 | 
				
			||||||
 | 
					    token: str = '',
 | 
				
			||||||
) -> AsyncGenerator[tuple[...],  NoBsWs]:
 | 
					) -> AsyncGenerator[tuple[...],  NoBsWs]:
 | 
				
			||||||
    """Apparently we can QoS for all sorts of reasons..so catch em.
 | 
					    """Apparently we can QoS for all sorts of reasons..so catch em.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    async with AsyncExitStack() as stack:
 | 
					    async with AsyncExitStack() as stack:
 | 
				
			||||||
        ws = NoBsWs(url, stack, fixture=fixture)
 | 
					        ws = NoBsWs(url, token, stack, fixture=fixture)
 | 
				
			||||||
        await ws._connect()
 | 
					        await ws._connect()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
| 
						 | 
					@ -153,86 +150,3 @@ async def open_autorecon_ws(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        finally:
 | 
					        finally:
 | 
				
			||||||
            await stack.aclose()
 | 
					            await stack.aclose()
 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
JSONRPC response-request style machinery for transparent multiplexing of msgs
 | 
					 | 
				
			||||||
over a NoBsWs.
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class JSONRPCResult(Struct):
 | 
					 | 
				
			||||||
    jsonrpc: str = '2.0'
 | 
					 | 
				
			||||||
    id: int
 | 
					 | 
				
			||||||
    result: Optional[dict] = None
 | 
					 | 
				
			||||||
    error: Optional[dict] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@asynccontextmanager
 | 
					 | 
				
			||||||
async def open_jsonrpc_session(
 | 
					 | 
				
			||||||
    url: str,
 | 
					 | 
				
			||||||
    start_id: int = 0,
 | 
					 | 
				
			||||||
    dtype: type = JSONRPCResult
 | 
					 | 
				
			||||||
) -> Callable[[str, dict], dict]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    async with (
 | 
					 | 
				
			||||||
        trio.open_nursery() as n,
 | 
					 | 
				
			||||||
        open_autorecon_ws(url) as ws
 | 
					 | 
				
			||||||
    ):
 | 
					 | 
				
			||||||
        rpc_id: Iterable = count(start_id)
 | 
					 | 
				
			||||||
        rpc_results: dict[int, dict] = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async def json_rpc(method: str, params: dict) -> dict:
 | 
					 | 
				
			||||||
            '''
 | 
					 | 
				
			||||||
            perform a json rpc call and wait for the result, raise exception in
 | 
					 | 
				
			||||||
            case of error field present on response
 | 
					 | 
				
			||||||
            '''
 | 
					 | 
				
			||||||
            msg = {
 | 
					 | 
				
			||||||
                'jsonrpc': '2.0',
 | 
					 | 
				
			||||||
                'id': next(rpc_id),
 | 
					 | 
				
			||||||
                'method': method,
 | 
					 | 
				
			||||||
                'params': params
 | 
					 | 
				
			||||||
            }
 | 
					 | 
				
			||||||
            _id = msg['id']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            rpc_results[_id] = {
 | 
					 | 
				
			||||||
                'result': None,
 | 
					 | 
				
			||||||
                'event': trio.Event()
 | 
					 | 
				
			||||||
            }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            await ws.send_msg(msg)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            await rpc_results[_id]['event'].wait()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            ret = rpc_results[_id]['result']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            del rpc_results[_id]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if ret.error is not None:
 | 
					 | 
				
			||||||
                raise Exception(json.dumps(ret.error, indent=4))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            return ret
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async def recv_task():
 | 
					 | 
				
			||||||
            '''
 | 
					 | 
				
			||||||
            receives every ws message and stores it in its corresponding result
 | 
					 | 
				
			||||||
            field, then sets the event to wakeup original sender tasks.
 | 
					 | 
				
			||||||
            '''
 | 
					 | 
				
			||||||
            async for msg in ws:
 | 
					 | 
				
			||||||
                msg = dtype(**msg)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                if msg.id not in rpc_results:
 | 
					 | 
				
			||||||
                    log.warning(f'Wasn\'t expecting ws msg: {json.dumps(msg, indent=4)}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                res = rpc_results.setdefault(
 | 
					 | 
				
			||||||
                    msg.id,
 | 
					 | 
				
			||||||
                    {'result': None, 'event': trio.Event()}
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                res['result'] = msg
 | 
					 | 
				
			||||||
                res['event'].set()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        n.start_soon(recv_task)
 | 
					 | 
				
			||||||
        yield json_rpc
 | 
					 | 
				
			||||||
        n.cancel_scope.cancel()
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										1029
									
								
								piker/data/feed.py
								
								
								
								
							
							
						
						
									
										1029
									
								
								piker/data/feed.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
					@ -37,7 +37,7 @@ import time
 | 
				
			||||||
from math import isnan
 | 
					from math import isnan
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from bidict import bidict
 | 
					from bidict import bidict
 | 
				
			||||||
from msgspec.msgpack import encode, decode
 | 
					import msgpack
 | 
				
			||||||
import pyqtgraph as pg
 | 
					import pyqtgraph as pg
 | 
				
			||||||
import numpy as np
 | 
					import numpy as np
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
| 
						 | 
					@ -56,7 +56,6 @@ if TYPE_CHECKING:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from .feed import maybe_open_feed
 | 
					from .feed import maybe_open_feed
 | 
				
			||||||
from ..log import get_logger, get_console_log
 | 
					from ..log import get_logger, get_console_log
 | 
				
			||||||
from .._profile import Profiler
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
| 
						 | 
					@ -388,57 +387,50 @@ class Storage:
 | 
				
			||||||
    async def load(
 | 
					    async def load(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        fqsn: str,
 | 
					        fqsn: str,
 | 
				
			||||||
        timeframe: int,
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> tuple[
 | 
					    ) -> tuple[
 | 
				
			||||||
        np.ndarray,  # timeframe sampled array-series
 | 
					        dict[int, np.ndarray],  # timeframe (in secs) to series
 | 
				
			||||||
        Optional[datetime],  # first dt
 | 
					        Optional[datetime],  # first dt
 | 
				
			||||||
        Optional[datetime],  # last dt
 | 
					        Optional[datetime],  # last dt
 | 
				
			||||||
    ]:
 | 
					    ]:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        first_tsdb_dt, last_tsdb_dt = None, None
 | 
					        first_tsdb_dt, last_tsdb_dt = None, None
 | 
				
			||||||
        hist = await self.read_ohlcv(
 | 
					        tsdb_arrays = await self.read_ohlcv(
 | 
				
			||||||
            fqsn,
 | 
					            fqsn,
 | 
				
			||||||
            # on first load we don't need to pull the max
 | 
					            # on first load we don't need to pull the max
 | 
				
			||||||
            # history per request size worth.
 | 
					            # history per request size worth.
 | 
				
			||||||
            limit=3000,
 | 
					            limit=3000,
 | 
				
			||||||
            timeframe=timeframe,
 | 
					 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        log.info(f'Loaded tsdb history {hist}')
 | 
					        log.info(f'Loaded tsdb history {tsdb_arrays}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if len(hist):
 | 
					        if tsdb_arrays:
 | 
				
			||||||
            times = hist['Epoch']
 | 
					            fastest = list(tsdb_arrays.values())[0]
 | 
				
			||||||
 | 
					            times = fastest['Epoch']
 | 
				
			||||||
            first, last = times[0], times[-1]
 | 
					            first, last = times[0], times[-1]
 | 
				
			||||||
            first_tsdb_dt, last_tsdb_dt = map(
 | 
					            first_tsdb_dt, last_tsdb_dt = map(
 | 
				
			||||||
                pendulum.from_timestamp, [first, last]
 | 
					                pendulum.from_timestamp, [first, last]
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return (
 | 
					        return tsdb_arrays, first_tsdb_dt, last_tsdb_dt
 | 
				
			||||||
            hist,  # array-data
 | 
					 | 
				
			||||||
            first_tsdb_dt,  # start of query-frame
 | 
					 | 
				
			||||||
            last_tsdb_dt,  # most recent
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def read_ohlcv(
 | 
					    async def read_ohlcv(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        fqsn: str,
 | 
					        fqsn: str,
 | 
				
			||||||
        timeframe: int | str,
 | 
					        timeframe: Optional[Union[int, str]] = None,
 | 
				
			||||||
        end: Optional[int] = None,
 | 
					        end: Optional[int] = None,
 | 
				
			||||||
        limit: int = int(800e3),
 | 
					        limit: int = int(800e3),
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> dict[
 | 
					    ) -> tuple[
 | 
				
			||||||
        int,
 | 
					        MarketstoreClient,
 | 
				
			||||||
        Union[dict, np.ndarray],
 | 
					        Union[dict, np.ndarray]
 | 
				
			||||||
    ]:
 | 
					    ]:
 | 
				
			||||||
 | 
					 | 
				
			||||||
        client = self.client
 | 
					        client = self.client
 | 
				
			||||||
        syms = await client.list_symbols()
 | 
					        syms = await client.list_symbols()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if fqsn not in syms:
 | 
					        if fqsn not in syms:
 | 
				
			||||||
            return {}
 | 
					            return {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # use the provided timeframe or 1s by default
 | 
					        tfstr = tf_in_1s[1]
 | 
				
			||||||
        tfstr = tf_in_1s.get(timeframe, tf_in_1s[1])
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        params = Params(
 | 
					        params = Params(
 | 
				
			||||||
            symbols=fqsn,
 | 
					            symbols=fqsn,
 | 
				
			||||||
| 
						 | 
					@ -452,68 +444,58 @@ class Storage:
 | 
				
			||||||
            limit=limit,
 | 
					            limit=limit,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        try:
 | 
					        if timeframe is None:
 | 
				
			||||||
 | 
					            log.info(f'starting {fqsn} tsdb granularity scan..')
 | 
				
			||||||
 | 
					            # loop through and try to find highest granularity
 | 
				
			||||||
 | 
					            for tfstr in tf_in_1s.values():
 | 
				
			||||||
 | 
					                try:
 | 
				
			||||||
 | 
					                    log.info(f'querying for {tfstr}@{fqsn}')
 | 
				
			||||||
 | 
					                    params.set('timeframe', tfstr)
 | 
				
			||||||
 | 
					                    result = await client.query(params)
 | 
				
			||||||
 | 
					                    break
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                except purerpc.grpclib.exceptions.UnknownError:
 | 
				
			||||||
 | 
					                    # XXX: this is already logged by the container and
 | 
				
			||||||
 | 
					                    # thus shows up through `marketstored` logs relay.
 | 
				
			||||||
 | 
					                    # log.warning(f'{tfstr}@{fqsn} not found')
 | 
				
			||||||
 | 
					                    continue
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                return {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        else:
 | 
				
			||||||
            result = await client.query(params)
 | 
					            result = await client.query(params)
 | 
				
			||||||
        except purerpc.grpclib.exceptions.UnknownError:
 | 
					 | 
				
			||||||
            # indicate there is no history for this timeframe
 | 
					 | 
				
			||||||
            return {}
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: it turns out column access on recarrays is actually slower:
 | 
					        # TODO: it turns out column access on recarrays is actually slower:
 | 
				
			||||||
        # https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
 | 
					        # https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
 | 
				
			||||||
        # it might make sense to make these structured arrays?
 | 
					        # it might make sense to make these structured arrays?
 | 
				
			||||||
        data_set = result.by_symbols()[fqsn]
 | 
					        # Fill out a `numpy` array-results map
 | 
				
			||||||
        array = data_set.array
 | 
					        arrays = {}
 | 
				
			||||||
 | 
					        for fqsn, data_set in result.by_symbols().items():
 | 
				
			||||||
 | 
					            arrays.setdefault(fqsn, {})[
 | 
				
			||||||
 | 
					                tf_in_1s.inverse[data_set.timeframe]
 | 
				
			||||||
 | 
					            ] = data_set.array
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # XXX: ensure sample rate is as expected
 | 
					        return arrays[fqsn][timeframe] if timeframe else arrays[fqsn]
 | 
				
			||||||
        time = data_set.array['Epoch']
 | 
					 | 
				
			||||||
        if len(time) > 1:
 | 
					 | 
				
			||||||
            time_step = time[-1] - time[-2]
 | 
					 | 
				
			||||||
            ts = tf_in_1s.inverse[data_set.timeframe]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if time_step != ts:
 | 
					 | 
				
			||||||
                log.warning(
 | 
					 | 
				
			||||||
                    f'MKTS BUG: wrong timeframe loaded: {time_step}'
 | 
					 | 
				
			||||||
                    'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG'
 | 
					 | 
				
			||||||
                    f'WIPING HISTORY FOR {ts}s'
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
                await self.delete_ts(fqsn, timeframe)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # try reading again..
 | 
					 | 
				
			||||||
                return await self.read_ohlcv(
 | 
					 | 
				
			||||||
                    fqsn,
 | 
					 | 
				
			||||||
                    timeframe,
 | 
					 | 
				
			||||||
                    end,
 | 
					 | 
				
			||||||
                    limit,
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return array
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def delete_ts(
 | 
					    async def delete_ts(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        key: str,
 | 
					        key: str,
 | 
				
			||||||
        timeframe: Optional[Union[int, str]] = None,
 | 
					        timeframe: Optional[Union[int, str]] = None,
 | 
				
			||||||
        fmt: str = 'OHLCV',
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> bool:
 | 
					    ) -> bool:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        client = self.client
 | 
					        client = self.client
 | 
				
			||||||
        syms = await client.list_symbols()
 | 
					        syms = await client.list_symbols()
 | 
				
			||||||
        print(syms)
 | 
					        print(syms)
 | 
				
			||||||
        if key not in syms:
 | 
					        # if key not in syms:
 | 
				
			||||||
            raise KeyError(f'`{key}` table key not found in\n{syms}?')
 | 
					        #     raise KeyError(f'`{fqsn}` table key not found?')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        tbk = mk_tbk((
 | 
					        return await client.destroy(tbk=key)
 | 
				
			||||||
            key,
 | 
					 | 
				
			||||||
            tf_in_1s.get(timeframe, tf_in_1s[60]),
 | 
					 | 
				
			||||||
            fmt,
 | 
					 | 
				
			||||||
        ))
 | 
					 | 
				
			||||||
        return await client.destroy(tbk=tbk)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def write_ohlcv(
 | 
					    async def write_ohlcv(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        fqsn: str,
 | 
					        fqsn: str,
 | 
				
			||||||
        ohlcv: np.ndarray,
 | 
					        ohlcv: np.ndarray,
 | 
				
			||||||
        timeframe: int,
 | 
					 | 
				
			||||||
        append_and_duplicate: bool = True,
 | 
					        append_and_duplicate: bool = True,
 | 
				
			||||||
        limit: int = int(800e3),
 | 
					        limit: int = int(800e3),
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -537,18 +519,17 @@ class Storage:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        m, r = divmod(len(mkts_array), limit)
 | 
					        m, r = divmod(len(mkts_array), limit)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        tfkey = tf_in_1s[timeframe]
 | 
					 | 
				
			||||||
        for i in range(m, 1):
 | 
					        for i in range(m, 1):
 | 
				
			||||||
            to_push = mkts_array[i-1:i*limit]
 | 
					            to_push = mkts_array[i-1:i*limit]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # write to db
 | 
					            # write to db
 | 
				
			||||||
            resp = await self.client.write(
 | 
					            resp = await self.client.write(
 | 
				
			||||||
                to_push,
 | 
					                to_push,
 | 
				
			||||||
                tbk=f'{fqsn}/{tfkey}/OHLCV',
 | 
					                tbk=f'{fqsn}/1Sec/OHLCV',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # NOTE: will will append duplicates
 | 
					                # NOTE: will will append duplicates
 | 
				
			||||||
                # for the same timestamp-index.
 | 
					                # for the same timestamp-index.
 | 
				
			||||||
                # TODO: pre-deduplicate?
 | 
					                # TODO: pre deduplicate?
 | 
				
			||||||
                isvariablelength=append_and_duplicate,
 | 
					                isvariablelength=append_and_duplicate,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -567,7 +548,7 @@ class Storage:
 | 
				
			||||||
            # write to db
 | 
					            # write to db
 | 
				
			||||||
            resp = await self.client.write(
 | 
					            resp = await self.client.write(
 | 
				
			||||||
                to_push,
 | 
					                to_push,
 | 
				
			||||||
                tbk=f'{fqsn}/{tfkey}/OHLCV',
 | 
					                tbk=f'{fqsn}/1Sec/OHLCV',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # NOTE: will will append duplicates
 | 
					                # NOTE: will will append duplicates
 | 
				
			||||||
                # for the same timestamp-index.
 | 
					                # for the same timestamp-index.
 | 
				
			||||||
| 
						 | 
					@ -596,7 +577,6 @@ class Storage:
 | 
				
			||||||
    # def delete_range(self, start_dt, end_dt) -> None:
 | 
					    # def delete_range(self, start_dt, end_dt) -> None:
 | 
				
			||||||
    #     ...
 | 
					    #     ...
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					 | 
				
			||||||
@acm
 | 
					@acm
 | 
				
			||||||
async def open_storage_client(
 | 
					async def open_storage_client(
 | 
				
			||||||
    fqsn: str,
 | 
					    fqsn: str,
 | 
				
			||||||
| 
						 | 
					@ -646,7 +626,7 @@ async def tsdb_history_update(
 | 
				
			||||||
    #   * the original data feed arch blurb:
 | 
					    #   * the original data feed arch blurb:
 | 
				
			||||||
    #     - https://github.com/pikers/piker/issues/98
 | 
					    #     - https://github.com/pikers/piker/issues/98
 | 
				
			||||||
    #
 | 
					    #
 | 
				
			||||||
    profiler = Profiler(
 | 
					    profiler = pg.debug.Profiler(
 | 
				
			||||||
        disabled=False,  # not pg_profile_enabled(),
 | 
					        disabled=False,  # not pg_profile_enabled(),
 | 
				
			||||||
        delayed=False,
 | 
					        delayed=False,
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
| 
						 | 
					@ -662,8 +642,8 @@ async def tsdb_history_update(
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        profiler(f'opened feed for {fqsn}')
 | 
					        profiler(f'opened feed for {fqsn}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # to_append = feed.hist_shm.array
 | 
					        to_append = feed.shm.array
 | 
				
			||||||
        # to_prepend = None
 | 
					        to_prepend = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if fqsn:
 | 
					        if fqsn:
 | 
				
			||||||
            symbol = feed.symbols.get(fqsn)
 | 
					            symbol = feed.symbols.get(fqsn)
 | 
				
			||||||
| 
						 | 
					@ -671,21 +651,21 @@ async def tsdb_history_update(
 | 
				
			||||||
                fqsn = symbol.front_fqsn()
 | 
					                fqsn = symbol.front_fqsn()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # diff db history with shm and only write the missing portions
 | 
					            # diff db history with shm and only write the missing portions
 | 
				
			||||||
            # ohlcv = feed.hist_shm.array
 | 
					            ohlcv = feed.shm.array
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # TODO: use pg profiler
 | 
					            # TODO: use pg profiler
 | 
				
			||||||
            # for secs in (1, 60):
 | 
					            tsdb_arrays = await storage.read_ohlcv(fqsn)
 | 
				
			||||||
            #     tsdb_array = await storage.read_ohlcv(
 | 
					            # hist diffing
 | 
				
			||||||
            #         fqsn,
 | 
					            if tsdb_arrays:
 | 
				
			||||||
            #         timeframe=timeframe,
 | 
					                for secs in (1, 60):
 | 
				
			||||||
            #     )
 | 
					                    ts = tsdb_arrays.get(secs)
 | 
				
			||||||
            #     # hist diffing:
 | 
					                    if ts is not None and len(ts):
 | 
				
			||||||
            #     # these aren't currently used but can be referenced from
 | 
					                        # these aren't currently used but can be referenced from
 | 
				
			||||||
            #     # within the embedded ipython shell below.
 | 
					                        # within the embedded ipython shell below.
 | 
				
			||||||
            #     to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
 | 
					                        to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
 | 
				
			||||||
            #     to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
 | 
					                        to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # profiler('Finished db arrays diffs')
 | 
					            profiler('Finished db arrays diffs')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        syms = await storage.client.list_symbols()
 | 
					        syms = await storage.client.list_symbols()
 | 
				
			||||||
        log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
 | 
					        log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
 | 
				
			||||||
| 
						 | 
					@ -794,13 +774,12 @@ async def stream_quotes(
 | 
				
			||||||
    async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
 | 
					    async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
 | 
				
			||||||
        # send subs topics to server
 | 
					        # send subs topics to server
 | 
				
			||||||
        resp = await ws.send_message(
 | 
					        resp = await ws.send_message(
 | 
				
			||||||
 | 
					            msgpack.dumps({'streams': list(tbks.values())})
 | 
				
			||||||
            encode({'streams': list(tbks.values())})
 | 
					 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        log.info(resp)
 | 
					        log.info(resp)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        async def recv() -> dict[str, Any]:
 | 
					        async def recv() -> dict[str, Any]:
 | 
				
			||||||
            return decode((await ws.get_message()), encoding='utf-8')
 | 
					            return msgpack.loads((await ws.get_message()), encoding='utf-8')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        streams = (await recv())['streams']
 | 
					        streams = (await recv())['streams']
 | 
				
			||||||
        log.info(f"Subscribed to {streams}")
 | 
					        log.info(f"Subscribed to {streams}")
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1,87 +0,0 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					 | 
				
			||||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					 | 
				
			||||||
# the Free Software Foundation, either version 3 of the License, or
 | 
					 | 
				
			||||||
# (at your option) any later version.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is distributed in the hope that it will be useful,
 | 
					 | 
				
			||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
					 | 
				
			||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
					 | 
				
			||||||
# GNU Affero General Public License for more details.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
"""
 | 
					 | 
				
			||||||
Built-in (extension) types.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
"""
 | 
					 | 
				
			||||||
import sys
 | 
					 | 
				
			||||||
from typing import Optional
 | 
					 | 
				
			||||||
from pprint import pformat
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import msgspec
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class Struct(
 | 
					 | 
				
			||||||
    msgspec.Struct,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # https://jcristharif.com/msgspec/structs.html#tagged-unions
 | 
					 | 
				
			||||||
    # tag='pikerstruct',
 | 
					 | 
				
			||||||
    # tag=True,
 | 
					 | 
				
			||||||
):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    A "human friendlier" (aka repl buddy) struct subtype.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    def to_dict(self) -> dict:
 | 
					 | 
				
			||||||
        return {
 | 
					 | 
				
			||||||
            f: getattr(self, f)
 | 
					 | 
				
			||||||
            for f in self.__struct_fields__
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def __repr__(self):
 | 
					 | 
				
			||||||
        # only turn on pprint when we detect a python REPL
 | 
					 | 
				
			||||||
        # at runtime B)
 | 
					 | 
				
			||||||
        if (
 | 
					 | 
				
			||||||
            hasattr(sys, 'ps1')
 | 
					 | 
				
			||||||
            # TODO: check if we're in pdb
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            return self.pformat()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return super().__repr__()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def pformat(self) -> str:
 | 
					 | 
				
			||||||
        return f'Struct({pformat(self.to_dict())})'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def copy(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        update: Optional[dict] = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> msgspec.Struct:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Validate-typecast all self defined fields, return a copy of us
 | 
					 | 
				
			||||||
        with all such fields.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        This is kinda like the default behaviour in `pydantic.BaseModel`.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        if update:
 | 
					 | 
				
			||||||
            for k, v in update.items():
 | 
					 | 
				
			||||||
                setattr(self, k, v)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # roundtrip serialize to validate
 | 
					 | 
				
			||||||
        return msgspec.msgpack.Decoder(
 | 
					 | 
				
			||||||
            type=type(self)
 | 
					 | 
				
			||||||
        ).decode(
 | 
					 | 
				
			||||||
            msgspec.msgpack.Encoder().encode(self)
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def typecast(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        # fields: Optional[list[str]] = None,
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					 | 
				
			||||||
        for fname, ftype in self.__annotations__.items():
 | 
					 | 
				
			||||||
            setattr(self, fname, ftype(getattr(self, fname)))
 | 
					 | 
				
			||||||
| 
						 | 
					@ -78,8 +78,7 @@ class Fsp:
 | 
				
			||||||
    # + the consuming fsp *to* the consumers output
 | 
					    # + the consuming fsp *to* the consumers output
 | 
				
			||||||
    # shm flow.
 | 
					    # shm flow.
 | 
				
			||||||
    _flow_registry: dict[
 | 
					    _flow_registry: dict[
 | 
				
			||||||
        tuple[_Token, str],
 | 
					        tuple[_Token, str], _Token,
 | 
				
			||||||
        tuple[_Token, Optional[ShmArray]],
 | 
					 | 
				
			||||||
    ] = {}
 | 
					    ] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
| 
						 | 
					@ -121,6 +120,7 @@ class Fsp:
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        return self.func(*args, **kwargs)
 | 
					        return self.func(*args, **kwargs)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # TODO: lru_cache this? prettty sure it'll work?
 | 
				
			||||||
    def get_shm(
 | 
					    def get_shm(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        src_shm: ShmArray,
 | 
					        src_shm: ShmArray,
 | 
				
			||||||
| 
						 | 
					@ -131,27 +131,12 @@ class Fsp:
 | 
				
			||||||
        for this "instance" of a signal processor for
 | 
					        for this "instance" of a signal processor for
 | 
				
			||||||
        the given ``key``.
 | 
					        the given ``key``.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        The destination shm "token" and array are cached if possible to
 | 
					 | 
				
			||||||
        minimize multiple stdlib/system calls.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        dst_token, maybe_array = self._flow_registry[
 | 
					        dst_token = self._flow_registry[
 | 
				
			||||||
            (src_shm._token, self.name)
 | 
					            (src_shm._token, self.name)
 | 
				
			||||||
        ]
 | 
					        ]
 | 
				
			||||||
        if maybe_array is None:
 | 
					        shm = attach_shm_array(dst_token)
 | 
				
			||||||
            self._flow_registry[
 | 
					        return shm
 | 
				
			||||||
                (src_shm._token, self.name)
 | 
					 | 
				
			||||||
            ] = (
 | 
					 | 
				
			||||||
                dst_token,
 | 
					 | 
				
			||||||
                # "cache" the ``ShmArray`` such that
 | 
					 | 
				
			||||||
                # we call the underlying "attach" code as few
 | 
					 | 
				
			||||||
                # times as possible as per:
 | 
					 | 
				
			||||||
                # - https://github.com/pikers/piker/issues/359
 | 
					 | 
				
			||||||
                # - https://github.com/pikers/piker/issues/332
 | 
					 | 
				
			||||||
                maybe_array := attach_shm_array(dst_token)
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return maybe_array
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def fsp(
 | 
					def fsp(
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -37,14 +37,12 @@ from .. import data
 | 
				
			||||||
from ..data import attach_shm_array
 | 
					from ..data import attach_shm_array
 | 
				
			||||||
from ..data.feed import Feed
 | 
					from ..data.feed import Feed
 | 
				
			||||||
from ..data._sharedmem import ShmArray
 | 
					from ..data._sharedmem import ShmArray
 | 
				
			||||||
from ..data._sampling import _default_delay_s
 | 
					 | 
				
			||||||
from ..data._source import Symbol
 | 
					from ..data._source import Symbol
 | 
				
			||||||
from ._api import (
 | 
					from ._api import (
 | 
				
			||||||
    Fsp,
 | 
					    Fsp,
 | 
				
			||||||
    _load_builtins,
 | 
					    _load_builtins,
 | 
				
			||||||
    _Token,
 | 
					    _Token,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from .._profile import Profiler
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -92,7 +90,7 @@ async def fsp_compute(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    profiler = Profiler(
 | 
					    profiler = pg.debug.Profiler(
 | 
				
			||||||
        delayed=False,
 | 
					        delayed=False,
 | 
				
			||||||
        disabled=True
 | 
					        disabled=True
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
| 
						 | 
					@ -107,7 +105,7 @@ async def fsp_compute(
 | 
				
			||||||
        filter_quotes_by_sym(fqsn, quote_stream),
 | 
					        filter_quotes_by_sym(fqsn, quote_stream),
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # XXX: currently the ``ohlcv`` arg
 | 
					        # XXX: currently the ``ohlcv`` arg
 | 
				
			||||||
        feed.rt_shm,
 | 
					        feed.shm,
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # Conduct a single iteration of fsp with historical bars input
 | 
					    # Conduct a single iteration of fsp with historical bars input
 | 
				
			||||||
| 
						 | 
					@ -116,7 +114,7 @@ async def fsp_compute(
 | 
				
			||||||
        dict[str, np.ndarray],  # multi-output case
 | 
					        dict[str, np.ndarray],  # multi-output case
 | 
				
			||||||
        np.ndarray,  # single output case
 | 
					        np.ndarray,  # single output case
 | 
				
			||||||
    ]
 | 
					    ]
 | 
				
			||||||
    history_output = await anext(out_stream)
 | 
					    history_output = await out_stream.__anext__()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    func_name = func.__name__
 | 
					    func_name = func.__name__
 | 
				
			||||||
    profiler(f'{func_name} generated history')
 | 
					    profiler(f'{func_name} generated history')
 | 
				
			||||||
| 
						 | 
					@ -263,7 +261,7 @@ async def cascade(
 | 
				
			||||||
    destination shm array buffer.
 | 
					    destination shm array buffer.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    profiler = Profiler(
 | 
					    profiler = pg.debug.Profiler(
 | 
				
			||||||
        delayed=False,
 | 
					        delayed=False,
 | 
				
			||||||
        disabled=False
 | 
					        disabled=False
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
| 
						 | 
					@ -286,10 +284,9 @@ async def cascade(
 | 
				
			||||||
    # TODO: ugh i hate this wind/unwind to list over the wire
 | 
					    # TODO: ugh i hate this wind/unwind to list over the wire
 | 
				
			||||||
    # but not sure how else to do it.
 | 
					    # but not sure how else to do it.
 | 
				
			||||||
    for (token, fsp_name, dst_token) in shm_registry:
 | 
					    for (token, fsp_name, dst_token) in shm_registry:
 | 
				
			||||||
        Fsp._flow_registry[(
 | 
					        Fsp._flow_registry[
 | 
				
			||||||
            _Token.from_msg(token),
 | 
					            (_Token.from_msg(token), fsp_name)
 | 
				
			||||||
            fsp_name,
 | 
					        ] = _Token.from_msg(dst_token)
 | 
				
			||||||
        )] = _Token.from_msg(dst_token), None
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    fsp: Fsp = reg.get(
 | 
					    fsp: Fsp = reg.get(
 | 
				
			||||||
        NamespacePath(ns_path)
 | 
					        NamespacePath(ns_path)
 | 
				
			||||||
| 
						 | 
					@ -315,7 +312,7 @@ async def cascade(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        profiler(f'{func}: feed up')
 | 
					        profiler(f'{func}: feed up')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        assert src.token == feed.rt_shm.token
 | 
					        assert src.token == feed.shm.token
 | 
				
			||||||
        # last_len = new_len = len(src.array)
 | 
					        # last_len = new_len = len(src.array)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        func_name = func.__name__
 | 
					        func_name = func.__name__
 | 
				
			||||||
| 
						 | 
					@ -377,8 +374,7 @@ async def cascade(
 | 
				
			||||||
                            'key': dst_shm_token,
 | 
					                            'key': dst_shm_token,
 | 
				
			||||||
                            'first': dst._first.value,
 | 
					                            'first': dst._first.value,
 | 
				
			||||||
                            'last': dst._last.value,
 | 
					                            'last': dst._last.value,
 | 
				
			||||||
                        }
 | 
					                    }})
 | 
				
			||||||
                    })
 | 
					 | 
				
			||||||
                    return tracker, index
 | 
					                    return tracker, index
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                def is_synced(
 | 
					                def is_synced(
 | 
				
			||||||
| 
						 | 
					@ -422,11 +418,7 @@ async def cascade(
 | 
				
			||||||
                # detect sample period step for subscription to increment
 | 
					                # detect sample period step for subscription to increment
 | 
				
			||||||
                # signal
 | 
					                # signal
 | 
				
			||||||
                times = src.array['time']
 | 
					                times = src.array['time']
 | 
				
			||||||
                if len(times) > 1:
 | 
					                delay_s = times[-1] - times[times != times[-1]][-1]
 | 
				
			||||||
                    delay_s = times[-1] - times[times != times[-1]][-1]
 | 
					 | 
				
			||||||
                else:
 | 
					 | 
				
			||||||
                    # our default "HFT" sample rate.
 | 
					 | 
				
			||||||
                    delay_s = _default_delay_s
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # Increment the underlying shared memory buffer on every
 | 
					                # Increment the underlying shared memory buffer on every
 | 
				
			||||||
                # "increment" msg received from the underlying data feed.
 | 
					                # "increment" msg received from the underlying data feed.
 | 
				
			||||||
| 
						 | 
					@ -437,8 +429,7 @@ async def cascade(
 | 
				
			||||||
                    profiler(f'{func_name}: sample stream up')
 | 
					                    profiler(f'{func_name}: sample stream up')
 | 
				
			||||||
                    profiler.finish()
 | 
					                    profiler.finish()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    async for i in istream:
 | 
					                    async for _ in istream:
 | 
				
			||||||
                        # log.runtime(f'FSP incrementing {i}')
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        # respawn the compute task if the source
 | 
					                        # respawn the compute task if the source
 | 
				
			||||||
                        # array has been updated such that we compute
 | 
					                        # array has been updated such that we compute
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										975
									
								
								piker/pp.py
								
								
								
								
							
							
						
						
									
										975
									
								
								piker/pp.py
								
								
								
								
							| 
						 | 
					@ -1,975 +0,0 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					 | 
				
			||||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					 | 
				
			||||||
# the Free Software Foundation, either version 3 of the License, or
 | 
					 | 
				
			||||||
# (at your option) any later version.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is distributed in the hope that it will be useful,
 | 
					 | 
				
			||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
					 | 
				
			||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
					 | 
				
			||||||
# GNU Affero General Public License for more details.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
Personal/Private position parsing, calculating, summarizing in a way
 | 
					 | 
				
			||||||
that doesn't try to cuk most humans who prefer to not lose their moneys..
 | 
					 | 
				
			||||||
(looking at you `ib` and dirt-bird friends)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
'''
 | 
					 | 
				
			||||||
from contextlib import contextmanager as cm
 | 
					 | 
				
			||||||
from pprint import pformat
 | 
					 | 
				
			||||||
import os
 | 
					 | 
				
			||||||
from os import path
 | 
					 | 
				
			||||||
from math import copysign
 | 
					 | 
				
			||||||
import re
 | 
					 | 
				
			||||||
import time
 | 
					 | 
				
			||||||
from typing import (
 | 
					 | 
				
			||||||
    Any,
 | 
					 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Union,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import pendulum
 | 
					 | 
				
			||||||
from pendulum import datetime, now
 | 
					 | 
				
			||||||
import tomli
 | 
					 | 
				
			||||||
import toml
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from . import config
 | 
					 | 
				
			||||||
from .brokers import get_brokermod
 | 
					 | 
				
			||||||
from .clearing._messages import BrokerdPosition, Status
 | 
					 | 
				
			||||||
from .data._source import Symbol
 | 
					 | 
				
			||||||
from .log import get_logger
 | 
					 | 
				
			||||||
from .data.types import Struct
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
log = get_logger(__name__)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@cm
 | 
					 | 
				
			||||||
def open_trade_ledger(
 | 
					 | 
				
			||||||
    broker: str,
 | 
					 | 
				
			||||||
    account: str,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> str:
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Indempotently create and read in a trade log file from the
 | 
					 | 
				
			||||||
    ``<configuration_dir>/ledgers/`` directory.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Files are named per broker account of the form
 | 
					 | 
				
			||||||
    ``<brokername>_<accountname>.toml``. The ``accountname`` here is the
 | 
					 | 
				
			||||||
    name as defined in the user's ``brokers.toml`` config.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    ldir = path.join(config._config_dir, 'ledgers')
 | 
					 | 
				
			||||||
    if not path.isdir(ldir):
 | 
					 | 
				
			||||||
        os.makedirs(ldir)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    fname = f'trades_{broker}_{account}.toml'
 | 
					 | 
				
			||||||
    tradesfile = path.join(ldir, fname)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if not path.isfile(tradesfile):
 | 
					 | 
				
			||||||
        log.info(
 | 
					 | 
				
			||||||
            f'Creating new local trades ledger: {tradesfile}'
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        with open(tradesfile, 'w') as cf:
 | 
					 | 
				
			||||||
            pass  # touch
 | 
					 | 
				
			||||||
    with open(tradesfile, 'rb') as cf:
 | 
					 | 
				
			||||||
        start = time.time()
 | 
					 | 
				
			||||||
        ledger = tomli.load(cf)
 | 
					 | 
				
			||||||
        print(f'Ledger load took {time.time() - start}s')
 | 
					 | 
				
			||||||
        cpy = ledger.copy()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
        yield cpy
 | 
					 | 
				
			||||||
    finally:
 | 
					 | 
				
			||||||
        if cpy != ledger:
 | 
					 | 
				
			||||||
            # TODO: show diff output?
 | 
					 | 
				
			||||||
            # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
 | 
					 | 
				
			||||||
            print(f'Updating ledger for {tradesfile}:\n')
 | 
					 | 
				
			||||||
            ledger.update(cpy)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # we write on close the mutated ledger data
 | 
					 | 
				
			||||||
            with open(tradesfile, 'w') as cf:
 | 
					 | 
				
			||||||
                toml.dump(ledger, cf)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class Transaction(Struct, frozen=True):
 | 
					 | 
				
			||||||
    # TODO: should this be ``.to`` (see below)?
 | 
					 | 
				
			||||||
    fqsn: str
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    tid: Union[str, int]  # unique transaction id
 | 
					 | 
				
			||||||
    size: float
 | 
					 | 
				
			||||||
    price: float
 | 
					 | 
				
			||||||
    cost: float  # commisions or other additional costs
 | 
					 | 
				
			||||||
    dt: datetime
 | 
					 | 
				
			||||||
    expiry: Optional[datetime] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # optional key normally derived from the broker
 | 
					 | 
				
			||||||
    # backend which ensures the instrument-symbol this record
 | 
					 | 
				
			||||||
    # is for is truly unique.
 | 
					 | 
				
			||||||
    bsuid: Optional[Union[str, int]] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # optional fqsn for the source "asset"/money symbol?
 | 
					 | 
				
			||||||
    # from: Optional[str] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class Position(Struct):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Basic pp (personal/piker position) model with attached clearing
 | 
					 | 
				
			||||||
    transaction history.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    symbol: Symbol
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # can be +ve or -ve for long/short
 | 
					 | 
				
			||||||
    size: float
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # "breakeven price" above or below which pnl moves above and below
 | 
					 | 
				
			||||||
    # zero for the entirety of the current "trade state".
 | 
					 | 
				
			||||||
    ppu: float
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # unique backend symbol id
 | 
					 | 
				
			||||||
    bsuid: str
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    split_ratio: Optional[int] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # ordered record of known constituent trade messages
 | 
					 | 
				
			||||||
    clears: dict[
 | 
					 | 
				
			||||||
        Union[str, int, Status],  # trade id
 | 
					 | 
				
			||||||
        dict[str, Any],  # transaction history summaries
 | 
					 | 
				
			||||||
    ] = {}
 | 
					 | 
				
			||||||
    first_clear_dt: Optional[datetime] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    expiry: Optional[datetime] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def to_dict(self) -> dict:
 | 
					 | 
				
			||||||
        return {
 | 
					 | 
				
			||||||
            f: getattr(self, f)
 | 
					 | 
				
			||||||
            for f in self.__struct_fields__
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def to_pretoml(self) -> tuple[str, dict]:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Prep this position's data contents for export to toml including
 | 
					 | 
				
			||||||
        re-structuring of the ``.clears`` table to an array of
 | 
					 | 
				
			||||||
        inline-subtables for better ``pps.toml`` compactness.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        d = self.to_dict()
 | 
					 | 
				
			||||||
        clears = d.pop('clears')
 | 
					 | 
				
			||||||
        expiry = d.pop('expiry')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if self.split_ratio is None:
 | 
					 | 
				
			||||||
            d.pop('split_ratio')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # should be obvious from clears/event table
 | 
					 | 
				
			||||||
        d.pop('first_clear_dt')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: we need to figure out how to have one top level
 | 
					 | 
				
			||||||
        # listing venue here even when the backend isn't providing
 | 
					 | 
				
			||||||
        # it via the trades ledger..
 | 
					 | 
				
			||||||
        # drop symbol obj in serialized form
 | 
					 | 
				
			||||||
        s = d.pop('symbol')
 | 
					 | 
				
			||||||
        fqsn = s.front_fqsn()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if self.expiry is None:
 | 
					 | 
				
			||||||
            d.pop('expiry', None)
 | 
					 | 
				
			||||||
        elif expiry:
 | 
					 | 
				
			||||||
            d['expiry'] = str(expiry)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        toml_clears_list = []
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # reverse sort so latest clears are at top of section?
 | 
					 | 
				
			||||||
        for tid, data in sorted(
 | 
					 | 
				
			||||||
            list(clears.items()),
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # sort by datetime
 | 
					 | 
				
			||||||
            key=lambda item: item[1]['dt'],
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            inline_table = toml.TomlDecoder().get_empty_inline_table()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # serialize datetime to parsable `str`
 | 
					 | 
				
			||||||
            inline_table['dt'] = str(data['dt'])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # insert optional clear fields in column order
 | 
					 | 
				
			||||||
            for k in ['ppu', 'accum_size']:
 | 
					 | 
				
			||||||
                val = data.get(k)
 | 
					 | 
				
			||||||
                if val:
 | 
					 | 
				
			||||||
                    inline_table[k] = val
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # insert required fields
 | 
					 | 
				
			||||||
            for k in ['price', 'size', 'cost']:
 | 
					 | 
				
			||||||
                inline_table[k] = data[k]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            inline_table['tid'] = tid
 | 
					 | 
				
			||||||
            toml_clears_list.append(inline_table)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        d['clears'] = toml_clears_list
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return fqsn, d
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def ensure_state(self) -> None:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Audit either the `.size` and `.ppu` local instance vars against
 | 
					 | 
				
			||||||
        the clears table calculations and return the calc-ed values if
 | 
					 | 
				
			||||||
        they differ and log warnings to console.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        clears = list(self.clears.values())
 | 
					 | 
				
			||||||
        self.first_clear_dt = min(list(entry['dt'] for entry in clears))
 | 
					 | 
				
			||||||
        last_clear = clears[-1]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        csize = self.calc_size()
 | 
					 | 
				
			||||||
        accum = last_clear['accum_size']
 | 
					 | 
				
			||||||
        if not self.expired():
 | 
					 | 
				
			||||||
            if (
 | 
					 | 
				
			||||||
                csize != accum
 | 
					 | 
				
			||||||
                and csize != round(accum * self.split_ratio or 1)
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                raise ValueError(f'Size mismatch: {csize}')
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            assert csize == 0, 'Contract is expired but non-zero size?'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if self.size != csize:
 | 
					 | 
				
			||||||
            log.warning(
 | 
					 | 
				
			||||||
                'Position state mismatch:\n'
 | 
					 | 
				
			||||||
                f'{self.size} => {csize}'
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            self.size = csize
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        cppu = self.calc_ppu()
 | 
					 | 
				
			||||||
        ppu = last_clear['ppu']
 | 
					 | 
				
			||||||
        if (
 | 
					 | 
				
			||||||
            cppu != ppu
 | 
					 | 
				
			||||||
            and self.split_ratio is not None
 | 
					 | 
				
			||||||
            # handle any split info entered (for now) manually by user
 | 
					 | 
				
			||||||
            and cppu != (ppu / self.split_ratio)
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            raise ValueError(f'PPU mismatch: {cppu}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if self.ppu != cppu:
 | 
					 | 
				
			||||||
            log.warning(
 | 
					 | 
				
			||||||
                'Position state mismatch:\n'
 | 
					 | 
				
			||||||
                f'{self.ppu} => {cppu}'
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            self.ppu = cppu
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def update_from_msg(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        msg: BrokerdPosition,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # XXX: better place to do this?
 | 
					 | 
				
			||||||
        symbol = self.symbol
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        lot_size_digits = symbol.lot_size_digits
 | 
					 | 
				
			||||||
        ppu, size = (
 | 
					 | 
				
			||||||
            round(
 | 
					 | 
				
			||||||
                msg['avg_price'],
 | 
					 | 
				
			||||||
                ndigits=symbol.tick_size_digits
 | 
					 | 
				
			||||||
            ),
 | 
					 | 
				
			||||||
            round(
 | 
					 | 
				
			||||||
                msg['size'],
 | 
					 | 
				
			||||||
                ndigits=lot_size_digits
 | 
					 | 
				
			||||||
            ),
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self.ppu = ppu
 | 
					 | 
				
			||||||
        self.size = size
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @property
 | 
					 | 
				
			||||||
    def dsize(self) -> float:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        The "dollar" size of the pp, normally in trading (fiat) unit
 | 
					 | 
				
			||||||
        terms.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        return self.ppu * self.size
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: idea: "real LIFO" dynamic positioning.
 | 
					 | 
				
			||||||
    # - when a trade takes place where the pnl for
 | 
					 | 
				
			||||||
    # the (set of) trade(s) is below the breakeven price
 | 
					 | 
				
			||||||
    # it may be that the trader took a +ve pnl on a short(er)
 | 
					 | 
				
			||||||
    # term trade in the same account.
 | 
					 | 
				
			||||||
    # - in this case we could recalc the be price to
 | 
					 | 
				
			||||||
    # be reverted back to it's prior value before the nearest term
 | 
					 | 
				
			||||||
    # trade was opened.?
 | 
					 | 
				
			||||||
    # def lifo_price() -> float:
 | 
					 | 
				
			||||||
    #     ...
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def calc_ppu(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        # include transaction cost in breakeven price
 | 
					 | 
				
			||||||
        # and presume the worst case of the same cost
 | 
					 | 
				
			||||||
        # to exit this transaction (even though in reality
 | 
					 | 
				
			||||||
        # it will be dynamic based on exit stratetgy).
 | 
					 | 
				
			||||||
        cost_scalar: float = 2,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> float:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Compute the "price-per-unit" price for the given non-zero sized
 | 
					 | 
				
			||||||
        rolling position.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        The recurrence relation which computes this (exponential) mean
 | 
					 | 
				
			||||||
        per new clear which **increases** the accumulative postiion size
 | 
					 | 
				
			||||||
        is:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        ppu[-1] = (
 | 
					 | 
				
			||||||
            ppu[-2] * accum_size[-2]
 | 
					 | 
				
			||||||
            +
 | 
					 | 
				
			||||||
            ppu[-1] * size
 | 
					 | 
				
			||||||
        ) / accum_size[-1]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        where `cost_basis` for the current step is simply the price
 | 
					 | 
				
			||||||
        * size of the most recent clearing transaction.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        asize_h: list[float] = []  # historical accumulative size
 | 
					 | 
				
			||||||
        ppu_h: list[float] = []  # historical price-per-unit
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        clears = list(self.clears.items())
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for i, (tid, entry) in enumerate(clears):
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            clear_size = entry['size']
 | 
					 | 
				
			||||||
            clear_price = entry['price']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            last_accum_size = asize_h[-1] if asize_h else 0
 | 
					 | 
				
			||||||
            accum_size = last_accum_size + clear_size
 | 
					 | 
				
			||||||
            accum_sign = copysign(1, accum_size)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            sign_change: bool = False
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if accum_size == 0:
 | 
					 | 
				
			||||||
                ppu_h.append(0)
 | 
					 | 
				
			||||||
                asize_h.append(0)
 | 
					 | 
				
			||||||
                continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # test if the pp somehow went "passed" a net zero size state
 | 
					 | 
				
			||||||
            # resulting in a change of the "sign" of the size (+ve for
 | 
					 | 
				
			||||||
            # long, -ve for short).
 | 
					 | 
				
			||||||
            sign_change = (
 | 
					 | 
				
			||||||
                copysign(1, last_accum_size) + accum_sign == 0
 | 
					 | 
				
			||||||
                and last_accum_size != 0
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # since we passed the net-zero-size state the new size
 | 
					 | 
				
			||||||
            # after sum should be the remaining size the new
 | 
					 | 
				
			||||||
            # "direction" (aka, long vs. short) for this clear.
 | 
					 | 
				
			||||||
            if sign_change:
 | 
					 | 
				
			||||||
                clear_size = accum_size
 | 
					 | 
				
			||||||
                abs_diff = abs(accum_size)
 | 
					 | 
				
			||||||
                asize_h.append(0)
 | 
					 | 
				
			||||||
                ppu_h.append(0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                # old size minus the new size gives us size diff with
 | 
					 | 
				
			||||||
                # +ve -> increase in pp size
 | 
					 | 
				
			||||||
                # -ve -> decrease in pp size
 | 
					 | 
				
			||||||
                abs_diff = abs(accum_size) - abs(last_accum_size)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # XXX: LIFO breakeven price update. only an increaze in size
 | 
					 | 
				
			||||||
            # of the position contributes the breakeven price,
 | 
					 | 
				
			||||||
            # a decrease does not (i.e. the position is being made
 | 
					 | 
				
			||||||
            # smaller).
 | 
					 | 
				
			||||||
            # abs_clear_size = abs(clear_size)
 | 
					 | 
				
			||||||
            abs_new_size = abs(accum_size)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if abs_diff > 0:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                cost_basis = (
 | 
					 | 
				
			||||||
                    # cost basis for this clear
 | 
					 | 
				
			||||||
                    clear_price * abs(clear_size)
 | 
					 | 
				
			||||||
                    +
 | 
					 | 
				
			||||||
                    # transaction cost
 | 
					 | 
				
			||||||
                    accum_sign * cost_scalar * entry['cost']
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                if asize_h:
 | 
					 | 
				
			||||||
                    size_last = abs(asize_h[-1])
 | 
					 | 
				
			||||||
                    cb_last = ppu_h[-1] * size_last
 | 
					 | 
				
			||||||
                    ppu = (cost_basis + cb_last) / abs_new_size
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                else:
 | 
					 | 
				
			||||||
                    ppu = cost_basis / abs_new_size
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                ppu_h.append(ppu)
 | 
					 | 
				
			||||||
                asize_h.append(accum_size)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                # on "exit" clears from a given direction,
 | 
					 | 
				
			||||||
                # only the size changes not the price-per-unit
 | 
					 | 
				
			||||||
                # need to be updated since the ppu remains constant
 | 
					 | 
				
			||||||
                # and gets weighted by the new size.
 | 
					 | 
				
			||||||
                asize_h.append(accum_size)
 | 
					 | 
				
			||||||
                ppu_h.append(ppu_h[-1])
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        final_ppu = ppu_h[-1] if ppu_h else 0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # handle any split info entered (for now) manually by user
 | 
					 | 
				
			||||||
        if self.split_ratio is not None:
 | 
					 | 
				
			||||||
            final_ppu /= self.split_ratio
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return final_ppu
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def expired(self) -> bool:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Predicate which checks if the contract/instrument is past its expiry.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        return bool(self.expiry) and self.expiry < now()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def calc_size(self) -> float:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Calculate the unit size of this position in the destination
 | 
					 | 
				
			||||||
        asset using the clears/trade event table; zero if expired.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        size: float = 0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # time-expired pps (normally derivatives) are "closed"
 | 
					 | 
				
			||||||
        # and have a zero size.
 | 
					 | 
				
			||||||
        if self.expired():
 | 
					 | 
				
			||||||
            return 0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for tid, entry in self.clears.items():
 | 
					 | 
				
			||||||
            size += entry['size']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if self.split_ratio is not None:
 | 
					 | 
				
			||||||
            size = round(size * self.split_ratio)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return size
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def minimize_clears(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> dict[str, dict]:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Minimize the position's clears entries by removing
 | 
					 | 
				
			||||||
        all transactions before the last net zero size to avoid
 | 
					 | 
				
			||||||
        unecessary history irrelevant to the current pp state.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        size: float = 0
 | 
					 | 
				
			||||||
        clears_since_zero: list[tuple(str, dict)] = []
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: we might just want to always do this when iterating
 | 
					 | 
				
			||||||
        # a ledger? keep a state of the last net-zero and only do the
 | 
					 | 
				
			||||||
        # full iterate when no state was stashed?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # scan for the last "net zero" position by iterating
 | 
					 | 
				
			||||||
        # transactions until the next net-zero size, rinse, repeat.
 | 
					 | 
				
			||||||
        for tid, clear in self.clears.items():
 | 
					 | 
				
			||||||
            size += clear['size']
 | 
					 | 
				
			||||||
            clears_since_zero.append((tid, clear))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if size == 0:
 | 
					 | 
				
			||||||
                clears_since_zero.clear()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self.clears = dict(clears_since_zero)
 | 
					 | 
				
			||||||
        return self.clears
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def add_clear(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        t: Transaction,
 | 
					 | 
				
			||||||
    ) -> dict:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Update clearing table and populate rolling ppu and accumulative
 | 
					 | 
				
			||||||
        size in both the clears entry and local attrs state.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        clear = self.clears[t.tid] = {
 | 
					 | 
				
			||||||
            'cost': t.cost,
 | 
					 | 
				
			||||||
            'price': t.price,
 | 
					 | 
				
			||||||
            'size': t.size,
 | 
					 | 
				
			||||||
            'dt': t.dt,
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: compute these incrementally instead
 | 
					 | 
				
			||||||
        # of re-looping through each time resulting in O(n**2)
 | 
					 | 
				
			||||||
        # behaviour..?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # NOTE: we compute these **after** adding the entry in order to
 | 
					 | 
				
			||||||
        # make the recurrence relation math work inside
 | 
					 | 
				
			||||||
        # ``.calc_size()``.
 | 
					 | 
				
			||||||
        self.size = clear['accum_size'] = self.calc_size()
 | 
					 | 
				
			||||||
        self.ppu = clear['ppu'] = self.calc_ppu()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return clear
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def sugest_split(self) -> float:
 | 
					 | 
				
			||||||
        ...
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class PpTable(Struct):
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    brokername: str
 | 
					 | 
				
			||||||
    acctid: str
 | 
					 | 
				
			||||||
    pps: dict[str, Position]
 | 
					 | 
				
			||||||
    conf: Optional[dict] = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def update_from_trans(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        trans: dict[str, Transaction],
 | 
					 | 
				
			||||||
        cost_scalar: float = 2,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> dict[str, Position]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        pps = self.pps
 | 
					 | 
				
			||||||
        updated: dict[str, Position] = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # lifo update all pps from records
 | 
					 | 
				
			||||||
        for tid, t in trans.items():
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            pp = pps.setdefault(
 | 
					 | 
				
			||||||
                t.bsuid,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # if no existing pp, allocate fresh one.
 | 
					 | 
				
			||||||
                Position(
 | 
					 | 
				
			||||||
                    Symbol.from_fqsn(
 | 
					 | 
				
			||||||
                        t.fqsn,
 | 
					 | 
				
			||||||
                        info={},
 | 
					 | 
				
			||||||
                    ),
 | 
					 | 
				
			||||||
                    size=0.0,
 | 
					 | 
				
			||||||
                    ppu=0.0,
 | 
					 | 
				
			||||||
                    bsuid=t.bsuid,
 | 
					 | 
				
			||||||
                    expiry=t.expiry,
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            clears = pp.clears
 | 
					 | 
				
			||||||
            if clears:
 | 
					 | 
				
			||||||
                first_clear_dt = pp.first_clear_dt
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # don't do updates for ledger records we already have
 | 
					 | 
				
			||||||
                # included in the current pps state.
 | 
					 | 
				
			||||||
                if (
 | 
					 | 
				
			||||||
                    t.tid in clears
 | 
					 | 
				
			||||||
                    or first_clear_dt and t.dt < first_clear_dt
 | 
					 | 
				
			||||||
                ):
 | 
					 | 
				
			||||||
                    # NOTE: likely you'll see repeats of the same
 | 
					 | 
				
			||||||
                    # ``Transaction`` passed in here if/when you are restarting
 | 
					 | 
				
			||||||
                    # a ``brokerd.ib`` where the API will re-report trades from
 | 
					 | 
				
			||||||
                    # the current session, so we need to make sure we don't
 | 
					 | 
				
			||||||
                    # "double count" these in pp calculations.
 | 
					 | 
				
			||||||
                    continue
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # update clearing table
 | 
					 | 
				
			||||||
            pp.add_clear(t)
 | 
					 | 
				
			||||||
            updated[t.bsuid] = pp
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # minimize clears tables and update sizing.
 | 
					 | 
				
			||||||
        for bsuid, pp in updated.items():
 | 
					 | 
				
			||||||
            pp.ensure_state()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return updated
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def dump_active(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
    ) -> tuple[
 | 
					 | 
				
			||||||
        dict[str, Position],
 | 
					 | 
				
			||||||
        dict[str, Position]
 | 
					 | 
				
			||||||
    ]:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Iterate all tabulated positions, render active positions to
 | 
					 | 
				
			||||||
        a ``dict`` format amenable to serialization (via TOML) and drop
 | 
					 | 
				
			||||||
        from state (``.pps``) as well as return in a ``dict`` all
 | 
					 | 
				
			||||||
        ``Position``s which have recently closed.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        # NOTE: newly closed position are also important to report/return
 | 
					 | 
				
			||||||
        # since a consumer, like an order mode UI ;), might want to react
 | 
					 | 
				
			||||||
        # based on the closure (for example removing the breakeven line
 | 
					 | 
				
			||||||
        # and clearing the entry from any lists/monitors).
 | 
					 | 
				
			||||||
        closed_pp_objs: dict[str, Position] = {}
 | 
					 | 
				
			||||||
        open_pp_objs: dict[str, Position] = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        pp_objs = self.pps
 | 
					 | 
				
			||||||
        for bsuid in list(pp_objs):
 | 
					 | 
				
			||||||
            pp = pp_objs[bsuid]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # XXX: debug hook for size mismatches
 | 
					 | 
				
			||||||
            # qqqbsuid = 320227571
 | 
					 | 
				
			||||||
            # if bsuid == qqqbsuid:
 | 
					 | 
				
			||||||
            #     breakpoint()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            pp.ensure_state()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if (
 | 
					 | 
				
			||||||
                # "net-zero" is a "closed" position
 | 
					 | 
				
			||||||
                pp.size == 0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # time-expired pps (normally derivatives) are "closed"
 | 
					 | 
				
			||||||
                or (pp.expiry and pp.expiry < now())
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                # for expired cases
 | 
					 | 
				
			||||||
                pp.size = 0
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # NOTE: we DO NOT pop the pp here since it can still be
 | 
					 | 
				
			||||||
                # used to check for duplicate clears that may come in as
 | 
					 | 
				
			||||||
                # new transaction from some backend API and need to be
 | 
					 | 
				
			||||||
                # ignored; the closed positions won't be written to the
 | 
					 | 
				
			||||||
                # ``pps.toml`` since ``pp_active_entries`` above is what's
 | 
					 | 
				
			||||||
                # written.
 | 
					 | 
				
			||||||
                closed_pp_objs[bsuid] = pp
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                open_pp_objs[bsuid] = pp
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return open_pp_objs, closed_pp_objs
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def to_toml(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
    ) -> dict[str, Any]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        active, closed = self.dump_active()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # ONLY dict-serialize all active positions; those that are closed
 | 
					 | 
				
			||||||
        # we don't store in the ``pps.toml``.
 | 
					 | 
				
			||||||
        to_toml_dict = {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for bsuid, pos in active.items():
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # keep the minimal amount of clears that make up this
 | 
					 | 
				
			||||||
            # position since the last net-zero state.
 | 
					 | 
				
			||||||
            pos.minimize_clears()
 | 
					 | 
				
			||||||
            pos.ensure_state()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # serialize to pre-toml form
 | 
					 | 
				
			||||||
            fqsn, asdict = pos.to_pretoml()
 | 
					 | 
				
			||||||
            log.info(f'Updating active pp: {fqsn}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # XXX: ugh, it's cuz we push the section under
 | 
					 | 
				
			||||||
            # the broker name.. maybe we need to rethink this?
 | 
					 | 
				
			||||||
            brokerless_key = fqsn.removeprefix(f'{self.brokername}.')
 | 
					 | 
				
			||||||
            to_toml_dict[brokerless_key] = asdict
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return to_toml_dict
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def write_config(self) -> None:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Write the current position table to the user's ``pps.toml``.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        # TODO: show diff output?
 | 
					 | 
				
			||||||
        # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
 | 
					 | 
				
			||||||
        print(f'Updating ``pps.toml`` for {path}:\n')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # active, closed_pp_objs = table.dump_active()
 | 
					 | 
				
			||||||
        pp_entries = self.to_toml()
 | 
					 | 
				
			||||||
        self.conf[self.brokername][self.acctid] = pp_entries
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: why tf haven't they already done this for inline
 | 
					 | 
				
			||||||
        # tables smh..
 | 
					 | 
				
			||||||
        enc = PpsEncoder(preserve=True)
 | 
					 | 
				
			||||||
        # table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
 | 
					 | 
				
			||||||
        enc.dump_funcs[
 | 
					 | 
				
			||||||
            toml.decoder.InlineTableDict
 | 
					 | 
				
			||||||
        ] = enc.dump_inline_table
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        config.write(
 | 
					 | 
				
			||||||
            self.conf,
 | 
					 | 
				
			||||||
            'pps',
 | 
					 | 
				
			||||||
            encoder=enc,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
def load_pps_from_ledger(
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    brokername: str,
 | 
					 | 
				
			||||||
    acctname: str,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # post normalization filter on ledger entries to be processed
 | 
					 | 
				
			||||||
    filter_by: Optional[list[dict]] = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> tuple[
 | 
					 | 
				
			||||||
    dict[str, Transaction],
 | 
					 | 
				
			||||||
    dict[str, Position],
 | 
					 | 
				
			||||||
]:
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Open a ledger file by broker name and account and read in and
 | 
					 | 
				
			||||||
    process any trade records into our normalized ``Transaction`` form
 | 
					 | 
				
			||||||
    and then update the equivalent ``Pptable`` and deliver the two
 | 
					 | 
				
			||||||
    bsuid-mapped dict-sets of the transactions and pps.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    with (
 | 
					 | 
				
			||||||
        open_trade_ledger(brokername, acctname) as ledger,
 | 
					 | 
				
			||||||
        open_pps(brokername, acctname) as table,
 | 
					 | 
				
			||||||
    ):
 | 
					 | 
				
			||||||
        if not ledger:
 | 
					 | 
				
			||||||
            # null case, no ledger file with content
 | 
					 | 
				
			||||||
            return {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        mod = get_brokermod(brokername)
 | 
					 | 
				
			||||||
        src_records: dict[str, Transaction] = mod.norm_trade_records(ledger)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if filter_by:
 | 
					 | 
				
			||||||
            records = {}
 | 
					 | 
				
			||||||
            bsuids = set(filter_by)
 | 
					 | 
				
			||||||
            for tid, r in src_records.items():
 | 
					 | 
				
			||||||
                if r.bsuid in bsuids:
 | 
					 | 
				
			||||||
                    records[tid] = r
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            records = src_records
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        updated = table.update_from_trans(records)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    return records, updated
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# TODO: instead see if we can hack tomli and tomli-w to do the same:
 | 
					 | 
				
			||||||
# - https://github.com/hukkin/tomli
 | 
					 | 
				
			||||||
# - https://github.com/hukkin/tomli-w
 | 
					 | 
				
			||||||
class PpsEncoder(toml.TomlEncoder):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Special "styled" encoder that makes a ``pps.toml`` redable and
 | 
					 | 
				
			||||||
    compact by putting `.clears` tables inline and everything else
 | 
					 | 
				
			||||||
    flat-ish.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    separator = ','
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def dump_list(self, v):
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Dump an inline list with a newline after every element and
 | 
					 | 
				
			||||||
        with consideration for denoted inline table types.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        retval = "[\n"
 | 
					 | 
				
			||||||
        for u in v:
 | 
					 | 
				
			||||||
            if isinstance(u, toml.decoder.InlineTableDict):
 | 
					 | 
				
			||||||
                out = self.dump_inline_table(u)
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                out = str(self.dump_value(u))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            retval += " " + out + "," + "\n"
 | 
					 | 
				
			||||||
        retval += "]"
 | 
					 | 
				
			||||||
        return retval
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def dump_inline_table(self, section):
 | 
					 | 
				
			||||||
        """Preserve inline table in its compact syntax instead of expanding
 | 
					 | 
				
			||||||
        into subsection.
 | 
					 | 
				
			||||||
        https://github.com/toml-lang/toml#user-content-inline-table
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        val_list = []
 | 
					 | 
				
			||||||
        for k, v in section.items():
 | 
					 | 
				
			||||||
            # if isinstance(v, toml.decoder.InlineTableDict):
 | 
					 | 
				
			||||||
            if isinstance(v, dict):
 | 
					 | 
				
			||||||
                val = self.dump_inline_table(v)
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                val = str(self.dump_value(v))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            val_list.append(k + " = " + val)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        retval = "{ " + ", ".join(val_list) + " }"
 | 
					 | 
				
			||||||
        return retval
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def dump_sections(self, o, sup):
 | 
					 | 
				
			||||||
        retstr = ""
 | 
					 | 
				
			||||||
        if sup != "" and sup[-1] != ".":
 | 
					 | 
				
			||||||
            sup += '.'
 | 
					 | 
				
			||||||
        retdict = self._dict()
 | 
					 | 
				
			||||||
        arraystr = ""
 | 
					 | 
				
			||||||
        for section in o:
 | 
					 | 
				
			||||||
            qsection = str(section)
 | 
					 | 
				
			||||||
            value = o[section]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if not re.match(r'^[A-Za-z0-9_-]+$', section):
 | 
					 | 
				
			||||||
                qsection = toml.encoder._dump_str(section)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # arrayoftables = False
 | 
					 | 
				
			||||||
            if (
 | 
					 | 
				
			||||||
                self.preserve
 | 
					 | 
				
			||||||
                and isinstance(value, toml.decoder.InlineTableDict)
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                retstr += (
 | 
					 | 
				
			||||||
                    qsection
 | 
					 | 
				
			||||||
                    +
 | 
					 | 
				
			||||||
                    " = "
 | 
					 | 
				
			||||||
                    +
 | 
					 | 
				
			||||||
                    self.dump_inline_table(o[section])
 | 
					 | 
				
			||||||
                    +
 | 
					 | 
				
			||||||
                    '\n'  # only on the final terminating left brace
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # XXX: this code i'm pretty sure is just blatantly bad
 | 
					 | 
				
			||||||
            # and/or wrong..
 | 
					 | 
				
			||||||
            # if isinstance(o[section], list):
 | 
					 | 
				
			||||||
            #     for a in o[section]:
 | 
					 | 
				
			||||||
            #         if isinstance(a, dict):
 | 
					 | 
				
			||||||
            #             arrayoftables = True
 | 
					 | 
				
			||||||
            # if arrayoftables:
 | 
					 | 
				
			||||||
            #     for a in o[section]:
 | 
					 | 
				
			||||||
            #         arraytabstr = "\n"
 | 
					 | 
				
			||||||
            #         arraystr += "[[" + sup + qsection + "]]\n"
 | 
					 | 
				
			||||||
            #         s, d = self.dump_sections(a, sup + qsection)
 | 
					 | 
				
			||||||
            #         if s:
 | 
					 | 
				
			||||||
            #             if s[0] == "[":
 | 
					 | 
				
			||||||
            #                 arraytabstr += s
 | 
					 | 
				
			||||||
            #             else:
 | 
					 | 
				
			||||||
            #                 arraystr += s
 | 
					 | 
				
			||||||
            #         while d:
 | 
					 | 
				
			||||||
            #             newd = self._dict()
 | 
					 | 
				
			||||||
            #             for dsec in d:
 | 
					 | 
				
			||||||
            #                 s1, d1 = self.dump_sections(d[dsec], sup +
 | 
					 | 
				
			||||||
            #                                             qsection + "." +
 | 
					 | 
				
			||||||
            #                                             dsec)
 | 
					 | 
				
			||||||
            #                 if s1:
 | 
					 | 
				
			||||||
            #                     arraytabstr += ("[" + sup + qsection +
 | 
					 | 
				
			||||||
            #                                     "." + dsec + "]\n")
 | 
					 | 
				
			||||||
            #                     arraytabstr += s1
 | 
					 | 
				
			||||||
            #                 for s1 in d1:
 | 
					 | 
				
			||||||
            #                     newd[dsec + "." + s1] = d1[s1]
 | 
					 | 
				
			||||||
            #             d = newd
 | 
					 | 
				
			||||||
            #         arraystr += arraytabstr
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            elif isinstance(value, dict):
 | 
					 | 
				
			||||||
                retdict[qsection] = o[section]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            elif o[section] is not None:
 | 
					 | 
				
			||||||
                retstr += (
 | 
					 | 
				
			||||||
                    qsection
 | 
					 | 
				
			||||||
                    +
 | 
					 | 
				
			||||||
                    " = "
 | 
					 | 
				
			||||||
                    +
 | 
					 | 
				
			||||||
                    str(self.dump_value(o[section]))
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # if not isinstance(value, dict):
 | 
					 | 
				
			||||||
                if not isinstance(value, toml.decoder.InlineTableDict):
 | 
					 | 
				
			||||||
                    # inline tables should not contain newlines:
 | 
					 | 
				
			||||||
                    # https://toml.io/en/v1.0.0#inline-table
 | 
					 | 
				
			||||||
                    retstr += '\n'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                raise ValueError(value)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        retstr += arraystr
 | 
					 | 
				
			||||||
        return (retstr, retdict)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
@cm
 | 
					 | 
				
			||||||
def open_pps(
 | 
					 | 
				
			||||||
    brokername: str,
 | 
					 | 
				
			||||||
    acctid: str,
 | 
					 | 
				
			||||||
    write_on_exit: bool = True,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> PpTable:
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Read out broker-specific position entries from
 | 
					 | 
				
			||||||
    incremental update file: ``pps.toml``.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    conf, path = config.load('pps')
 | 
					 | 
				
			||||||
    brokersection = conf.setdefault(brokername, {})
 | 
					 | 
				
			||||||
    pps = brokersection.setdefault(acctid, {})
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: ideally we can pass in an existing
 | 
					 | 
				
			||||||
    # pps state to this right? such that we
 | 
					 | 
				
			||||||
    # don't have to do a ledger reload all the
 | 
					 | 
				
			||||||
    # time.. a couple ideas I can think of,
 | 
					 | 
				
			||||||
    # - mirror this in some client side actor which
 | 
					 | 
				
			||||||
    #   does the actual ledger updates (say the paper
 | 
					 | 
				
			||||||
    #   engine proc if we decide to always spawn it?),
 | 
					 | 
				
			||||||
    # - do diffs against updates from the ledger writer
 | 
					 | 
				
			||||||
    #   actor and the in-mem state here?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    pp_objs = {}
 | 
					 | 
				
			||||||
    table = PpTable(
 | 
					 | 
				
			||||||
        brokername,
 | 
					 | 
				
			||||||
        acctid,
 | 
					 | 
				
			||||||
        pp_objs,
 | 
					 | 
				
			||||||
        conf=conf,
 | 
					 | 
				
			||||||
    )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # unmarshal/load ``pps.toml`` config entries into object form
 | 
					 | 
				
			||||||
    # and update `PpTable` obj entries.
 | 
					 | 
				
			||||||
    for fqsn, entry in pps.items():
 | 
					 | 
				
			||||||
        bsuid = entry['bsuid']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # convert clears sub-tables (only in this form
 | 
					 | 
				
			||||||
        # for toml re-presentation) back into a master table.
 | 
					 | 
				
			||||||
        clears_list = entry['clears']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # index clears entries in "object" form by tid in a top
 | 
					 | 
				
			||||||
        # level dict instead of a list (as is presented in our
 | 
					 | 
				
			||||||
        # ``pps.toml``).
 | 
					 | 
				
			||||||
        clears = pp_objs.setdefault(bsuid, {})
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: should be make a ``Struct`` for clear/event entries?
 | 
					 | 
				
			||||||
        # convert "clear events table" from the toml config (list of
 | 
					 | 
				
			||||||
        # a dicts) and load it into object form for use in position
 | 
					 | 
				
			||||||
        # processing of new clear events.
 | 
					 | 
				
			||||||
        trans: list[Transaction] = []
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for clears_table in clears_list:
 | 
					 | 
				
			||||||
            tid = clears_table.pop('tid')
 | 
					 | 
				
			||||||
            dtstr = clears_table['dt']
 | 
					 | 
				
			||||||
            dt = pendulum.parse(dtstr)
 | 
					 | 
				
			||||||
            clears_table['dt'] = dt
 | 
					 | 
				
			||||||
            trans.append(Transaction(
 | 
					 | 
				
			||||||
                fqsn=bsuid,
 | 
					 | 
				
			||||||
                bsuid=bsuid,
 | 
					 | 
				
			||||||
                tid=tid,
 | 
					 | 
				
			||||||
                size=clears_table['size'],
 | 
					 | 
				
			||||||
                price=clears_table['price'],
 | 
					 | 
				
			||||||
                cost=clears_table['cost'],
 | 
					 | 
				
			||||||
                dt=dt,
 | 
					 | 
				
			||||||
            ))
 | 
					 | 
				
			||||||
            clears[tid] = clears_table
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        size = entry['size']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: remove but, handle old field name for now
 | 
					 | 
				
			||||||
        ppu = entry.get('ppu', entry.get('be_price', 0))
 | 
					 | 
				
			||||||
        split_ratio = entry.get('split_ratio')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        expiry = entry.get('expiry')
 | 
					 | 
				
			||||||
        if expiry:
 | 
					 | 
				
			||||||
            expiry = pendulum.parse(expiry)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        pp = pp_objs[bsuid] = Position(
 | 
					 | 
				
			||||||
            Symbol.from_fqsn(fqsn, info={}),
 | 
					 | 
				
			||||||
            size=size,
 | 
					 | 
				
			||||||
            ppu=ppu,
 | 
					 | 
				
			||||||
            split_ratio=split_ratio,
 | 
					 | 
				
			||||||
            expiry=expiry,
 | 
					 | 
				
			||||||
            bsuid=entry['bsuid'],
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # XXX: super critical, we need to be sure to include
 | 
					 | 
				
			||||||
        # all pps.toml clears to avoid reusing clears that were
 | 
					 | 
				
			||||||
        # already included in the current incremental update
 | 
					 | 
				
			||||||
        # state, since today's records may have already been
 | 
					 | 
				
			||||||
        # processed!
 | 
					 | 
				
			||||||
        for t in trans:
 | 
					 | 
				
			||||||
            pp.add_clear(t)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # audit entries loaded from toml
 | 
					 | 
				
			||||||
        pp.ensure_state()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    try:
 | 
					 | 
				
			||||||
        yield table
 | 
					 | 
				
			||||||
    finally:
 | 
					 | 
				
			||||||
        if write_on_exit:
 | 
					 | 
				
			||||||
            table.write_config()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
if __name__ == '__main__':
 | 
					 | 
				
			||||||
    import sys
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    args = sys.argv
 | 
					 | 
				
			||||||
    assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
 | 
					 | 
				
			||||||
    args = args[1:]
 | 
					 | 
				
			||||||
    for acctid in args:
 | 
					 | 
				
			||||||
        broker, name = acctid.split('.')
 | 
					 | 
				
			||||||
        trans, updated_pps = load_pps_from_ledger(broker, name)
 | 
					 | 
				
			||||||
        print(
 | 
					 | 
				
			||||||
            f'Processing transactions into pps for {broker}:{acctid}\n'
 | 
					 | 
				
			||||||
            f'{pformat(trans)}\n\n'
 | 
					 | 
				
			||||||
            f'{pformat(updated_pps)}'
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
| 
						 | 
					@ -32,22 +32,16 @@ def mk_marker_path(
 | 
				
			||||||
    style: str,
 | 
					    style: str,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> QGraphicsPathItem:
 | 
					) -> QGraphicsPathItem:
 | 
				
			||||||
    '''
 | 
					    """Add a marker to be displayed on the line wrapped in a ``QGraphicsPathItem``
 | 
				
			||||||
    Add a marker to be displayed on the line wrapped in
 | 
					    ready to be placed using scene coordinates (not view).
 | 
				
			||||||
    a ``QGraphicsPathItem`` ready to be placed using scene coordinates
 | 
					 | 
				
			||||||
    (not view).
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    **Arguments**
 | 
					    **Arguments**
 | 
				
			||||||
    style        String indicating the style of marker to add:
 | 
					    style        String indicating the style of marker to add:
 | 
				
			||||||
                  ``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
 | 
					                  ``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
 | 
				
			||||||
                  ``'>|<'``, ``'^'``, ``'v'``, ``'o'``
 | 
					                  ``'>|<'``, ``'^'``, ``'v'``, ``'o'``
 | 
				
			||||||
 | 
					    size          Size of the marker in pixels.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    This code is taken nearly verbatim from the
 | 
					    """
 | 
				
			||||||
    `InfiniteLine.addMarker()` method but does not attempt do be aware
 | 
					 | 
				
			||||||
    of low(er) level graphics controls and expects for the output
 | 
					 | 
				
			||||||
    polygon to be applied to a ``QGraphicsPathItem``.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    path = QtGui.QPainterPath()
 | 
					    path = QtGui.QPainterPath()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if style == 'o':
 | 
					    if style == 'o':
 | 
				
			||||||
| 
						 | 
					@ -93,8 +87,7 @@ def mk_marker_path(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class LevelMarker(QGraphicsPathItem):
 | 
					class LevelMarker(QGraphicsPathItem):
 | 
				
			||||||
    '''
 | 
					    '''An arrow marker path graphich which redraws itself
 | 
				
			||||||
    An arrow marker path graphich which redraws itself
 | 
					 | 
				
			||||||
    to the specified view coordinate level on each paint cycle.
 | 
					    to the specified view coordinate level on each paint cycle.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -111,8 +104,7 @@ class LevelMarker(QGraphicsPathItem):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # get polygon and scale
 | 
					        # get polygon and scale
 | 
				
			||||||
        super().__init__()
 | 
					        super().__init__()
 | 
				
			||||||
        # self.setScale(size, size)
 | 
					        self.scale(size, size)
 | 
				
			||||||
        self.setScale(size)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # interally generates path
 | 
					        # interally generates path
 | 
				
			||||||
        self._style = None
 | 
					        self._style = None
 | 
				
			||||||
| 
						 | 
					@ -122,7 +114,6 @@ class LevelMarker(QGraphicsPathItem):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.get_level = get_level
 | 
					        self.get_level = get_level
 | 
				
			||||||
        self._on_paint = on_paint
 | 
					        self._on_paint = on_paint
 | 
				
			||||||
 | 
					 | 
				
			||||||
        self.scene_x = lambda: chart.marker_right_points()[1]
 | 
					        self.scene_x = lambda: chart.marker_right_points()[1]
 | 
				
			||||||
        self.level: float = 0
 | 
					        self.level: float = 0
 | 
				
			||||||
        self.keep_in_view = keep_in_view
 | 
					        self.keep_in_view = keep_in_view
 | 
				
			||||||
| 
						 | 
					@ -158,9 +149,12 @@ class LevelMarker(QGraphicsPathItem):
 | 
				
			||||||
    def w(self) -> float:
 | 
					    def w(self) -> float:
 | 
				
			||||||
        return self.path_br().width()
 | 
					        return self.path_br().width()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def position_in_view(self) -> None:
 | 
					    def position_in_view(
 | 
				
			||||||
        '''
 | 
					        self,
 | 
				
			||||||
        Show a pp off-screen indicator for a level label.
 | 
					        # level: float,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					        '''Show a pp off-screen indicator for a level label.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        This is like in fps games where you have a gps "nav" indicator
 | 
					        This is like in fps games where you have a gps "nav" indicator
 | 
				
			||||||
        but your teammate is outside the range of view, except in 2D, on
 | 
					        but your teammate is outside the range of view, except in 2D, on
 | 
				
			||||||
| 
						 | 
					@ -168,6 +162,7 @@ class LevelMarker(QGraphicsPathItem):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        level = self.get_level()
 | 
					        level = self.get_level()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        view = self.chart.getViewBox()
 | 
					        view = self.chart.getViewBox()
 | 
				
			||||||
        vr = view.state['viewRange']
 | 
					        vr = view.state['viewRange']
 | 
				
			||||||
        ymn, ymx = vr[1]
 | 
					        ymn, ymx = vr[1]
 | 
				
			||||||
| 
						 | 
					@ -191,6 +186,7 @@ class LevelMarker(QGraphicsPathItem):
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        elif level < ymn:  # pin to bottom of view
 | 
					        elif level < ymn:  # pin to bottom of view
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            self.setPos(
 | 
					            self.setPos(
 | 
				
			||||||
                QPointF(
 | 
					                QPointF(
 | 
				
			||||||
                    x,
 | 
					                    x,
 | 
				
			||||||
| 
						 | 
					@ -215,8 +211,7 @@ class LevelMarker(QGraphicsPathItem):
 | 
				
			||||||
        w: QtWidgets.QWidget
 | 
					        w: QtWidgets.QWidget
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        '''
 | 
					        '''Core paint which we override to always update
 | 
				
			||||||
        Core paint which we override to always update
 | 
					 | 
				
			||||||
        our marker position in scene coordinates from a
 | 
					        our marker position in scene coordinates from a
 | 
				
			||||||
        view cooridnate "level".
 | 
					        view cooridnate "level".
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -240,12 +235,11 @@ def qgo_draw_markers(
 | 
				
			||||||
    right_offset: float,
 | 
					    right_offset: float,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> float:
 | 
					) -> float:
 | 
				
			||||||
    '''
 | 
					    """Paint markers in ``pg.GraphicsItem`` style by first
 | 
				
			||||||
    Paint markers in ``pg.GraphicsItem`` style by first
 | 
					 | 
				
			||||||
    removing the view transform for the painter, drawing the markers
 | 
					    removing the view transform for the painter, drawing the markers
 | 
				
			||||||
    in scene coords, then restoring the view coords.
 | 
					    in scene coords, then restoring the view coords.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    """
 | 
				
			||||||
    # paint markers in native coordinate system
 | 
					    # paint markers in native coordinate system
 | 
				
			||||||
    orig_tr = p.transform()
 | 
					    orig_tr = p.transform()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -78,8 +78,6 @@ async def _async_main(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    from . import _display
 | 
					    from . import _display
 | 
				
			||||||
    from ._pg_overrides import _do_overrides
 | 
					 | 
				
			||||||
    _do_overrides()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    godwidget = main_widget
 | 
					    godwidget = main_widget
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -109,8 +107,9 @@ async def _async_main(
 | 
				
			||||||
        # setup search widget and focus main chart view at startup
 | 
					        # setup search widget and focus main chart view at startup
 | 
				
			||||||
        # search widget is a singleton alongside the godwidget
 | 
					        # search widget is a singleton alongside the godwidget
 | 
				
			||||||
        search = _search.SearchWidget(godwidget=godwidget)
 | 
					        search = _search.SearchWidget(godwidget=godwidget)
 | 
				
			||||||
        # search.bar.unfocus()
 | 
					        search.bar.unfocus()
 | 
				
			||||||
        # godwidget.hbox.addWidget(search)
 | 
					
 | 
				
			||||||
 | 
					        godwidget.hbox.addWidget(search)
 | 
				
			||||||
        godwidget.search = search
 | 
					        godwidget.search = search
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        symbol, _, provider = sym.rpartition('.')
 | 
					        symbol, _, provider = sym.rpartition('.')
 | 
				
			||||||
| 
						 | 
					@ -179,6 +178,6 @@ def _main(
 | 
				
			||||||
    run_qtractor(
 | 
					    run_qtractor(
 | 
				
			||||||
        func=_async_main,
 | 
					        func=_async_main,
 | 
				
			||||||
        args=(sym, brokernames, piker_loglevel),
 | 
					        args=(sym, brokernames, piker_loglevel),
 | 
				
			||||||
        main_widget_type=GodWidget,
 | 
					        main_widget=GodWidget,
 | 
				
			||||||
        tractor_kwargs=tractor_kwargs,
 | 
					        tractor_kwargs=tractor_kwargs,
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -39,17 +39,12 @@ class Axis(pg.AxisItem):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    A better axis that sizes tick contents considering font size.
 | 
					    A better axis that sizes tick contents considering font size.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    Also includes tick values lru caching originally proposed in but never
 | 
					 | 
				
			||||||
    accepted upstream:
 | 
					 | 
				
			||||||
    https://github.com/pyqtgraph/pyqtgraph/pull/2160
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        linkedsplits,
 | 
					        linkedsplits,
 | 
				
			||||||
        typical_max_str: str = '100 000.000',
 | 
					        typical_max_str: str = '100 000.000',
 | 
				
			||||||
        text_color: str = 'bracket',
 | 
					        text_color: str = 'bracket',
 | 
				
			||||||
        lru_cache_tick_strings: bool = True,
 | 
					 | 
				
			||||||
        **kwargs
 | 
					        **kwargs
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
| 
						 | 
					@ -96,34 +91,6 @@ class Axis(pg.AxisItem):
 | 
				
			||||||
        # size the pertinent axis dimension to a "typical value"
 | 
					        # size the pertinent axis dimension to a "typical value"
 | 
				
			||||||
        self.size_to_values()
 | 
					        self.size_to_values()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # NOTE: requires override ``.tickValues()`` method seen below.
 | 
					 | 
				
			||||||
        if lru_cache_tick_strings:
 | 
					 | 
				
			||||||
            self.tickStrings = lru_cache(
 | 
					 | 
				
			||||||
                maxsize=2**20
 | 
					 | 
				
			||||||
            )(self.tickStrings)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # NOTE: only overriden to cast tick values entries into tuples
 | 
					 | 
				
			||||||
    # for use with the lru caching.
 | 
					 | 
				
			||||||
    def tickValues(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        minVal: float,
 | 
					 | 
				
			||||||
        maxVal: float,
 | 
					 | 
				
			||||||
        size: int,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> list[tuple[float, tuple[str]]]:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Repack tick values into tuples for lru caching.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        ticks = []
 | 
					 | 
				
			||||||
        for scalar, values in super().tickValues(minVal, maxVal, size):
 | 
					 | 
				
			||||||
            ticks.append((
 | 
					 | 
				
			||||||
                scalar,
 | 
					 | 
				
			||||||
                tuple(values),  # this
 | 
					 | 
				
			||||||
            ))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return ticks
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @property
 | 
					    @property
 | 
				
			||||||
    def text_color(self) -> str:
 | 
					    def text_color(self) -> str:
 | 
				
			||||||
        return self._text_color
 | 
					        return self._text_color
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -19,11 +19,7 @@ High level chart-widget apis.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
'''
 | 
					'''
 | 
				
			||||||
from __future__ import annotations
 | 
					from __future__ import annotations
 | 
				
			||||||
from typing import (
 | 
					from typing import Optional, TYPE_CHECKING
 | 
				
			||||||
    Iterator,
 | 
					 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    TYPE_CHECKING,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
from PyQt5 import QtCore, QtWidgets
 | 
					from PyQt5 import QtCore, QtWidgets
 | 
				
			||||||
from PyQt5.QtCore import (
 | 
					from PyQt5.QtCore import (
 | 
				
			||||||
| 
						 | 
					@ -72,9 +68,6 @@ from ._forms import FieldsForm
 | 
				
			||||||
from .._profile import pg_profile_enabled, ms_slower_then
 | 
					from .._profile import pg_profile_enabled, ms_slower_then
 | 
				
			||||||
from ._overlay import PlotItemOverlay
 | 
					from ._overlay import PlotItemOverlay
 | 
				
			||||||
from ._flows import Flow
 | 
					from ._flows import Flow
 | 
				
			||||||
from ._search import SearchWidget
 | 
					 | 
				
			||||||
from . import _pg_overrides as pgo
 | 
					 | 
				
			||||||
from .._profile import Profiler
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
if TYPE_CHECKING:
 | 
					if TYPE_CHECKING:
 | 
				
			||||||
    from ._display import DisplayState
 | 
					    from ._display import DisplayState
 | 
				
			||||||
| 
						 | 
					@ -92,9 +85,6 @@ class GodWidget(QWidget):
 | 
				
			||||||
    modify them.
 | 
					    modify them.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    search: SearchWidget
 | 
					 | 
				
			||||||
    mode_name: str = 'god'
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
| 
						 | 
					@ -104,8 +94,6 @@ class GodWidget(QWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        super().__init__(parent)
 | 
					        super().__init__(parent)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.search: Optional[SearchWidget] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self.hbox = QHBoxLayout(self)
 | 
					        self.hbox = QHBoxLayout(self)
 | 
				
			||||||
        self.hbox.setContentsMargins(0, 0, 0, 0)
 | 
					        self.hbox.setContentsMargins(0, 0, 0, 0)
 | 
				
			||||||
        self.hbox.setSpacing(6)
 | 
					        self.hbox.setSpacing(6)
 | 
				
			||||||
| 
						 | 
					@ -127,10 +115,7 @@ class GodWidget(QWidget):
 | 
				
			||||||
        # self.vbox.addLayout(self.hbox)
 | 
					        # self.vbox.addLayout(self.hbox)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._chart_cache: dict[str, LinkedSplits] = {}
 | 
					        self._chart_cache: dict[str, LinkedSplits] = {}
 | 
				
			||||||
 | 
					        self.linkedsplits: Optional[LinkedSplits] = None
 | 
				
			||||||
        self.hist_linked: Optional[LinkedSplits] = None
 | 
					 | 
				
			||||||
        self.rt_linked: Optional[LinkedSplits] = None
 | 
					 | 
				
			||||||
        self._active_cursor: Optional[Cursor] = None
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # assigned in the startup func `_async_main()`
 | 
					        # assigned in the startup func `_async_main()`
 | 
				
			||||||
        self._root_n: trio.Nursery = None
 | 
					        self._root_n: trio.Nursery = None
 | 
				
			||||||
| 
						 | 
					@ -138,14 +123,6 @@ class GodWidget(QWidget):
 | 
				
			||||||
        self._widgets: dict[str, QWidget] = {}
 | 
					        self._widgets: dict[str, QWidget] = {}
 | 
				
			||||||
        self._resizing: bool = False
 | 
					        self._resizing: bool = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: do we need this, when would god get resized
 | 
					 | 
				
			||||||
        # and the window does not? Never right?!
 | 
					 | 
				
			||||||
        # self.reg_for_resize(self)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    @property
 | 
					 | 
				
			||||||
    def linkedsplits(self) -> LinkedSplits:
 | 
					 | 
				
			||||||
        return self.rt_linked
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # def init_timeframes_ui(self):
 | 
					    # def init_timeframes_ui(self):
 | 
				
			||||||
    #     self.tf_layout = QHBoxLayout()
 | 
					    #     self.tf_layout = QHBoxLayout()
 | 
				
			||||||
    #     self.tf_layout.setSpacing(0)
 | 
					    #     self.tf_layout.setSpacing(0)
 | 
				
			||||||
| 
						 | 
					@ -171,19 +148,19 @@ class GodWidget(QWidget):
 | 
				
			||||||
    def set_chart_symbol(
 | 
					    def set_chart_symbol(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        symbol_key: str,  # of form <fqsn>.<providername>
 | 
					        symbol_key: str,  # of form <fqsn>.<providername>
 | 
				
			||||||
        all_linked: tuple[LinkedSplits, LinkedSplits],  # type: ignore
 | 
					        linkedsplits: LinkedSplits,  # type: ignore
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        # re-sort org cache symbol list in LIFO order
 | 
					        # re-sort org cache symbol list in LIFO order
 | 
				
			||||||
        cache = self._chart_cache
 | 
					        cache = self._chart_cache
 | 
				
			||||||
        cache.pop(symbol_key, None)
 | 
					        cache.pop(symbol_key, None)
 | 
				
			||||||
        cache[symbol_key] = all_linked
 | 
					        cache[symbol_key] = linkedsplits
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_chart_symbol(
 | 
					    def get_chart_symbol(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        symbol_key: str,
 | 
					        symbol_key: str,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> tuple[LinkedSplits, LinkedSplits]:  # type: ignore
 | 
					    ) -> LinkedSplits:  # type: ignore
 | 
				
			||||||
        return self._chart_cache.get(symbol_key)
 | 
					        return self._chart_cache.get(symbol_key)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def load_symbol(
 | 
					    async def load_symbol(
 | 
				
			||||||
| 
						 | 
					@ -205,33 +182,28 @@ class GodWidget(QWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # fully qualified symbol name (SNS i guess is what we're making?)
 | 
					        # fully qualified symbol name (SNS i guess is what we're making?)
 | 
				
			||||||
        fqsn = '.'.join([symbol_key, providername])
 | 
					        fqsn = '.'.join([symbol_key, providername])
 | 
				
			||||||
        all_linked = self.get_chart_symbol(fqsn)
 | 
					
 | 
				
			||||||
 | 
					        linkedsplits = self.get_chart_symbol(fqsn)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        order_mode_started = trio.Event()
 | 
					        order_mode_started = trio.Event()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if not self.vbox.isEmpty():
 | 
					        if not self.vbox.isEmpty():
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # XXX: seems to make switching slower?
 | 
					            # XXX: this is CRITICAL especially with pixel buffer caching
 | 
				
			||||||
            # qframe = self.hist_linked.chart.qframe
 | 
					            self.linkedsplits.hide()
 | 
				
			||||||
            # if qframe.sidepane is self.search:
 | 
					            self.linkedsplits.unfocus()
 | 
				
			||||||
            #     qframe.hbox.removeWidget(self.search)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            for linked in [self.rt_linked, self.hist_linked]:
 | 
					            # XXX: pretty sure we don't need this
 | 
				
			||||||
                # XXX: this is CRITICAL especially with pixel buffer caching
 | 
					            # remove any existing plots?
 | 
				
			||||||
                linked.hide()
 | 
					            # XXX: ahh we might want to support cache unloading..
 | 
				
			||||||
                linked.unfocus()
 | 
					            # self.vbox.removeWidget(self.linkedsplits)
 | 
				
			||||||
 | 
					 | 
				
			||||||
                # XXX: pretty sure we don't need this
 | 
					 | 
				
			||||||
                # remove any existing plots?
 | 
					 | 
				
			||||||
                # XXX: ahh we might want to support cache unloading..
 | 
					 | 
				
			||||||
                # self.vbox.removeWidget(linked)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # switching to a new viewable chart
 | 
					        # switching to a new viewable chart
 | 
				
			||||||
        if all_linked is None or reset:
 | 
					        if linkedsplits is None or reset:
 | 
				
			||||||
            from ._display import display_symbol_data
 | 
					            from ._display import display_symbol_data
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # we must load a fresh linked charts set
 | 
					            # we must load a fresh linked charts set
 | 
				
			||||||
            self.rt_linked = rt_charts = LinkedSplits(self)
 | 
					            linkedsplits = LinkedSplits(self)
 | 
				
			||||||
            self.hist_linked = hist_charts = LinkedSplits(self)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # spawn new task to start up and update new sub-chart instances
 | 
					            # spawn new task to start up and update new sub-chart instances
 | 
				
			||||||
            self._root_n.start_soon(
 | 
					            self._root_n.start_soon(
 | 
				
			||||||
| 
						 | 
					@ -243,70 +215,43 @@ class GodWidget(QWidget):
 | 
				
			||||||
                order_mode_started,
 | 
					                order_mode_started,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # self.vbox.addWidget(hist_charts)
 | 
					            self.set_chart_symbol(fqsn, linkedsplits)
 | 
				
			||||||
            self.vbox.addWidget(rt_charts)
 | 
					            self.vbox.addWidget(linkedsplits)
 | 
				
			||||||
            self.set_chart_symbol(
 | 
					 | 
				
			||||||
                fqsn,
 | 
					 | 
				
			||||||
                (hist_charts, rt_charts),
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            for linked in [hist_charts, rt_charts]:
 | 
					 | 
				
			||||||
                linked.show()
 | 
					 | 
				
			||||||
                linked.focus()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            linkedsplits.show()
 | 
				
			||||||
 | 
					            linkedsplits.focus()
 | 
				
			||||||
            await trio.sleep(0)
 | 
					            await trio.sleep(0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            # symbol is already loaded and ems ready
 | 
					            # symbol is already loaded and ems ready
 | 
				
			||||||
            order_mode_started.set()
 | 
					            order_mode_started.set()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            self.hist_linked, self.rt_linked = all_linked
 | 
					            # TODO:
 | 
				
			||||||
 | 
					            # - we'll probably want per-instrument/provider state here?
 | 
				
			||||||
 | 
					            #   change the order config form over to the new chart
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            for linked in all_linked:
 | 
					            # XXX: since the pp config is a singleton widget we have to
 | 
				
			||||||
                # TODO:
 | 
					            # also switch it over to the new chart's interal-layout
 | 
				
			||||||
                # - we'll probably want per-instrument/provider state here?
 | 
					            # self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
 | 
				
			||||||
                #   change the order config form over to the new chart
 | 
					            chart = linkedsplits.chart
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # chart is already in memory so just focus it
 | 
					            # chart is already in memory so just focus it
 | 
				
			||||||
                linked.show()
 | 
					            linkedsplits.show()
 | 
				
			||||||
                linked.focus()
 | 
					            linkedsplits.focus()
 | 
				
			||||||
                linked.graphics_cycle()
 | 
					            linkedsplits.graphics_cycle()
 | 
				
			||||||
                await trio.sleep(0)
 | 
					            await trio.sleep(0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # resume feeds *after* rendering chart view asap
 | 
					            # resume feeds *after* rendering chart view asap
 | 
				
			||||||
                chart = linked.chart
 | 
					            chart.resume_all_feeds()
 | 
				
			||||||
                if chart:
 | 
					 | 
				
			||||||
                    chart.resume_all_feeds()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # TODO: we need a check to see if the chart
 | 
					            # TODO: we need a check to see if the chart
 | 
				
			||||||
            # last had the xlast in view, if so then shift so it's
 | 
					            # last had the xlast in view, if so then shift so it's
 | 
				
			||||||
            # still in view, if the user was viewing history then
 | 
					            # still in view, if the user was viewing history then
 | 
				
			||||||
            # do nothing yah?
 | 
					            # do nothing yah?
 | 
				
			||||||
            self.rt_linked.chart.default_view()
 | 
					            chart.default_view()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # if a history chart instance is already up then
 | 
					        self.linkedsplits = linkedsplits
 | 
				
			||||||
        # set the search widget as its sidepane.
 | 
					        symbol = linkedsplits.symbol
 | 
				
			||||||
        hist_chart = self.hist_linked.chart
 | 
					 | 
				
			||||||
        if hist_chart:
 | 
					 | 
				
			||||||
            hist_chart.qframe.set_sidepane(self.search)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # NOTE: this is really stupid/hard to follow.
 | 
					 | 
				
			||||||
            # we have to reposition the active position nav
 | 
					 | 
				
			||||||
            # **AFTER** applying the search bar as a sidepane
 | 
					 | 
				
			||||||
            # to the newly switched to symbol.
 | 
					 | 
				
			||||||
            await trio.sleep(0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # TODO: probably stick this in some kinda `LooknFeel` API?
 | 
					 | 
				
			||||||
            for tracker in self.rt_linked.mode.trackers.values():
 | 
					 | 
				
			||||||
                pp_nav = tracker.nav
 | 
					 | 
				
			||||||
                if tracker.live_pp.size:
 | 
					 | 
				
			||||||
                    pp_nav.show()
 | 
					 | 
				
			||||||
                    pp_nav.hide_info()
 | 
					 | 
				
			||||||
                else:
 | 
					 | 
				
			||||||
                    pp_nav.hide()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # set window titlebar info
 | 
					 | 
				
			||||||
        symbol = self.rt_linked.symbol
 | 
					 | 
				
			||||||
        if symbol is not None:
 | 
					        if symbol is not None:
 | 
				
			||||||
            self.window.setWindowTitle(
 | 
					            self.window.setWindowTitle(
 | 
				
			||||||
                f'{symbol.front_fqsn()} '
 | 
					                f'{symbol.front_fqsn()} '
 | 
				
			||||||
| 
						 | 
					@ -323,23 +268,11 @@ class GodWidget(QWidget):
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        # go back to view-mode focus (aka chart focus)
 | 
					        # go back to view-mode focus (aka chart focus)
 | 
				
			||||||
        self.clearFocus()
 | 
					        self.clearFocus()
 | 
				
			||||||
        chart = self.rt_linked.chart
 | 
					        self.linkedsplits.chart.setFocus()
 | 
				
			||||||
        if chart:
 | 
					 | 
				
			||||||
            chart.setFocus()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def reg_for_resize(
 | 
					    def resizeEvent(self, event: QtCore.QEvent) -> None:
 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        widget: QWidget,
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					 | 
				
			||||||
        getattr(widget, 'on_resize')
 | 
					 | 
				
			||||||
        self._widgets[widget.mode_name] = widget
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def on_win_resize(self, event: QtCore.QEvent) -> None:
 | 
					 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Top level god widget handler from window (the real yaweh) resize
 | 
					        Top level god widget resize handler.
 | 
				
			||||||
        events such that any registered widgets which wish to be
 | 
					 | 
				
			||||||
        notified are invoked using our pythonic `.on_resize()` method
 | 
					 | 
				
			||||||
        api.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        Where we do UX magic to make things not suck B)
 | 
					        Where we do UX magic to make things not suck B)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -355,28 +288,6 @@ class GodWidget(QWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._resizing = False
 | 
					        self._resizing = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # on_resize = on_win_resize
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def get_cursor(self) -> Cursor:
 | 
					 | 
				
			||||||
        return self._active_cursor
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def iter_linked(self) -> Iterator[LinkedSplits]:
 | 
					 | 
				
			||||||
        for linked in [self.hist_linked, self.rt_linked]:
 | 
					 | 
				
			||||||
            yield linked
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def resize_all(self) -> None:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Dynamic resize sequence: adjusts all sub-widgets/charts to
 | 
					 | 
				
			||||||
        sensible default ratios of what space is detected as available
 | 
					 | 
				
			||||||
        on the display / window.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        rt_linked = self.rt_linked
 | 
					 | 
				
			||||||
        rt_linked.set_split_sizes()
 | 
					 | 
				
			||||||
        self.rt_linked.resize_sidepanes()
 | 
					 | 
				
			||||||
        self.hist_linked.resize_sidepanes(from_linked=rt_linked)
 | 
					 | 
				
			||||||
        self.search.on_resize()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ChartnPane(QFrame):
 | 
					class ChartnPane(QFrame):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -389,9 +300,9 @@ class ChartnPane(QFrame):
 | 
				
			||||||
    https://doc.qt.io/qt-5/qwidget.html#composite-widgets
 | 
					    https://doc.qt.io/qt-5/qwidget.html#composite-widgets
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    sidepane: FieldsForm | SearchWidget
 | 
					    sidepane: FieldsForm
 | 
				
			||||||
    hbox: QHBoxLayout
 | 
					    hbox: QHBoxLayout
 | 
				
			||||||
    chart: Optional[ChartPlotWidget] = None
 | 
					    chart: Optional['ChartPlotWidget'] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
| 
						 | 
					@ -403,7 +314,7 @@ class ChartnPane(QFrame):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        super().__init__(parent)
 | 
					        super().__init__(parent)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._sidepane = sidepane
 | 
					        self.sidepane = sidepane
 | 
				
			||||||
        self.chart = None
 | 
					        self.chart = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        hbox = self.hbox = QHBoxLayout(self)
 | 
					        hbox = self.hbox = QHBoxLayout(self)
 | 
				
			||||||
| 
						 | 
					@ -411,21 +322,6 @@ class ChartnPane(QFrame):
 | 
				
			||||||
        hbox.setContentsMargins(0, 0, 0, 0)
 | 
					        hbox.setContentsMargins(0, 0, 0, 0)
 | 
				
			||||||
        hbox.setSpacing(3)
 | 
					        hbox.setSpacing(3)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def set_sidepane(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        sidepane: FieldsForm | SearchWidget,
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # add sidepane **after** chart; place it on axis side
 | 
					 | 
				
			||||||
        self.hbox.addWidget(
 | 
					 | 
				
			||||||
            sidepane,
 | 
					 | 
				
			||||||
            alignment=Qt.AlignTop
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        self._sidepane = sidepane
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def sidepane(self) -> FieldsForm | SearchWidget:
 | 
					 | 
				
			||||||
        return self._sidepane
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
class LinkedSplits(QWidget):
 | 
					class LinkedSplits(QWidget):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -460,7 +356,6 @@ class LinkedSplits(QWidget):
 | 
				
			||||||
        self.splitter = QSplitter(QtCore.Qt.Vertical)
 | 
					        self.splitter = QSplitter(QtCore.Qt.Vertical)
 | 
				
			||||||
        self.splitter.setMidLineWidth(0)
 | 
					        self.splitter.setMidLineWidth(0)
 | 
				
			||||||
        self.splitter.setHandleWidth(2)
 | 
					        self.splitter.setHandleWidth(2)
 | 
				
			||||||
        self.splitter.splitterMoved.connect(self.on_splitter_adjust)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.layout = QVBoxLayout(self)
 | 
					        self.layout = QVBoxLayout(self)
 | 
				
			||||||
        self.layout.setContentsMargins(0, 0, 0, 0)
 | 
					        self.layout.setContentsMargins(0, 0, 0, 0)
 | 
				
			||||||
| 
						 | 
					@ -473,16 +368,6 @@ class LinkedSplits(QWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._symbol: Symbol = None
 | 
					        self._symbol: Symbol = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def on_splitter_adjust(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        pos: int,
 | 
					 | 
				
			||||||
        index: int,
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					 | 
				
			||||||
        # print(f'splitter moved pos:{pos}, index:{index}')
 | 
					 | 
				
			||||||
        godw = self.godwidget
 | 
					 | 
				
			||||||
        if self is godw.rt_linked:
 | 
					 | 
				
			||||||
            godw.search.on_resize()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def graphics_cycle(self, **kwargs) -> None:
 | 
					    def graphics_cycle(self, **kwargs) -> None:
 | 
				
			||||||
        from . import _display
 | 
					        from . import _display
 | 
				
			||||||
        ds = self.display_state
 | 
					        ds = self.display_state
 | 
				
			||||||
| 
						 | 
					@ -498,32 +383,28 @@ class LinkedSplits(QWidget):
 | 
				
			||||||
        prop: Optional[float] = None,
 | 
					        prop: Optional[float] = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        '''
 | 
					        '''Set the proportion of space allocated for linked subcharts.
 | 
				
			||||||
        Set the proportion of space allocated for linked subcharts.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        ln = len(self.subplots) or 1
 | 
					        ln = len(self.subplots)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # proportion allocated to consumer subcharts
 | 
					        # proportion allocated to consumer subcharts
 | 
				
			||||||
        if not prop:
 | 
					        if not prop:
 | 
				
			||||||
            prop = 3/8
 | 
					            prop = 3/8*5/8
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        h = self.height()
 | 
					            # if ln < 2:
 | 
				
			||||||
        histview_h = h * (6/16)
 | 
					            #     prop = 3/8*5/8
 | 
				
			||||||
        h = h - histview_h
 | 
					
 | 
				
			||||||
 | 
					            # elif ln >= 2:
 | 
				
			||||||
 | 
					            #     prop = 3/8
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        major = 1 - prop
 | 
					        major = 1 - prop
 | 
				
			||||||
        min_h_ind = int((h * prop) / ln)
 | 
					        min_h_ind = int((self.height() * prop) / ln)
 | 
				
			||||||
        sizes = [
 | 
					 | 
				
			||||||
            int(histview_h),
 | 
					 | 
				
			||||||
            int(h * major),
 | 
					 | 
				
			||||||
        ]
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # give all subcharts the same remaining proportional height
 | 
					        sizes = [int(self.height() * major)]
 | 
				
			||||||
        sizes.extend([min_h_ind] * ln)
 | 
					        sizes.extend([min_h_ind] * ln)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if self.godwidget.rt_linked is self:
 | 
					        self.splitter.setSizes(sizes)
 | 
				
			||||||
            self.splitter.setSizes(sizes)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def focus(self) -> None:
 | 
					    def focus(self) -> None:
 | 
				
			||||||
        if self.chart is not None:
 | 
					        if self.chart is not None:
 | 
				
			||||||
| 
						 | 
					@ -571,6 +452,13 @@ class LinkedSplits(QWidget):
 | 
				
			||||||
        # add crosshair graphic
 | 
					        # add crosshair graphic
 | 
				
			||||||
        self.chart.addItem(self.cursor)
 | 
					        self.chart.addItem(self.cursor)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # axis placement
 | 
				
			||||||
 | 
					        if (
 | 
				
			||||||
 | 
					            _xaxis_at == 'bottom' and
 | 
				
			||||||
 | 
					            'bottom' in self.chart.plotItem.axes
 | 
				
			||||||
 | 
					        ):
 | 
				
			||||||
 | 
					            self.chart.hideAxis('bottom')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # style?
 | 
					        # style?
 | 
				
			||||||
        self.chart.setFrameStyle(
 | 
					        self.chart.setFrameStyle(
 | 
				
			||||||
            QFrame.StyledPanel |
 | 
					            QFrame.StyledPanel |
 | 
				
			||||||
| 
						 | 
					@ -616,15 +504,10 @@ class LinkedSplits(QWidget):
 | 
				
			||||||
            'bottom': xaxis,
 | 
					            'bottom': xaxis,
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if sidepane is not False:
 | 
					        qframe = ChartnPane(
 | 
				
			||||||
            parent = qframe = ChartnPane(
 | 
					            sidepane=sidepane,
 | 
				
			||||||
                sidepane=sidepane,
 | 
					            parent=self.splitter,
 | 
				
			||||||
                parent=self.splitter,
 | 
					        )
 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            parent = self.splitter
 | 
					 | 
				
			||||||
            qframe = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        cpw = ChartPlotWidget(
 | 
					        cpw = ChartPlotWidget(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # this name will be used to register the primary
 | 
					            # this name will be used to register the primary
 | 
				
			||||||
| 
						 | 
					@ -632,7 +515,7 @@ class LinkedSplits(QWidget):
 | 
				
			||||||
            name=name,
 | 
					            name=name,
 | 
				
			||||||
            data_key=array_key or name,
 | 
					            data_key=array_key or name,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            parent=parent,
 | 
					            parent=qframe,
 | 
				
			||||||
            linkedsplits=self,
 | 
					            linkedsplits=self,
 | 
				
			||||||
            axisItems=axes,
 | 
					            axisItems=axes,
 | 
				
			||||||
            **cpw_kwargs,
 | 
					            **cpw_kwargs,
 | 
				
			||||||
| 
						 | 
					@ -640,45 +523,37 @@ class LinkedSplits(QWidget):
 | 
				
			||||||
        cpw.hideAxis('left')
 | 
					        cpw.hideAxis('left')
 | 
				
			||||||
        cpw.hideAxis('bottom')
 | 
					        cpw.hideAxis('bottom')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if (
 | 
					        if self.xaxis_chart:
 | 
				
			||||||
            _xaxis_at == 'bottom' and (
 | 
					            self.xaxis_chart.hideAxis('bottom')
 | 
				
			||||||
                self.xaxis_chart
 | 
					 | 
				
			||||||
                or (
 | 
					 | 
				
			||||||
                    not self.subplots
 | 
					 | 
				
			||||||
                    and self.xaxis_chart is None
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            if self.xaxis_chart:
 | 
					 | 
				
			||||||
                self.xaxis_chart.hideAxis('bottom')
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # presuming we only want it at the true bottom of all charts.
 | 
					            # presuming we only want it at the true bottom of all charts.
 | 
				
			||||||
            # XXX: uses new api from our ``pyqtgraph`` fork.
 | 
					            # XXX: uses new api from our ``pyqtgraph`` fork.
 | 
				
			||||||
            # https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master
 | 
					            # https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master
 | 
				
			||||||
            # _ = self.xaxis_chart.removeAxis('bottom', unlink=False)
 | 
					            # _ = self.xaxis_chart.removeAxis('bottom', unlink=False)
 | 
				
			||||||
            # assert 'bottom' not in self.xaxis_chart.plotItem.axes
 | 
					            # assert 'bottom' not in self.xaxis_chart.plotItem.axes
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            self.xaxis_chart = cpw
 | 
					            self.xaxis_chart = cpw
 | 
				
			||||||
            cpw.showAxis('bottom')
 | 
					            cpw.showAxis('bottom')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if qframe is not None:
 | 
					        if self.xaxis_chart is None:
 | 
				
			||||||
            qframe.chart = cpw
 | 
					            self.xaxis_chart = cpw
 | 
				
			||||||
            qframe.hbox.addWidget(cpw)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # so we can look this up and add back to the splitter
 | 
					        qframe.chart = cpw
 | 
				
			||||||
            # on a symbol switch
 | 
					        qframe.hbox.addWidget(cpw)
 | 
				
			||||||
            cpw.qframe = qframe
 | 
					 | 
				
			||||||
            assert cpw.parent() == qframe
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # add sidepane **after** chart; place it on axis side
 | 
					        # so we can look this up and add back to the splitter
 | 
				
			||||||
            qframe.set_sidepane(sidepane)
 | 
					        # on a symbol switch
 | 
				
			||||||
            # qframe.hbox.addWidget(
 | 
					        cpw.qframe = qframe
 | 
				
			||||||
            #     sidepane,
 | 
					        assert cpw.parent() == qframe
 | 
				
			||||||
            #     alignment=Qt.AlignTop
 | 
					 | 
				
			||||||
            # )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            cpw.sidepane = sidepane
 | 
					        # add sidepane **after** chart; place it on axis side
 | 
				
			||||||
 | 
					        qframe.hbox.addWidget(
 | 
				
			||||||
 | 
					            sidepane,
 | 
				
			||||||
 | 
					            alignment=Qt.AlignTop
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
 | 
					        cpw.sidepane = sidepane
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        cpw.plotItem.vb.linked = self
 | 
					        cpw.plotItem.vb.linkedsplits = self
 | 
				
			||||||
        cpw.setFrameStyle(
 | 
					        cpw.setFrameStyle(
 | 
				
			||||||
            QtWidgets.QFrame.StyledPanel
 | 
					            QtWidgets.QFrame.StyledPanel
 | 
				
			||||||
            # | QtWidgets.QFrame.Plain
 | 
					            # | QtWidgets.QFrame.Plain
 | 
				
			||||||
| 
						 | 
					@ -739,8 +614,9 @@ class LinkedSplits(QWidget):
 | 
				
			||||||
        if not _is_main:
 | 
					        if not _is_main:
 | 
				
			||||||
            # track by name
 | 
					            # track by name
 | 
				
			||||||
            self.subplots[name] = cpw
 | 
					            self.subplots[name] = cpw
 | 
				
			||||||
            if qframe is not None:
 | 
					            self.splitter.addWidget(qframe)
 | 
				
			||||||
                self.splitter.addWidget(qframe)
 | 
					            # scale split regions
 | 
				
			||||||
 | 
					            self.set_split_sizes()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            assert style == 'bar', 'main chart must be OHLC'
 | 
					            assert style == 'bar', 'main chart must be OHLC'
 | 
				
			||||||
| 
						 | 
					@ -766,28 +642,19 @@ class LinkedSplits(QWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def resize_sidepanes(
 | 
					    def resize_sidepanes(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        from_linked: Optional[LinkedSplits] = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Size all sidepanes based on the OHLC "main" plot and its
 | 
					        Size all sidepanes based on the OHLC "main" plot and its
 | 
				
			||||||
        sidepane width.
 | 
					        sidepane width.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        if from_linked:
 | 
					        main_chart = self.chart
 | 
				
			||||||
            main_chart = from_linked.chart
 | 
					        if main_chart:
 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            main_chart = self.chart
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if main_chart and main_chart.sidepane:
 | 
					 | 
				
			||||||
            sp_w = main_chart.sidepane.width()
 | 
					            sp_w = main_chart.sidepane.width()
 | 
				
			||||||
            for name, cpw in self.subplots.items():
 | 
					            for name, cpw in self.subplots.items():
 | 
				
			||||||
                cpw.sidepane.setMinimumWidth(sp_w)
 | 
					                cpw.sidepane.setMinimumWidth(sp_w)
 | 
				
			||||||
                cpw.sidepane.setMaximumWidth(sp_w)
 | 
					                cpw.sidepane.setMaximumWidth(sp_w)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if from_linked:
 | 
					 | 
				
			||||||
                self.chart.sidepane.setMinimumWidth(sp_w)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ChartPlotWidget(pg.PlotWidget):
 | 
					class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
| 
						 | 
					@ -814,8 +681,7 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
    # a better one?
 | 
					    # a better one?
 | 
				
			||||||
    def mk_vb(self, name: str) -> ChartView:
 | 
					    def mk_vb(self, name: str) -> ChartView:
 | 
				
			||||||
        cv = ChartView(name)
 | 
					        cv = ChartView(name)
 | 
				
			||||||
        # link new view to chart's view set
 | 
					        cv.linkedsplits = self.linked
 | 
				
			||||||
        cv.linked = self.linked
 | 
					 | 
				
			||||||
        return cv
 | 
					        return cv
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
| 
						 | 
					@ -834,7 +700,6 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        static_yrange: Optional[tuple[float, float]] = None,
 | 
					        static_yrange: Optional[tuple[float, float]] = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        parent=None,
 | 
					 | 
				
			||||||
        **kwargs,
 | 
					        **kwargs,
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
| 
						 | 
					@ -847,20 +712,16 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # NOTE: must be set bfore calling ``.mk_vb()``
 | 
					        # NOTE: must be set bfore calling ``.mk_vb()``
 | 
				
			||||||
        self.linked = linkedsplits
 | 
					        self.linked = linkedsplits
 | 
				
			||||||
        self.sidepane: Optional[FieldsForm] = None
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # source of our custom interactions
 | 
					        # source of our custom interactions
 | 
				
			||||||
        self.cv = cv = self.mk_vb(name)
 | 
					        self.cv = cv = self.mk_vb(name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        pi = pgo.PlotItem(viewBox=cv, **kwargs)
 | 
					 | 
				
			||||||
        super().__init__(
 | 
					        super().__init__(
 | 
				
			||||||
            background=hcolor(view_color),
 | 
					            background=hcolor(view_color),
 | 
				
			||||||
            viewBox=cv,
 | 
					            viewBox=cv,
 | 
				
			||||||
            # parent=None,
 | 
					            # parent=None,
 | 
				
			||||||
            # plotItem=None,
 | 
					            # plotItem=None,
 | 
				
			||||||
            # antialias=True,
 | 
					            # antialias=True,
 | 
				
			||||||
            parent=parent,
 | 
					 | 
				
			||||||
            plotItem=pi,
 | 
					 | 
				
			||||||
            **kwargs
 | 
					            **kwargs
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        # give viewbox as reference to chart
 | 
					        # give viewbox as reference to chart
 | 
				
			||||||
| 
						 | 
					@ -899,18 +760,9 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
 | 
					        self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # indempotent startup flag for auto-yrange subsys
 | 
					 | 
				
			||||||
        # to detect the "first time" y-domain graphics begin
 | 
					 | 
				
			||||||
        # to be shown in the (main) graphics view.
 | 
					 | 
				
			||||||
        self._on_screen: bool = False
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def resume_all_feeds(self):
 | 
					    def resume_all_feeds(self):
 | 
				
			||||||
        try:
 | 
					        for feed in self._feeds.values():
 | 
				
			||||||
            for feed in self._feeds.values():
 | 
					            self.linked.godwidget._root_n.start_soon(feed.resume)
 | 
				
			||||||
                self.linked.godwidget._root_n.start_soon(feed.resume)
 | 
					 | 
				
			||||||
        except RuntimeError:
 | 
					 | 
				
			||||||
            # TODO: cancel the qtractor runtime here?
 | 
					 | 
				
			||||||
            raise
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def pause_all_feeds(self):
 | 
					    def pause_all_feeds(self):
 | 
				
			||||||
        for feed in self._feeds.values():
 | 
					        for feed in self._feeds.values():
 | 
				
			||||||
| 
						 | 
					@ -1007,9 +859,7 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def default_view(
 | 
					    def default_view(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        bars_from_y: int = int(616 * 3/8),
 | 
					        bars_from_y: int = 3000,
 | 
				
			||||||
        y_offset: int = 0,
 | 
					 | 
				
			||||||
        do_ds: bool = True,
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
| 
						 | 
					@ -1047,12 +897,8 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
        # terms now that we've scaled either by user control
 | 
					        # terms now that we've scaled either by user control
 | 
				
			||||||
        # or to the default set of bars as per the immediate block
 | 
					        # or to the default set of bars as per the immediate block
 | 
				
			||||||
        # above.
 | 
					        # above.
 | 
				
			||||||
        if not y_offset:
 | 
					        marker_pos, l1_len = self.pre_l1_xs()
 | 
				
			||||||
            marker_pos, l1_len = self.pre_l1_xs()
 | 
					        end = xlast + l1_len + 1
 | 
				
			||||||
            end = xlast + l1_len + 1
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            end = xlast + y_offset + 1
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        begin = end - (r - l)
 | 
					        begin = end - (r - l)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # for debugging
 | 
					        # for debugging
 | 
				
			||||||
| 
						 | 
					@ -1074,11 +920,8 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
            max=end,
 | 
					            max=end,
 | 
				
			||||||
            padding=0,
 | 
					            padding=0,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					        self.view.maybe_downsample_graphics()
 | 
				
			||||||
        if do_ds:
 | 
					        view._set_yrange()
 | 
				
			||||||
            self.view.maybe_downsample_graphics()
 | 
					 | 
				
			||||||
            view._set_yrange()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        try:
 | 
					        try:
 | 
				
			||||||
            self.linked.graphics_cycle()
 | 
					            self.linked.graphics_cycle()
 | 
				
			||||||
        except IndexError:
 | 
					        except IndexError:
 | 
				
			||||||
| 
						 | 
					@ -1151,7 +994,7 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
        axis_side: str = 'right',
 | 
					        axis_side: str = 'right',
 | 
				
			||||||
        axis_kwargs: dict = {},
 | 
					        axis_kwargs: dict = {},
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> pgo.PlotItem:
 | 
					    ) -> pg.PlotItem:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Custom viewbox impl
 | 
					        # Custom viewbox impl
 | 
				
			||||||
        cv = self.mk_vb(name)
 | 
					        cv = self.mk_vb(name)
 | 
				
			||||||
| 
						 | 
					@ -1160,14 +1003,13 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
        allowed_sides = {'left', 'right'}
 | 
					        allowed_sides = {'left', 'right'}
 | 
				
			||||||
        if axis_side not in allowed_sides:
 | 
					        if axis_side not in allowed_sides:
 | 
				
			||||||
            raise ValueError(f'``axis_side``` must be in {allowed_sides}')
 | 
					            raise ValueError(f'``axis_side``` must be in {allowed_sides}')
 | 
				
			||||||
 | 
					 | 
				
			||||||
        yaxis = PriceAxis(
 | 
					        yaxis = PriceAxis(
 | 
				
			||||||
            orientation=axis_side,
 | 
					            orientation=axis_side,
 | 
				
			||||||
            linkedsplits=self.linked,
 | 
					            linkedsplits=self.linked,
 | 
				
			||||||
            **axis_kwargs,
 | 
					            **axis_kwargs,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        pi = pgo.PlotItem(
 | 
					        pi = pg.PlotItem(
 | 
				
			||||||
            parent=self.plotItem,
 | 
					            parent=self.plotItem,
 | 
				
			||||||
            name=name,
 | 
					            name=name,
 | 
				
			||||||
            enableMenu=False,
 | 
					            enableMenu=False,
 | 
				
			||||||
| 
						 | 
					@ -1180,27 +1022,19 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        pi.hideButtons()
 | 
					        pi.hideButtons()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # cv.enable_auto_yrange(self.view)
 | 
				
			||||||
 | 
					        cv.enable_auto_yrange()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # compose this new plot's graphics with the current chart's
 | 
					        # compose this new plot's graphics with the current chart's
 | 
				
			||||||
        # existing one but with separate axes as neede and specified.
 | 
					        # existing one but with separate axes as neede and specified.
 | 
				
			||||||
        self.pi_overlay.add_plotitem(
 | 
					        self.pi_overlay.add_plotitem(
 | 
				
			||||||
            pi,
 | 
					            pi,
 | 
				
			||||||
            index=index,
 | 
					            index=index,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # only link x-axes and
 | 
					            # only link x-axes,
 | 
				
			||||||
            # don't relay any ``ViewBox`` derived event
 | 
					 | 
				
			||||||
            # handlers since we only care about keeping charts
 | 
					 | 
				
			||||||
            # x-synced on interaction (at least for now).
 | 
					 | 
				
			||||||
            link_axes=(0,),
 | 
					            link_axes=(0,),
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # connect auto-yrange callbacks *from* this new
 | 
					 | 
				
			||||||
        # view **to** this parent and likewise *from* the
 | 
					 | 
				
			||||||
        # main/parent chart back *to* the created overlay.
 | 
					 | 
				
			||||||
        cv.enable_auto_yrange(src_vb=self.view)
 | 
					 | 
				
			||||||
        # makes it so that interaction on the new overlay will reflect
 | 
					 | 
				
			||||||
        # back on the main chart (which overlay was added to).
 | 
					 | 
				
			||||||
        self.view.enable_auto_yrange(src_vb=cv)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # add axis title
 | 
					        # add axis title
 | 
				
			||||||
        # TODO: do we want this API to still work?
 | 
					        # TODO: do we want this API to still work?
 | 
				
			||||||
        # raxis = pi.getAxis('right')
 | 
					        # raxis = pi.getAxis('right')
 | 
				
			||||||
| 
						 | 
					@ -1262,7 +1096,7 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: this probably needs its own method?
 | 
					        # TODO: this probably needs its own method?
 | 
				
			||||||
        if overlay:
 | 
					        if overlay:
 | 
				
			||||||
            if isinstance(overlay, pgo.PlotItem):
 | 
					            if isinstance(overlay, pg.PlotItem):
 | 
				
			||||||
                if overlay not in self.pi_overlay.overlays:
 | 
					                if overlay not in self.pi_overlay.overlays:
 | 
				
			||||||
                    raise RuntimeError(
 | 
					                    raise RuntimeError(
 | 
				
			||||||
                            f'{overlay} must be from `.plotitem_overlay()`'
 | 
					                            f'{overlay} must be from `.plotitem_overlay()`'
 | 
				
			||||||
| 
						 | 
					@ -1421,7 +1255,8 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
        If ``bars_range`` is provided use that range.
 | 
					        If ``bars_range`` is provided use that range.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        profiler = Profiler(
 | 
					        # print(f'Chart[{self.name}].maxmin()')
 | 
				
			||||||
 | 
					        profiler = pg.debug.Profiler(
 | 
				
			||||||
            msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
 | 
					            msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
 | 
				
			||||||
            disabled=not pg_profile_enabled(),
 | 
					            disabled=not pg_profile_enabled(),
 | 
				
			||||||
            ms_threshold=ms_slower_then,
 | 
					            ms_threshold=ms_slower_then,
 | 
				
			||||||
| 
						 | 
					@ -1452,18 +1287,11 @@ class ChartPlotWidget(pg.PlotWidget):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            key = round(lbar), round(rbar)
 | 
					            key = round(lbar), round(rbar)
 | 
				
			||||||
            res = flow.maxmin(*key)
 | 
					            res = flow.maxmin(*key)
 | 
				
			||||||
 | 
					            if res == (None, None):
 | 
				
			||||||
            if (
 | 
					                log.error(
 | 
				
			||||||
                res is None
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                log.warning(
 | 
					 | 
				
			||||||
                    f"{flow_key} no mxmn for bars_range => {key} !?"
 | 
					                    f"{flow_key} no mxmn for bars_range => {key} !?"
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
                res = 0, 0
 | 
					                res = 0, 0
 | 
				
			||||||
                if not self._on_screen:
 | 
					 | 
				
			||||||
                    self.default_view(do_ds=False)
 | 
					 | 
				
			||||||
                    self._on_screen = True
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        profiler(f'yrange mxmn: {key} -> {res}')
 | 
					        profiler(f'yrange mxmn: {key} -> {res}')
 | 
				
			||||||
        # print(f'{flow_key} yrange mxmn: {key} -> {res}')
 | 
					 | 
				
			||||||
        return res
 | 
					        return res
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -223,20 +223,14 @@ def ds_m4(
 | 
				
			||||||
    assert frames >= (xrange / uppx)
 | 
					    assert frames >= (xrange / uppx)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # call into ``numba``
 | 
					    # call into ``numba``
 | 
				
			||||||
    (
 | 
					    nb, i_win, y_out = _m4(
 | 
				
			||||||
        nb,
 | 
					 | 
				
			||||||
        x_out,
 | 
					 | 
				
			||||||
        y_out,
 | 
					 | 
				
			||||||
        ymn,
 | 
					 | 
				
			||||||
        ymx,
 | 
					 | 
				
			||||||
    ) = _m4(
 | 
					 | 
				
			||||||
        x,
 | 
					        x,
 | 
				
			||||||
        y,
 | 
					        y,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        frames,
 | 
					        frames,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: see func below..
 | 
					        # TODO: see func below..
 | 
				
			||||||
        # x_out,
 | 
					        # i_win,
 | 
				
			||||||
        # y_out,
 | 
					        # y_out,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # first index in x data to start at
 | 
					        # first index in x data to start at
 | 
				
			||||||
| 
						 | 
					@ -249,11 +243,10 @@ def ds_m4(
 | 
				
			||||||
    # filter out any overshoot in the input allocation arrays by
 | 
					    # filter out any overshoot in the input allocation arrays by
 | 
				
			||||||
    # removing zero-ed tail entries which should start at a certain
 | 
					    # removing zero-ed tail entries which should start at a certain
 | 
				
			||||||
    # index.
 | 
					    # index.
 | 
				
			||||||
    x_out = x_out[x_out != 0]
 | 
					    i_win = i_win[i_win != 0]
 | 
				
			||||||
    y_out = y_out[:x_out.size]
 | 
					    y_out = y_out[:i_win.size]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # print(f'M4 output ymn, ymx: {ymn},{ymx}')
 | 
					    return nb, i_win, y_out
 | 
				
			||||||
    return nb, x_out, y_out, ymn, ymx
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@jit(
 | 
					@jit(
 | 
				
			||||||
| 
						 | 
					@ -267,8 +260,8 @@ def _m4(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    frames: int,
 | 
					    frames: int,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # TODO: using this approach, having the ``.zeros()`` alloc lines
 | 
					    # TODO: using this approach by having the ``.zeros()`` alloc lines
 | 
				
			||||||
    # below in pure python, there were segs faults and alloc crashes..
 | 
					    # below, in put python was causing segs faults and alloc crashes..
 | 
				
			||||||
    # we might need to see how it behaves with shm arrays and consider
 | 
					    # we might need to see how it behaves with shm arrays and consider
 | 
				
			||||||
    # allocating them once at startup?
 | 
					    # allocating them once at startup?
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -281,22 +274,14 @@ def _m4(
 | 
				
			||||||
    x_start: int,
 | 
					    x_start: int,
 | 
				
			||||||
    step: float,
 | 
					    step: float,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> tuple[
 | 
					) -> int:
 | 
				
			||||||
    int,
 | 
					    # nbins = len(i_win)
 | 
				
			||||||
    np.ndarray,
 | 
					    # count = len(xs)
 | 
				
			||||||
    np.ndarray,
 | 
					 | 
				
			||||||
    float,
 | 
					 | 
				
			||||||
    float,
 | 
					 | 
				
			||||||
]:
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Implementation of the m4 algorithm in ``numba``:
 | 
					 | 
				
			||||||
    http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    # these are pre-allocated and mutated by ``numba``
 | 
					    # these are pre-allocated and mutated by ``numba``
 | 
				
			||||||
    # code in-place.
 | 
					    # code in-place.
 | 
				
			||||||
    y_out = np.zeros((frames, 4), ys.dtype)
 | 
					    y_out = np.zeros((frames, 4), ys.dtype)
 | 
				
			||||||
    x_out = np.zeros(frames, xs.dtype)
 | 
					    i_win = np.zeros(frames, xs.dtype)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    bincount = 0
 | 
					    bincount = 0
 | 
				
			||||||
    x_left = x_start
 | 
					    x_left = x_start
 | 
				
			||||||
| 
						 | 
					@ -310,34 +295,24 @@ def _m4(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # set all bins in the left-most entry to the starting left-most x value
 | 
					    # set all bins in the left-most entry to the starting left-most x value
 | 
				
			||||||
    # (aka a row broadcast).
 | 
					    # (aka a row broadcast).
 | 
				
			||||||
    x_out[bincount] = x_left
 | 
					    i_win[bincount] = x_left
 | 
				
			||||||
    # set all y-values to the first value passed in.
 | 
					    # set all y-values to the first value passed in.
 | 
				
			||||||
    y_out[bincount] = ys[0]
 | 
					    y_out[bincount] = ys[0]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # full input y-data mx and mn
 | 
					 | 
				
			||||||
    mx: float = -np.inf
 | 
					 | 
				
			||||||
    mn: float = np.inf
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # compute OHLC style max / min values per window sized x-frame.
 | 
					 | 
				
			||||||
    for i in range(len(xs)):
 | 
					    for i in range(len(xs)):
 | 
				
			||||||
 | 
					 | 
				
			||||||
        x = xs[i]
 | 
					        x = xs[i]
 | 
				
			||||||
        y = ys[i]
 | 
					        y = ys[i]
 | 
				
			||||||
 | 
					 | 
				
			||||||
        if x < x_left + step:   # the current window "step" is [bin, bin+1)
 | 
					        if x < x_left + step:   # the current window "step" is [bin, bin+1)
 | 
				
			||||||
            ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
 | 
					            y_out[bincount, 1] = min(y, y_out[bincount, 1])
 | 
				
			||||||
            ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
 | 
					            y_out[bincount, 2] = max(y, y_out[bincount, 2])
 | 
				
			||||||
            y_out[bincount, 3] = y
 | 
					            y_out[bincount, 3] = y
 | 
				
			||||||
            mx = max(mx, ymx)
 | 
					 | 
				
			||||||
            mn = min(mn, ymn)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            # Find the next bin
 | 
					            # Find the next bin
 | 
				
			||||||
            while x >= x_left + step:
 | 
					            while x >= x_left + step:
 | 
				
			||||||
                x_left += step
 | 
					                x_left += step
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            bincount += 1
 | 
					            bincount += 1
 | 
				
			||||||
            x_out[bincount] = x_left
 | 
					            i_win[bincount] = x_left
 | 
				
			||||||
            y_out[bincount] = y
 | 
					            y_out[bincount] = y
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return bincount, x_out, y_out, mn, mx
 | 
					    return bincount, i_win, y_out
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -18,13 +18,8 @@
 | 
				
			||||||
Mouse interaction graphics
 | 
					Mouse interaction graphics
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from __future__ import annotations
 | 
					 | 
				
			||||||
from functools import partial
 | 
					from functools import partial
 | 
				
			||||||
from typing import (
 | 
					from typing import Optional, Callable
 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Callable,
 | 
					 | 
				
			||||||
    TYPE_CHECKING,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
import inspect
 | 
					import inspect
 | 
				
			||||||
import numpy as np
 | 
					import numpy as np
 | 
				
			||||||
| 
						 | 
					@ -41,12 +36,6 @@ from ._style import (
 | 
				
			||||||
from ._axes import YAxisLabel, XAxisLabel
 | 
					from ._axes import YAxisLabel, XAxisLabel
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
 | 
					
 | 
				
			||||||
if TYPE_CHECKING:
 | 
					 | 
				
			||||||
    from ._chart import (
 | 
					 | 
				
			||||||
        ChartPlotWidget,
 | 
					 | 
				
			||||||
        LinkedSplits,
 | 
					 | 
				
			||||||
    )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -69,7 +58,7 @@ class LineDot(pg.CurvePoint):
 | 
				
			||||||
        curve: pg.PlotCurveItem,
 | 
					        curve: pg.PlotCurveItem,
 | 
				
			||||||
        index: int,
 | 
					        index: int,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        plot: ChartPlotWidget,  # type: ingore # noqa
 | 
					        plot: 'ChartPlotWidget',  # type: ingore # noqa
 | 
				
			||||||
        pos=None,
 | 
					        pos=None,
 | 
				
			||||||
        color: str = 'default_light',
 | 
					        color: str = 'default_light',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -162,7 +151,7 @@ class ContentsLabel(pg.LabelItem):
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # chart: ChartPlotWidget,  # noqa
 | 
					        # chart: 'ChartPlotWidget',  # noqa
 | 
				
			||||||
        view: pg.ViewBox,
 | 
					        view: pg.ViewBox,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        anchor_at: str = ('top', 'right'),
 | 
					        anchor_at: str = ('top', 'right'),
 | 
				
			||||||
| 
						 | 
					@ -255,7 +244,7 @@ class ContentsLabels:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        linkedsplits: LinkedSplits,  # type: ignore # noqa
 | 
					        linkedsplits: 'LinkedSplits',  # type: ignore # noqa
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -300,7 +289,7 @@ class ContentsLabels:
 | 
				
			||||||
    def add_label(
 | 
					    def add_label(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        chart: ChartPlotWidget,  # type: ignore # noqa
 | 
					        chart: 'ChartPlotWidget',  # type: ignore # noqa
 | 
				
			||||||
        name: str,
 | 
					        name: str,
 | 
				
			||||||
        anchor_at: tuple[str, str] = ('top', 'left'),
 | 
					        anchor_at: tuple[str, str] = ('top', 'left'),
 | 
				
			||||||
        update_func: Callable = ContentsLabel.update_from_value,
 | 
					        update_func: Callable = ContentsLabel.update_from_value,
 | 
				
			||||||
| 
						 | 
					@ -327,7 +316,7 @@ class Cursor(pg.GraphicsObject):
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        linkedsplits: LinkedSplits,  # noqa
 | 
					        linkedsplits: 'LinkedSplits',  # noqa
 | 
				
			||||||
        digits: int = 0
 | 
					        digits: int = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
| 
						 | 
					@ -336,8 +325,6 @@ class Cursor(pg.GraphicsObject):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.linked = linkedsplits
 | 
					        self.linked = linkedsplits
 | 
				
			||||||
        self.graphics: dict[str, pg.GraphicsObject] = {}
 | 
					        self.graphics: dict[str, pg.GraphicsObject] = {}
 | 
				
			||||||
        self.xaxis_label: Optional[XAxisLabel] = None
 | 
					 | 
				
			||||||
        self.always_show_xlabel: bool = True
 | 
					 | 
				
			||||||
        self.plots: list['PlotChartWidget'] = []  # type: ignore # noqa
 | 
					        self.plots: list['PlotChartWidget'] = []  # type: ignore # noqa
 | 
				
			||||||
        self.active_plot = None
 | 
					        self.active_plot = None
 | 
				
			||||||
        self.digits: int = digits
 | 
					        self.digits: int = digits
 | 
				
			||||||
| 
						 | 
					@ -398,7 +385,7 @@ class Cursor(pg.GraphicsObject):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def add_plot(
 | 
					    def add_plot(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        plot: ChartPlotWidget,  # noqa
 | 
					        plot: 'ChartPlotWidget',  # noqa
 | 
				
			||||||
        digits: int = 0,
 | 
					        digits: int = 0,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
| 
						 | 
					@ -482,7 +469,7 @@ class Cursor(pg.GraphicsObject):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def add_curve_cursor(
 | 
					    def add_curve_cursor(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        plot: ChartPlotWidget,  # noqa
 | 
					        plot: 'ChartPlotWidget',  # noqa
 | 
				
			||||||
        curve: 'PlotCurveItem',  # noqa
 | 
					        curve: 'PlotCurveItem',  # noqa
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> LineDot:
 | 
					    ) -> LineDot:
 | 
				
			||||||
| 
						 | 
					@ -504,29 +491,17 @@ class Cursor(pg.GraphicsObject):
 | 
				
			||||||
        log.debug(f"{(action, plot.name)}")
 | 
					        log.debug(f"{(action, plot.name)}")
 | 
				
			||||||
        if action == 'Enter':
 | 
					        if action == 'Enter':
 | 
				
			||||||
            self.active_plot = plot
 | 
					            self.active_plot = plot
 | 
				
			||||||
            plot.linked.godwidget._active_cursor = self
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # show horiz line and y-label
 | 
					            # show horiz line and y-label
 | 
				
			||||||
            self.graphics[plot]['hl'].show()
 | 
					            self.graphics[plot]['hl'].show()
 | 
				
			||||||
            self.graphics[plot]['yl'].show()
 | 
					            self.graphics[plot]['yl'].show()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if (
 | 
					        else:  # Leave
 | 
				
			||||||
                not self.always_show_xlabel
 | 
					 | 
				
			||||||
                and not self.xaxis_label.isVisible()
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                self.xaxis_label.show()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # Leave: hide horiz line and y-label
 | 
					            # hide horiz line and y-label
 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            self.graphics[plot]['hl'].hide()
 | 
					            self.graphics[plot]['hl'].hide()
 | 
				
			||||||
            self.graphics[plot]['yl'].hide()
 | 
					            self.graphics[plot]['yl'].hide()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if (
 | 
					 | 
				
			||||||
                not self.always_show_xlabel
 | 
					 | 
				
			||||||
                and self.xaxis_label.isVisible()
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                self.xaxis_label.hide()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def mouseMoved(
 | 
					    def mouseMoved(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        coords: tuple[QPointF],  # noqa
 | 
					        coords: tuple[QPointF],  # noqa
 | 
				
			||||||
| 
						 | 
					@ -615,17 +590,13 @@ class Cursor(pg.GraphicsObject):
 | 
				
			||||||
                            left_axis_width += left.width()
 | 
					                            left_axis_width += left.width()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # map back to abs (label-local) coordinates
 | 
					                # map back to abs (label-local) coordinates
 | 
				
			||||||
                if (
 | 
					                self.xaxis_label.update_label(
 | 
				
			||||||
                    self.always_show_xlabel
 | 
					                    abs_pos=(
 | 
				
			||||||
                    or self.xaxis_label.isVisible()
 | 
					                        plot.mapFromView(QPointF(vl_x, iy)) -
 | 
				
			||||||
                ):
 | 
					                        QPointF(left_axis_width, 0)
 | 
				
			||||||
                    self.xaxis_label.update_label(
 | 
					                    ),
 | 
				
			||||||
                        abs_pos=(
 | 
					                    value=ix,
 | 
				
			||||||
                            plot.mapFromView(QPointF(vl_x, iy)) -
 | 
					                )
 | 
				
			||||||
                            QPointF(left_axis_width, 0)
 | 
					 | 
				
			||||||
                        ),
 | 
					 | 
				
			||||||
                        value=ix,
 | 
					 | 
				
			||||||
                    )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._datum_xy = ix, iy
 | 
					        self._datum_xy = ix, iy
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -44,7 +44,6 @@ from ._style import hcolor
 | 
				
			||||||
#     ds_m4,
 | 
					#     ds_m4,
 | 
				
			||||||
# )
 | 
					# )
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from .._profile import Profiler
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
| 
						 | 
					@ -332,7 +331,7 @@ class Curve(pg.GraphicsObject):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        profiler = Profiler(
 | 
					        profiler = pg.debug.Profiler(
 | 
				
			||||||
            msg=f'Curve.paint(): `{self._name}`',
 | 
					            msg=f'Curve.paint(): `{self._name}`',
 | 
				
			||||||
            disabled=not pg_profile_enabled(),
 | 
					            disabled=not pg_profile_enabled(),
 | 
				
			||||||
            ms_threshold=ms_slower_then,
 | 
					            ms_threshold=ms_slower_then,
 | 
				
			||||||
| 
						 | 
					@ -467,7 +466,7 @@ class StepCurve(Curve):
 | 
				
			||||||
    def sub_paint(
 | 
					    def sub_paint(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        p: QPainter,
 | 
					        p: QPainter,
 | 
				
			||||||
        profiler: Profiler,
 | 
					        profiler: pg.debug.Profiler,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        # p.drawLines(*tuple(filter(bool, self._last_step_lines)))
 | 
					        # p.drawLines(*tuple(filter(bool, self._last_step_lines)))
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -21,20 +21,19 @@ this module ties together quote and computational (fsp) streams with
 | 
				
			||||||
graphics update methods via our custom ``pyqtgraph`` charting api.
 | 
					graphics update methods via our custom ``pyqtgraph`` charting api.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
'''
 | 
					'''
 | 
				
			||||||
 | 
					from dataclasses import dataclass
 | 
				
			||||||
from functools import partial
 | 
					from functools import partial
 | 
				
			||||||
import time
 | 
					import time
 | 
				
			||||||
from typing import Optional, Any, Callable
 | 
					from typing import Optional, Any, Callable
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					import numpy as np
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
 | 
					import pendulum
 | 
				
			||||||
import pyqtgraph as pg
 | 
					import pyqtgraph as pg
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# from .. import brokers
 | 
					# from .. import brokers
 | 
				
			||||||
from ..data.feed import (
 | 
					from ..data.feed import open_feed
 | 
				
			||||||
    open_feed,
 | 
					 | 
				
			||||||
    Feed,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from ..data.types import Struct
 | 
					 | 
				
			||||||
from ._axes import YAxisLabel
 | 
					from ._axes import YAxisLabel
 | 
				
			||||||
from ._chart import (
 | 
					from ._chart import (
 | 
				
			||||||
    ChartPlotWidget,
 | 
					    ChartPlotWidget,
 | 
				
			||||||
| 
						 | 
					@ -42,36 +41,29 @@ from ._chart import (
 | 
				
			||||||
    GodWidget,
 | 
					    GodWidget,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ._l1 import L1Labels
 | 
					from ._l1 import L1Labels
 | 
				
			||||||
from ._style import hcolor
 | 
					 | 
				
			||||||
from ._fsp import (
 | 
					from ._fsp import (
 | 
				
			||||||
    update_fsp_chart,
 | 
					    update_fsp_chart,
 | 
				
			||||||
    start_fsp_displays,
 | 
					    start_fsp_displays,
 | 
				
			||||||
    has_vlm,
 | 
					    has_vlm,
 | 
				
			||||||
    open_vlm_displays,
 | 
					    open_vlm_displays,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ..data._sharedmem import (
 | 
					from ..data._sharedmem import ShmArray
 | 
				
			||||||
    ShmArray,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from ..data._source import tf_in_1s
 | 
					from ..data._source import tf_in_1s
 | 
				
			||||||
from ._forms import (
 | 
					from ._forms import (
 | 
				
			||||||
    FieldsForm,
 | 
					    FieldsForm,
 | 
				
			||||||
    mk_order_pane_layout,
 | 
					    mk_order_pane_layout,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from .order_mode import (
 | 
					from .order_mode import open_order_mode
 | 
				
			||||||
    open_order_mode,
 | 
					 | 
				
			||||||
    OrderMode,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from .._profile import (
 | 
					from .._profile import (
 | 
				
			||||||
    pg_profile_enabled,
 | 
					    pg_profile_enabled,
 | 
				
			||||||
    ms_slower_then,
 | 
					    ms_slower_then,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from .._profile import Profiler
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# TODO: load this from a config.toml!
 | 
					# TODO: load this from a config.toml!
 | 
				
			||||||
_quote_throttle_rate: int = 16  # Hz
 | 
					_quote_throttle_rate: int = 22  # Hz
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# a working tick-type-classes template
 | 
					# a working tick-type-classes template
 | 
				
			||||||
| 
						 | 
					@ -113,10 +105,6 @@ def chart_maxmin(
 | 
				
			||||||
    mn, mx = out
 | 
					    mn, mx = out
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    mx_vlm_in_view = 0
 | 
					    mx_vlm_in_view = 0
 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: we need to NOT call this to avoid a manual
 | 
					 | 
				
			||||||
    # np.max/min trigger and especially on the vlm_chart
 | 
					 | 
				
			||||||
    # flows which aren't shown.. like vlm?
 | 
					 | 
				
			||||||
    if vlm_chart:
 | 
					    if vlm_chart:
 | 
				
			||||||
        out = vlm_chart.maxmin()
 | 
					        out = vlm_chart.maxmin()
 | 
				
			||||||
        if out:
 | 
					        if out:
 | 
				
			||||||
| 
						 | 
					@ -130,105 +118,39 @@ def chart_maxmin(
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class DisplayState(Struct):
 | 
					@dataclass
 | 
				
			||||||
 | 
					class DisplayState:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    Chart-local real-time graphics state container.
 | 
					    Chart-local real-time graphics state container.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    godwidget: GodWidget
 | 
					 | 
				
			||||||
    quotes: dict[str, Any]
 | 
					    quotes: dict[str, Any]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    maxmin: Callable
 | 
					    maxmin: Callable
 | 
				
			||||||
    ohlcv: ShmArray
 | 
					    ohlcv: ShmArray
 | 
				
			||||||
    hist_ohlcv: ShmArray
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # high level chart handles
 | 
					    # high level chart handles
 | 
				
			||||||
 | 
					    linked: LinkedSplits
 | 
				
			||||||
    chart: ChartPlotWidget
 | 
					    chart: ChartPlotWidget
 | 
				
			||||||
 | 
					    vlm_chart: ChartPlotWidget
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # axis labels
 | 
					    # axis labels
 | 
				
			||||||
    l1: L1Labels
 | 
					    l1: L1Labels
 | 
				
			||||||
    last_price_sticky: YAxisLabel
 | 
					    last_price_sticky: YAxisLabel
 | 
				
			||||||
    hist_last_price_sticky: YAxisLabel
 | 
					    vlm_sticky: YAxisLabel
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # misc state tracking
 | 
					    # misc state tracking
 | 
				
			||||||
    vars: dict[str, Any] = {
 | 
					    vars: dict[str, Any]
 | 
				
			||||||
        'tick_margin': 0,
 | 
					 | 
				
			||||||
        'i_last': 0,
 | 
					 | 
				
			||||||
        'i_last_append': 0,
 | 
					 | 
				
			||||||
        'last_mx_vlm': 0,
 | 
					 | 
				
			||||||
        'last_mx': 0,
 | 
					 | 
				
			||||||
        'last_mn': 0,
 | 
					 | 
				
			||||||
    }
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    vlm_chart: Optional[ChartPlotWidget] = None
 | 
					 | 
				
			||||||
    vlm_sticky: Optional[YAxisLabel] = None
 | 
					 | 
				
			||||||
    wap_in_history: bool = False
 | 
					    wap_in_history: bool = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def incr_info(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        chart: Optional[ChartPlotWidget] = None,
 | 
					 | 
				
			||||||
        shm: Optional[ShmArray] = None,
 | 
					 | 
				
			||||||
        state: Optional[dict] = None,  # pass in a copy if you don't
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        update_state: bool = True,
 | 
					 | 
				
			||||||
        update_uppx: float = 16,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> tuple:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        shm = shm or self.ohlcv
 | 
					 | 
				
			||||||
        chart = chart or self.chart
 | 
					 | 
				
			||||||
        state = state or self.vars
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if not update_state:
 | 
					 | 
				
			||||||
            state = state.copy()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # compute the first available graphic's x-units-per-pixel
 | 
					 | 
				
			||||||
        uppx = chart.view.x_uppx()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # NOTE: this used to be implemented in a dedicated
 | 
					 | 
				
			||||||
        # "increment task": ``check_for_new_bars()`` but it doesn't
 | 
					 | 
				
			||||||
        # make sense to do a whole task switch when we can just do
 | 
					 | 
				
			||||||
        # this simple index-diff and all the fsp sub-curve graphics
 | 
					 | 
				
			||||||
        # are diffed on each draw cycle anyway; so updates to the
 | 
					 | 
				
			||||||
        # "curve" length is already automatic.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # increment the view position by the sample offset.
 | 
					 | 
				
			||||||
        i_step = shm.index
 | 
					 | 
				
			||||||
        i_diff = i_step - state['i_last']
 | 
					 | 
				
			||||||
        state['i_last'] = i_step
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        append_diff = i_step - state['i_last_append']
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # update the "last datum" (aka extending the flow graphic with
 | 
					 | 
				
			||||||
        # new data) only if the number of unit steps is >= the number of
 | 
					 | 
				
			||||||
        # such unit steps per pixel (aka uppx). Iow, if the zoom level
 | 
					 | 
				
			||||||
        # is such that a datum(s) update to graphics wouldn't span
 | 
					 | 
				
			||||||
        # to a new pixel, we don't update yet.
 | 
					 | 
				
			||||||
        do_append = (append_diff >= uppx)
 | 
					 | 
				
			||||||
        if do_append:
 | 
					 | 
				
			||||||
            state['i_last_append'] = i_step
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        do_rt_update = uppx < update_uppx
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        _, _, _, r = chart.bars_range()
 | 
					 | 
				
			||||||
        liv = r >= i_step
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: pack this into a struct
 | 
					 | 
				
			||||||
        return (
 | 
					 | 
				
			||||||
            uppx,
 | 
					 | 
				
			||||||
            liv,
 | 
					 | 
				
			||||||
            do_append,
 | 
					 | 
				
			||||||
            i_diff,
 | 
					 | 
				
			||||||
            append_diff,
 | 
					 | 
				
			||||||
            do_rt_update,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def graphics_update_loop(
 | 
					async def graphics_update_loop(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    nurse: trio.Nursery,
 | 
					    linked: LinkedSplits,
 | 
				
			||||||
    godwidget: GodWidget,
 | 
					    stream: tractor.MsgStream,
 | 
				
			||||||
    feed: Feed,
 | 
					    ohlcv: np.ndarray,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    wap_in_history: bool = False,
 | 
					    wap_in_history: bool = False,
 | 
				
			||||||
    vlm_chart: Optional[ChartPlotWidget] = None,
 | 
					    vlm_chart: Optional[ChartPlotWidget] = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -249,29 +171,22 @@ async def graphics_update_loop(
 | 
				
			||||||
    #   of copying it from last bar's close
 | 
					    #   of copying it from last bar's close
 | 
				
			||||||
    # - 1-5 sec bar lookback-autocorrection like tws does?
 | 
					    # - 1-5 sec bar lookback-autocorrection like tws does?
 | 
				
			||||||
    #   (would require a background history checker task)
 | 
					    #   (would require a background history checker task)
 | 
				
			||||||
    linked: LinkedSplits = godwidget.rt_linked
 | 
					    display_rate = linked.godwidget.window.current_screen().refreshRate()
 | 
				
			||||||
    display_rate = godwidget.window.current_screen().refreshRate()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    fast_chart = linked.chart
 | 
					    chart = linked.chart
 | 
				
			||||||
    hist_chart = godwidget.hist_linked.chart
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ohlcv = feed.rt_shm
 | 
					 | 
				
			||||||
    hist_ohlcv = feed.hist_shm
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # update last price sticky
 | 
					    # update last price sticky
 | 
				
			||||||
    last_price_sticky = fast_chart._ysticks[fast_chart.name]
 | 
					    last_price_sticky = chart._ysticks[chart.name]
 | 
				
			||||||
    last_price_sticky.update_from_data(
 | 
					    last_price_sticky.update_from_data(
 | 
				
			||||||
        *ohlcv.array[-1][['index', 'close']]
 | 
					        *ohlcv.array[-1][['index', 'close']]
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    hist_last_price_sticky = hist_chart._ysticks[hist_chart.name]
 | 
					    if vlm_chart:
 | 
				
			||||||
    hist_last_price_sticky.update_from_data(
 | 
					        vlm_sticky = vlm_chart._ysticks['volume']
 | 
				
			||||||
        *hist_ohlcv.array[-1][['index', 'close']]
 | 
					 | 
				
			||||||
    )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    maxmin = partial(
 | 
					    maxmin = partial(
 | 
				
			||||||
        chart_maxmin,
 | 
					        chart_maxmin,
 | 
				
			||||||
        fast_chart,
 | 
					        chart,
 | 
				
			||||||
        ohlcv,
 | 
					        ohlcv,
 | 
				
			||||||
        vlm_chart,
 | 
					        vlm_chart,
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
| 
						 | 
					@ -285,15 +200,15 @@ async def graphics_update_loop(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    last, volume = ohlcv.array[-1][['close', 'volume']]
 | 
					    last, volume = ohlcv.array[-1][['close', 'volume']]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    symbol = fast_chart.linked.symbol
 | 
					    symbol = chart.linked.symbol
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    l1 = L1Labels(
 | 
					    l1 = L1Labels(
 | 
				
			||||||
        fast_chart,
 | 
					        chart,
 | 
				
			||||||
        # determine precision/decimal lengths
 | 
					        # determine precision/decimal lengths
 | 
				
			||||||
        digits=symbol.tick_size_digits,
 | 
					        digits=symbol.tick_size_digits,
 | 
				
			||||||
        size_digits=symbol.lot_size_digits,
 | 
					        size_digits=symbol.lot_size_digits,
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    fast_chart._l1_labels = l1
 | 
					    chart._l1_labels = l1
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # TODO:
 | 
					    # TODO:
 | 
				
			||||||
    # - in theory we should be able to read buffer data faster
 | 
					    # - in theory we should be able to read buffer data faster
 | 
				
			||||||
| 
						 | 
					@ -303,22 +218,46 @@ async def graphics_update_loop(
 | 
				
			||||||
    #   levels this might be dark volume we need to
 | 
					    #   levels this might be dark volume we need to
 | 
				
			||||||
    #   present differently -> likely dark vlm
 | 
					    #   present differently -> likely dark vlm
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    tick_size = fast_chart.linked.symbol.tick_size
 | 
					    tick_size = chart.linked.symbol.tick_size
 | 
				
			||||||
    tick_margin = 3 * tick_size
 | 
					    tick_margin = 3 * tick_size
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    fast_chart.show()
 | 
					    chart.show()
 | 
				
			||||||
 | 
					    # view = chart.view
 | 
				
			||||||
    last_quote = time.time()
 | 
					    last_quote = time.time()
 | 
				
			||||||
    i_last = ohlcv.index
 | 
					    i_last = ohlcv.index
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # async def iter_drain_quotes():
 | 
				
			||||||
 | 
					    #     # NOTE: all code below this loop is expected to be synchronous
 | 
				
			||||||
 | 
					    #     # and thus draw instructions are not picked up jntil the next
 | 
				
			||||||
 | 
					    #     # wait / iteration.
 | 
				
			||||||
 | 
					    #     async for quotes in stream:
 | 
				
			||||||
 | 
					    #         while True:
 | 
				
			||||||
 | 
					    #             try:
 | 
				
			||||||
 | 
					    #                 moar = stream.receive_nowait()
 | 
				
			||||||
 | 
					    #             except trio.WouldBlock:
 | 
				
			||||||
 | 
					    #                 yield quotes
 | 
				
			||||||
 | 
					    #                 break
 | 
				
			||||||
 | 
					    #             else:
 | 
				
			||||||
 | 
					    #                 for sym, quote in moar.items():
 | 
				
			||||||
 | 
					    #                     ticks_frame = quote.get('ticks')
 | 
				
			||||||
 | 
					    #                     if ticks_frame:
 | 
				
			||||||
 | 
					    #                         quotes[sym].setdefault(
 | 
				
			||||||
 | 
					    #                             'ticks', []).extend(ticks_frame)
 | 
				
			||||||
 | 
					    #                     print('pulled extra')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    #                 yield quotes
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # async for quotes in iter_drain_quotes():
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ds = linked.display_state = DisplayState(**{
 | 
					    ds = linked.display_state = DisplayState(**{
 | 
				
			||||||
        'godwidget': godwidget,
 | 
					 | 
				
			||||||
        'quotes': {},
 | 
					        'quotes': {},
 | 
				
			||||||
 | 
					        'linked': linked,
 | 
				
			||||||
        'maxmin': maxmin,
 | 
					        'maxmin': maxmin,
 | 
				
			||||||
        'ohlcv': ohlcv,
 | 
					        'ohlcv': ohlcv,
 | 
				
			||||||
        'hist_ohlcv': hist_ohlcv,
 | 
					        'chart': chart,
 | 
				
			||||||
        'chart': fast_chart,
 | 
					 | 
				
			||||||
        'last_price_sticky': last_price_sticky,
 | 
					        'last_price_sticky': last_price_sticky,
 | 
				
			||||||
        'hist_last_price_sticky': hist_last_price_sticky,
 | 
					        'vlm_chart': vlm_chart,
 | 
				
			||||||
 | 
					        'vlm_sticky': vlm_sticky,
 | 
				
			||||||
        'l1': l1,
 | 
					        'l1': l1,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        'vars': {
 | 
					        'vars': {
 | 
				
			||||||
| 
						 | 
					@ -331,69 +270,9 @@ async def graphics_update_loop(
 | 
				
			||||||
        }
 | 
					        }
 | 
				
			||||||
    })
 | 
					    })
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    if vlm_chart:
 | 
					    chart.default_view()
 | 
				
			||||||
        vlm_sticky = vlm_chart._ysticks['volume']
 | 
					 | 
				
			||||||
        ds.vlm_chart = vlm_chart
 | 
					 | 
				
			||||||
        ds.vlm_sticky = vlm_sticky
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    fast_chart.default_view()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: probably factor this into some kinda `DisplayState`
 | 
					 | 
				
			||||||
    # API that can be reused at least in terms of pulling view
 | 
					 | 
				
			||||||
    # params (eg ``.bars_range()``).
 | 
					 | 
				
			||||||
    async def increment_history_view():
 | 
					 | 
				
			||||||
        i_last = hist_ohlcv.index
 | 
					 | 
				
			||||||
        state = ds.vars.copy() | {
 | 
					 | 
				
			||||||
            'i_last_append': i_last,
 | 
					 | 
				
			||||||
            'i_last': i_last,
 | 
					 | 
				
			||||||
        }
 | 
					 | 
				
			||||||
        _, hist_step_size_s, _ = feed.get_ds_info()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async with feed.index_stream(
 | 
					 | 
				
			||||||
            # int(hist_step_size_s)
 | 
					 | 
				
			||||||
            # TODO: seems this is more reliable at keeping the slow
 | 
					 | 
				
			||||||
            # chart incremented in view more correctly?
 | 
					 | 
				
			||||||
            # - It might make sense to just inline this logic with the
 | 
					 | 
				
			||||||
            #   main display task? => it's a tradeoff of slower task
 | 
					 | 
				
			||||||
            #   wakeups/ctx switches verus logic checks (as normal)
 | 
					 | 
				
			||||||
            # - we need increment logic that only does the view shift
 | 
					 | 
				
			||||||
            #   call when the uppx permits/needs it
 | 
					 | 
				
			||||||
            int(1),
 | 
					 | 
				
			||||||
        ) as istream:
 | 
					 | 
				
			||||||
            async for msg in istream:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # check if slow chart needs an x-domain shift and/or
 | 
					 | 
				
			||||||
                # y-range resize.
 | 
					 | 
				
			||||||
                (
 | 
					 | 
				
			||||||
                    uppx,
 | 
					 | 
				
			||||||
                    liv,
 | 
					 | 
				
			||||||
                    do_append,
 | 
					 | 
				
			||||||
                    i_diff,
 | 
					 | 
				
			||||||
                    append_diff,
 | 
					 | 
				
			||||||
                    do_rt_update,
 | 
					 | 
				
			||||||
                ) = ds.incr_info(
 | 
					 | 
				
			||||||
                    chart=hist_chart,
 | 
					 | 
				
			||||||
                    shm=ds.hist_ohlcv,
 | 
					 | 
				
			||||||
                    state=state,
 | 
					 | 
				
			||||||
                    # update_state=False,
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
                # print(
 | 
					 | 
				
			||||||
                #     f'liv: {liv}\n'
 | 
					 | 
				
			||||||
                #     f'do_append: {do_append}\n'
 | 
					 | 
				
			||||||
                #     f'append_diff: {append_diff}\n'
 | 
					 | 
				
			||||||
                # )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                if (
 | 
					 | 
				
			||||||
                    do_append
 | 
					 | 
				
			||||||
                    and liv
 | 
					 | 
				
			||||||
                ):
 | 
					 | 
				
			||||||
                    hist_chart.increment_view(steps=i_diff)
 | 
					 | 
				
			||||||
                    hist_chart.view._set_yrange(yrange=hist_chart.maxmin())
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    nurse.start_soon(increment_history_view)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # main real-time quotes update loop
 | 
					    # main real-time quotes update loop
 | 
				
			||||||
    stream: tractor.MsgStream = feed.stream
 | 
					 | 
				
			||||||
    async for quotes in stream:
 | 
					    async for quotes in stream:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        ds.quotes = quotes
 | 
					        ds.quotes = quotes
 | 
				
			||||||
| 
						 | 
					@ -413,16 +292,15 @@ async def graphics_update_loop(
 | 
				
			||||||
        last_quote = time.time()
 | 
					        last_quote = time.time()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # chart isn't active/shown so skip render cycle and pause feed(s)
 | 
					        # chart isn't active/shown so skip render cycle and pause feed(s)
 | 
				
			||||||
        if fast_chart.linked.isHidden():
 | 
					        if chart.linked.isHidden():
 | 
				
			||||||
            # print('skipping update')
 | 
					            chart.pause_all_feeds()
 | 
				
			||||||
            fast_chart.pause_all_feeds()
 | 
					 | 
				
			||||||
            continue
 | 
					            continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # ic = fast_chart.view._ic
 | 
					        ic = chart.view._ic
 | 
				
			||||||
        # if ic:
 | 
					        if ic:
 | 
				
			||||||
        #     fast_chart.pause_all_feeds()
 | 
					            chart.pause_all_feeds()
 | 
				
			||||||
        #     await ic.wait()
 | 
					            await ic.wait()
 | 
				
			||||||
        #     fast_chart.resume_all_feeds()
 | 
					            chart.resume_all_feeds()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # sync call to update all graphics/UX components.
 | 
					        # sync call to update all graphics/UX components.
 | 
				
			||||||
        graphics_update_cycle(ds)
 | 
					        graphics_update_cycle(ds)
 | 
				
			||||||
| 
						 | 
					@ -439,10 +317,8 @@ def graphics_update_cycle(
 | 
				
			||||||
    # hopefully XD
 | 
					    # hopefully XD
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    chart = ds.chart
 | 
					    chart = ds.chart
 | 
				
			||||||
    # TODO: just pass this as a direct ref to avoid so many attr accesses?
 | 
					 | 
				
			||||||
    hist_chart = ds.godwidget.hist_linked.chart
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    profiler = Profiler(
 | 
					    profiler = pg.debug.Profiler(
 | 
				
			||||||
        msg=f'Graphics loop cycle for: `{chart.name}`',
 | 
					        msg=f'Graphics loop cycle for: `{chart.name}`',
 | 
				
			||||||
        delayed=True,
 | 
					        delayed=True,
 | 
				
			||||||
        disabled=not pg_profile_enabled(),
 | 
					        disabled=not pg_profile_enabled(),
 | 
				
			||||||
| 
						 | 
					@ -454,24 +330,53 @@ def graphics_update_cycle(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # unpack multi-referenced components
 | 
					    # unpack multi-referenced components
 | 
				
			||||||
    vlm_chart = ds.vlm_chart
 | 
					    vlm_chart = ds.vlm_chart
 | 
				
			||||||
 | 
					 | 
				
			||||||
    # rt "HFT" chart
 | 
					 | 
				
			||||||
    l1 = ds.l1
 | 
					    l1 = ds.l1
 | 
				
			||||||
    ohlcv = ds.ohlcv
 | 
					    ohlcv = ds.ohlcv
 | 
				
			||||||
    array = ohlcv.array
 | 
					    array = ohlcv.array
 | 
				
			||||||
 | 
					 | 
				
			||||||
    vars = ds.vars
 | 
					    vars = ds.vars
 | 
				
			||||||
    tick_margin = vars['tick_margin']
 | 
					    tick_margin = vars['tick_margin']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    update_uppx = 16
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    for sym, quote in ds.quotes.items():
 | 
					    for sym, quote in ds.quotes.items():
 | 
				
			||||||
        (
 | 
					
 | 
				
			||||||
            uppx,
 | 
					        # compute the first available graphic's x-units-per-pixel
 | 
				
			||||||
            liv,
 | 
					        uppx = vlm_chart.view.x_uppx()
 | 
				
			||||||
            do_append,
 | 
					
 | 
				
			||||||
            i_diff,
 | 
					        # NOTE: vlm may be written by the ``brokerd`` backend
 | 
				
			||||||
            append_diff,
 | 
					        # event though a tick sample is not emitted.
 | 
				
			||||||
            do_rt_update,
 | 
					        # TODO: show dark trades differently
 | 
				
			||||||
        ) = ds.incr_info()
 | 
					        # https://github.com/pikers/piker/issues/116
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # NOTE: this used to be implemented in a dedicated
 | 
				
			||||||
 | 
					        # "increment task": ``check_for_new_bars()`` but it doesn't
 | 
				
			||||||
 | 
					        # make sense to do a whole task switch when we can just do
 | 
				
			||||||
 | 
					        # this simple index-diff and all the fsp sub-curve graphics
 | 
				
			||||||
 | 
					        # are diffed on each draw cycle anyway; so updates to the
 | 
				
			||||||
 | 
					        # "curve" length is already automatic.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # increment the view position by the sample offset.
 | 
				
			||||||
 | 
					        i_step = ohlcv.index
 | 
				
			||||||
 | 
					        i_diff = i_step - vars['i_last']
 | 
				
			||||||
 | 
					        vars['i_last'] = i_step
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        append_diff = i_step - vars['i_last_append']
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # update the "last datum" (aka extending the flow graphic with
 | 
				
			||||||
 | 
					        # new data) only if the number of unit steps is >= the number of
 | 
				
			||||||
 | 
					        # such unit steps per pixel (aka uppx). Iow, if the zoom level
 | 
				
			||||||
 | 
					        # is such that a datum(s) update to graphics wouldn't span
 | 
				
			||||||
 | 
					        # to a new pixel, we don't update yet.
 | 
				
			||||||
 | 
					        do_append = (append_diff >= uppx)
 | 
				
			||||||
 | 
					        if do_append:
 | 
				
			||||||
 | 
					            vars['i_last_append'] = i_step
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        do_rt_update = uppx < update_uppx
 | 
				
			||||||
 | 
					        # print(
 | 
				
			||||||
 | 
					        #     f'append_diff:{append_diff}\n'
 | 
				
			||||||
 | 
					        #     f'uppx:{uppx}\n'
 | 
				
			||||||
 | 
					        #     f'do_append: {do_append}'
 | 
				
			||||||
 | 
					        # )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: we should only run mxmn when we know
 | 
					        # TODO: we should only run mxmn when we know
 | 
				
			||||||
        # an update is due via ``do_append`` above.
 | 
					        # an update is due via ``do_append`` above.
 | 
				
			||||||
| 
						 | 
					@ -487,6 +392,8 @@ def graphics_update_cycle(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        profiler('`ds.maxmin()` call')
 | 
					        profiler('`ds.maxmin()` call')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        liv = r >= i_step  # the last datum is in view
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if (
 | 
					        if (
 | 
				
			||||||
            prepend_update_index is not None
 | 
					            prepend_update_index is not None
 | 
				
			||||||
            and lbar > prepend_update_index
 | 
					            and lbar > prepend_update_index
 | 
				
			||||||
| 
						 | 
					@ -501,11 +408,18 @@ def graphics_update_cycle(
 | 
				
			||||||
        # don't real-time "shift" the curve to the
 | 
					        # don't real-time "shift" the curve to the
 | 
				
			||||||
        # left unless we get one of the following:
 | 
					        # left unless we get one of the following:
 | 
				
			||||||
        if (
 | 
					        if (
 | 
				
			||||||
            (do_append and liv)
 | 
					            (
 | 
				
			||||||
 | 
					                # i_diff > 0  # no new sample step
 | 
				
			||||||
 | 
					                do_append
 | 
				
			||||||
 | 
					                # and uppx < 4  # chart is zoomed out very far
 | 
				
			||||||
 | 
					                and liv
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
            or trigger_all
 | 
					            or trigger_all
 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
 | 
					            # TODO: we should track and compute whether the last
 | 
				
			||||||
 | 
					            # pixel in a curve should show new data based on uppx
 | 
				
			||||||
 | 
					            # and then iff update curves and shift?
 | 
				
			||||||
            chart.increment_view(steps=i_diff)
 | 
					            chart.increment_view(steps=i_diff)
 | 
				
			||||||
            chart.view._set_yrange(yrange=(mn, mx))
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if vlm_chart:
 | 
					            if vlm_chart:
 | 
				
			||||||
                vlm_chart.increment_view(steps=i_diff)
 | 
					                vlm_chart.increment_view(steps=i_diff)
 | 
				
			||||||
| 
						 | 
					@ -563,10 +477,7 @@ def graphics_update_cycle(
 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
            chart.update_graphics_from_flow(
 | 
					            chart.update_graphics_from_flow(
 | 
				
			||||||
                chart.name,
 | 
					                chart.name,
 | 
				
			||||||
                do_append=do_append,
 | 
					                # do_append=uppx < update_uppx,
 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            hist_chart.update_graphics_from_flow(
 | 
					 | 
				
			||||||
                chart.name,
 | 
					 | 
				
			||||||
                do_append=do_append,
 | 
					                do_append=do_append,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -606,9 +517,6 @@ def graphics_update_cycle(
 | 
				
			||||||
                ds.last_price_sticky.update_from_data(
 | 
					                ds.last_price_sticky.update_from_data(
 | 
				
			||||||
                    *end[['index', 'close']]
 | 
					                    *end[['index', 'close']]
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
                ds.hist_last_price_sticky.update_from_data(
 | 
					 | 
				
			||||||
                    *end[['index', 'close']]
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                if wap_in_history:
 | 
					                if wap_in_history:
 | 
				
			||||||
                    # update vwap overlay line
 | 
					                    # update vwap overlay line
 | 
				
			||||||
| 
						 | 
					@ -656,44 +564,26 @@ def graphics_update_cycle(
 | 
				
			||||||
                l1.bid_label.update_fields({'level': price, 'size': size})
 | 
					                l1.bid_label.update_fields({'level': price, 'size': size})
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # check for y-range re-size
 | 
					        # check for y-range re-size
 | 
				
			||||||
        if (mx > vars['last_mx']) or (mn < vars['last_mn']):
 | 
					        if (
 | 
				
			||||||
 | 
					            (mx > vars['last_mx']) or (mn < vars['last_mn'])
 | 
				
			||||||
            # fast chart resize case
 | 
					            and not chart._static_yrange == 'axis'
 | 
				
			||||||
 | 
					            and liv
 | 
				
			||||||
 | 
					        ):
 | 
				
			||||||
 | 
					            main_vb = chart.view
 | 
				
			||||||
            if (
 | 
					            if (
 | 
				
			||||||
                liv
 | 
					                main_vb._ic is None
 | 
				
			||||||
                and not chart._static_yrange == 'axis'
 | 
					                or not main_vb._ic.is_set()
 | 
				
			||||||
            ):
 | 
					            ):
 | 
				
			||||||
                main_vb = chart.view
 | 
					                # print(f'updating range due to mxmn')
 | 
				
			||||||
                if (
 | 
					                main_vb._set_yrange(
 | 
				
			||||||
                    main_vb._ic is None
 | 
					                    # TODO: we should probably scale
 | 
				
			||||||
                    or not main_vb._ic.is_set()
 | 
					                    # the view margin based on the size
 | 
				
			||||||
                ):
 | 
					                    # of the true range? This way you can
 | 
				
			||||||
                    # print(f'updating range due to mxmn')
 | 
					                    # slap in orders outside the current
 | 
				
			||||||
                    main_vb._set_yrange(
 | 
					                    # L1 (only) book range.
 | 
				
			||||||
                        # TODO: we should probably scale
 | 
					                    # range_margin=0.1,
 | 
				
			||||||
                        # the view margin based on the size
 | 
					                    yrange=(mn, mx),
 | 
				
			||||||
                        # of the true range? This way you can
 | 
					                )
 | 
				
			||||||
                        # slap in orders outside the current
 | 
					 | 
				
			||||||
                        # L1 (only) book range.
 | 
					 | 
				
			||||||
                        # range_margin=0.1,
 | 
					 | 
				
			||||||
                        yrange=(mn, mx),
 | 
					 | 
				
			||||||
                    )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # check if slow chart needs a resize
 | 
					 | 
				
			||||||
            (
 | 
					 | 
				
			||||||
                _,
 | 
					 | 
				
			||||||
                hist_liv,
 | 
					 | 
				
			||||||
                _,
 | 
					 | 
				
			||||||
                _,
 | 
					 | 
				
			||||||
                _,
 | 
					 | 
				
			||||||
                _,
 | 
					 | 
				
			||||||
            ) = ds.incr_info(
 | 
					 | 
				
			||||||
                chart=hist_chart,
 | 
					 | 
				
			||||||
                shm=ds.hist_ohlcv,
 | 
					 | 
				
			||||||
                update_state=False,
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            if hist_liv:
 | 
					 | 
				
			||||||
                hist_chart.view._set_yrange(yrange=hist_chart.maxmin())
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # XXX: update this every draw cycle to make L1-always-in-view work.
 | 
					        # XXX: update this every draw cycle to make L1-always-in-view work.
 | 
				
			||||||
        vars['last_mx'], vars['last_mn'] = mx, mn
 | 
					        vars['last_mx'], vars['last_mn'] = mx, mn
 | 
				
			||||||
| 
						 | 
					@ -810,140 +700,6 @@ def graphics_update_cycle(
 | 
				
			||||||
                    flow.draw_last(array_key=curve_name)
 | 
					                    flow.draw_last(array_key=curve_name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
async def link_views_with_region(
 | 
					 | 
				
			||||||
    rt_chart: ChartPlotWidget,
 | 
					 | 
				
			||||||
    hist_chart: ChartPlotWidget,
 | 
					 | 
				
			||||||
    feed: Feed,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> None:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # these value are be only pulled once during shm init/startup
 | 
					 | 
				
			||||||
    izero_hist = feed.izero_hist
 | 
					 | 
				
			||||||
    izero_rt = feed.izero_rt
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Add the LinearRegionItem to the ViewBox, but tell the ViewBox
 | 
					 | 
				
			||||||
    # to exclude this item when doing auto-range calculations.
 | 
					 | 
				
			||||||
    rt_pi = rt_chart.plotItem
 | 
					 | 
				
			||||||
    hist_pi = hist_chart.plotItem
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    region = pg.LinearRegionItem(
 | 
					 | 
				
			||||||
        movable=False,
 | 
					 | 
				
			||||||
        # color scheme that matches sidepane styling
 | 
					 | 
				
			||||||
        pen=pg.mkPen(hcolor('gunmetal')),
 | 
					 | 
				
			||||||
        brush=pg.mkBrush(hcolor('default_darkest')),
 | 
					 | 
				
			||||||
    )
 | 
					 | 
				
			||||||
    region.setZValue(10)  # put linear region "in front" in layer terms
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    hist_pi.addItem(region, ignoreBounds=True)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    flow = rt_chart._flows[hist_chart.name]
 | 
					 | 
				
			||||||
    assert flow
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # XXX: no idea why this doesn't work but it's causing
 | 
					 | 
				
			||||||
    # a weird placement of the region on the way-far-left..
 | 
					 | 
				
			||||||
    # region.setClipItem(flow.graphics)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # poll for datums load and timestep detection
 | 
					 | 
				
			||||||
    for _ in range(100):
 | 
					 | 
				
			||||||
        try:
 | 
					 | 
				
			||||||
            _, _, ratio = feed.get_ds_info()
 | 
					 | 
				
			||||||
            break
 | 
					 | 
				
			||||||
        except IndexError:
 | 
					 | 
				
			||||||
            await trio.sleep(0.01)
 | 
					 | 
				
			||||||
            continue
 | 
					 | 
				
			||||||
    else:
 | 
					 | 
				
			||||||
        raise RuntimeError(
 | 
					 | 
				
			||||||
            'Failed to detect sampling periods from shm!?')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # sampling rate transform math:
 | 
					 | 
				
			||||||
    # -----------------------------
 | 
					 | 
				
			||||||
    # define the fast chart to slow chart as a linear mapping
 | 
					 | 
				
			||||||
    # over the fast index domain `i` to the slow index domain
 | 
					 | 
				
			||||||
    # `j` as:
 | 
					 | 
				
			||||||
    #
 | 
					 | 
				
			||||||
    # j = i - i_offset
 | 
					 | 
				
			||||||
    #     ------------  + j_offset
 | 
					 | 
				
			||||||
    #         j/i
 | 
					 | 
				
			||||||
    #
 | 
					 | 
				
			||||||
    # conversely the inverse function is:
 | 
					 | 
				
			||||||
    #
 | 
					 | 
				
			||||||
    # i = j/i * (j - j_offset) + i_offset
 | 
					 | 
				
			||||||
    #
 | 
					 | 
				
			||||||
    # Where `j_offset` is our ``izero_hist`` and `i_offset` is our
 | 
					 | 
				
			||||||
    # `izero_rt`, the ``ShmArray`` offsets which correspond to the
 | 
					 | 
				
			||||||
    # indexes in each array where the "current" time is indexed at init.
 | 
					 | 
				
			||||||
    # AKA the index where new data is "appended to" and historical data
 | 
					 | 
				
			||||||
    # if "prepended from".
 | 
					 | 
				
			||||||
    #
 | 
					 | 
				
			||||||
    # more practically (and by default) `i` is normally an index
 | 
					 | 
				
			||||||
    # into 1s samples and `j` is an index into 60s samples (aka 1m).
 | 
					 | 
				
			||||||
    # in the below handlers ``ratio`` is the `j/i` and ``mn``/``mx``
 | 
					 | 
				
			||||||
    # are the low and high index input from the source index domain.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def update_region_from_pi(
 | 
					 | 
				
			||||||
        window,
 | 
					 | 
				
			||||||
        viewRange: tuple[tuple, tuple],
 | 
					 | 
				
			||||||
        is_manual: bool = True,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					 | 
				
			||||||
        # put linear region "in front" in layer terms
 | 
					 | 
				
			||||||
        region.setZValue(10)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # set the region on the history chart
 | 
					 | 
				
			||||||
        # to the range currently viewed in the
 | 
					 | 
				
			||||||
        # HFT/real-time chart.
 | 
					 | 
				
			||||||
        mn, mx = viewRange[0]
 | 
					 | 
				
			||||||
        ds_mn = (mn - izero_rt)/ratio
 | 
					 | 
				
			||||||
        ds_mx = (mx - izero_rt)/ratio
 | 
					 | 
				
			||||||
        lhmn = ds_mn + izero_hist
 | 
					 | 
				
			||||||
        lhmx = ds_mx + izero_hist
 | 
					 | 
				
			||||||
        # print(
 | 
					 | 
				
			||||||
        #     f'rt_view_range: {(mn, mx)}\n'
 | 
					 | 
				
			||||||
        #     f'ds_mn, ds_mx: {(ds_mn, ds_mx)}\n'
 | 
					 | 
				
			||||||
        #     f'lhmn, lhmx: {(lhmn, lhmx)}\n'
 | 
					 | 
				
			||||||
        # )
 | 
					 | 
				
			||||||
        region.setRegion((
 | 
					 | 
				
			||||||
            lhmn,
 | 
					 | 
				
			||||||
            lhmx,
 | 
					 | 
				
			||||||
        ))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: if we want to have the slow chart adjust range to
 | 
					 | 
				
			||||||
        # match the fast chart's selection -> results in the
 | 
					 | 
				
			||||||
        # linear region expansion never can go "outside of view".
 | 
					 | 
				
			||||||
        # hmn, hmx = hvr = hist_chart.view.state['viewRange'][0]
 | 
					 | 
				
			||||||
        # print((hmn, hmx))
 | 
					 | 
				
			||||||
        # if (
 | 
					 | 
				
			||||||
        #     hvr
 | 
					 | 
				
			||||||
        #     and (lhmn < hmn or lhmx > hmx)
 | 
					 | 
				
			||||||
        # ):
 | 
					 | 
				
			||||||
        #     hist_pi.setXRange(
 | 
					 | 
				
			||||||
        #         lhmn,
 | 
					 | 
				
			||||||
        #         lhmx,
 | 
					 | 
				
			||||||
        #         padding=0,
 | 
					 | 
				
			||||||
        #     )
 | 
					 | 
				
			||||||
        #     hist_linked.graphics_cycle()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # connect region to be updated on plotitem interaction.
 | 
					 | 
				
			||||||
    rt_pi.sigRangeChanged.connect(update_region_from_pi)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def update_pi_from_region():
 | 
					 | 
				
			||||||
        region.setZValue(10)
 | 
					 | 
				
			||||||
        mn, mx = region.getRegion()
 | 
					 | 
				
			||||||
        # print(f'region_x: {(mn, mx)}')
 | 
					 | 
				
			||||||
        rt_pi.setXRange(
 | 
					 | 
				
			||||||
            ((mn - izero_hist) * ratio) + izero_rt,
 | 
					 | 
				
			||||||
            ((mx - izero_hist) * ratio) + izero_rt,
 | 
					 | 
				
			||||||
            padding=0,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO BUG XXX: seems to cause a real perf hit and a recursion error
 | 
					 | 
				
			||||||
    # (but used to work before generalizing for 1s ohlc offset?)..
 | 
					 | 
				
			||||||
    # something to do with the label callback handlers?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # region.sigRegionChanged.connect(update_pi_from_region)
 | 
					 | 
				
			||||||
    # region.sigRegionChangeFinished.connect(update_pi_from_region)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def display_symbol_data(
 | 
					async def display_symbol_data(
 | 
				
			||||||
    godwidget: GodWidget,
 | 
					    godwidget: GodWidget,
 | 
				
			||||||
    provider: str,
 | 
					    provider: str,
 | 
				
			||||||
| 
						 | 
					@ -985,13 +741,15 @@ async def display_symbol_data(
 | 
				
			||||||
        tick_throttle=_quote_throttle_rate,
 | 
					        tick_throttle=_quote_throttle_rate,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) as feed:
 | 
					    ) as feed:
 | 
				
			||||||
        ohlcv: ShmArray = feed.rt_shm
 | 
					        ohlcv: ShmArray = feed.shm
 | 
				
			||||||
        hist_ohlcv: ShmArray = feed.hist_shm
 | 
					        bars = ohlcv.array
 | 
				
			||||||
 | 
					 | 
				
			||||||
        symbol = feed.symbols[sym]
 | 
					        symbol = feed.symbols[sym]
 | 
				
			||||||
        fqsn = symbol.front_fqsn()
 | 
					        fqsn = symbol.front_fqsn()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        step_size_s = 1
 | 
					        times = bars['time']
 | 
				
			||||||
 | 
					        end = pendulum.from_timestamp(times[-1])
 | 
				
			||||||
 | 
					        start = pendulum.from_timestamp(times[times != times[-1]][-1])
 | 
				
			||||||
 | 
					        step_size_s = (end - start).seconds
 | 
				
			||||||
        tf_key = tf_in_1s[step_size_s]
 | 
					        tf_key = tf_in_1s[step_size_s]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # load in symbol's ohlc data
 | 
					        # load in symbol's ohlc data
 | 
				
			||||||
| 
						 | 
					@ -1001,84 +759,59 @@ async def display_symbol_data(
 | 
				
			||||||
            f'step:{tf_key} '
 | 
					            f'step:{tf_key} '
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        rt_linked = godwidget.rt_linked
 | 
					        linked = godwidget.linkedsplits
 | 
				
			||||||
        rt_linked._symbol = symbol
 | 
					        linked._symbol = symbol
 | 
				
			||||||
 | 
					 | 
				
			||||||
        # create top history view chart above the "main rt chart".
 | 
					 | 
				
			||||||
        hist_linked = godwidget.hist_linked
 | 
					 | 
				
			||||||
        hist_linked._symbol = symbol
 | 
					 | 
				
			||||||
        hist_chart = hist_linked.plot_ohlc_main(
 | 
					 | 
				
			||||||
            symbol,
 | 
					 | 
				
			||||||
            feed.hist_shm,
 | 
					 | 
				
			||||||
            # in the case of history chart we explicitly set `False`
 | 
					 | 
				
			||||||
            # to avoid internal pane creation.
 | 
					 | 
				
			||||||
            # sidepane=False,
 | 
					 | 
				
			||||||
            sidepane=godwidget.search,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        # don't show when not focussed
 | 
					 | 
				
			||||||
        hist_linked.cursor.always_show_xlabel = False
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # generate order mode side-pane UI
 | 
					        # generate order mode side-pane UI
 | 
				
			||||||
        # A ``FieldsForm`` form to configure order entry
 | 
					        # A ``FieldsForm`` form to configure order entry
 | 
				
			||||||
        # and add as next-to-y-axis singleton pane
 | 
					 | 
				
			||||||
        pp_pane: FieldsForm = mk_order_pane_layout(godwidget)
 | 
					        pp_pane: FieldsForm = mk_order_pane_layout(godwidget)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # add as next-to-y-axis singleton pane
 | 
				
			||||||
        godwidget.pp_pane = pp_pane
 | 
					        godwidget.pp_pane = pp_pane
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # create main OHLC chart
 | 
					        # create main OHLC chart
 | 
				
			||||||
        chart = rt_linked.plot_ohlc_main(
 | 
					        chart = linked.plot_ohlc_main(
 | 
				
			||||||
            symbol,
 | 
					            symbol,
 | 
				
			||||||
            ohlcv,
 | 
					            ohlcv,
 | 
				
			||||||
            # in the case of history chart we explicitly set `False`
 | 
					 | 
				
			||||||
            # to avoid internal pane creation.
 | 
					 | 
				
			||||||
            sidepane=pp_pane,
 | 
					            sidepane=pp_pane,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					        chart.default_view()
 | 
				
			||||||
        chart._feeds[symbol.key] = feed
 | 
					        chart._feeds[symbol.key] = feed
 | 
				
			||||||
        chart.setFocus()
 | 
					        chart.setFocus()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?!
 | 
					 | 
				
			||||||
        # plot historical vwap if available
 | 
					        # plot historical vwap if available
 | 
				
			||||||
        wap_in_history = False
 | 
					        wap_in_history = False
 | 
				
			||||||
        # if (
 | 
					
 | 
				
			||||||
        #     brokermod._show_wap_in_history
 | 
					        # XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?!
 | 
				
			||||||
        #     and 'bar_wap' in bars.dtype.fields
 | 
					        # if brokermod._show_wap_in_history:
 | 
				
			||||||
        # ):
 | 
					
 | 
				
			||||||
        #     wap_in_history = True
 | 
					        #     if 'bar_wap' in bars.dtype.fields:
 | 
				
			||||||
        #     chart.draw_curve(
 | 
					        #         wap_in_history = True
 | 
				
			||||||
        #         name='bar_wap',
 | 
					        #         chart.draw_curve(
 | 
				
			||||||
        #         shm=ohlcv,
 | 
					        #             name='bar_wap',
 | 
				
			||||||
        #         color='default_light',
 | 
					        #             shm=ohlcv,
 | 
				
			||||||
        #         add_label=False,
 | 
					        #             color='default_light',
 | 
				
			||||||
        #     )
 | 
					        #             add_label=False,
 | 
				
			||||||
 | 
					        #         )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # size view to data once at outset
 | 
				
			||||||
 | 
					        chart.cv._set_yrange()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # NOTE: we must immediately tell Qt to show the OHLC chart
 | 
					        # NOTE: we must immediately tell Qt to show the OHLC chart
 | 
				
			||||||
        # to avoid a race where the subplots get added/shown to
 | 
					        # to avoid a race where the subplots get added/shown to
 | 
				
			||||||
        # the linked set *before* the main price chart!
 | 
					        # the linked set *before* the main price chart!
 | 
				
			||||||
        rt_linked.show()
 | 
					        linked.show()
 | 
				
			||||||
        rt_linked.focus()
 | 
					        linked.focus()
 | 
				
			||||||
        await trio.sleep(0)
 | 
					        await trio.sleep(0)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # NOTE: here we insert the slow-history chart set into
 | 
					 | 
				
			||||||
        # the fast chart's splitter -> so it's a splitter of charts
 | 
					 | 
				
			||||||
        # inside the first widget slot of a splitter of charts XD
 | 
					 | 
				
			||||||
        rt_linked.splitter.insertWidget(0, hist_linked)
 | 
					 | 
				
			||||||
        # XXX: if we wanted it at the bottom?
 | 
					 | 
				
			||||||
        # rt_linked.splitter.addWidget(hist_linked)
 | 
					 | 
				
			||||||
        rt_linked.focus()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        godwidget.resize_all()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        vlm_chart: Optional[ChartPlotWidget] = None
 | 
					        vlm_chart: Optional[ChartPlotWidget] = None
 | 
				
			||||||
        async with trio.open_nursery() as ln:
 | 
					        async with trio.open_nursery() as ln:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # if available load volume related built-in display(s)
 | 
					            # if available load volume related built-in display(s)
 | 
				
			||||||
            if (
 | 
					            if has_vlm(ohlcv):
 | 
				
			||||||
                not symbol.broker_info[provider].get('no_vlm', False)
 | 
					 | 
				
			||||||
                and has_vlm(ohlcv)
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                vlm_chart = await ln.start(
 | 
					                vlm_chart = await ln.start(
 | 
				
			||||||
                    open_vlm_displays,
 | 
					                    open_vlm_displays,
 | 
				
			||||||
                    rt_linked,
 | 
					                    linked,
 | 
				
			||||||
                    ohlcv,
 | 
					                    ohlcv,
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1086,7 +819,7 @@ async def display_symbol_data(
 | 
				
			||||||
            # from an input config.
 | 
					            # from an input config.
 | 
				
			||||||
            ln.start_soon(
 | 
					            ln.start_soon(
 | 
				
			||||||
                start_fsp_displays,
 | 
					                start_fsp_displays,
 | 
				
			||||||
                rt_linked,
 | 
					                linked,
 | 
				
			||||||
                ohlcv,
 | 
					                ohlcv,
 | 
				
			||||||
                loading_sym_key,
 | 
					                loading_sym_key,
 | 
				
			||||||
                loglevel,
 | 
					                loglevel,
 | 
				
			||||||
| 
						 | 
					@ -1095,79 +828,36 @@ async def display_symbol_data(
 | 
				
			||||||
            # start graphics update loop after receiving first live quote
 | 
					            # start graphics update loop after receiving first live quote
 | 
				
			||||||
            ln.start_soon(
 | 
					            ln.start_soon(
 | 
				
			||||||
                graphics_update_loop,
 | 
					                graphics_update_loop,
 | 
				
			||||||
                ln,
 | 
					                linked,
 | 
				
			||||||
                godwidget,
 | 
					                feed.stream,
 | 
				
			||||||
                feed,
 | 
					                ohlcv,
 | 
				
			||||||
                wap_in_history,
 | 
					                wap_in_history,
 | 
				
			||||||
                vlm_chart,
 | 
					                vlm_chart,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            await trio.sleep(0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # size view to data prior to order mode init
 | 
					 | 
				
			||||||
            chart.default_view()
 | 
					 | 
				
			||||||
            rt_linked.graphics_cycle()
 | 
					 | 
				
			||||||
            await trio.sleep(0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            hist_chart.default_view(
 | 
					 | 
				
			||||||
                bars_from_y=int(len(hist_ohlcv.array)),  # size to data
 | 
					 | 
				
			||||||
                y_offset=6116*2,  # push it a little away from the y-axis
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            hist_linked.graphics_cycle()
 | 
					 | 
				
			||||||
            await trio.sleep(0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            godwidget.resize_all()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            await link_views_with_region(
 | 
					 | 
				
			||||||
                chart,
 | 
					 | 
				
			||||||
                hist_chart,
 | 
					 | 
				
			||||||
                feed,
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            mode: OrderMode
 | 
					 | 
				
			||||||
            async with (
 | 
					            async with (
 | 
				
			||||||
                open_order_mode(
 | 
					                open_order_mode(
 | 
				
			||||||
                    feed,
 | 
					                    feed,
 | 
				
			||||||
                    godwidget,
 | 
					                    chart,
 | 
				
			||||||
                    fqsn,
 | 
					                    fqsn,
 | 
				
			||||||
                    order_mode_started
 | 
					                    order_mode_started
 | 
				
			||||||
                ) as mode
 | 
					                )
 | 
				
			||||||
            ):
 | 
					            ):
 | 
				
			||||||
                if not vlm_chart:
 | 
					 | 
				
			||||||
                    # trigger another view reset if no sub-chart
 | 
					 | 
				
			||||||
                    chart.default_view()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                rt_linked.mode = mode
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # let Qt run to render all widgets and make sure the
 | 
					                # let Qt run to render all widgets and make sure the
 | 
				
			||||||
                # sidepanes line up vertically.
 | 
					                # sidepanes line up vertically.
 | 
				
			||||||
                await trio.sleep(0)
 | 
					                await trio.sleep(0)
 | 
				
			||||||
 | 
					                linked.resize_sidepanes()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # dynamic resize steps
 | 
					 | 
				
			||||||
                godwidget.resize_all()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # TODO: look into this because not sure why it was
 | 
					 | 
				
			||||||
                # commented out / we ever needed it XD
 | 
					 | 
				
			||||||
                # NOTE: we pop the volume chart from the subplots set so
 | 
					                # NOTE: we pop the volume chart from the subplots set so
 | 
				
			||||||
                # that it isn't double rendered in the display loop
 | 
					                # that it isn't double rendered in the display loop
 | 
				
			||||||
                # above since we do a maxmin calc on the volume data to
 | 
					                # above since we do a maxmin calc on the volume data to
 | 
				
			||||||
                # determine if auto-range adjustements should be made.
 | 
					                # determine if auto-range adjustements should be made.
 | 
				
			||||||
                # rt_linked.subplots.pop('volume', None)
 | 
					                # linked.subplots.pop('volume', None)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # TODO: make this not so shit XD
 | 
					                # TODO: make this not so shit XD
 | 
				
			||||||
                # close group status
 | 
					                # close group status
 | 
				
			||||||
                sbar._status_groups[loading_sym_key][1]()
 | 
					                sbar._status_groups[loading_sym_key][1]()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                hist_linked.graphics_cycle()
 | 
					 | 
				
			||||||
                await trio.sleep(0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                bars_in_mem = int(len(hist_ohlcv.array))
 | 
					 | 
				
			||||||
                hist_chart.default_view(
 | 
					 | 
				
			||||||
                    bars_from_y=bars_in_mem,  # size to data
 | 
					 | 
				
			||||||
                    # push it 1/16th away from the y-axis
 | 
					 | 
				
			||||||
                    y_offset=round(bars_in_mem / 16),
 | 
					 | 
				
			||||||
                )
 | 
					 | 
				
			||||||
                godwidget.resize_all()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # let the app run.. bby
 | 
					                # let the app run.. bby
 | 
				
			||||||
 | 
					                # linked.graphics_cycle()
 | 
				
			||||||
                await trio.sleep_forever()
 | 
					                await trio.sleep_forever()
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -18,27 +18,11 @@
 | 
				
			||||||
Higher level annotation editors.
 | 
					Higher level annotation editors.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from __future__ import annotations
 | 
					from dataclasses import dataclass, field
 | 
				
			||||||
from collections import defaultdict
 | 
					from typing import Optional
 | 
				
			||||||
from typing import (
 | 
					 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    TYPE_CHECKING
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
import pyqtgraph as pg
 | 
					import pyqtgraph as pg
 | 
				
			||||||
from pyqtgraph import (
 | 
					from pyqtgraph import ViewBox, Point, QtCore, QtGui
 | 
				
			||||||
    ViewBox,
 | 
					 | 
				
			||||||
    Point,
 | 
					 | 
				
			||||||
    QtCore,
 | 
					 | 
				
			||||||
    QtWidgets,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from PyQt5.QtGui import (
 | 
					 | 
				
			||||||
    QColor,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from PyQt5.QtWidgets import (
 | 
					 | 
				
			||||||
    QLabel,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from pyqtgraph import functions as fn
 | 
					from pyqtgraph import functions as fn
 | 
				
			||||||
from PyQt5.QtCore import QPointF
 | 
					from PyQt5.QtCore import QPointF
 | 
				
			||||||
import numpy as np
 | 
					import numpy as np
 | 
				
			||||||
| 
						 | 
					@ -46,34 +30,28 @@ import numpy as np
 | 
				
			||||||
from ._style import hcolor, _font
 | 
					from ._style import hcolor, _font
 | 
				
			||||||
from ._lines import LevelLine
 | 
					from ._lines import LevelLine
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from ..data.types import Struct
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
if TYPE_CHECKING:
 | 
					 | 
				
			||||||
    from ._chart import GodWidget
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class ArrowEditor(Struct):
 | 
					@dataclass
 | 
				
			||||||
 | 
					class ArrowEditor:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    godw: GodWidget = None  # type: ignore # noqa
 | 
					    chart: 'ChartPlotWidget'  # noqa
 | 
				
			||||||
    _arrows: dict[str, list[pg.ArrowItem]] = {}
 | 
					    _arrows: field(default_factory=dict)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def add(
 | 
					    def add(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        plot: pg.PlotItem,
 | 
					 | 
				
			||||||
        uid: str,
 | 
					        uid: str,
 | 
				
			||||||
        x: float,
 | 
					        x: float,
 | 
				
			||||||
        y: float,
 | 
					        y: float,
 | 
				
			||||||
        color='default',
 | 
					        color='default',
 | 
				
			||||||
        pointing: Optional[str] = None,
 | 
					        pointing: Optional[str] = None,
 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> pg.ArrowItem:
 | 
					    ) -> pg.ArrowItem:
 | 
				
			||||||
        '''
 | 
					        """Add an arrow graphic to view at given (x, y).
 | 
				
			||||||
        Add an arrow graphic to view at given (x, y).
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
        angle = {
 | 
					        angle = {
 | 
				
			||||||
            'up': 90,
 | 
					            'up': 90,
 | 
				
			||||||
            'down': -90,
 | 
					            'down': -90,
 | 
				
			||||||
| 
						 | 
					@ -96,25 +74,25 @@ class ArrowEditor(Struct):
 | 
				
			||||||
            brush=pg.mkBrush(hcolor(color)),
 | 
					            brush=pg.mkBrush(hcolor(color)),
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        arrow.setPos(x, y)
 | 
					        arrow.setPos(x, y)
 | 
				
			||||||
        self._arrows.setdefault(uid, []).append(arrow)
 | 
					
 | 
				
			||||||
 | 
					        self._arrows[uid] = arrow
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # render to view
 | 
					        # render to view
 | 
				
			||||||
        plot.addItem(arrow)
 | 
					        self.chart.plotItem.addItem(arrow)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return arrow
 | 
					        return arrow
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def remove(self, arrow) -> bool:
 | 
					    def remove(self, arrow) -> bool:
 | 
				
			||||||
        for linked in self.godw.iter_linked():
 | 
					        self.chart.plotItem.removeItem(arrow)
 | 
				
			||||||
            linked.chart.plotItem.removeItem(arrow)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class LineEditor(Struct):
 | 
					@dataclass
 | 
				
			||||||
    '''
 | 
					class LineEditor:
 | 
				
			||||||
    The great editor of linez.
 | 
					    '''The great editor of linez.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    godw: GodWidget = None  # type: ignore # noqa
 | 
					    chart: 'ChartPlotWidget' = None  # type: ignore # noqa
 | 
				
			||||||
    _order_lines: defaultdict[str, LevelLine] = defaultdict(list)
 | 
					    _order_lines: dict[str, LevelLine] = field(default_factory=dict)
 | 
				
			||||||
    _active_staged_line: LevelLine = None
 | 
					    _active_staged_line: LevelLine = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def stage_line(
 | 
					    def stage_line(
 | 
				
			||||||
| 
						 | 
					@ -122,11 +100,11 @@ class LineEditor(Struct):
 | 
				
			||||||
        line: LevelLine,
 | 
					        line: LevelLine,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> LevelLine:
 | 
					    ) -> LevelLine:
 | 
				
			||||||
        '''
 | 
					        """Stage a line at the current chart's cursor position
 | 
				
			||||||
        Stage a line at the current chart's cursor position
 | 
					 | 
				
			||||||
        and return it.
 | 
					        and return it.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # add a "staged" cursor-tracking line to view
 | 
					        # add a "staged" cursor-tracking line to view
 | 
				
			||||||
        # and cash it in a a var
 | 
					        # and cash it in a a var
 | 
				
			||||||
        if self._active_staged_line:
 | 
					        if self._active_staged_line:
 | 
				
			||||||
| 
						 | 
					@ -137,25 +115,17 @@ class LineEditor(Struct):
 | 
				
			||||||
        return line
 | 
					        return line
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def unstage_line(self) -> LevelLine:
 | 
					    def unstage_line(self) -> LevelLine:
 | 
				
			||||||
        '''
 | 
					        """Inverse of ``.stage_line()``.
 | 
				
			||||||
        Inverse of ``.stage_line()``.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
        cursor = self.godw.get_cursor()
 | 
					        # chart = self.chart._cursor.active_plot
 | 
				
			||||||
        if not cursor:
 | 
					        # # chart.setCursor(QtCore.Qt.ArrowCursor)
 | 
				
			||||||
            return None
 | 
					        cursor = self.chart.linked.cursor
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # delete "staged" cursor tracking line from view
 | 
					        # delete "staged" cursor tracking line from view
 | 
				
			||||||
        line = self._active_staged_line
 | 
					        line = self._active_staged_line
 | 
				
			||||||
        if line:
 | 
					        if line:
 | 
				
			||||||
            try:
 | 
					            cursor._trackers.remove(line)
 | 
				
			||||||
                cursor._trackers.remove(line)
 | 
					 | 
				
			||||||
            except KeyError:
 | 
					 | 
				
			||||||
                # when the current cursor doesn't have said line
 | 
					 | 
				
			||||||
                # registered (probably means that user held order mode
 | 
					 | 
				
			||||||
                # key while panning to another view) then we just
 | 
					 | 
				
			||||||
                # ignore the remove error.
 | 
					 | 
				
			||||||
                pass
 | 
					 | 
				
			||||||
            line.delete()
 | 
					            line.delete()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._active_staged_line = None
 | 
					        self._active_staged_line = None
 | 
				
			||||||
| 
						 | 
					@ -163,58 +133,55 @@ class LineEditor(Struct):
 | 
				
			||||||
        # show the crosshair y line and label
 | 
					        # show the crosshair y line and label
 | 
				
			||||||
        cursor.show_xhair()
 | 
					        cursor.show_xhair()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def submit_lines(
 | 
					    def submit_line(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        lines: list[LevelLine],
 | 
					        line: LevelLine,
 | 
				
			||||||
        uuid: str,
 | 
					        uuid: str,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> LevelLine:
 | 
					    ) -> LevelLine:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # staged_line = self._active_staged_line
 | 
					        staged_line = self._active_staged_line
 | 
				
			||||||
        # if not staged_line:
 | 
					        if not staged_line:
 | 
				
			||||||
        #     raise RuntimeError("No line is currently staged!?")
 | 
					            raise RuntimeError("No line is currently staged!?")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # for now, until submission reponse arrives
 | 
					        # for now, until submission reponse arrives
 | 
				
			||||||
        for line in lines:
 | 
					        line.hide_labels()
 | 
				
			||||||
            line.hide_labels()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # register for later lookup/deletion
 | 
					        # register for later lookup/deletion
 | 
				
			||||||
        self._order_lines[uuid] += lines
 | 
					        self._order_lines[uuid] = line
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return lines
 | 
					        return line
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def commit_line(self, uuid: str) -> list[LevelLine]:
 | 
					    def commit_line(self, uuid: str) -> LevelLine:
 | 
				
			||||||
        '''
 | 
					        """Commit a "staged line" to view.
 | 
				
			||||||
        Commit a "staged line" to view.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        Submits the line graphic under the cursor as a (new) permanent
 | 
					        Submits the line graphic under the cursor as a (new) permanent
 | 
				
			||||||
        graphic in view.
 | 
					        graphic in view.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
        lines = self._order_lines[uuid]
 | 
					        try:
 | 
				
			||||||
        if lines:
 | 
					            line = self._order_lines[uuid]
 | 
				
			||||||
            for line in lines:
 | 
					        except KeyError:
 | 
				
			||||||
                line.show_labels()
 | 
					            log.warning(f'No line for {uuid} could be found?')
 | 
				
			||||||
                line.hide_markers()
 | 
					            return
 | 
				
			||||||
                log.debug(f'Level active for level: {line.value()}')
 | 
					        else:
 | 
				
			||||||
                # TODO: other flashy things to indicate the order is active
 | 
					            line.show_labels()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return lines
 | 
					            # TODO: other flashy things to indicate the order is active
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            log.debug(f'Level active for level: {line.value()}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            return line
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def lines_under_cursor(self) -> list[LevelLine]:
 | 
					    def lines_under_cursor(self) -> list[LevelLine]:
 | 
				
			||||||
        '''
 | 
					        """Get the line(s) under the cursor position.
 | 
				
			||||||
        Get the line(s) under the cursor position.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
        # Delete any hoverable under the cursor
 | 
					        # Delete any hoverable under the cursor
 | 
				
			||||||
        return self.godw.get_cursor()._hovered
 | 
					        return self.chart.linked.cursor._hovered
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def all_lines(self) -> list[LevelLine]:
 | 
					    def all_lines(self) -> tuple[LevelLine]:
 | 
				
			||||||
        all_lines = []
 | 
					        return tuple(self._order_lines.values())
 | 
				
			||||||
        for lines in list(self._order_lines.values()):
 | 
					 | 
				
			||||||
            all_lines.extend(lines)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return all_lines
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def remove_line(
 | 
					    def remove_line(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
| 
						 | 
					@ -229,30 +196,29 @@ class LineEditor(Struct):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        # try to look up line from our registry
 | 
					        # try to look up line from our registry
 | 
				
			||||||
        lines = self._order_lines.pop(uuid, None)
 | 
					        line = self._order_lines.pop(uuid, line)
 | 
				
			||||||
        if lines:
 | 
					        if line:
 | 
				
			||||||
            cursor = self.godw.get_cursor()
 | 
					 | 
				
			||||||
            if cursor:
 | 
					 | 
				
			||||||
                for line in lines:
 | 
					 | 
				
			||||||
                    # if hovered remove from cursor set
 | 
					 | 
				
			||||||
                    hovered = cursor._hovered
 | 
					 | 
				
			||||||
                    if line in hovered:
 | 
					 | 
				
			||||||
                        hovered.remove(line)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    log.debug(f'deleting {line} with oid: {uuid}')
 | 
					            # if hovered remove from cursor set
 | 
				
			||||||
                    line.delete()
 | 
					            cursor = self.chart.linked.cursor
 | 
				
			||||||
 | 
					            hovered = cursor._hovered
 | 
				
			||||||
 | 
					            if line in hovered:
 | 
				
			||||||
 | 
					                hovered.remove(line)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    # make sure the xhair doesn't get left off
 | 
					                # make sure the xhair doesn't get left off
 | 
				
			||||||
                    # just because we never got a un-hover event
 | 
					                # just because we never got a un-hover event
 | 
				
			||||||
                    cursor.show_xhair()
 | 
					                cursor.show_xhair()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            log.debug(f'deleting {line} with oid: {uuid}')
 | 
				
			||||||
 | 
					            line.delete()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            log.warning(f'Could not find line for {line}')
 | 
					            log.warning(f'Could not find line for {line}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return lines
 | 
					        return line
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class SelectRect(QtWidgets.QGraphicsRectItem):
 | 
					class SelectRect(QtGui.QGraphicsRectItem):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
| 
						 | 
					@ -261,12 +227,12 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        super().__init__(0, 0, 1, 1)
 | 
					        super().__init__(0, 0, 1, 1)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # self.rbScaleBox = QGraphicsRectItem(0, 0, 1, 1)
 | 
					        # self.rbScaleBox = QtGui.QGraphicsRectItem(0, 0, 1, 1)
 | 
				
			||||||
        self.vb = viewbox
 | 
					        self.vb = viewbox
 | 
				
			||||||
        self._chart: 'ChartPlotWidget' = None  # noqa
 | 
					        self._chart: 'ChartPlotWidget' = None  # noqa
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # override selection box color
 | 
					        # override selection box color
 | 
				
			||||||
        color = QColor(hcolor(color))
 | 
					        color = QtGui.QColor(hcolor(color))
 | 
				
			||||||
        self.setPen(fn.mkPen(color, width=1))
 | 
					        self.setPen(fn.mkPen(color, width=1))
 | 
				
			||||||
        color.setAlpha(66)
 | 
					        color.setAlpha(66)
 | 
				
			||||||
        self.setBrush(fn.mkBrush(color))
 | 
					        self.setBrush(fn.mkBrush(color))
 | 
				
			||||||
| 
						 | 
					@ -274,7 +240,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
 | 
				
			||||||
        self.hide()
 | 
					        self.hide()
 | 
				
			||||||
        self._label = None
 | 
					        self._label = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        label = self._label = QLabel()
 | 
					        label = self._label = QtGui.QLabel()
 | 
				
			||||||
        label.setTextFormat(0)  # markdown
 | 
					        label.setTextFormat(0)  # markdown
 | 
				
			||||||
        label.setFont(_font.font)
 | 
					        label.setFont(_font.font)
 | 
				
			||||||
        label.setMargin(0)
 | 
					        label.setMargin(0)
 | 
				
			||||||
| 
						 | 
					@ -311,8 +277,8 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
 | 
				
			||||||
        # TODO: get bg color working
 | 
					        # TODO: get bg color working
 | 
				
			||||||
        palette.setColor(
 | 
					        palette.setColor(
 | 
				
			||||||
            self._label.backgroundRole(),
 | 
					            self._label.backgroundRole(),
 | 
				
			||||||
            # QColor(chart.backgroundBrush()),
 | 
					            # QtGui.QColor(chart.backgroundBrush()),
 | 
				
			||||||
            QColor(hcolor('papas_special')),
 | 
					            QtGui.QColor(hcolor('papas_special')),
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def update_on_resize(self, vr, r):
 | 
					    def update_on_resize(self, vr, r):
 | 
				
			||||||
| 
						 | 
					@ -360,7 +326,7 @@ class SelectRect(QtWidgets.QGraphicsRectItem):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.setPos(r.topLeft())
 | 
					        self.setPos(r.topLeft())
 | 
				
			||||||
        self.resetTransform()
 | 
					        self.resetTransform()
 | 
				
			||||||
        self.setRect(r)
 | 
					        self.scale(r.width(), r.height())
 | 
				
			||||||
        self.show()
 | 
					        self.show()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        y1, y2 = start_pos.y(), end_pos.y()
 | 
					        y1, y2 = start_pos.y(), end_pos.y()
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -18,11 +18,11 @@
 | 
				
			||||||
Qt event proxying and processing using ``trio`` mem chans.
 | 
					Qt event proxying and processing using ``trio`` mem chans.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from contextlib import asynccontextmanager as acm
 | 
					from contextlib import asynccontextmanager, AsyncExitStack
 | 
				
			||||||
from typing import Callable
 | 
					from typing import Callable
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from pydantic import BaseModel
 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
from tractor.trionics import gather_contexts
 | 
					 | 
				
			||||||
from PyQt5 import QtCore
 | 
					from PyQt5 import QtCore
 | 
				
			||||||
from PyQt5.QtCore import QEvent, pyqtBoundSignal
 | 
					from PyQt5.QtCore import QEvent, pyqtBoundSignal
 | 
				
			||||||
from PyQt5.QtWidgets import QWidget
 | 
					from PyQt5.QtWidgets import QWidget
 | 
				
			||||||
| 
						 | 
					@ -30,8 +30,6 @@ from PyQt5.QtWidgets import (
 | 
				
			||||||
    QGraphicsSceneMouseEvent as gs_mouse,
 | 
					    QGraphicsSceneMouseEvent as gs_mouse,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ..data.types import Struct
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
MOUSE_EVENTS = {
 | 
					MOUSE_EVENTS = {
 | 
				
			||||||
    gs_mouse.GraphicsSceneMousePress,
 | 
					    gs_mouse.GraphicsSceneMousePress,
 | 
				
			||||||
| 
						 | 
					@ -45,10 +43,13 @@ MOUSE_EVENTS = {
 | 
				
			||||||
# TODO: maybe consider some constrained ints down the road?
 | 
					# TODO: maybe consider some constrained ints down the road?
 | 
				
			||||||
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
 | 
					# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class KeyboardMsg(Struct):
 | 
					class KeyboardMsg(BaseModel):
 | 
				
			||||||
    '''Unpacked Qt keyboard event data.
 | 
					    '''Unpacked Qt keyboard event data.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
 | 
					    class Config:
 | 
				
			||||||
 | 
					        arbitrary_types_allowed = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    event: QEvent
 | 
					    event: QEvent
 | 
				
			||||||
    etype: int
 | 
					    etype: int
 | 
				
			||||||
    key: int
 | 
					    key: int
 | 
				
			||||||
| 
						 | 
					@ -56,13 +57,16 @@ class KeyboardMsg(Struct):
 | 
				
			||||||
    txt: str
 | 
					    txt: str
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def to_tuple(self) -> tuple:
 | 
					    def to_tuple(self) -> tuple:
 | 
				
			||||||
        return tuple(self.to_dict().values())
 | 
					        return tuple(self.dict().values())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class MouseMsg(Struct):
 | 
					class MouseMsg(BaseModel):
 | 
				
			||||||
    '''Unpacked Qt keyboard event data.
 | 
					    '''Unpacked Qt keyboard event data.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
 | 
					    class Config:
 | 
				
			||||||
 | 
					        arbitrary_types_allowed = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    event: QEvent
 | 
					    event: QEvent
 | 
				
			||||||
    etype: int
 | 
					    etype: int
 | 
				
			||||||
    button: int
 | 
					    button: int
 | 
				
			||||||
| 
						 | 
					@ -156,7 +160,7 @@ class EventRelay(QtCore.QObject):
 | 
				
			||||||
        return False
 | 
					        return False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@asynccontextmanager
 | 
				
			||||||
async def open_event_stream(
 | 
					async def open_event_stream(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    source_widget: QWidget,
 | 
					    source_widget: QWidget,
 | 
				
			||||||
| 
						 | 
					@ -182,7 +186,7 @@ async def open_event_stream(
 | 
				
			||||||
        source_widget.removeEventFilter(kc)
 | 
					        source_widget.removeEventFilter(kc)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@asynccontextmanager
 | 
				
			||||||
async def open_signal_handler(
 | 
					async def open_signal_handler(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    signal: pyqtBoundSignal,
 | 
					    signal: pyqtBoundSignal,
 | 
				
			||||||
| 
						 | 
					@ -207,7 +211,7 @@ async def open_signal_handler(
 | 
				
			||||||
            yield
 | 
					            yield
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@acm
 | 
					@asynccontextmanager
 | 
				
			||||||
async def open_handlers(
 | 
					async def open_handlers(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    source_widgets: list[QWidget],
 | 
					    source_widgets: list[QWidget],
 | 
				
			||||||
| 
						 | 
					@ -216,14 +220,16 @@ async def open_handlers(
 | 
				
			||||||
    **kwargs,
 | 
					    **kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async with (
 | 
					    async with (
 | 
				
			||||||
        trio.open_nursery() as n,
 | 
					        trio.open_nursery() as n,
 | 
				
			||||||
        gather_contexts([
 | 
					        AsyncExitStack() as stack,
 | 
				
			||||||
            open_event_stream(widget, event_types, **kwargs)
 | 
					 | 
				
			||||||
            for widget in source_widgets
 | 
					 | 
				
			||||||
        ]) as streams,
 | 
					 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        for widget, event_recv_stream in zip(source_widgets, streams):
 | 
					        for widget in source_widgets:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            event_recv_stream = await stack.enter_async_context(
 | 
				
			||||||
 | 
					                open_event_stream(widget, event_types, **kwargs)
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
            n.start_soon(async_handler, widget, event_recv_stream)
 | 
					            n.start_soon(async_handler, widget, event_recv_stream)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        yield
 | 
					        yield
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -20,24 +20,16 @@ Trio - Qt integration
 | 
				
			||||||
Run ``trio`` in guest mode on top of the Qt event loop.
 | 
					Run ``trio`` in guest mode on top of the Qt event loop.
 | 
				
			||||||
All global Qt runtime settings are mostly defined here.
 | 
					All global Qt runtime settings are mostly defined here.
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from __future__ import annotations
 | 
					from typing import Tuple, Callable, Dict, Any
 | 
				
			||||||
from typing import (
 | 
					 | 
				
			||||||
    Callable,
 | 
					 | 
				
			||||||
    Any,
 | 
					 | 
				
			||||||
    Type,
 | 
					 | 
				
			||||||
    TYPE_CHECKING,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
import platform
 | 
					import platform
 | 
				
			||||||
import traceback
 | 
					import traceback
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Qt specific
 | 
					# Qt specific
 | 
				
			||||||
import PyQt5  # noqa
 | 
					import PyQt5  # noqa
 | 
				
			||||||
from PyQt5.QtWidgets import (
 | 
					import pyqtgraph as pg
 | 
				
			||||||
    QWidget,
 | 
					from pyqtgraph import QtGui
 | 
				
			||||||
    QMainWindow,
 | 
					 | 
				
			||||||
    QApplication,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from PyQt5 import QtCore
 | 
					from PyQt5 import QtCore
 | 
				
			||||||
 | 
					# from PyQt5.QtGui import QLabel, QStatusBar
 | 
				
			||||||
from PyQt5.QtCore import (
 | 
					from PyQt5.QtCore import (
 | 
				
			||||||
    pyqtRemoveInputHook,
 | 
					    pyqtRemoveInputHook,
 | 
				
			||||||
    Qt,
 | 
					    Qt,
 | 
				
			||||||
| 
						 | 
					@ -45,7 +37,7 @@ from PyQt5.QtCore import (
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
import qdarkstyle
 | 
					import qdarkstyle
 | 
				
			||||||
from qdarkstyle import DarkPalette
 | 
					from qdarkstyle import DarkPalette
 | 
				
			||||||
# import qdarkgraystyle  # TODO: play with it
 | 
					# import qdarkgraystyle
 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
from outcome import Error
 | 
					from outcome import Error
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -54,7 +46,6 @@ from ..log import get_logger
 | 
				
			||||||
from ._pg_overrides import _do_overrides
 | 
					from ._pg_overrides import _do_overrides
 | 
				
			||||||
from . import _style
 | 
					from . import _style
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# pyqtgraph global config
 | 
					# pyqtgraph global config
 | 
				
			||||||
| 
						 | 
					@ -81,18 +72,17 @@ if platform.system() == "Windows":
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def run_qtractor(
 | 
					def run_qtractor(
 | 
				
			||||||
    func: Callable,
 | 
					    func: Callable,
 | 
				
			||||||
    args: tuple,
 | 
					    args: Tuple,
 | 
				
			||||||
    main_widget_type: Type[QWidget],
 | 
					    main_widget: QtGui.QWidget,
 | 
				
			||||||
    tractor_kwargs: dict[str, Any] = {},
 | 
					    tractor_kwargs: Dict[str, Any] = {},
 | 
				
			||||||
    window_type: QMainWindow = None,
 | 
					    window_type: QtGui.QMainWindow = None,
 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
    # avoids annoying message when entering debugger from qt loop
 | 
					    # avoids annoying message when entering debugger from qt loop
 | 
				
			||||||
    pyqtRemoveInputHook()
 | 
					    pyqtRemoveInputHook()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    app = QApplication.instance()
 | 
					    app = QtGui.QApplication.instance()
 | 
				
			||||||
    if app is None:
 | 
					    if app is None:
 | 
				
			||||||
        app = QApplication([])
 | 
					        app = PyQt5.QtWidgets.QApplication([])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # TODO: we might not need this if it's desired
 | 
					    # TODO: we might not need this if it's desired
 | 
				
			||||||
    # to cancel the tractor machinery on Qt loop
 | 
					    # to cancel the tractor machinery on Qt loop
 | 
				
			||||||
| 
						 | 
					@ -166,7 +156,7 @@ def run_qtractor(
 | 
				
			||||||
    # hook into app focus change events
 | 
					    # hook into app focus change events
 | 
				
			||||||
    app.focusChanged.connect(window.on_focus_change)
 | 
					    app.focusChanged.connect(window.on_focus_change)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    instance = main_widget_type()
 | 
					    instance = main_widget()
 | 
				
			||||||
    instance.window = window
 | 
					    instance.window = window
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # override tractor's defaults
 | 
					    # override tractor's defaults
 | 
				
			||||||
| 
						 | 
					@ -188,7 +178,7 @@ def run_qtractor(
 | 
				
			||||||
        # restrict_keyboard_interrupt_to_checkpoints=True,
 | 
					        # restrict_keyboard_interrupt_to_checkpoints=True,
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    window.godwidget: GodWidget = instance
 | 
					    window.main_widget = main_widget
 | 
				
			||||||
    window.setCentralWidget(instance)
 | 
					    window.setCentralWidget(instance)
 | 
				
			||||||
    if is_windows:
 | 
					    if is_windows:
 | 
				
			||||||
        window.configure_to_desktop()
 | 
					        window.configure_to_desktop()
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -59,7 +59,6 @@ from ._curve import (
 | 
				
			||||||
    FlattenedOHLC,
 | 
					    FlattenedOHLC,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from .._profile import Profiler
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
| 
						 | 
					@ -131,7 +130,7 @@ def render_baritems(
 | 
				
			||||||
        int, int, np.ndarray,
 | 
					        int, int, np.ndarray,
 | 
				
			||||||
        int, int, np.ndarray,
 | 
					        int, int, np.ndarray,
 | 
				
			||||||
    ],
 | 
					    ],
 | 
				
			||||||
    profiler: Profiler,
 | 
					    profiler: pg.debug.Profiler,
 | 
				
			||||||
    **kwargs,
 | 
					    **kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
| 
						 | 
					@ -338,7 +337,6 @@ class Flow(msgspec.Struct):  # , frozen=True):
 | 
				
			||||||
    name: str
 | 
					    name: str
 | 
				
			||||||
    plot: pg.PlotItem
 | 
					    plot: pg.PlotItem
 | 
				
			||||||
    graphics: Union[Curve, BarItems]
 | 
					    graphics: Union[Curve, BarItems]
 | 
				
			||||||
    yrange: tuple[float, float] = None
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # in some cases a flow may want to change its
 | 
					    # in some cases a flow may want to change its
 | 
				
			||||||
    # graphical "type" or, "form" when downsampling,
 | 
					    # graphical "type" or, "form" when downsampling,
 | 
				
			||||||
| 
						 | 
					@ -388,11 +386,10 @@ class Flow(msgspec.Struct):  # , frozen=True):
 | 
				
			||||||
        lbar: int,
 | 
					        lbar: int,
 | 
				
			||||||
        rbar: int,
 | 
					        rbar: int,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> Optional[tuple[float, float]]:
 | 
					    ) -> tuple[float, float]:
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Compute the cached max and min y-range values for a given
 | 
					        Compute the cached max and min y-range values for a given
 | 
				
			||||||
        x-range determined by ``lbar`` and ``rbar`` or ``None``
 | 
					        x-range determined by ``lbar`` and ``rbar``.
 | 
				
			||||||
        if no range can be determined (yet).
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        rkey = (lbar, rbar)
 | 
					        rkey = (lbar, rbar)
 | 
				
			||||||
| 
						 | 
					@ -402,44 +399,40 @@ class Flow(msgspec.Struct):  # , frozen=True):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        shm = self.shm
 | 
					        shm = self.shm
 | 
				
			||||||
        if shm is None:
 | 
					        if shm is None:
 | 
				
			||||||
            return None
 | 
					            mxmn = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        arr = shm.array
 | 
					        else:  # new block for profiling?..
 | 
				
			||||||
 | 
					            arr = shm.array
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # build relative indexes into shm array
 | 
					            # build relative indexes into shm array
 | 
				
			||||||
        # TODO: should we just add/use a method
 | 
					            # TODO: should we just add/use a method
 | 
				
			||||||
        # on the shm to do this?
 | 
					            # on the shm to do this?
 | 
				
			||||||
        ifirst = arr[0]['index']
 | 
					            ifirst = arr[0]['index']
 | 
				
			||||||
        slice_view = arr[
 | 
					            slice_view = arr[
 | 
				
			||||||
            lbar - ifirst:
 | 
					                lbar - ifirst:
 | 
				
			||||||
            (rbar - ifirst) + 1
 | 
					                (rbar - ifirst) + 1
 | 
				
			||||||
        ]
 | 
					            ]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if not slice_view.size:
 | 
					            if not slice_view.size:
 | 
				
			||||||
            return None
 | 
					                mxmn = None
 | 
				
			||||||
 | 
					 | 
				
			||||||
        elif self.yrange:
 | 
					 | 
				
			||||||
            mxmn = self.yrange
 | 
					 | 
				
			||||||
            # print(f'{self.name} M4 maxmin: {mxmn}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            if self.is_ohlc:
 | 
					 | 
				
			||||||
                ylow = np.min(slice_view['low'])
 | 
					 | 
				
			||||||
                yhigh = np.max(slice_view['high'])
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                view = slice_view[self.name]
 | 
					                if self.is_ohlc:
 | 
				
			||||||
                ylow = np.min(view)
 | 
					                    ylow = np.min(slice_view['low'])
 | 
				
			||||||
                yhigh = np.max(view)
 | 
					                    yhigh = np.max(slice_view['high'])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            mxmn = ylow, yhigh
 | 
					                else:
 | 
				
			||||||
            # print(f'{self.name} MANUAL maxmin: {mxmin}')
 | 
					                    view = slice_view[self.name]
 | 
				
			||||||
 | 
					                    ylow = np.min(view)
 | 
				
			||||||
 | 
					                    yhigh = np.max(view)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # cache result for input range
 | 
					                mxmn = ylow, yhigh
 | 
				
			||||||
        assert mxmn
 | 
					 | 
				
			||||||
        self._mxmns[rkey] = mxmn
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return mxmn
 | 
					            if mxmn is not None:
 | 
				
			||||||
 | 
					                # cache new mxmn result
 | 
				
			||||||
 | 
					                self._mxmns[rkey] = mxmn
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            return mxmn
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def view_range(self) -> tuple[int, int]:
 | 
					    def view_range(self) -> tuple[int, int]:
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
| 
						 | 
					@ -518,7 +511,7 @@ class Flow(msgspec.Struct):  # , frozen=True):
 | 
				
			||||||
        render: bool = True,
 | 
					        render: bool = True,
 | 
				
			||||||
        array_key: Optional[str] = None,
 | 
					        array_key: Optional[str] = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        profiler: Optional[Profiler] = None,
 | 
					        profiler: Optional[pg.debug.Profiler] = None,
 | 
				
			||||||
        do_append: bool = True,
 | 
					        do_append: bool = True,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        **kwargs,
 | 
					        **kwargs,
 | 
				
			||||||
| 
						 | 
					@ -529,7 +522,7 @@ class Flow(msgspec.Struct):  # , frozen=True):
 | 
				
			||||||
        render to graphics.
 | 
					        render to graphics.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        profiler = Profiler(
 | 
					        profiler = pg.debug.Profiler(
 | 
				
			||||||
            msg=f'Flow.update_graphics() for {self.name}',
 | 
					            msg=f'Flow.update_graphics() for {self.name}',
 | 
				
			||||||
            disabled=not pg_profile_enabled(),
 | 
					            disabled=not pg_profile_enabled(),
 | 
				
			||||||
            ms_threshold=4,
 | 
					            ms_threshold=4,
 | 
				
			||||||
| 
						 | 
					@ -635,13 +628,10 @@ class Flow(msgspec.Struct):  # , frozen=True):
 | 
				
			||||||
            # source data so we clear our path data in prep
 | 
					            # source data so we clear our path data in prep
 | 
				
			||||||
            # to generate a new one from original source data.
 | 
					            # to generate a new one from original source data.
 | 
				
			||||||
            new_sample_rate = True
 | 
					            new_sample_rate = True
 | 
				
			||||||
 | 
					            showing_src_data = True
 | 
				
			||||||
            should_ds = False
 | 
					            should_ds = False
 | 
				
			||||||
            should_redraw = True
 | 
					            should_redraw = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            showing_src_data = True
 | 
					 | 
				
			||||||
            # reset yrange to be computed from source data
 | 
					 | 
				
			||||||
            self.yrange = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # MAIN RENDER LOGIC:
 | 
					        # MAIN RENDER LOGIC:
 | 
				
			||||||
        # - determine in view data and redraw on range change
 | 
					        # - determine in view data and redraw on range change
 | 
				
			||||||
        # - determine downsampling ops if needed
 | 
					        # - determine downsampling ops if needed
 | 
				
			||||||
| 
						 | 
					@ -667,10 +657,6 @@ class Flow(msgspec.Struct):  # , frozen=True):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            **rkwargs,
 | 
					            **rkwargs,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        if showing_src_data:
 | 
					 | 
				
			||||||
            # print(f"{self.name} SHOWING SOURCE")
 | 
					 | 
				
			||||||
            # reset yrange to be computed from source data
 | 
					 | 
				
			||||||
            self.yrange = None
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if not out:
 | 
					        if not out:
 | 
				
			||||||
            log.warning(f'{self.name} failed to render!?')
 | 
					            log.warning(f'{self.name} failed to render!?')
 | 
				
			||||||
| 
						 | 
					@ -678,9 +664,6 @@ class Flow(msgspec.Struct):  # , frozen=True):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        path, data, reset = out
 | 
					        path, data, reset = out
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # if self.yrange:
 | 
					 | 
				
			||||||
        #     print(f'flow {self.name} yrange from m4: {self.yrange}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # XXX: SUPER UGGGHHH... without this we get stale cache
 | 
					        # XXX: SUPER UGGGHHH... without this we get stale cache
 | 
				
			||||||
        # graphics that don't update until you downsampler again..
 | 
					        # graphics that don't update until you downsampler again..
 | 
				
			||||||
        if reset:
 | 
					        if reset:
 | 
				
			||||||
| 
						 | 
					@ -949,7 +932,7 @@ class Renderer(msgspec.Struct):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        new_read,
 | 
					        new_read,
 | 
				
			||||||
        array_key: str,
 | 
					        array_key: str,
 | 
				
			||||||
        profiler: Profiler,
 | 
					        profiler: pg.debug.Profiler,
 | 
				
			||||||
        uppx: float = 1,
 | 
					        uppx: float = 1,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # redraw and ds flags
 | 
					        # redraw and ds flags
 | 
				
			||||||
| 
						 | 
					@ -1075,7 +1058,6 @@ class Renderer(msgspec.Struct):
 | 
				
			||||||
        # xy-path data transform: convert source data to a format
 | 
					        # xy-path data transform: convert source data to a format
 | 
				
			||||||
        # able to be passed to a `QPainterPath` rendering routine.
 | 
					        # able to be passed to a `QPainterPath` rendering routine.
 | 
				
			||||||
        if not len(hist):
 | 
					        if not len(hist):
 | 
				
			||||||
            # XXX: this might be why the profiler only has exits?
 | 
					 | 
				
			||||||
            return
 | 
					            return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        x_out, y_out, connect = self.format_xy(
 | 
					        x_out, y_out, connect = self.format_xy(
 | 
				
			||||||
| 
						 | 
					@ -1162,14 +1144,11 @@ class Renderer(msgspec.Struct):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            elif should_ds and uppx > 1:
 | 
					            elif should_ds and uppx > 1:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                x_out, y_out, ymn, ymx = xy_downsample(
 | 
					                x_out, y_out = xy_downsample(
 | 
				
			||||||
                    x_out,
 | 
					                    x_out,
 | 
				
			||||||
                    y_out,
 | 
					                    y_out,
 | 
				
			||||||
                    uppx,
 | 
					                    uppx,
 | 
				
			||||||
                )
 | 
					                )
 | 
				
			||||||
                self.flow.yrange = ymn, ymx
 | 
					 | 
				
			||||||
                # print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                reset = True
 | 
					                reset = True
 | 
				
			||||||
                profiler(f'FULL PATH downsample redraw={should_ds}')
 | 
					                profiler(f'FULL PATH downsample redraw={should_ds}')
 | 
				
			||||||
                self._in_ds = True
 | 
					                self._in_ds = True
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -619,7 +619,7 @@ class FillStatusBar(QProgressBar):
 | 
				
			||||||
        # color: #19232D;
 | 
					        # color: #19232D;
 | 
				
			||||||
        # width: 10px;
 | 
					        # width: 10px;
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.setRange(0, int(slots))
 | 
					        self.setRange(0, slots)
 | 
				
			||||||
        self.setValue(value)
 | 
					        self.setValue(value)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -644,7 +644,7 @@ def mk_fill_status_bar(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # TODO: calc this height from the ``ChartnPane``
 | 
					    # TODO: calc this height from the ``ChartnPane``
 | 
				
			||||||
    chart_h = round(parent_pane.height() * 5/8)
 | 
					    chart_h = round(parent_pane.height() * 5/8)
 | 
				
			||||||
    bar_h = chart_h * 0.375*0.9
 | 
					    bar_h = chart_h * 0.375
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # TODO: once things are sized to screen
 | 
					    # TODO: once things are sized to screen
 | 
				
			||||||
    bar_label_font_size = label_font_size or _font.px_size - 2
 | 
					    bar_label_font_size = label_font_size or _font.px_size - 2
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -27,13 +27,12 @@ from itertools import cycle
 | 
				
			||||||
from typing import Optional, AsyncGenerator, Any
 | 
					from typing import Optional, AsyncGenerator, Any
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import numpy as np
 | 
					import numpy as np
 | 
				
			||||||
import msgspec
 | 
					from pydantic import create_model
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
import pyqtgraph as pg
 | 
					import pyqtgraph as pg
 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
from trio_typing import TaskStatus
 | 
					from trio_typing import TaskStatus
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from piker.data.types import Struct
 | 
					 | 
				
			||||||
from ._axes import PriceAxis
 | 
					from ._axes import PriceAxis
 | 
				
			||||||
from .._cacheables import maybe_open_context
 | 
					from .._cacheables import maybe_open_context
 | 
				
			||||||
from ..calc import humanize
 | 
					from ..calc import humanize
 | 
				
			||||||
| 
						 | 
					@ -54,12 +53,11 @@ from ._forms import (
 | 
				
			||||||
from ..fsp._api import maybe_mk_fsp_shm, Fsp
 | 
					from ..fsp._api import maybe_mk_fsp_shm, Fsp
 | 
				
			||||||
from ..fsp import cascade
 | 
					from ..fsp import cascade
 | 
				
			||||||
from ..fsp._volume import (
 | 
					from ..fsp._volume import (
 | 
				
			||||||
    # tina_vwap,
 | 
					    tina_vwap,
 | 
				
			||||||
    dolla_vlm,
 | 
					    dolla_vlm,
 | 
				
			||||||
    flow_rates,
 | 
					    flow_rates,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from .._profile import Profiler
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -155,13 +153,12 @@ async def open_fsp_sidepane(
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
 | 
					    # https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
 | 
				
			||||||
    FspConfig = msgspec.defstruct(
 | 
					    FspConfig = create_model(
 | 
				
			||||||
        "Point",
 | 
					        'FspConfig',
 | 
				
			||||||
        [('name', name)] + list(params.items()),
 | 
					        name=name,
 | 
				
			||||||
        bases=(Struct,),
 | 
					        **params,
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
    model = FspConfig(name=name, **params)
 | 
					    sidepane.model = FspConfig()
 | 
				
			||||||
    sidepane.model = model
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # just a logger for now until we get fsp configs up and running.
 | 
					    # just a logger for now until we get fsp configs up and running.
 | 
				
			||||||
    async def settings_change(
 | 
					    async def settings_change(
 | 
				
			||||||
| 
						 | 
					@ -191,7 +188,7 @@ async def open_fsp_actor_cluster(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    from tractor._clustering import open_actor_cluster
 | 
					    from tractor._clustering import open_actor_cluster
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # profiler = Profiler(
 | 
					    # profiler = pg.debug.Profiler(
 | 
				
			||||||
    #     delayed=False,
 | 
					    #     delayed=False,
 | 
				
			||||||
    #     disabled=False
 | 
					    #     disabled=False
 | 
				
			||||||
    # )
 | 
					    # )
 | 
				
			||||||
| 
						 | 
					@ -213,7 +210,7 @@ async def run_fsp_ui(
 | 
				
			||||||
    target: Fsp,
 | 
					    target: Fsp,
 | 
				
			||||||
    conf: dict[str, dict],
 | 
					    conf: dict[str, dict],
 | 
				
			||||||
    loglevel: str,
 | 
					    loglevel: str,
 | 
				
			||||||
    # profiler: Profiler,
 | 
					    # profiler: pg.debug.Profiler,
 | 
				
			||||||
    # _quote_throttle_rate: int = 58,
 | 
					    # _quote_throttle_rate: int = 58,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
| 
						 | 
					@ -443,9 +440,7 @@ class FspAdmin:
 | 
				
			||||||
                        # if the chart isn't hidden try to update
 | 
					                        # if the chart isn't hidden try to update
 | 
				
			||||||
                        # the data on screen.
 | 
					                        # the data on screen.
 | 
				
			||||||
                        if not self.linked.isHidden():
 | 
					                        if not self.linked.isHidden():
 | 
				
			||||||
                            log.debug(
 | 
					                            log.debug(f'Re-syncing graphics for fsp: {ns_path}')
 | 
				
			||||||
                                f'Re-syncing graphics for fsp: {ns_path}'
 | 
					 | 
				
			||||||
                            )
 | 
					 | 
				
			||||||
                            self.linked.graphics_cycle(
 | 
					                            self.linked.graphics_cycle(
 | 
				
			||||||
                                trigger_all=True,
 | 
					                                trigger_all=True,
 | 
				
			||||||
                                prepend_update_index=info['first'],
 | 
					                                prepend_update_index=info['first'],
 | 
				
			||||||
| 
						 | 
					@ -474,10 +469,9 @@ class FspAdmin:
 | 
				
			||||||
            target=target,
 | 
					            target=target,
 | 
				
			||||||
            readonly=True,
 | 
					            readonly=True,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        self._flow_registry[(
 | 
					        self._flow_registry[
 | 
				
			||||||
            self.src_shm._token,
 | 
					            (self.src_shm._token, target.name)
 | 
				
			||||||
            target.name
 | 
					        ] = dst_shm._token
 | 
				
			||||||
        )] = dst_shm._token
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # if not opened:
 | 
					        # if not opened:
 | 
				
			||||||
        #     raise RuntimeError(
 | 
					        #     raise RuntimeError(
 | 
				
			||||||
| 
						 | 
					@ -624,8 +618,6 @@ async def open_vlm_displays(
 | 
				
			||||||
        # built-in vlm which we plot ASAP since it's
 | 
					        # built-in vlm which we plot ASAP since it's
 | 
				
			||||||
        # usually data provided directly with OHLC history.
 | 
					        # usually data provided directly with OHLC history.
 | 
				
			||||||
        shm = ohlcv
 | 
					        shm = ohlcv
 | 
				
			||||||
        ohlc_chart = linked.chart
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        chart = linked.add_plot(
 | 
					        chart = linked.add_plot(
 | 
				
			||||||
            name='volume',
 | 
					            name='volume',
 | 
				
			||||||
            shm=shm,
 | 
					            shm=shm,
 | 
				
			||||||
| 
						 | 
					@ -641,34 +633,26 @@ async def open_vlm_displays(
 | 
				
			||||||
            # the curve item internals are pretty convoluted.
 | 
					            # the curve item internals are pretty convoluted.
 | 
				
			||||||
            style='step',
 | 
					            style='step',
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        ohlc_chart.view.enable_auto_yrange(
 | 
					 | 
				
			||||||
            src_vb=chart.view,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # force 0 to always be in view
 | 
					        # force 0 to always be in view
 | 
				
			||||||
        def multi_maxmin(
 | 
					        def multi_maxmin(
 | 
				
			||||||
            names: list[str],
 | 
					            names: list[str],
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        ) -> tuple[float, float]:
 | 
					        ) -> tuple[float, float]:
 | 
				
			||||||
            '''
 | 
					 | 
				
			||||||
            Flows "group" maxmin loop; assumes all named flows
 | 
					 | 
				
			||||||
            are in the same co-domain and thus can be sorted
 | 
					 | 
				
			||||||
            as one set.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            Iterates all the named flows and calls the chart
 | 
					 | 
				
			||||||
            api to find their range values and return.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            TODO: really we should probably have a more built-in API
 | 
					 | 
				
			||||||
            for this?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            '''
 | 
					 | 
				
			||||||
            mx = 0
 | 
					            mx = 0
 | 
				
			||||||
            for name in names:
 | 
					            for name in names:
 | 
				
			||||||
                ymn, ymx = chart.maxmin(name=name)
 | 
					
 | 
				
			||||||
                mx = max(mx, ymx)
 | 
					                mxmn = chart.maxmin(name=name)
 | 
				
			||||||
 | 
					                if mxmn:
 | 
				
			||||||
 | 
					                    ymax = mxmn[1]
 | 
				
			||||||
 | 
					                    if ymax > mx:
 | 
				
			||||||
 | 
					                        mx = ymax
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            return 0, mx
 | 
					            return 0, mx
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        chart.view.maxmin = partial(multi_maxmin, names=['volume'])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: fix the x-axis label issue where if you put
 | 
					        # TODO: fix the x-axis label issue where if you put
 | 
				
			||||||
        # the axis on the left it's totally not lined up...
 | 
					        # the axis on the left it's totally not lined up...
 | 
				
			||||||
        # show volume units value on LHS (for dinkus)
 | 
					        # show volume units value on LHS (for dinkus)
 | 
				
			||||||
| 
						 | 
					@ -752,8 +736,6 @@ async def open_vlm_displays(
 | 
				
			||||||
                },
 | 
					                },
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            dvlm_pi.hideAxis('left')
 | 
					 | 
				
			||||||
            dvlm_pi.hideAxis('bottom')
 | 
					 | 
				
			||||||
            # all to be overlayed curve names
 | 
					            # all to be overlayed curve names
 | 
				
			||||||
            fields = [
 | 
					            fields = [
 | 
				
			||||||
               'dolla_vlm',
 | 
					               'dolla_vlm',
 | 
				
			||||||
| 
						 | 
					@ -794,7 +776,6 @@ async def open_vlm_displays(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            ) -> None:
 | 
					            ) -> None:
 | 
				
			||||||
                for name in names:
 | 
					                for name in names:
 | 
				
			||||||
 | 
					 | 
				
			||||||
                    if 'dark' in name:
 | 
					                    if 'dark' in name:
 | 
				
			||||||
                        color = dark_vlm_color
 | 
					                        color = dark_vlm_color
 | 
				
			||||||
                    elif 'rate' in name:
 | 
					                    elif 'rate' in name:
 | 
				
			||||||
| 
						 | 
					@ -886,7 +867,6 @@ async def open_vlm_displays(
 | 
				
			||||||
                # keep both regular and dark vlm in view
 | 
					                # keep both regular and dark vlm in view
 | 
				
			||||||
                names=trade_rate_fields,
 | 
					                names=trade_rate_fields,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            tr_pi.hideAxis('bottom')
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            chart_curves(
 | 
					            chart_curves(
 | 
				
			||||||
                trade_rate_fields,
 | 
					                trade_rate_fields,
 | 
				
			||||||
| 
						 | 
					@ -960,7 +940,7 @@ async def start_fsp_displays(
 | 
				
			||||||
        #     },
 | 
					        #     },
 | 
				
			||||||
        # },
 | 
					        # },
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    profiler = Profiler(
 | 
					    profiler = pg.debug.Profiler(
 | 
				
			||||||
        delayed=False,
 | 
					        delayed=False,
 | 
				
			||||||
        disabled=False
 | 
					        disabled=False
 | 
				
			||||||
    )
 | 
					    )
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -33,7 +33,6 @@ import numpy as np
 | 
				
			||||||
import trio
 | 
					import trio
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from .._profile import Profiler
 | 
					 | 
				
			||||||
from .._profile import pg_profile_enabled, ms_slower_then
 | 
					from .._profile import pg_profile_enabled, ms_slower_then
 | 
				
			||||||
# from ._style import _min_points_to_show
 | 
					# from ._style import _min_points_to_show
 | 
				
			||||||
from ._editors import SelectRect
 | 
					from ._editors import SelectRect
 | 
				
			||||||
| 
						 | 
					@ -142,16 +141,13 @@ async def handle_viewmode_kb_inputs(
 | 
				
			||||||
                    Qt.Key_Space,
 | 
					                    Qt.Key_Space,
 | 
				
			||||||
                }
 | 
					                }
 | 
				
			||||||
            ):
 | 
					            ):
 | 
				
			||||||
                godw = view._chart.linked.godwidget
 | 
					                view._chart.linked.godwidget.search.focus()
 | 
				
			||||||
                godw.hist_linked.resize_sidepanes(from_linked=godw.rt_linked)
 | 
					 | 
				
			||||||
                godw.search.focus()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # esc and ctrl-c
 | 
					            # esc and ctrl-c
 | 
				
			||||||
            if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
 | 
					            if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
 | 
				
			||||||
                # ctrl-c as cancel
 | 
					                # ctrl-c as cancel
 | 
				
			||||||
                # https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
 | 
					                # https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
 | 
				
			||||||
                view.select_box.clear()
 | 
					                view.select_box.clear()
 | 
				
			||||||
                view.linked.focus()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # cancel order or clear graphics
 | 
					            # cancel order or clear graphics
 | 
				
			||||||
            if key == Qt.Key_C or key == Qt.Key_Delete:
 | 
					            if key == Qt.Key_C or key == Qt.Key_Delete:
 | 
				
			||||||
| 
						 | 
					@ -182,17 +178,17 @@ async def handle_viewmode_kb_inputs(
 | 
				
			||||||
            if key in pressed:
 | 
					            if key in pressed:
 | 
				
			||||||
                pressed.remove(key)
 | 
					                pressed.remove(key)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # QUERY/QUOTE MODE
 | 
					        # QUERY/QUOTE MODE #
 | 
				
			||||||
        # ----------------
 | 
					 | 
				
			||||||
        if {Qt.Key_Q}.intersection(pressed):
 | 
					        if {Qt.Key_Q}.intersection(pressed):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            view.linked.cursor.in_query_mode = True
 | 
					            view.linkedsplits.cursor.in_query_mode = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            view.linked.cursor.in_query_mode = False
 | 
					            view.linkedsplits.cursor.in_query_mode = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # SELECTION MODE
 | 
					        # SELECTION MODE
 | 
				
			||||||
        # --------------
 | 
					        # --------------
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if shift:
 | 
					        if shift:
 | 
				
			||||||
            if view.state['mouseMode'] == ViewBox.PanMode:
 | 
					            if view.state['mouseMode'] == ViewBox.PanMode:
 | 
				
			||||||
                view.setMouseMode(ViewBox.RectMode)
 | 
					                view.setMouseMode(ViewBox.RectMode)
 | 
				
			||||||
| 
						 | 
					@ -213,27 +209,18 @@ async def handle_viewmode_kb_inputs(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # ORDER MODE
 | 
					        # ORDER MODE
 | 
				
			||||||
        # ----------
 | 
					        # ----------
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # live vs. dark trigger + an action {buy, sell, alert}
 | 
					        # live vs. dark trigger + an action {buy, sell, alert}
 | 
				
			||||||
        order_keys_pressed = ORDER_MODE.intersection(pressed)
 | 
					        order_keys_pressed = ORDER_MODE.intersection(pressed)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if order_keys_pressed:
 | 
					        if order_keys_pressed:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # TODO: it seems like maybe the composition should be
 | 
					            # show the pp size label
 | 
				
			||||||
            # reversed here? Like, maybe we should have the nav have
 | 
					            order_mode.current_pp.show()
 | 
				
			||||||
            # access to the pos state and then make encapsulated logic
 | 
					 | 
				
			||||||
            # that shows the right stuff on screen instead or order mode
 | 
					 | 
				
			||||||
            # and position-related abstractions doing this?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # show the pp size label only if there is
 | 
					 | 
				
			||||||
            # a non-zero pos existing
 | 
					 | 
				
			||||||
            tracker = order_mode.current_pp
 | 
					 | 
				
			||||||
            if tracker.live_pp.size:
 | 
					 | 
				
			||||||
                tracker.nav.show()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # TODO: show pp config mini-params in status bar widget
 | 
					            # TODO: show pp config mini-params in status bar widget
 | 
				
			||||||
            # mode.pp_config.show()
 | 
					            # mode.pp_config.show()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            trigger_type: str = 'dark'
 | 
					 | 
				
			||||||
            if (
 | 
					            if (
 | 
				
			||||||
                # 's' for "submit" to activate "live" order
 | 
					                # 's' for "submit" to activate "live" order
 | 
				
			||||||
                Qt.Key_S in pressed or
 | 
					                Qt.Key_S in pressed or
 | 
				
			||||||
| 
						 | 
					@ -241,6 +228,9 @@ async def handle_viewmode_kb_inputs(
 | 
				
			||||||
            ):
 | 
					            ):
 | 
				
			||||||
                trigger_type: str = 'live'
 | 
					                trigger_type: str = 'live'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            else:
 | 
				
			||||||
 | 
					                trigger_type: str = 'dark'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # order mode trigger "actions"
 | 
					            # order mode trigger "actions"
 | 
				
			||||||
            if Qt.Key_D in pressed:  # for "damp eet"
 | 
					            if Qt.Key_D in pressed:  # for "damp eet"
 | 
				
			||||||
                action = 'sell'
 | 
					                action = 'sell'
 | 
				
			||||||
| 
						 | 
					@ -269,8 +259,8 @@ async def handle_viewmode_kb_inputs(
 | 
				
			||||||
                Qt.Key_S in pressed or
 | 
					                Qt.Key_S in pressed or
 | 
				
			||||||
                order_keys_pressed or
 | 
					                order_keys_pressed or
 | 
				
			||||||
                Qt.Key_O in pressed
 | 
					                Qt.Key_O in pressed
 | 
				
			||||||
            )
 | 
					            ) and
 | 
				
			||||||
            and key in NUMBER_LINE
 | 
					            key in NUMBER_LINE
 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
            # hot key to set order slots size.
 | 
					            # hot key to set order slots size.
 | 
				
			||||||
            # change edit field to current number line value,
 | 
					            # change edit field to current number line value,
 | 
				
			||||||
| 
						 | 
					@ -288,7 +278,7 @@ async def handle_viewmode_kb_inputs(
 | 
				
			||||||
        else:  # none active
 | 
					        else:  # none active
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # hide pp label
 | 
					            # hide pp label
 | 
				
			||||||
            order_mode.current_pp.nav.hide_info()
 | 
					            order_mode.current_pp.hide_info()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # if none are pressed, remove "staged" level
 | 
					            # if none are pressed, remove "staged" level
 | 
				
			||||||
            # line under cursor position
 | 
					            # line under cursor position
 | 
				
			||||||
| 
						 | 
					@ -329,6 +319,7 @@ async def handle_viewmode_mouse(
 | 
				
			||||||
        ):
 | 
					        ):
 | 
				
			||||||
            # when in order mode, submit execution
 | 
					            # when in order mode, submit execution
 | 
				
			||||||
            # msg.event.accept()
 | 
					            # msg.event.accept()
 | 
				
			||||||
 | 
					            # breakpoint()
 | 
				
			||||||
            view.order_mode.submit_order()
 | 
					            view.order_mode.submit_order()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -345,6 +336,16 @@ class ChartView(ViewBox):
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    mode_name: str = 'view'
 | 
					    mode_name: str = 'view'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # "relay events" for making overlaid views work.
 | 
				
			||||||
 | 
					    # NOTE: these MUST be defined here (and can't be monkey patched
 | 
				
			||||||
 | 
					    # on later) due to signal construction requiring refs to be
 | 
				
			||||||
 | 
					    # in place during the run of meta-class machinery.
 | 
				
			||||||
 | 
					    mouseDragEventRelay = QtCore.Signal(object, object, object)
 | 
				
			||||||
 | 
					    wheelEventRelay = QtCore.Signal(object, object, object)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    event_relay_source: 'Optional[ViewBox]' = None
 | 
				
			||||||
 | 
					    relays: dict[str, QtCore.Signal] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -374,7 +375,7 @@ class ChartView(ViewBox):
 | 
				
			||||||
            y=True,
 | 
					            y=True,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.linked = None
 | 
					        self.linkedsplits = None
 | 
				
			||||||
        self._chart: 'ChartPlotWidget' = None  # noqa
 | 
					        self._chart: 'ChartPlotWidget' = None  # noqa
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # add our selection box annotator
 | 
					        # add our selection box annotator
 | 
				
			||||||
| 
						 | 
					@ -396,11 +397,8 @@ class ChartView(ViewBox):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        if self._ic is None:
 | 
					        if self._ic is None:
 | 
				
			||||||
            try:
 | 
					            self.chart.pause_all_feeds()
 | 
				
			||||||
                self.chart.pause_all_feeds()
 | 
					            self._ic = trio.Event()
 | 
				
			||||||
                self._ic = trio.Event()
 | 
					 | 
				
			||||||
            except RuntimeError:
 | 
					 | 
				
			||||||
                pass
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def signal_ic(
 | 
					    def signal_ic(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
| 
						 | 
					@ -413,12 +411,9 @@ class ChartView(ViewBox):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        if self._ic:
 | 
					        if self._ic:
 | 
				
			||||||
            try:
 | 
					            self._ic.set()
 | 
				
			||||||
                self._ic.set()
 | 
					            self._ic = None
 | 
				
			||||||
                self._ic = None
 | 
					            self.chart.resume_all_feeds()
 | 
				
			||||||
                self.chart.resume_all_feeds()
 | 
					 | 
				
			||||||
            except RuntimeError:
 | 
					 | 
				
			||||||
                pass
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @asynccontextmanager
 | 
					    @asynccontextmanager
 | 
				
			||||||
    async def open_async_input_handler(
 | 
					    async def open_async_input_handler(
 | 
				
			||||||
| 
						 | 
					@ -468,7 +463,7 @@ class ChartView(ViewBox):
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        ev,
 | 
					        ev,
 | 
				
			||||||
        axis=None,
 | 
					        axis=None,
 | 
				
			||||||
        # relayed_from: ChartView = None,
 | 
					        relayed_from: ChartView = None,
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Override "center-point" location for scrolling.
 | 
					        Override "center-point" location for scrolling.
 | 
				
			||||||
| 
						 | 
					@ -479,20 +474,13 @@ class ChartView(ViewBox):
 | 
				
			||||||
        TODO: PR a method into ``pyqtgraph`` to make this configurable
 | 
					        TODO: PR a method into ``pyqtgraph`` to make this configurable
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        linked = self.linked
 | 
					 | 
				
			||||||
        if (
 | 
					 | 
				
			||||||
            not linked
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            # print(f'{self.name} not linked but relay from {relayed_from.name}')
 | 
					 | 
				
			||||||
            return
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if axis in (0, 1):
 | 
					        if axis in (0, 1):
 | 
				
			||||||
            mask = [False, False]
 | 
					            mask = [False, False]
 | 
				
			||||||
            mask[axis] = self.state['mouseEnabled'][axis]
 | 
					            mask[axis] = self.state['mouseEnabled'][axis]
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            mask = self.state['mouseEnabled'][:]
 | 
					            mask = self.state['mouseEnabled'][:]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        chart = self.linked.chart
 | 
					        chart = self.linkedsplits.chart
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # don't zoom more then the min points setting
 | 
					        # don't zoom more then the min points setting
 | 
				
			||||||
        l, lbar, rbar, r = chart.bars_range()
 | 
					        l, lbar, rbar, r = chart.bars_range()
 | 
				
			||||||
| 
						 | 
					@ -605,20 +593,9 @@ class ChartView(ViewBox):
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        ev,
 | 
					        ev,
 | 
				
			||||||
        axis: Optional[int] = None,
 | 
					        axis: Optional[int] = None,
 | 
				
			||||||
        # relayed_from: ChartView = None,
 | 
					        relayed_from: ChartView = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        # if relayed_from:
 | 
					 | 
				
			||||||
        #     print(f'PAN: {self.name} -> RELAYED FROM: {relayed_from.name}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # NOTE since in the overlay case axes are already
 | 
					 | 
				
			||||||
        # "linked" any x-range change will already be mirrored
 | 
					 | 
				
			||||||
        # in all overlaid ``PlotItems``, so we need to simply
 | 
					 | 
				
			||||||
        # ignore the signal here since otherwise we get N-calls
 | 
					 | 
				
			||||||
        # from N-overlays resulting in an "accelerated" feeling
 | 
					 | 
				
			||||||
        # panning motion instead of the expect linear shift.
 | 
					 | 
				
			||||||
        # if relayed_from:
 | 
					 | 
				
			||||||
        #     return
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        pos = ev.pos()
 | 
					        pos = ev.pos()
 | 
				
			||||||
        lastPos = ev.lastPos()
 | 
					        lastPos = ev.lastPos()
 | 
				
			||||||
| 
						 | 
					@ -692,10 +669,7 @@ class ChartView(ViewBox):
 | 
				
			||||||
                # XXX: WHY
 | 
					                # XXX: WHY
 | 
				
			||||||
                ev.accept()
 | 
					                ev.accept()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                try:
 | 
					                self.start_ic()
 | 
				
			||||||
                    self.start_ic()
 | 
					 | 
				
			||||||
                except RuntimeError:
 | 
					 | 
				
			||||||
                    pass
 | 
					 | 
				
			||||||
                # if self._ic is None:
 | 
					                # if self._ic is None:
 | 
				
			||||||
                #     self.chart.pause_all_feeds()
 | 
					                #     self.chart.pause_all_feeds()
 | 
				
			||||||
                #     self._ic = trio.Event()
 | 
					                #     self._ic = trio.Event()
 | 
				
			||||||
| 
						 | 
					@ -787,7 +761,7 @@ class ChartView(ViewBox):
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        name = self.name
 | 
					        name = self.name
 | 
				
			||||||
        # print(f'YRANGE ON {name}')
 | 
					        # print(f'YRANGE ON {name}')
 | 
				
			||||||
        profiler = Profiler(
 | 
					        profiler = pg.debug.Profiler(
 | 
				
			||||||
            msg=f'`ChartView._set_yrange()`: `{name}`',
 | 
					            msg=f'`ChartView._set_yrange()`: `{name}`',
 | 
				
			||||||
            disabled=not pg_profile_enabled(),
 | 
					            disabled=not pg_profile_enabled(),
 | 
				
			||||||
            ms_threshold=ms_slower_then,
 | 
					            ms_threshold=ms_slower_then,
 | 
				
			||||||
| 
						 | 
					@ -856,37 +830,33 @@ class ChartView(ViewBox):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Assign callbacks for rescaling and resampling y-axis data
 | 
					        Assign callback for rescaling y-axis automatically
 | 
				
			||||||
        automatically based on data contents and ``ViewBox`` state.
 | 
					        based on data contents and ``ViewBox`` state.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        if src_vb is None:
 | 
					        if src_vb is None:
 | 
				
			||||||
            src_vb = self
 | 
					            src_vb = self
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # widget-UIs/splitter(s) resizing
 | 
					        # splitter(s) resizing
 | 
				
			||||||
        src_vb.sigResized.connect(self._set_yrange)
 | 
					        src_vb.sigResized.connect(self._set_yrange)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # re-sampling trigger:
 | 
					 | 
				
			||||||
        # TODO: a smarter way to avoid calling this needlessly?
 | 
					        # TODO: a smarter way to avoid calling this needlessly?
 | 
				
			||||||
        # 2 things i can think of:
 | 
					        # 2 things i can think of:
 | 
				
			||||||
        # - register downsample-able graphics specially and only
 | 
					        # - register downsample-able graphics specially and only
 | 
				
			||||||
        #   iterate those.
 | 
					        #   iterate those.
 | 
				
			||||||
        # - only register this when certain downsample-able graphics are
 | 
					        # - only register this when certain downsampleable graphics are
 | 
				
			||||||
        #   "added to scene".
 | 
					        #   "added to scene".
 | 
				
			||||||
        src_vb.sigRangeChangedManually.connect(
 | 
					        src_vb.sigRangeChangedManually.connect(
 | 
				
			||||||
            self.maybe_downsample_graphics
 | 
					            self.maybe_downsample_graphics
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # mouse wheel doesn't emit XRangeChanged
 | 
					        # mouse wheel doesn't emit XRangeChanged
 | 
				
			||||||
        src_vb.sigRangeChangedManually.connect(self._set_yrange)
 | 
					        src_vb.sigRangeChangedManually.connect(self._set_yrange)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # XXX: enabling these will cause "jittery"-ness
 | 
					        # src_vb.sigXRangeChanged.connect(self._set_yrange)
 | 
				
			||||||
        # on zoom where sharp diffs in the y-range will
 | 
					        # src_vb.sigXRangeChanged.connect(
 | 
				
			||||||
        # not re-size right away until a new sample update?
 | 
					        #     self.maybe_downsample_graphics
 | 
				
			||||||
        # if src_vb is not self:
 | 
					        # )
 | 
				
			||||||
        #     src_vb.sigXRangeChanged.connect(self._set_yrange)
 | 
					 | 
				
			||||||
        #     src_vb.sigXRangeChanged.connect(
 | 
					 | 
				
			||||||
        #         self.maybe_downsample_graphics
 | 
					 | 
				
			||||||
        #     )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def disable_auto_yrange(self) -> None:
 | 
					    def disable_auto_yrange(self) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -927,7 +897,8 @@ class ChartView(ViewBox):
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        autoscale_overlays: bool = True,
 | 
					        autoscale_overlays: bool = True,
 | 
				
			||||||
    ):
 | 
					    ):
 | 
				
			||||||
        profiler = Profiler(
 | 
					
 | 
				
			||||||
 | 
					        profiler = pg.debug.Profiler(
 | 
				
			||||||
            msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
 | 
					            msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
 | 
				
			||||||
            disabled=not pg_profile_enabled(),
 | 
					            disabled=not pg_profile_enabled(),
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -941,12 +912,8 @@ class ChartView(ViewBox):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: a faster single-loop-iterator way of doing this XD
 | 
					        # TODO: a faster single-loop-iterator way of doing this XD
 | 
				
			||||||
        chart = self._chart
 | 
					        chart = self._chart
 | 
				
			||||||
        plots = {chart.name: chart}
 | 
					        linked = self.linkedsplits
 | 
				
			||||||
 | 
					        plots = linked.subplots | {chart.name: chart}
 | 
				
			||||||
        linked = self.linked
 | 
					 | 
				
			||||||
        if linked:
 | 
					 | 
				
			||||||
            plots |= linked.subplots
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        for chart_name, chart in plots.items():
 | 
					        for chart_name, chart in plots.items():
 | 
				
			||||||
            for name, flow in chart._flows.items():
 | 
					            for name, flow in chart._flows.items():
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -956,7 +923,6 @@ class ChartView(ViewBox):
 | 
				
			||||||
                    # XXX: super important to be aware of this.
 | 
					                    # XXX: super important to be aware of this.
 | 
				
			||||||
                    # or not flow.graphics.isVisible()
 | 
					                    # or not flow.graphics.isVisible()
 | 
				
			||||||
                ):
 | 
					                ):
 | 
				
			||||||
                    # print(f'skipping {flow.name}')
 | 
					 | 
				
			||||||
                    continue
 | 
					                    continue
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # pass in no array which will read and render from the last
 | 
					                # pass in no array which will read and render from the last
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -18,14 +18,9 @@
 | 
				
			||||||
Lines for orders, alerts, L2.
 | 
					Lines for orders, alerts, L2.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from __future__ import annotations
 | 
					 | 
				
			||||||
from functools import partial
 | 
					from functools import partial
 | 
				
			||||||
from math import floor
 | 
					from math import floor
 | 
				
			||||||
from typing import (
 | 
					from typing import Optional, Callable
 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Callable,
 | 
					 | 
				
			||||||
    TYPE_CHECKING,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
import pyqtgraph as pg
 | 
					import pyqtgraph as pg
 | 
				
			||||||
from pyqtgraph import Point, functions as fn
 | 
					from pyqtgraph import Point, functions as fn
 | 
				
			||||||
| 
						 | 
					@ -42,9 +37,6 @@ from ..calc import humanize
 | 
				
			||||||
from ._label import Label
 | 
					from ._label import Label
 | 
				
			||||||
from ._style import hcolor, _font
 | 
					from ._style import hcolor, _font
 | 
				
			||||||
 | 
					
 | 
				
			||||||
if TYPE_CHECKING:
 | 
					 | 
				
			||||||
    from ._cursor import Cursor
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# TODO: probably worth investigating if we can
 | 
					# TODO: probably worth investigating if we can
 | 
				
			||||||
# make .boundingRect() faster:
 | 
					# make .boundingRect() faster:
 | 
				
			||||||
| 
						 | 
					@ -92,7 +84,7 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._marker = None
 | 
					        self._marker = None
 | 
				
			||||||
        self.only_show_markers_on_hover = only_show_markers_on_hover
 | 
					        self.only_show_markers_on_hover = only_show_markers_on_hover
 | 
				
			||||||
        self.track_marker_pos: bool = False
 | 
					        self.show_markers: bool = True  # presuming the line is hovered at init
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # should line go all the way to far end or leave a "margin"
 | 
					        # should line go all the way to far end or leave a "margin"
 | 
				
			||||||
        # space for other graphics (eg. L1 book)
 | 
					        # space for other graphics (eg. L1 book)
 | 
				
			||||||
| 
						 | 
					@ -130,9 +122,6 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
        self._y_incr_mult = 1 / chart.linked.symbol.tick_size
 | 
					        self._y_incr_mult = 1 / chart.linked.symbol.tick_size
 | 
				
			||||||
        self._right_end_sc: float = 0
 | 
					        self._right_end_sc: float = 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # use px caching
 | 
					 | 
				
			||||||
        self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def txt_offsets(self) -> tuple[int, int]:
 | 
					    def txt_offsets(self) -> tuple[int, int]:
 | 
				
			||||||
        return 0, 0
 | 
					        return 0, 0
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -227,23 +216,20 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
        y: float
 | 
					        y: float
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        '''
 | 
					        '''Chart coordinates cursor tracking callback.
 | 
				
			||||||
        Chart coordinates cursor tracking callback.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        this is called by our ``Cursor`` type once this line is set to
 | 
					        this is called by our ``Cursor`` type once this line is set to
 | 
				
			||||||
        track the cursor: for every movement this callback is invoked to
 | 
					        track the cursor: for every movement this callback is invoked to
 | 
				
			||||||
        reposition the line with the current view coordinates.
 | 
					        reposition the line with the current view coordinates.
 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        self.movable = True
 | 
					        self.movable = True
 | 
				
			||||||
        self.set_level(y)  # implictly calls reposition handler
 | 
					        self.set_level(y)  # implictly calls reposition handler
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def mouseDragEvent(self, ev):
 | 
					    def mouseDragEvent(self, ev):
 | 
				
			||||||
        '''
 | 
					        """Override the ``InfiniteLine`` handler since we need more
 | 
				
			||||||
        Override the ``InfiniteLine`` handler since we need more
 | 
					 | 
				
			||||||
        detailed control and start end signalling.
 | 
					        detailed control and start end signalling.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
        cursor = self._chart.linked.cursor
 | 
					        cursor = self._chart.linked.cursor
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # hide y-crosshair
 | 
					        # hide y-crosshair
 | 
				
			||||||
| 
						 | 
					@ -295,20 +281,10 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
            # show y-crosshair again
 | 
					            # show y-crosshair again
 | 
				
			||||||
            cursor.show_xhair()
 | 
					            cursor.show_xhair()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_cursor(self) -> Optional[Cursor]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        chart = self._chart
 | 
					 | 
				
			||||||
        cur = chart.linked.cursor
 | 
					 | 
				
			||||||
        if self in cur._hovered:
 | 
					 | 
				
			||||||
            return cur
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def delete(self) -> None:
 | 
					    def delete(self) -> None:
 | 
				
			||||||
        '''
 | 
					        """Remove this line from containing chart/view/scene.
 | 
				
			||||||
        Remove this line from containing chart/view/scene.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
        scene = self.scene()
 | 
					        scene = self.scene()
 | 
				
			||||||
        if scene:
 | 
					        if scene:
 | 
				
			||||||
            for label in self._labels:
 | 
					            for label in self._labels:
 | 
				
			||||||
| 
						 | 
					@ -322,8 +298,9 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # remove from chart/cursor states
 | 
					        # remove from chart/cursor states
 | 
				
			||||||
        chart = self._chart
 | 
					        chart = self._chart
 | 
				
			||||||
        cur = self.get_cursor()
 | 
					        cur = chart.linked.cursor
 | 
				
			||||||
        if cur:
 | 
					
 | 
				
			||||||
 | 
					        if self in cur._hovered:
 | 
				
			||||||
            cur._hovered.remove(self)
 | 
					            cur._hovered.remove(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        chart.plotItem.removeItem(self)
 | 
					        chart.plotItem.removeItem(self)
 | 
				
			||||||
| 
						 | 
					@ -331,8 +308,8 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
    def mouseDoubleClickEvent(
 | 
					    def mouseDoubleClickEvent(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        ev: QtGui.QMouseEvent,
 | 
					        ev: QtGui.QMouseEvent,
 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: enter labels edit mode
 | 
					        # TODO: enter labels edit mode
 | 
				
			||||||
        print(f'double click {ev}')
 | 
					        print(f'double click {ev}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -357,22 +334,30 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        line_end, marker_right, r_axis_x = self._chart.marker_right_points()
 | 
					        line_end, marker_right, r_axis_x = self._chart.marker_right_points()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # (legacy) NOTE: at one point this seemed slower when moving around
 | 
					        if self.show_markers and self.markers:
 | 
				
			||||||
        # order lines.. not sure if that's still true or why but we've
 | 
					
 | 
				
			||||||
        # dropped the original hacky `.pain()` transform stuff for inf
 | 
					            p.setPen(self.pen)
 | 
				
			||||||
        # line markers now - check the git history if it needs to be
 | 
					            qgo_draw_markers(
 | 
				
			||||||
        # reverted.
 | 
					                self.markers,
 | 
				
			||||||
        if self._marker:
 | 
					                self.pen.color(),
 | 
				
			||||||
            if self.track_marker_pos:
 | 
					                p,
 | 
				
			||||||
                # make the line end at the marker's x pos
 | 
					                vb_left,
 | 
				
			||||||
                line_end = marker_right = self._marker.pos().x()
 | 
					                vb_right,
 | 
				
			||||||
 | 
					                marker_right,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            # marker_size = self.markers[0][2]
 | 
				
			||||||
 | 
					            self._maxMarkerSize = max([m[2] / 2. for m in self.markers])
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # this seems slower when moving around
 | 
				
			||||||
 | 
					        # order lines.. not sure wtf is up with that.
 | 
				
			||||||
 | 
					        # for now we're just using it on the position line.
 | 
				
			||||||
 | 
					        elif self._marker:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # TODO: make this label update part of a scene-aware-marker
 | 
					            # TODO: make this label update part of a scene-aware-marker
 | 
				
			||||||
            # composed annotation
 | 
					            # composed annotation
 | 
				
			||||||
            self._marker.setPos(
 | 
					            self._marker.setPos(
 | 
				
			||||||
                QPointF(marker_right, self.scene_y())
 | 
					                QPointF(marker_right, self.scene_y())
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
 | 
					 | 
				
			||||||
            if hasattr(self._marker, 'label'):
 | 
					            if hasattr(self._marker, 'label'):
 | 
				
			||||||
                self._marker.label.update()
 | 
					                self._marker.label.update()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -394,14 +379,16 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def hide(self) -> None:
 | 
					    def hide(self) -> None:
 | 
				
			||||||
        super().hide()
 | 
					        super().hide()
 | 
				
			||||||
        mkr = self._marker
 | 
					        if self._marker:
 | 
				
			||||||
        if mkr:
 | 
					            self._marker.hide()
 | 
				
			||||||
            mkr.hide()
 | 
					            # needed for ``order_line()`` lines currently
 | 
				
			||||||
 | 
					            self._marker.label.hide()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def show(self) -> None:
 | 
					    def show(self) -> None:
 | 
				
			||||||
        super().show()
 | 
					        super().show()
 | 
				
			||||||
        if self._marker:
 | 
					        if self._marker:
 | 
				
			||||||
            self._marker.show()
 | 
					            self._marker.show()
 | 
				
			||||||
 | 
					            # self._marker.label.show()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def scene_y(self) -> float:
 | 
					    def scene_y(self) -> float:
 | 
				
			||||||
        return self.getViewBox().mapFromView(
 | 
					        return self.getViewBox().mapFromView(
 | 
				
			||||||
| 
						 | 
					@ -434,10 +421,6 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return path
 | 
					        return path
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @property
 | 
					 | 
				
			||||||
    def marker(self) -> LevelMarker:
 | 
					 | 
				
			||||||
        return self._marker
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def hoverEvent(self, ev):
 | 
					    def hoverEvent(self, ev):
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        Mouse hover callback.
 | 
					        Mouse hover callback.
 | 
				
			||||||
| 
						 | 
					@ -446,16 +429,17 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
        cur = self._chart.linked.cursor
 | 
					        cur = self._chart.linked.cursor
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # hovered
 | 
					        # hovered
 | 
				
			||||||
        if (
 | 
					        if (not ev.isExit()) and ev.acceptDrags(QtCore.Qt.LeftButton):
 | 
				
			||||||
            not ev.isExit()
 | 
					
 | 
				
			||||||
            and ev.acceptDrags(QtCore.Qt.LeftButton)
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            # if already hovered we don't need to run again
 | 
					            # if already hovered we don't need to run again
 | 
				
			||||||
            if self.mouseHovering is True:
 | 
					            if self.mouseHovering is True:
 | 
				
			||||||
                return
 | 
					                return
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if self.only_show_markers_on_hover:
 | 
					            if self.only_show_markers_on_hover:
 | 
				
			||||||
                self.show_markers()
 | 
					                self.show_markers = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                if self._marker:
 | 
				
			||||||
 | 
					                    self._marker.show()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # highlight if so configured
 | 
					            # highlight if so configured
 | 
				
			||||||
            if self.highlight_on_hover:
 | 
					            if self.highlight_on_hover:
 | 
				
			||||||
| 
						 | 
					@ -498,7 +482,11 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
            cur._hovered.remove(self)
 | 
					            cur._hovered.remove(self)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if self.only_show_markers_on_hover:
 | 
					            if self.only_show_markers_on_hover:
 | 
				
			||||||
                self.hide_markers()
 | 
					                self.show_markers = False
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                if self._marker:
 | 
				
			||||||
 | 
					                    self._marker.hide()
 | 
				
			||||||
 | 
					                    self._marker.label.hide()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if self not in cur._trackers:
 | 
					            if self not in cur._trackers:
 | 
				
			||||||
                cur.show_xhair(y_label_level=self.value())
 | 
					                cur.show_xhair(y_label_level=self.value())
 | 
				
			||||||
| 
						 | 
					@ -510,15 +498,6 @@ class LevelLine(pg.InfiniteLine):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.update()
 | 
					        self.update()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def hide_markers(self) -> None:
 | 
					 | 
				
			||||||
        if self._marker:
 | 
					 | 
				
			||||||
            self._marker.hide()
 | 
					 | 
				
			||||||
            self._marker.label.hide()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def show_markers(self) -> None:
 | 
					 | 
				
			||||||
        if self._marker:
 | 
					 | 
				
			||||||
            self._marker.show()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
def level_line(
 | 
					def level_line(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -539,10 +518,9 @@ def level_line(
 | 
				
			||||||
    **kwargs,
 | 
					    **kwargs,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> LevelLine:
 | 
					) -> LevelLine:
 | 
				
			||||||
    '''
 | 
					    """Convenience routine to add a styled horizontal line to a plot.
 | 
				
			||||||
    Convenience routine to add a styled horizontal line to a plot.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    """
 | 
				
			||||||
    hl_color = color + '_light' if highlight_on_hover else color
 | 
					    hl_color = color + '_light' if highlight_on_hover else color
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    line = LevelLine(
 | 
					    line = LevelLine(
 | 
				
			||||||
| 
						 | 
					@ -724,7 +702,7 @@ def order_line(
 | 
				
			||||||
        marker = LevelMarker(
 | 
					        marker = LevelMarker(
 | 
				
			||||||
            chart=chart,
 | 
					            chart=chart,
 | 
				
			||||||
            style=marker_style,
 | 
					            style=marker_style,
 | 
				
			||||||
            get_level=line.value,  # callback
 | 
					            get_level=line.value,
 | 
				
			||||||
            size=marker_size,
 | 
					            size=marker_size,
 | 
				
			||||||
            keep_in_view=False,
 | 
					            keep_in_view=False,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
| 
						 | 
					@ -733,8 +711,7 @@ def order_line(
 | 
				
			||||||
        marker = line.add_marker(marker)
 | 
					        marker = line.add_marker(marker)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # XXX: DON'T COMMENT THIS!
 | 
					        # XXX: DON'T COMMENT THIS!
 | 
				
			||||||
        # this fixes it the artifact issue!
 | 
					        # this fixes it the artifact issue! .. of course, bounding rect stuff
 | 
				
			||||||
        # .. of course, bounding rect stuff
 | 
					 | 
				
			||||||
        line._maxMarkerSize = marker_size
 | 
					        line._maxMarkerSize = marker_size
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        assert line._marker is marker
 | 
					        assert line._marker is marker
 | 
				
			||||||
| 
						 | 
					@ -755,8 +732,7 @@ def order_line(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        if action != 'alert':
 | 
					        if action != 'alert':
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # add a partial position label if we also added a level
 | 
					            # add a partial position label if we also added a level marker
 | 
				
			||||||
            # marker
 | 
					 | 
				
			||||||
            pp_size_label = Label(
 | 
					            pp_size_label = Label(
 | 
				
			||||||
                view=view,
 | 
					                view=view,
 | 
				
			||||||
                color=line.color,
 | 
					                color=line.color,
 | 
				
			||||||
| 
						 | 
					@ -790,9 +766,9 @@ def order_line(
 | 
				
			||||||
            # XXX: without this the pp proportion label next the marker
 | 
					            # XXX: without this the pp proportion label next the marker
 | 
				
			||||||
            # seems to lag?  this is the same issue we had with position
 | 
					            # seems to lag?  this is the same issue we had with position
 | 
				
			||||||
            # lines which we handle with ``.update_graphcis()``.
 | 
					            # lines which we handle with ``.update_graphcis()``.
 | 
				
			||||||
 | 
					            # marker._on_paint=lambda marker: pp_size_label.update()
 | 
				
			||||||
            marker._on_paint = lambda marker: pp_size_label.update()
 | 
					            marker._on_paint = lambda marker: pp_size_label.update()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # XXX: THIS IS AN UNTYPED MONKEY PATCH!?!?!
 | 
					 | 
				
			||||||
        marker.label = label
 | 
					        marker.label = label
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # sanity check
 | 
					    # sanity check
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -1,98 +0,0 @@
 | 
				
			||||||
# piker: trading gear for hackers
 | 
					 | 
				
			||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is free software: you can redistribute it and/or modify
 | 
					 | 
				
			||||||
# it under the terms of the GNU Affero General Public License as published by
 | 
					 | 
				
			||||||
# the Free Software Foundation, either version 3 of the License, or
 | 
					 | 
				
			||||||
# (at your option) any later version.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# This program is distributed in the hope that it will be useful,
 | 
					 | 
				
			||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
					 | 
				
			||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
					 | 
				
			||||||
# GNU Affero General Public License for more details.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# You should have received a copy of the GNU Affero General Public License
 | 
					 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
"""
 | 
					 | 
				
			||||||
Notifications utils.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
"""
 | 
					 | 
				
			||||||
import os
 | 
					 | 
				
			||||||
import platform
 | 
					 | 
				
			||||||
import subprocess
 | 
					 | 
				
			||||||
from typing import Optional
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import trio
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
from ..log import get_logger
 | 
					 | 
				
			||||||
from ..clearing._messages import (
 | 
					 | 
				
			||||||
    Status,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
log = get_logger(__name__)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
_dbus_uid: Optional[str] = ''
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
async def notify_from_ems_status_msg(
 | 
					 | 
				
			||||||
    msg: Status,
 | 
					 | 
				
			||||||
    duration: int = 3000,
 | 
					 | 
				
			||||||
    is_subproc: bool = False,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
) -> None:
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Send a linux desktop notification.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    Handle subprocesses by discovering the dbus user id
 | 
					 | 
				
			||||||
    on first call.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    if platform.system() != "Linux":
 | 
					 | 
				
			||||||
        return
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: this in another task?
 | 
					 | 
				
			||||||
    # not sure if this will ever be a bottleneck,
 | 
					 | 
				
			||||||
    # we probably could do graphics stuff first tho?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    if is_subproc:
 | 
					 | 
				
			||||||
        global _dbus_uid
 | 
					 | 
				
			||||||
        su = os.environ.get('SUDO_USER')
 | 
					 | 
				
			||||||
        if (
 | 
					 | 
				
			||||||
            not _dbus_uid
 | 
					 | 
				
			||||||
            and su
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # TODO: use `trio` but we need to use nursery.start()
 | 
					 | 
				
			||||||
            # to use pipes?
 | 
					 | 
				
			||||||
            # result = await trio.run_process(
 | 
					 | 
				
			||||||
            result = subprocess.run(
 | 
					 | 
				
			||||||
                [
 | 
					 | 
				
			||||||
                    'id',
 | 
					 | 
				
			||||||
                    '-u',
 | 
					 | 
				
			||||||
                    su,
 | 
					 | 
				
			||||||
                ],
 | 
					 | 
				
			||||||
                stdout=subprocess.PIPE,
 | 
					 | 
				
			||||||
                stderr=subprocess.PIPE,
 | 
					 | 
				
			||||||
                # check=True
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            _dbus_uid = result.stdout.decode("utf-8").replace('\n', '')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            os.environ['DBUS_SESSION_BUS_ADDRESS'] = (
 | 
					 | 
				
			||||||
                f'unix:path=/run/user/{_dbus_uid}/bus'
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    result = await trio.run_process(
 | 
					 | 
				
			||||||
        [
 | 
					 | 
				
			||||||
            'notify-send',
 | 
					 | 
				
			||||||
            '-u', 'normal',
 | 
					 | 
				
			||||||
            '-t', f'{duration}',
 | 
					 | 
				
			||||||
            'piker',
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # TODO: add in standard fill/exec info that maybe we
 | 
					 | 
				
			||||||
            # pack in a broker independent way?
 | 
					 | 
				
			||||||
            f"'{msg.pformat()}'",
 | 
					 | 
				
			||||||
        ],
 | 
					 | 
				
			||||||
    )
 | 
					 | 
				
			||||||
    log.runtime(result)
 | 
					 | 
				
			||||||
| 
						 | 
					@ -32,7 +32,6 @@ from PyQt5.QtGui import QPainterPath
 | 
				
			||||||
from .._profile import pg_profile_enabled, ms_slower_then
 | 
					from .._profile import pg_profile_enabled, ms_slower_then
 | 
				
			||||||
from ._style import hcolor
 | 
					from ._style import hcolor
 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from .._profile import Profiler
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
if TYPE_CHECKING:
 | 
					if TYPE_CHECKING:
 | 
				
			||||||
    from ._chart import LinkedSplits
 | 
					    from ._chart import LinkedSplits
 | 
				
			||||||
| 
						 | 
					@ -171,7 +170,7 @@ class BarItems(pg.GraphicsObject):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        profiler = Profiler(
 | 
					        profiler = pg.debug.Profiler(
 | 
				
			||||||
            disabled=not pg_profile_enabled(),
 | 
					            disabled=not pg_profile_enabled(),
 | 
				
			||||||
            ms_threshold=ms_slower_then,
 | 
					            ms_threshold=ms_slower_then,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -22,9 +22,12 @@ from __future__ import annotations
 | 
				
			||||||
from typing import (
 | 
					from typing import (
 | 
				
			||||||
    Optional, Generic,
 | 
					    Optional, Generic,
 | 
				
			||||||
    TypeVar, Callable,
 | 
					    TypeVar, Callable,
 | 
				
			||||||
 | 
					    Literal,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					import enum
 | 
				
			||||||
 | 
					import sys
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# from pydantic import BaseModel, validator
 | 
					from pydantic import BaseModel, validator
 | 
				
			||||||
from pydantic.generics import GenericModel
 | 
					from pydantic.generics import GenericModel
 | 
				
			||||||
from PyQt5.QtWidgets import (
 | 
					from PyQt5.QtWidgets import (
 | 
				
			||||||
    QWidget,
 | 
					    QWidget,
 | 
				
			||||||
| 
						 | 
					@ -35,7 +38,6 @@ from ._forms import (
 | 
				
			||||||
    # FontScaledDelegate,
 | 
					    # FontScaledDelegate,
 | 
				
			||||||
    Edit,
 | 
					    Edit,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
from ..data.types import Struct
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
DataType = TypeVar('DataType')
 | 
					DataType = TypeVar('DataType')
 | 
				
			||||||
| 
						 | 
					@ -60,7 +62,7 @@ class Selection(Field[DataType], Generic[DataType]):
 | 
				
			||||||
    options: dict[str, DataType]
 | 
					    options: dict[str, DataType]
 | 
				
			||||||
    # value: DataType = None
 | 
					    # value: DataType = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # @validator('value')  # , always=True)
 | 
					    @validator('value')  # , always=True)
 | 
				
			||||||
    def set_value_first(
 | 
					    def set_value_first(
 | 
				
			||||||
        cls,
 | 
					        cls,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -98,7 +100,7 @@ class Edit(Field[DataType], Generic[DataType]):
 | 
				
			||||||
    widget_factory = Edit
 | 
					    widget_factory = Edit
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class AllocatorPane(Struct):
 | 
					class AllocatorPane(BaseModel):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    account = Selection[str](
 | 
					    account = Selection[str](
 | 
				
			||||||
        options=dict.fromkeys(
 | 
					        options=dict.fromkeys(
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -18,27 +18,23 @@
 | 
				
			||||||
Charting overlay helpers.
 | 
					Charting overlay helpers.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
'''
 | 
					'''
 | 
				
			||||||
from collections import defaultdict
 | 
					from typing import Callable, Optional
 | 
				
			||||||
from functools import partial
 | 
					
 | 
				
			||||||
from typing import (
 | 
					from pyqtgraph.Qt.QtCore import (
 | 
				
			||||||
    Callable,
 | 
					    # QObject,
 | 
				
			||||||
    Optional,
 | 
					    # Signal,
 | 
				
			||||||
 | 
					    Qt,
 | 
				
			||||||
 | 
					    # QEvent,
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
from pyqtgraph.graphicsItems.AxisItem import AxisItem
 | 
					from pyqtgraph.graphicsItems.AxisItem import AxisItem
 | 
				
			||||||
from pyqtgraph.graphicsItems.ViewBox import ViewBox
 | 
					from pyqtgraph.graphicsItems.ViewBox import ViewBox
 | 
				
			||||||
# from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
 | 
					from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
 | 
				
			||||||
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
 | 
					from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
 | 
				
			||||||
from pyqtgraph.Qt.QtCore import (
 | 
					from pyqtgraph.Qt.QtCore import QObject, Signal, QEvent
 | 
				
			||||||
    QObject,
 | 
					from pyqtgraph.Qt.QtWidgets import QGraphicsGridLayout, QGraphicsLinearLayout
 | 
				
			||||||
    Signal,
 | 
					
 | 
				
			||||||
    QEvent,
 | 
					from ._interaction import ChartView
 | 
				
			||||||
    Qt,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from pyqtgraph.Qt.QtWidgets import (
 | 
					 | 
				
			||||||
    # QGraphicsGridLayout,
 | 
					 | 
				
			||||||
    QGraphicsLinearLayout,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
__all__ = ["PlotItemOverlay"]
 | 
					__all__ = ["PlotItemOverlay"]
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -84,8 +80,8 @@ class ComposedGridLayout:
 | 
				
			||||||
    ``<axis_name>i`` in the layout.
 | 
					    ``<axis_name>i`` in the layout.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    The ``item: PlotItem`` passed to the constructor's grid layout is
 | 
					    The ``item: PlotItem`` passed to the constructor's grid layout is
 | 
				
			||||||
    used verbatim as the "main plot" who's view box is given precedence
 | 
					    used verbatim as the "main plot" who's view box is give precedence
 | 
				
			||||||
    for input handling. The main plot's axes are removed from its
 | 
					    for input handling. The main plot's axes are removed from it's
 | 
				
			||||||
    layout and placed in the surrounding exterior layouts to allow for
 | 
					    layout and placed in the surrounding exterior layouts to allow for
 | 
				
			||||||
    re-ordering if desired.
 | 
					    re-ordering if desired.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -93,11 +89,16 @@ class ComposedGridLayout:
 | 
				
			||||||
    def __init__(
 | 
					    def __init__(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        item: PlotItem,
 | 
					        item: PlotItem,
 | 
				
			||||||
 | 
					        grid: QGraphicsGridLayout,
 | 
				
			||||||
 | 
					        reverse: bool = False,  # insert items to the "center"
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					 | 
				
			||||||
        self.items: list[PlotItem] = []
 | 
					        self.items: list[PlotItem] = []
 | 
				
			||||||
        self._pi2axes: dict[  # TODO: use a ``bidict`` here?
 | 
					        # self.grid = grid
 | 
				
			||||||
 | 
					        self.reverse = reverse
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # TODO: use a ``bidict`` here?
 | 
				
			||||||
 | 
					        self._pi2axes: dict[
 | 
				
			||||||
            int,
 | 
					            int,
 | 
				
			||||||
            dict[str, AxisItem],
 | 
					            dict[str, AxisItem],
 | 
				
			||||||
        ] = {}
 | 
					        ] = {}
 | 
				
			||||||
| 
						 | 
					@ -119,13 +120,12 @@ class ComposedGridLayout:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if name in ('top', 'bottom'):
 | 
					            if name in ('top', 'bottom'):
 | 
				
			||||||
                orient = Qt.Vertical
 | 
					                orient = Qt.Vertical
 | 
				
			||||||
 | 
					 | 
				
			||||||
            elif name in ('left', 'right'):
 | 
					            elif name in ('left', 'right'):
 | 
				
			||||||
                orient = Qt.Horizontal
 | 
					                orient = Qt.Horizontal
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            layout.setOrientation(orient)
 | 
					            layout.setOrientation(orient)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.insert_plotitem(0, item)
 | 
					        self.insert(0, item)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # insert surrounding linear layouts into the parent pi's layout
 | 
					        # insert surrounding linear layouts into the parent pi's layout
 | 
				
			||||||
        # such that additional axes can be appended arbitrarily without
 | 
					        # such that additional axes can be appended arbitrarily without
 | 
				
			||||||
| 
						 | 
					@ -159,7 +159,7 @@ class ComposedGridLayout:
 | 
				
			||||||
        # enter plot into list for index tracking
 | 
					        # enter plot into list for index tracking
 | 
				
			||||||
        self.items.insert(index, plotitem)
 | 
					        self.items.insert(index, plotitem)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def insert_plotitem(
 | 
					    def insert(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        index: int,
 | 
					        index: int,
 | 
				
			||||||
        plotitem: PlotItem,
 | 
					        plotitem: PlotItem,
 | 
				
			||||||
| 
						 | 
					@ -171,9 +171,7 @@ class ComposedGridLayout:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        if index < 0:
 | 
					        if index < 0:
 | 
				
			||||||
            raise ValueError(
 | 
					            raise ValueError('`insert()` only supports an index >= 0')
 | 
				
			||||||
                '`.insert_plotitem()` only supports an index >= 0'
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # add plot's axes in sequence to the embedded linear layouts
 | 
					        # add plot's axes in sequence to the embedded linear layouts
 | 
				
			||||||
        # for each "side" thus avoiding graphics collisions.
 | 
					        # for each "side" thus avoiding graphics collisions.
 | 
				
			||||||
| 
						 | 
					@ -222,7 +220,7 @@ class ComposedGridLayout:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return index
 | 
					        return index
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def append_plotitem(
 | 
					    def append(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        item: PlotItem,
 | 
					        item: PlotItem,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -234,7 +232,7 @@ class ComposedGridLayout:
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        # for left and bottom axes we have to first remove
 | 
					        # for left and bottom axes we have to first remove
 | 
				
			||||||
        # items and re-insert to maintain a list-order.
 | 
					        # items and re-insert to maintain a list-order.
 | 
				
			||||||
        return self.insert_plotitem(len(self.items), item)
 | 
					        return self.insert(len(self.items), item)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_axis(
 | 
					    def get_axis(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
| 
						 | 
					@ -251,16 +249,16 @@ class ComposedGridLayout:
 | 
				
			||||||
        named = self._pi2axes[name]
 | 
					        named = self._pi2axes[name]
 | 
				
			||||||
        return named.get(index)
 | 
					        return named.get(index)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # def pop(
 | 
					    def pop(
 | 
				
			||||||
    #     self,
 | 
					        self,
 | 
				
			||||||
    #     item: PlotItem,
 | 
					        item: PlotItem,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # ) -> PlotItem:
 | 
					    ) -> PlotItem:
 | 
				
			||||||
    #     '''
 | 
					        '''
 | 
				
			||||||
    #     Remove item and restack all axes in list-order.
 | 
					        Remove item and restack all axes in list-order.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    #     '''
 | 
					        '''
 | 
				
			||||||
    #     raise NotImplementedError
 | 
					        raise NotImplementedError
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# Unimplemented features TODO:
 | 
					# Unimplemented features TODO:
 | 
				
			||||||
| 
						 | 
					@ -281,6 +279,194 @@ class ComposedGridLayout:
 | 
				
			||||||
#   axis?
 | 
					#   axis?
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# TODO: we might want to enabled some kind of manual flag to disable
 | 
				
			||||||
 | 
					# this method wrapping during type creation? As example a user could
 | 
				
			||||||
 | 
					# definitively decide **not** to enable broadcasting support by
 | 
				
			||||||
 | 
					# setting something like ``ViewBox.disable_relays = True``?
 | 
				
			||||||
 | 
					def mk_relay_method(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    signame: str,
 | 
				
			||||||
 | 
					    slot: Callable[
 | 
				
			||||||
 | 
					        [ViewBox,
 | 
				
			||||||
 | 
					         'QEvent',
 | 
				
			||||||
 | 
					         Optional[AxisItem]],
 | 
				
			||||||
 | 
					        None,
 | 
				
			||||||
 | 
					    ],
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					) -> Callable[
 | 
				
			||||||
 | 
					    [
 | 
				
			||||||
 | 
					        ViewBox,
 | 
				
			||||||
 | 
					        # lol, there isn't really a generic type thanks
 | 
				
			||||||
 | 
					        # to the rewrite of Qt's event system XD
 | 
				
			||||||
 | 
					        'QEvent',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        'Optional[AxisItem]',
 | 
				
			||||||
 | 
					        'Optional[ViewBox]',  # the ``relayed_from`` arg we provide
 | 
				
			||||||
 | 
					    ],
 | 
				
			||||||
 | 
					    None,
 | 
				
			||||||
 | 
					]:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def maybe_broadcast(
 | 
				
			||||||
 | 
					        vb: 'ViewBox',
 | 
				
			||||||
 | 
					        ev: 'QEvent',
 | 
				
			||||||
 | 
					        axis: 'Optional[int]' = None,
 | 
				
			||||||
 | 
					        relayed_from: 'ViewBox' = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        (soon to be) Decorator which makes an event handler
 | 
				
			||||||
 | 
					        "broadcastable" to overlayed ``GraphicsWidget``s.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        Adds relay signals based on the decorated handler's name
 | 
				
			||||||
 | 
					        and conducts a signal broadcast of the relay signal if there
 | 
				
			||||||
 | 
					        are consumers registered.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        # When no relay source has been set just bypass all
 | 
				
			||||||
 | 
					        # the broadcast machinery.
 | 
				
			||||||
 | 
					        if vb.event_relay_source is None:
 | 
				
			||||||
 | 
					            ev.accept()
 | 
				
			||||||
 | 
					            return slot(
 | 
				
			||||||
 | 
					                vb,
 | 
				
			||||||
 | 
					                ev,
 | 
				
			||||||
 | 
					                axis=axis,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if relayed_from:
 | 
				
			||||||
 | 
					            assert axis is None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # this is a relayed event and should be ignored (so it does not
 | 
				
			||||||
 | 
					            # halt/short circuit the graphicscene loop). Further the
 | 
				
			||||||
 | 
					            # surrounding handler for this signal must be allowed to execute
 | 
				
			||||||
 | 
					            # and get processed by **this consumer**.
 | 
				
			||||||
 | 
					            # print(f'{vb.name} rx relayed from {relayed_from.name}')
 | 
				
			||||||
 | 
					            ev.ignore()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            return slot(
 | 
				
			||||||
 | 
					                vb,
 | 
				
			||||||
 | 
					                ev,
 | 
				
			||||||
 | 
					                axis=axis,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        if axis is not None:
 | 
				
			||||||
 | 
					            # print(f'{vb.name} handling axis event:\n{str(ev)}')
 | 
				
			||||||
 | 
					            ev.accept()
 | 
				
			||||||
 | 
					            return slot(
 | 
				
			||||||
 | 
					                vb,
 | 
				
			||||||
 | 
					                ev,
 | 
				
			||||||
 | 
					                axis=axis,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        elif (
 | 
				
			||||||
 | 
					            relayed_from is None
 | 
				
			||||||
 | 
					            and vb.event_relay_source is vb  # we are the broadcaster
 | 
				
			||||||
 | 
					            and axis is None
 | 
				
			||||||
 | 
					        ):
 | 
				
			||||||
 | 
					            # Broadcast case: this is a source event which will be
 | 
				
			||||||
 | 
					            # relayed to attached consumers and accepted after all
 | 
				
			||||||
 | 
					            # consumers complete their own handling followed by this
 | 
				
			||||||
 | 
					            # routine's processing. Sequence is,
 | 
				
			||||||
 | 
					            # - pre-relay to all consumers *first* - ``.emit()`` blocks
 | 
				
			||||||
 | 
					            #   until all downstream relay handlers have run.
 | 
				
			||||||
 | 
					            # - run the source handler for **this** event and accept
 | 
				
			||||||
 | 
					            #   the event
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # Access the "bound signal" that is created
 | 
				
			||||||
 | 
					            # on the widget type as part of instantiation.
 | 
				
			||||||
 | 
					            signal = getattr(vb, signame)
 | 
				
			||||||
 | 
					            # print(f'{vb.name} emitting {signame}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # TODO/NOTE: we could also just bypass a "relay" signal
 | 
				
			||||||
 | 
					            # entirely and instead call the handlers manually in
 | 
				
			||||||
 | 
					            # a loop? This probably is a lot simpler and also doesn't
 | 
				
			||||||
 | 
					            # have any downside, and allows not touching target widget
 | 
				
			||||||
 | 
					            # internals.
 | 
				
			||||||
 | 
					            signal.emit(
 | 
				
			||||||
 | 
					                ev,
 | 
				
			||||||
 | 
					                axis,
 | 
				
			||||||
 | 
					                # passing this demarks a broadcasted/relayed event
 | 
				
			||||||
 | 
					                vb,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					            # accept event so no more relays are fired.
 | 
				
			||||||
 | 
					            ev.accept()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            # call underlying wrapped method with an extra
 | 
				
			||||||
 | 
					            # ``relayed_from`` value to denote that this is a relayed
 | 
				
			||||||
 | 
					            # event handling case.
 | 
				
			||||||
 | 
					            return slot(
 | 
				
			||||||
 | 
					                vb,
 | 
				
			||||||
 | 
					                ev,
 | 
				
			||||||
 | 
					                axis=axis,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return maybe_broadcast
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# XXX: :( can't define signals **after** class compile time
 | 
				
			||||||
 | 
					# so this is not really useful.
 | 
				
			||||||
 | 
					# def mk_relay_signal(
 | 
				
			||||||
 | 
					#     func,
 | 
				
			||||||
 | 
					#     name: str = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					# ) -> Signal:
 | 
				
			||||||
 | 
					#     (
 | 
				
			||||||
 | 
					#         args,
 | 
				
			||||||
 | 
					#         varargs,
 | 
				
			||||||
 | 
					#         varkw,
 | 
				
			||||||
 | 
					#         defaults,
 | 
				
			||||||
 | 
					#         kwonlyargs,
 | 
				
			||||||
 | 
					#         kwonlydefaults,
 | 
				
			||||||
 | 
					#         annotations
 | 
				
			||||||
 | 
					#     ) = inspect.getfullargspec(func)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					#     # XXX: generate a relay signal with 1 extra
 | 
				
			||||||
 | 
					#     # argument for a ``relayed_from`` kwarg. Since
 | 
				
			||||||
 | 
					#     # ``'self'`` is already ignored by signals we just need
 | 
				
			||||||
 | 
					#     # to count the arguments since we're adding only 1 (and
 | 
				
			||||||
 | 
					#     # ``args`` will capture that).
 | 
				
			||||||
 | 
					#     numargs = len(args + list(defaults))
 | 
				
			||||||
 | 
					#     signal = Signal(*tuple(numargs * [object]))
 | 
				
			||||||
 | 
					#     signame = name or func.__name__ + 'Relay'
 | 
				
			||||||
 | 
					#     return signame, signal
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					def enable_relays(
 | 
				
			||||||
 | 
					    widget: GraphicsWidget,
 | 
				
			||||||
 | 
					    handler_names: list[str],
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					) -> list[Signal]:
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    Method override helper which enables relay of a particular
 | 
				
			||||||
 | 
					    ``Signal`` from some chosen broadcaster widget to a set of
 | 
				
			||||||
 | 
					    consumer widgets which should operate their event handlers normally
 | 
				
			||||||
 | 
					    but instead of signals "relayed" from the broadcaster.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    Mostly useful for overlaying widgets that handle user input
 | 
				
			||||||
 | 
					    that you want to overlay graphically. The target ``widget`` type must
 | 
				
			||||||
 | 
					    define ``QtCore.Signal``s each with a `'Relay'` suffix for each
 | 
				
			||||||
 | 
					    name provided in ``handler_names: list[str]``.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    '''
 | 
				
			||||||
 | 
					    signals = []
 | 
				
			||||||
 | 
					    for name in handler_names:
 | 
				
			||||||
 | 
					        handler = getattr(widget, name)
 | 
				
			||||||
 | 
					        signame = name + 'Relay'
 | 
				
			||||||
 | 
					        # ensure the target widget defines a relay signal
 | 
				
			||||||
 | 
					        relay = getattr(widget, signame)
 | 
				
			||||||
 | 
					        widget.relays[signame] = name
 | 
				
			||||||
 | 
					        signals.append(relay)
 | 
				
			||||||
 | 
					        method = mk_relay_method(signame, handler)
 | 
				
			||||||
 | 
					        setattr(widget, name, method)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    return signals
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					enable_relays(
 | 
				
			||||||
 | 
					    ChartView,
 | 
				
			||||||
 | 
					    ['wheelEvent', 'mouseDragEvent']
 | 
				
			||||||
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class PlotItemOverlay:
 | 
					class PlotItemOverlay:
 | 
				
			||||||
    '''
 | 
					    '''
 | 
				
			||||||
    A composite for managing overlaid ``PlotItem`` instances such that
 | 
					    A composite for managing overlaid ``PlotItem`` instances such that
 | 
				
			||||||
| 
						 | 
					@ -296,18 +482,16 @@ class PlotItemOverlay:
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.root_plotitem: PlotItem = root_plotitem
 | 
					        self.root_plotitem: PlotItem = root_plotitem
 | 
				
			||||||
        self.relay_handlers: defaultdict[
 | 
					 | 
				
			||||||
            str,
 | 
					 | 
				
			||||||
            list[Callable],
 | 
					 | 
				
			||||||
        ] = defaultdict(list)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # NOTE: required for scene layering/relaying; this guarantees
 | 
					        vb = root_plotitem.vb
 | 
				
			||||||
        # the "root" plot receives priority for interaction
 | 
					        vb.event_relay_source = vb  # TODO: maybe change name?
 | 
				
			||||||
        # events/signals.
 | 
					        vb.setZValue(1000)  # XXX: critical for scene layering/relaying
 | 
				
			||||||
        root_plotitem.vb.setZValue(1000)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.overlays: list[PlotItem] = []
 | 
					        self.overlays: list[PlotItem] = []
 | 
				
			||||||
        self.layout = ComposedGridLayout(root_plotitem)
 | 
					        self.layout = ComposedGridLayout(
 | 
				
			||||||
 | 
					            root_plotitem,
 | 
				
			||||||
 | 
					            root_plotitem.layout,
 | 
				
			||||||
 | 
					        )
 | 
				
			||||||
        self._relays: dict[str, Signal] = {}
 | 
					        self._relays: dict[str, Signal] = {}
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def add_plotitem(
 | 
					    def add_plotitem(
 | 
				
			||||||
| 
						 | 
					@ -315,10 +499,8 @@ class PlotItemOverlay:
 | 
				
			||||||
        plotitem: PlotItem,
 | 
					        plotitem: PlotItem,
 | 
				
			||||||
        index: Optional[int] = None,
 | 
					        index: Optional[int] = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # event/signal names which will be broadcasted to all added
 | 
					        # TODO: we could also put the ``ViewBox.XAxis``
 | 
				
			||||||
        # (relayee) ``PlotItem``s (eg. ``ViewBox.mouseDragEvent``).
 | 
					        # style enum here?
 | 
				
			||||||
        relay_events: list[str] = [],
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # (0,),  # link x
 | 
					        # (0,),  # link x
 | 
				
			||||||
        # (1,),  # link y
 | 
					        # (1,),  # link y
 | 
				
			||||||
        # (0, 1),  # link both
 | 
					        # (0, 1),  # link both
 | 
				
			||||||
| 
						 | 
					@ -328,155 +510,58 @@ class PlotItemOverlay:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        index = index or len(self.overlays)
 | 
					        index = index or len(self.overlays)
 | 
				
			||||||
        root = self.root_plotitem
 | 
					        root = self.root_plotitem
 | 
				
			||||||
 | 
					        # layout: QGraphicsGridLayout = root.layout
 | 
				
			||||||
        self.overlays.insert(index, plotitem)
 | 
					        self.overlays.insert(index, plotitem)
 | 
				
			||||||
        vb: ViewBox = plotitem.vb
 | 
					        vb: ViewBox = plotitem.vb
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        # mark this consumer overlay as ready to expect relayed events
 | 
				
			||||||
 | 
					        # from the root plotitem.
 | 
				
			||||||
 | 
					        vb.event_relay_source = root.vb
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # TODO: some sane way to allow menu event broadcast XD
 | 
					        # TODO: some sane way to allow menu event broadcast XD
 | 
				
			||||||
        # vb.setMenuEnabled(False)
 | 
					        # vb.setMenuEnabled(False)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # wire up any relay signal(s) from the source plot to added
 | 
					        # TODO: inside the `maybe_broadcast()` (soon to be) decorator
 | 
				
			||||||
        # "overlays". We use a plain loop instead of mucking with
 | 
					        # we need have checks that consumers have been attached to
 | 
				
			||||||
        # re-connecting signal/slots which tends to be more invasive and
 | 
					        # these relay signals.
 | 
				
			||||||
        # harder to implement and provides no measurable performance
 | 
					        if link_axes != (0, 1):
 | 
				
			||||||
        # gain.
 | 
					 | 
				
			||||||
        if relay_events:
 | 
					 | 
				
			||||||
            for ev_name in relay_events:
 | 
					 | 
				
			||||||
                relayee_handler: Callable[
 | 
					 | 
				
			||||||
                    [
 | 
					 | 
				
			||||||
                        ViewBox,
 | 
					 | 
				
			||||||
                        # lol, there isn't really a generic type thanks
 | 
					 | 
				
			||||||
                        # to the rewrite of Qt's event system XD
 | 
					 | 
				
			||||||
                        QEvent,
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
                        AxisItem | None,
 | 
					            # wire up relay signals
 | 
				
			||||||
                    ],
 | 
					            for relay_signal_name, handler_name in vb.relays.items():
 | 
				
			||||||
                    None,
 | 
					                # print(handler_name)
 | 
				
			||||||
                ] = getattr(vb, ev_name)
 | 
					                # XXX: Signal class attrs are bound after instantiation
 | 
				
			||||||
 | 
					                # of the defining type, so we need to access that bound
 | 
				
			||||||
                sub_handlers: list[Callable] = self.relay_handlers[ev_name]
 | 
					                # version here.
 | 
				
			||||||
 | 
					                signal = getattr(root.vb, relay_signal_name)
 | 
				
			||||||
                # on the first registry of a relayed event we pop the
 | 
					                handler = getattr(vb, handler_name)
 | 
				
			||||||
                # root's handler and override it to a custom broadcaster
 | 
					                signal.connect(handler)
 | 
				
			||||||
                # routine.
 | 
					 | 
				
			||||||
                if not sub_handlers:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    src_handler = getattr(
 | 
					 | 
				
			||||||
                        root.vb,
 | 
					 | 
				
			||||||
                        ev_name,
 | 
					 | 
				
			||||||
                    )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    def broadcast(
 | 
					 | 
				
			||||||
                        ev: 'QEvent',
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        # TODO: drop this viewbox specific input and
 | 
					 | 
				
			||||||
                        # allow a predicate to be passed in by user.
 | 
					 | 
				
			||||||
                        axis: 'Optional[int]' = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        *,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        # these are bound in by the ``partial`` below
 | 
					 | 
				
			||||||
                        # and ensure a unique broadcaster per event.
 | 
					 | 
				
			||||||
                        ev_name: str = None,
 | 
					 | 
				
			||||||
                        src_handler: Callable = None,
 | 
					 | 
				
			||||||
                        relayed_from: 'ViewBox' = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        # remaining inputs the source handler expects
 | 
					 | 
				
			||||||
                        **kwargs,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    ) -> None:
 | 
					 | 
				
			||||||
                        '''
 | 
					 | 
				
			||||||
                        Broadcast signal or event: this is a source
 | 
					 | 
				
			||||||
                        event which will be relayed to attached
 | 
					 | 
				
			||||||
                        "relayee" plot item consumers.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        The event is accepted halting any further
 | 
					 | 
				
			||||||
                        handlers from being triggered.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        Sequence is,
 | 
					 | 
				
			||||||
                        - pre-relay to all consumers *first* - exactly
 | 
					 | 
				
			||||||
                          like how a ``Signal.emit()`` blocks until all
 | 
					 | 
				
			||||||
                          downstream relay handlers have run.
 | 
					 | 
				
			||||||
                        - run the event's source handler event
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        '''
 | 
					 | 
				
			||||||
                        ev.accept()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        # broadcast first to relayees *first*. trigger
 | 
					 | 
				
			||||||
                        # relay of event to all consumers **before**
 | 
					 | 
				
			||||||
                        # processing/consumption in the source handler.
 | 
					 | 
				
			||||||
                        relayed_handlers = self.relay_handlers[ev_name]
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        assert getattr(vb, ev_name).__name__ == ev_name
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        # TODO: generalize as an input predicate
 | 
					 | 
				
			||||||
                        if axis is None:
 | 
					 | 
				
			||||||
                            for handler in relayed_handlers:
 | 
					 | 
				
			||||||
                                handler(
 | 
					 | 
				
			||||||
                                    ev,
 | 
					 | 
				
			||||||
                                    axis=axis,
 | 
					 | 
				
			||||||
                                    **kwargs,
 | 
					 | 
				
			||||||
                                )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                        # run "source" widget's handler last
 | 
					 | 
				
			||||||
                        src_handler(
 | 
					 | 
				
			||||||
                            ev,
 | 
					 | 
				
			||||||
                            axis=axis,
 | 
					 | 
				
			||||||
                        )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                    # dynamic handler override on the publisher plot
 | 
					 | 
				
			||||||
                    setattr(
 | 
					 | 
				
			||||||
                        root.vb,
 | 
					 | 
				
			||||||
                        ev_name,
 | 
					 | 
				
			||||||
                        partial(
 | 
					 | 
				
			||||||
                            broadcast,
 | 
					 | 
				
			||||||
                            ev_name=ev_name,
 | 
					 | 
				
			||||||
                            src_handler=src_handler
 | 
					 | 
				
			||||||
                        ),
 | 
					 | 
				
			||||||
                    )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                else:
 | 
					 | 
				
			||||||
                    assert getattr(root.vb, ev_name)
 | 
					 | 
				
			||||||
                    assert relayee_handler not in sub_handlers
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # append relayed-to widget's handler to relay table
 | 
					 | 
				
			||||||
                sub_handlers.append(relayee_handler)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # link dim-axes to root if requested by user.
 | 
					        # link dim-axes to root if requested by user.
 | 
				
			||||||
 | 
					        # TODO: solve more-then-wanted scaled panning on click drag
 | 
				
			||||||
 | 
					        # which seems to be due to broadcast. So we probably need to
 | 
				
			||||||
 | 
					        # disable broadcast when axes are linked in a particular
 | 
				
			||||||
 | 
					        # dimension?
 | 
				
			||||||
        for dim in link_axes:
 | 
					        for dim in link_axes:
 | 
				
			||||||
            # link x and y axes to new view box such that the top level
 | 
					            # link x and y axes to new view box such that the top level
 | 
				
			||||||
            # viewbox propagates to the root (and whatever other
 | 
					            # viewbox propagates to the root (and whatever other
 | 
				
			||||||
            # plotitem overlays that have been added).
 | 
					            # plotitem overlays that have been added).
 | 
				
			||||||
            vb.linkView(dim, root.vb)
 | 
					            vb.linkView(dim, root.vb)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # => NOTE: in order to prevent "more-then-linear" scaled
 | 
					        # make overlaid viewbox impossible to focus since the top
 | 
				
			||||||
        # panning moves on (for eg. click-drag) certain range change
 | 
					        # level should handle all input and relay to overlays.
 | 
				
			||||||
        # signals (i.e. ``.sigXRangeChanged``), the user needs to be
 | 
					        # NOTE: this was solved with the `setZValue()` above!
 | 
				
			||||||
        # careful that any broadcasted ``relay_events`` are are short
 | 
					 | 
				
			||||||
        # circuited in sub-handlers (aka relayee's) implementations. As
 | 
					 | 
				
			||||||
        # an example if a ``ViewBox.mouseDragEvent`` is broadcasted, the
 | 
					 | 
				
			||||||
        # overlayed implementations need to be sure they either don't
 | 
					 | 
				
			||||||
        # also link the x-axes (by not providing ``link_axes=(0,)``
 | 
					 | 
				
			||||||
        # above) or that the relayee ``.mouseDragEvent()`` handlers are
 | 
					 | 
				
			||||||
        # ready to "``return`` early" in the case that
 | 
					 | 
				
			||||||
        # ``.sigXRangeChanged`` is emitted as part of linked axes.
 | 
					 | 
				
			||||||
        # For more details on such signalling mechanics peek in
 | 
					 | 
				
			||||||
        # ``ViewBox.linkView()``.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # make overlaid viewbox impossible to focus since the top level
 | 
					        # TODO: we will probably want to add a "focus" api such that
 | 
				
			||||||
        # should handle all input and relay to overlays. Note that the
 | 
					        # a new "top level" ``PlotItem`` can be selected dynamically
 | 
				
			||||||
        # "root" plot item gettingn interaction priority is configured
 | 
					        # (and presumably the axes dynamically sorted to match).
 | 
				
			||||||
        # with the ``.setZValue()`` during init.
 | 
					 | 
				
			||||||
        vb.setFlag(
 | 
					        vb.setFlag(
 | 
				
			||||||
            vb.GraphicsItemFlag.ItemIsFocusable,
 | 
					            vb.GraphicsItemFlag.ItemIsFocusable,
 | 
				
			||||||
            False
 | 
					            False
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
        vb.setFocusPolicy(Qt.NoFocus)
 | 
					        vb.setFocusPolicy(Qt.NoFocus)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # => TODO: add a "focus" api for switching the "top level"
 | 
					 | 
				
			||||||
        # ``PlotItem`` dynamically.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # append-compose into the layout all axes from this plot
 | 
					        # append-compose into the layout all axes from this plot
 | 
				
			||||||
        self.layout.insert_plotitem(index, plotitem)
 | 
					        self.layout.insert(index, plotitem)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        plotitem.setGeometry(root.vb.sceneBoundingRect())
 | 
					        plotitem.setGeometry(root.vb.sceneBoundingRect())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -494,6 +579,25 @@ class PlotItemOverlay:
 | 
				
			||||||
        root.vb.setFocus()
 | 
					        root.vb.setFocus()
 | 
				
			||||||
        assert root.vb.focusWidget()
 | 
					        assert root.vb.focusWidget()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # XXX: do we need this? Why would you build then destroy?
 | 
				
			||||||
 | 
					    def remove_plotitem(self, plotItem: PlotItem) -> None:
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        Remove this ``PlotItem`` from the overlayed set making not shown
 | 
				
			||||||
 | 
					        and unable to accept input.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        ...
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    # TODO: i think this would be super hot B)
 | 
				
			||||||
 | 
					    def focus_item(self, plotitem: PlotItem) -> PlotItem:
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        Apply focus to a contained PlotItem thus making it the "top level"
 | 
				
			||||||
 | 
					        item in the overlay able to accept peripheral's input from the user
 | 
				
			||||||
 | 
					        and responsible for zoom and panning control via its ``ViewBox``.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        ...
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_axis(
 | 
					    def get_axis(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        plot: PlotItem,
 | 
					        plot: PlotItem,
 | 
				
			||||||
| 
						 | 
					@ -526,9 +630,8 @@ class PlotItemOverlay:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return axes
 | 
					        return axes
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # XXX: untested as of now.
 | 
					    # TODO: i guess we need this if you want to detach existing plots
 | 
				
			||||||
    # TODO: need this as part of selecting a different root/source
 | 
					    # dynamically? XXX: untested as of now.
 | 
				
			||||||
    # plot to rewire interaction event broadcast dynamically.
 | 
					 | 
				
			||||||
    def _disconnect_all(
 | 
					    def _disconnect_all(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        plotitem: PlotItem,
 | 
					        plotitem: PlotItem,
 | 
				
			||||||
| 
						 | 
					@ -543,22 +646,3 @@ class PlotItemOverlay:
 | 
				
			||||||
            disconnected.append(sig)
 | 
					            disconnected.append(sig)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        return disconnected
 | 
					        return disconnected
 | 
				
			||||||
 | 
					 | 
				
			||||||
    # XXX: do we need this? Why would you build then destroy?
 | 
					 | 
				
			||||||
    # def remove_plotitem(self, plotItem: PlotItem) -> None:
 | 
					 | 
				
			||||||
    #     '''
 | 
					 | 
				
			||||||
    #     Remove this ``PlotItem`` from the overlayed set making not shown
 | 
					 | 
				
			||||||
    #     and unable to accept input.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    #     '''
 | 
					 | 
				
			||||||
    #     ...
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # TODO: i think this would be super hot B)
 | 
					 | 
				
			||||||
    # def focus_plotitem(self, plotitem: PlotItem) -> PlotItem:
 | 
					 | 
				
			||||||
    #     '''
 | 
					 | 
				
			||||||
    #     Apply focus to a contained PlotItem thus making it the "top level"
 | 
					 | 
				
			||||||
    #     item in the overlay able to accept peripheral's input from the user
 | 
					 | 
				
			||||||
    #     and responsible for zoom and panning control via its ``ViewBox``.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    #     '''
 | 
					 | 
				
			||||||
    #     ...
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -49,17 +49,12 @@ def xy_downsample(
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    x_spacer: float = 0.5,
 | 
					    x_spacer: float = 0.5,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> tuple[
 | 
					) -> tuple[np.ndarray, np.ndarray]:
 | 
				
			||||||
    np.ndarray,
 | 
					 | 
				
			||||||
    np.ndarray,
 | 
					 | 
				
			||||||
    float,
 | 
					 | 
				
			||||||
    float,
 | 
					 | 
				
			||||||
]:
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # downsample whenever more then 1 pixels per datum can be shown.
 | 
					    # downsample whenever more then 1 pixels per datum can be shown.
 | 
				
			||||||
    # always refresh data bounds until we get diffing
 | 
					    # always refresh data bounds until we get diffing
 | 
				
			||||||
    # working properly, see above..
 | 
					    # working properly, see above..
 | 
				
			||||||
    bins, x, y, ymn, ymx = ds_m4(
 | 
					    bins, x, y = ds_m4(
 | 
				
			||||||
        x,
 | 
					        x,
 | 
				
			||||||
        y,
 | 
					        y,
 | 
				
			||||||
        uppx,
 | 
					        uppx,
 | 
				
			||||||
| 
						 | 
					@ -72,7 +67,7 @@ def xy_downsample(
 | 
				
			||||||
    )).flatten()
 | 
					    )).flatten()
 | 
				
			||||||
    y = y.flatten()
 | 
					    y = y.flatten()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    return x, y, ymn, ymx
 | 
					    return x, y
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
@njit(
 | 
					@njit(
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -15,15 +15,11 @@
 | 
				
			||||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
					# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
Customization of ``pyqtgraph`` core routines and various types normally
 | 
					Customization of ``pyqtgraph`` core routines to speed up our use mostly
 | 
				
			||||||
for speedups.
 | 
					based on not requiring "scentific precision" for pixel perfect view
 | 
				
			||||||
 | 
					transforms.
 | 
				
			||||||
Generally, our does not require "scentific precision" for pixel perfect
 | 
					 | 
				
			||||||
view transforms.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
"""
 | 
					"""
 | 
				
			||||||
from typing import Optional
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
import pyqtgraph as pg
 | 
					import pyqtgraph as pg
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -50,211 +46,3 @@ def _do_overrides() -> None:
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    # we don't care about potential fp issues inside Qt
 | 
					    # we don't care about potential fp issues inside Qt
 | 
				
			||||||
    pg.functions.invertQTransform = invertQTransform
 | 
					    pg.functions.invertQTransform = invertQTransform
 | 
				
			||||||
    pg.PlotItem = PlotItem
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
# NOTE: the below customized type contains all our changes on a method
 | 
					 | 
				
			||||||
# by method basis as per the diff:
 | 
					 | 
				
			||||||
# https://github.com/pyqtgraph/pyqtgraph/commit/8e60bc14234b6bec1369ff4192dbfb82f8682920#diff-a2b5865955d2ba703dbc4c35ff01aa761aa28d2aeaac5e68d24e338bc82fb5b1R500
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
class PlotItem(pg.PlotItem):
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    Overrides for the core plot object mostly pertaining to overlayed
 | 
					 | 
				
			||||||
    multi-view management as it relates to multi-axis managment.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    '''
 | 
					 | 
				
			||||||
    def __init__(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        parent=None,
 | 
					 | 
				
			||||||
        name=None,
 | 
					 | 
				
			||||||
        labels=None,
 | 
					 | 
				
			||||||
        title=None,
 | 
					 | 
				
			||||||
        viewBox=None,
 | 
					 | 
				
			||||||
        axisItems=None,
 | 
					 | 
				
			||||||
        default_axes=['left', 'bottom'],
 | 
					 | 
				
			||||||
        enableMenu=True,
 | 
					 | 
				
			||||||
        **kargs
 | 
					 | 
				
			||||||
    ):
 | 
					 | 
				
			||||||
        super().__init__(
 | 
					 | 
				
			||||||
            parent=parent,
 | 
					 | 
				
			||||||
            name=name,
 | 
					 | 
				
			||||||
            labels=labels,
 | 
					 | 
				
			||||||
            title=title,
 | 
					 | 
				
			||||||
            viewBox=viewBox,
 | 
					 | 
				
			||||||
            axisItems=axisItems,
 | 
					 | 
				
			||||||
            # default_axes=default_axes,
 | 
					 | 
				
			||||||
            enableMenu=enableMenu,
 | 
					 | 
				
			||||||
            kargs=kargs,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
        # self.setAxisItems(
 | 
					 | 
				
			||||||
        #     axisItems,
 | 
					 | 
				
			||||||
        #     default_axes=default_axes,
 | 
					 | 
				
			||||||
        # )
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # NOTE: this is an entirely new method not in upstream.
 | 
					 | 
				
			||||||
    def removeAxis(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        name: str,
 | 
					 | 
				
			||||||
        unlink: bool = True,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> Optional[pg.AxisItem]:
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        Remove an axis from the contained axis items
 | 
					 | 
				
			||||||
        by ```name: str```.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        This means the axis graphics object will be removed
 | 
					 | 
				
			||||||
        from the ``.layout: QGraphicsGridLayout`` as well as unlinked
 | 
					 | 
				
			||||||
        from the underlying associated ``ViewBox``.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        If the ``unlink: bool`` is set to ``False`` then the axis will
 | 
					 | 
				
			||||||
        stay linked to its view and will only be removed from the
 | 
					 | 
				
			||||||
        layoutonly be removed from the layout.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        If no axis with ``name: str`` is found then this is a noop.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        Return the axis instance that was removed.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        entry = self.axes.pop(name, None)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if not entry:
 | 
					 | 
				
			||||||
            return
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        axis = entry['item']
 | 
					 | 
				
			||||||
        self.layout.removeItem(axis)
 | 
					 | 
				
			||||||
        axis.scene().removeItem(axis)
 | 
					 | 
				
			||||||
        if unlink:
 | 
					 | 
				
			||||||
            axis.unlinkFromView()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self.update()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        return axis
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    # Why do we need to always have all axes created?
 | 
					 | 
				
			||||||
    #
 | 
					 | 
				
			||||||
    # I don't understand this at all.
 | 
					 | 
				
			||||||
    #
 | 
					 | 
				
			||||||
    # Everything seems to work if you just always apply the
 | 
					 | 
				
			||||||
    # set passed to this method **EXCEPT** for some super weird reason
 | 
					 | 
				
			||||||
    # the view box geometry still computes as though the space for the
 | 
					 | 
				
			||||||
    # `'bottom'` axis is always there **UNLESS** you always add that
 | 
					 | 
				
			||||||
    # axis but hide it?
 | 
					 | 
				
			||||||
    #
 | 
					 | 
				
			||||||
    # Why in tf would this be the case!?!?
 | 
					 | 
				
			||||||
    def setAxisItems(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        # XXX: yeah yeah, i know we can't use type annots like this yet.
 | 
					 | 
				
			||||||
        axisItems: Optional[dict[str, pg.AxisItem]] = None,
 | 
					 | 
				
			||||||
        add_to_layout: bool = True,
 | 
					 | 
				
			||||||
        default_axes: list[str] = ['left', 'bottom'],
 | 
					 | 
				
			||||||
    ):
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        Override axis item setting to only
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        """
 | 
					 | 
				
			||||||
        axisItems = axisItems or {}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # XXX: wth is is this even saying?!?
 | 
					 | 
				
			||||||
        # Array containing visible axis items
 | 
					 | 
				
			||||||
        # Also containing potentially hidden axes, but they are not
 | 
					 | 
				
			||||||
        # touched so it does not matter
 | 
					 | 
				
			||||||
        # visibleAxes = ['left', 'bottom']
 | 
					 | 
				
			||||||
        # Note that it does not matter that this adds
 | 
					 | 
				
			||||||
        # some values to visibleAxes a second time
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # XXX: uhhh wat^ ..?
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        visibleAxes = list(default_axes) + list(axisItems.keys())
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # TODO: we should probably invert the loop here to not loop the
 | 
					 | 
				
			||||||
        # predefined "axis name set" and instead loop the `axisItems`
 | 
					 | 
				
			||||||
        # input and lookup indices from a predefined map.
 | 
					 | 
				
			||||||
        for name, pos in (
 | 
					 | 
				
			||||||
            ('top', (1, 1)),
 | 
					 | 
				
			||||||
            ('bottom', (3, 1)),
 | 
					 | 
				
			||||||
            ('left', (2, 0)),
 | 
					 | 
				
			||||||
            ('right', (2, 2))
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            if (
 | 
					 | 
				
			||||||
                name in self.axes and
 | 
					 | 
				
			||||||
                name in axisItems
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                # we already have an axis entry for this name
 | 
					 | 
				
			||||||
                # so remove the existing entry.
 | 
					 | 
				
			||||||
                self.removeAxis(name)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # elif name not in axisItems:
 | 
					 | 
				
			||||||
            #     # this axis entry is not provided in this call
 | 
					 | 
				
			||||||
            #     # so remove any old/existing entry.
 | 
					 | 
				
			||||||
            #     self.removeAxis(name)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # Create new axis
 | 
					 | 
				
			||||||
            if name in axisItems:
 | 
					 | 
				
			||||||
                axis = axisItems[name]
 | 
					 | 
				
			||||||
                if axis.scene() is not None:
 | 
					 | 
				
			||||||
                    if (
 | 
					 | 
				
			||||||
                        name not in self.axes
 | 
					 | 
				
			||||||
                        or axis != self.axes[name]["item"]
 | 
					 | 
				
			||||||
                    ):
 | 
					 | 
				
			||||||
                        raise RuntimeError(
 | 
					 | 
				
			||||||
                            "Can't add an axis to multiple plots. Shared axes"
 | 
					 | 
				
			||||||
                            " can be achieved with multiple AxisItem instances"
 | 
					 | 
				
			||||||
                            " and set[X/Y]Link.")
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                # Set up new axis
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
                # XXX: ok but why do we want to add axes for all entries
 | 
					 | 
				
			||||||
                # if not desired by the user? The only reason I can see
 | 
					 | 
				
			||||||
                # adding this is without it there's some weird
 | 
					 | 
				
			||||||
                # ``ViewBox`` geometry bug.. where a gap for the
 | 
					 | 
				
			||||||
                # 'bottom' axis is somehow left in?
 | 
					 | 
				
			||||||
                axis = pg.AxisItem(orientation=name, parent=self)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            axis.linkToView(self.vb)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # XXX: shouldn't you already know the ``pos`` from the name?
 | 
					 | 
				
			||||||
            # Oh right instead of a global map that would let you
 | 
					 | 
				
			||||||
            # reasily look that up it's redefined over and over and over
 | 
					 | 
				
			||||||
            # again in methods..
 | 
					 | 
				
			||||||
            self.axes[name] = {'item': axis, 'pos': pos}
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # NOTE: in the overlay case the axis may be added to some
 | 
					 | 
				
			||||||
            # other layout and should not be added here.
 | 
					 | 
				
			||||||
            if add_to_layout:
 | 
					 | 
				
			||||||
                self.layout.addItem(axis, *pos)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # place axis above images at z=0, items that want to draw
 | 
					 | 
				
			||||||
            # over the axes should be placed at z>=1:
 | 
					 | 
				
			||||||
            axis.setZValue(0.5)
 | 
					 | 
				
			||||||
            axis.setFlag(
 | 
					 | 
				
			||||||
                axis.GraphicsItemFlag.ItemNegativeZStacksBehindParent
 | 
					 | 
				
			||||||
            )
 | 
					 | 
				
			||||||
            if name in visibleAxes:
 | 
					 | 
				
			||||||
                self.showAxis(name, True)
 | 
					 | 
				
			||||||
            else:
 | 
					 | 
				
			||||||
                # why do we need to insert all axes to ``.axes`` and
 | 
					 | 
				
			||||||
                # only hide the ones the user doesn't specify? It all
 | 
					 | 
				
			||||||
                # seems to work fine without doing this except for this
 | 
					 | 
				
			||||||
                # weird gap for the 'bottom' axis that always shows up
 | 
					 | 
				
			||||||
                # in the view box geometry??
 | 
					 | 
				
			||||||
                self.hideAxis(name)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def updateGrid(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        *args,
 | 
					 | 
				
			||||||
    ):
 | 
					 | 
				
			||||||
        alpha = self.ctrl.gridAlphaSlider.value()
 | 
					 | 
				
			||||||
        x = alpha if self.ctrl.xGridCheck.isChecked() else False
 | 
					 | 
				
			||||||
        y = alpha if self.ctrl.yGridCheck.isChecked() else False
 | 
					 | 
				
			||||||
        for name, dim in (
 | 
					 | 
				
			||||||
            ('top', x),
 | 
					 | 
				
			||||||
            ('bottom', x),
 | 
					 | 
				
			||||||
            ('left', y),
 | 
					 | 
				
			||||||
            ('right', y)
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            if name in self.axes:
 | 
					 | 
				
			||||||
                self.getAxis(name).setGrid(dim)
 | 
					 | 
				
			||||||
        # self.getAxis('bottom').setGrid(x)
 | 
					 | 
				
			||||||
        # self.getAxis('left').setGrid(y)
 | 
					 | 
				
			||||||
        # self.getAxis('right').setGrid(y)
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
					@ -35,13 +35,9 @@ from collections import defaultdict
 | 
				
			||||||
from contextlib import asynccontextmanager
 | 
					from contextlib import asynccontextmanager
 | 
				
			||||||
from functools import partial
 | 
					from functools import partial
 | 
				
			||||||
from typing import (
 | 
					from typing import (
 | 
				
			||||||
    Optional,
 | 
					    Optional, Callable,
 | 
				
			||||||
    Callable,
 | 
					    Awaitable, Sequence,
 | 
				
			||||||
    Awaitable,
 | 
					    Any, AsyncIterator
 | 
				
			||||||
    Sequence,
 | 
					 | 
				
			||||||
    Any,
 | 
					 | 
				
			||||||
    AsyncIterator,
 | 
					 | 
				
			||||||
    Iterator,
 | 
					 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
import time
 | 
					import time
 | 
				
			||||||
# from pprint import pformat
 | 
					# from pprint import pformat
 | 
				
			||||||
| 
						 | 
					@ -123,7 +119,7 @@ class CompleterView(QTreeView):
 | 
				
			||||||
        # TODO: size this based on DPI font
 | 
					        # TODO: size this based on DPI font
 | 
				
			||||||
        self.setIndentation(_font.px_size)
 | 
					        self.setIndentation(_font.px_size)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.setUniformRowHeights(True)
 | 
					        # self.setUniformRowHeights(True)
 | 
				
			||||||
        # self.setColumnWidth(0, 3)
 | 
					        # self.setColumnWidth(0, 3)
 | 
				
			||||||
        # self.setVerticalBarPolicy(Qt.ScrollBarAlwaysOff)
 | 
					        # self.setVerticalBarPolicy(Qt.ScrollBarAlwaysOff)
 | 
				
			||||||
        # self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustIgnored)
 | 
					        # self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustIgnored)
 | 
				
			||||||
| 
						 | 
					@ -142,15 +138,13 @@ class CompleterView(QTreeView):
 | 
				
			||||||
        model.setHorizontalHeaderLabels(labels)
 | 
					        model.setHorizontalHeaderLabels(labels)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self._font_size: int = 0  # pixels
 | 
					        self._font_size: int = 0  # pixels
 | 
				
			||||||
        self._init: bool = False
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async def on_pressed(self, idx: QModelIndex) -> None:
 | 
					    async def on_pressed(self, idx: QModelIndex) -> None:
 | 
				
			||||||
        '''
 | 
					        '''Mouse pressed on view handler.
 | 
				
			||||||
        Mouse pressed on view handler.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        search = self.parent()
 | 
					        search = self.parent()
 | 
				
			||||||
        await search.chart_current_item()
 | 
					        await search.chart_current_item(clear_to_cache=False)
 | 
				
			||||||
        search.focus()
 | 
					        search.focus()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def set_font_size(self, size: int = 18):
 | 
					    def set_font_size(self, size: int = 18):
 | 
				
			||||||
| 
						 | 
					@ -162,64 +156,56 @@ class CompleterView(QTreeView):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.setStyleSheet(f"font: {size}px")
 | 
					        self.setStyleSheet(f"font: {size}px")
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def resize_to_results(
 | 
					    # def resizeEvent(self, event: 'QEvent') -> None:
 | 
				
			||||||
        self,
 | 
					    #     event.accept()
 | 
				
			||||||
        w: Optional[float] = 0,
 | 
					    #     super().resizeEvent(event)
 | 
				
			||||||
        h: Optional[float] = None,
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    def on_resize(self) -> None:
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        Resize relay event from god.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        '''
 | 
				
			||||||
 | 
					        self.resize_to_results()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def resize_to_results(self):
 | 
				
			||||||
        model = self.model()
 | 
					        model = self.model()
 | 
				
			||||||
        cols = model.columnCount()
 | 
					        cols = model.columnCount()
 | 
				
			||||||
        cidx = self.selectionModel().currentIndex()
 | 
					        # rows = model.rowCount()
 | 
				
			||||||
        rows = model.rowCount()
 | 
					 | 
				
			||||||
        self.expandAll()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # compute the approx height in pixels needed to include
 | 
					 | 
				
			||||||
        # all result rows in view.
 | 
					 | 
				
			||||||
        row_h = rows_h = self.rowHeight(cidx) * (rows + 1)
 | 
					 | 
				
			||||||
        for idx, item in self.iter_df_rows():
 | 
					 | 
				
			||||||
            row_h = self.rowHeight(idx)
 | 
					 | 
				
			||||||
            rows_h += row_h
 | 
					 | 
				
			||||||
            # print(f'row_h: {row_h}\nrows_h: {rows_h}')
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            # TODO: could we just break early here on detection
 | 
					 | 
				
			||||||
            # of ``rows_h >= h``?
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        col_w_tot = 0
 | 
					        col_w_tot = 0
 | 
				
			||||||
        for i in range(cols):
 | 
					        for i in range(cols):
 | 
				
			||||||
            # only slap in a rows's height's worth
 | 
					 | 
				
			||||||
            # of padding once at startup.. no idea
 | 
					 | 
				
			||||||
            if (
 | 
					 | 
				
			||||||
                not self._init
 | 
					 | 
				
			||||||
                and row_h
 | 
					 | 
				
			||||||
            ):
 | 
					 | 
				
			||||||
                col_w_tot = row_h
 | 
					 | 
				
			||||||
                self._init = True
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            self.resizeColumnToContents(i)
 | 
					            self.resizeColumnToContents(i)
 | 
				
			||||||
            col_w_tot += self.columnWidth(i)
 | 
					            col_w_tot += self.columnWidth(i)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # NOTE: if the heigh `h` set here is **too large** then the
 | 
					        win = self.window()
 | 
				
			||||||
        # resize event will perpetually trigger as the window causes
 | 
					        win_h = win.height()
 | 
				
			||||||
        # some kind of recompute of callbacks.. so we have to ensure
 | 
					        edit_h = self.parent().bar.height()
 | 
				
			||||||
        # it's limited.
 | 
					        sb_h = win.statusBar().height()
 | 
				
			||||||
        if h:
 | 
					 | 
				
			||||||
            h: int = round(h)
 | 
					 | 
				
			||||||
            abs_mx = round(0.91 * h)
 | 
					 | 
				
			||||||
            self.setMaximumHeight(abs_mx)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if rows_h <= abs_mx:
 | 
					        # TODO: probably make this more general / less hacky
 | 
				
			||||||
                # self.setMinimumHeight(rows_h)
 | 
					        # we should figure out the exact number of rows to allow
 | 
				
			||||||
                self.setMinimumHeight(rows_h)
 | 
					        # inclusive of search bar and header "rows", in pixel terms.
 | 
				
			||||||
                # self.setFixedHeight(rows_h)
 | 
					        # Eventually when we have an "info" widget below the results we
 | 
				
			||||||
 | 
					        # will want space for it and likely terminating the results-view
 | 
				
			||||||
 | 
					        # space **exactly on a row** would be ideal.
 | 
				
			||||||
 | 
					        # if row_px > 0:
 | 
				
			||||||
 | 
					        #     rows = ceil(window_h / row_px) - 4
 | 
				
			||||||
 | 
					        # else:
 | 
				
			||||||
 | 
					        #     rows = 16
 | 
				
			||||||
 | 
					        # self.setFixedHeight(rows * row_px)
 | 
				
			||||||
 | 
					        # self.resize(self.width(), rows * row_px)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            else:
 | 
					        # NOTE: if the heigh set here is **too large** then the resize
 | 
				
			||||||
                self.setMinimumHeight(abs_mx)
 | 
					        # event will perpetually trigger as the window causes some kind
 | 
				
			||||||
 | 
					        # of recompute of callbacks.. so we have to ensure it's limited.
 | 
				
			||||||
 | 
					        h = win_h - (edit_h + 1.666*sb_h)
 | 
				
			||||||
 | 
					        assert h > 0
 | 
				
			||||||
 | 
					        self.setFixedHeight(round(h))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # dyncamically size to width of longest result seen
 | 
					        # size to width of longest result seen thus far
 | 
				
			||||||
        curr_w = self.width()
 | 
					        # TODO: should we always dynamically scale to longest result?
 | 
				
			||||||
        if curr_w < col_w_tot:
 | 
					        if self.width() < col_w_tot:
 | 
				
			||||||
            self.setMinimumWidth(col_w_tot)
 | 
					            self.setFixedWidth(col_w_tot)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.update()
 | 
					        self.update()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -345,23 +331,6 @@ class CompleterView(QTreeView):
 | 
				
			||||||
            item = model.itemFromIndex(idx)
 | 
					            item = model.itemFromIndex(idx)
 | 
				
			||||||
            yield idx, item
 | 
					            yield idx, item
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def iter_df_rows(
 | 
					 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        iparent: QModelIndex = QModelIndex(),
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> Iterator[tuple[QModelIndex, QStandardItem]]:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        model = self.model()
 | 
					 | 
				
			||||||
        isections = model.rowCount(iparent)
 | 
					 | 
				
			||||||
        for i in range(isections):
 | 
					 | 
				
			||||||
            idx = model.index(i, 0, iparent)
 | 
					 | 
				
			||||||
            item = model.itemFromIndex(idx)
 | 
					 | 
				
			||||||
            yield idx, item
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
            if model.hasChildren(idx):
 | 
					 | 
				
			||||||
                # recursively yield child items depth-first
 | 
					 | 
				
			||||||
                yield from self.iter_df_rows(idx)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def find_section(
 | 
					    def find_section(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        section: str,
 | 
					        section: str,
 | 
				
			||||||
| 
						 | 
					@ -385,8 +354,7 @@ class CompleterView(QTreeView):
 | 
				
			||||||
        status_field: str = None,
 | 
					        status_field: str = None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        '''
 | 
					        '''Clear all result-rows from under the depth = 1 section.
 | 
				
			||||||
        Clear all result-rows from under the depth = 1 section.
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        idx = self.find_section(section)
 | 
					        idx = self.find_section(section)
 | 
				
			||||||
| 
						 | 
					@ -407,6 +375,8 @@ class CompleterView(QTreeView):
 | 
				
			||||||
            else:
 | 
					            else:
 | 
				
			||||||
                model.setItem(idx.row(), 1, QStandardItem())
 | 
					                model.setItem(idx.row(), 1, QStandardItem())
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            self.resize_to_results()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            return idx
 | 
					            return idx
 | 
				
			||||||
        else:
 | 
					        else:
 | 
				
			||||||
            return None
 | 
					            return None
 | 
				
			||||||
| 
						 | 
					@ -474,22 +444,9 @@ class CompleterView(QTreeView):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.show_matches()
 | 
					        self.show_matches()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def show_matches(
 | 
					    def show_matches(self) -> None:
 | 
				
			||||||
        self,
 | 
					 | 
				
			||||||
        wh: Optional[tuple[float, float]] = None,
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    ) -> None:
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        if wh:
 | 
					 | 
				
			||||||
            self.resize_to_results(*wh)
 | 
					 | 
				
			||||||
        else:
 | 
					 | 
				
			||||||
            # case where it's just an update from results and *NOT*
 | 
					 | 
				
			||||||
            # a resize of some higher level parent-container widget.
 | 
					 | 
				
			||||||
            search = self.parent()
 | 
					 | 
				
			||||||
            w, h = search.space_dims()
 | 
					 | 
				
			||||||
            self.resize_to_results(w=w, h=h)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self.show()
 | 
					        self.show()
 | 
				
			||||||
 | 
					        self.resize_to_results()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class SearchBar(Edit):
 | 
					class SearchBar(Edit):
 | 
				
			||||||
| 
						 | 
					@ -509,15 +466,18 @@ class SearchBar(Edit):
 | 
				
			||||||
        self.godwidget = godwidget
 | 
					        self.godwidget = godwidget
 | 
				
			||||||
        super().__init__(parent, **kwargs)
 | 
					        super().__init__(parent, **kwargs)
 | 
				
			||||||
        self.view: CompleterView = view
 | 
					        self.view: CompleterView = view
 | 
				
			||||||
 | 
					        godwidget._widgets[view.mode_name] = view
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					    def show(self) -> None:
 | 
				
			||||||
 | 
					        super().show()
 | 
				
			||||||
 | 
					        self.view.show_matches()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def unfocus(self) -> None:
 | 
					    def unfocus(self) -> None:
 | 
				
			||||||
        self.parent().hide()
 | 
					        self.parent().hide()
 | 
				
			||||||
        self.clearFocus()
 | 
					        self.clearFocus()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def hide(self) -> None:
 | 
					 | 
				
			||||||
        if self.view:
 | 
					        if self.view:
 | 
				
			||||||
            self.view.hide()
 | 
					            self.view.hide()
 | 
				
			||||||
        super().hide()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class SearchWidget(QtWidgets.QWidget):
 | 
					class SearchWidget(QtWidgets.QWidget):
 | 
				
			||||||
| 
						 | 
					@ -536,16 +496,15 @@ class SearchWidget(QtWidgets.QWidget):
 | 
				
			||||||
        parent=None,
 | 
					        parent=None,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        super().__init__(parent)
 | 
					        super().__init__(parent or godwidget)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # size it as we specify
 | 
					        # size it as we specify
 | 
				
			||||||
        self.setSizePolicy(
 | 
					        self.setSizePolicy(
 | 
				
			||||||
            QtWidgets.QSizePolicy.Fixed,
 | 
					            QtWidgets.QSizePolicy.Fixed,
 | 
				
			||||||
            QtWidgets.QSizePolicy.Fixed,
 | 
					            QtWidgets.QSizePolicy.Expanding,
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.godwidget = godwidget
 | 
					        self.godwidget = godwidget
 | 
				
			||||||
        godwidget.reg_for_resize(self)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.vbox = QtWidgets.QVBoxLayout(self)
 | 
					        self.vbox = QtWidgets.QVBoxLayout(self)
 | 
				
			||||||
        self.vbox.setContentsMargins(0, 4, 4, 0)
 | 
					        self.vbox.setContentsMargins(0, 4, 4, 0)
 | 
				
			||||||
| 
						 | 
					@ -595,22 +554,17 @@ class SearchWidget(QtWidgets.QWidget):
 | 
				
			||||||
        self.vbox.setAlignment(self.view, Qt.AlignTop | Qt.AlignLeft)
 | 
					        self.vbox.setAlignment(self.view, Qt.AlignTop | Qt.AlignLeft)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def focus(self) -> None:
 | 
					    def focus(self) -> None:
 | 
				
			||||||
        self.show()
 | 
					
 | 
				
			||||||
 | 
					        if self.view.model().rowCount(QModelIndex()) == 0:
 | 
				
			||||||
 | 
					            # fill cache list if nothing existing
 | 
				
			||||||
 | 
					            self.view.set_section_entries(
 | 
				
			||||||
 | 
					                'cache',
 | 
				
			||||||
 | 
					                list(reversed(self.godwidget._chart_cache)),
 | 
				
			||||||
 | 
					                clear_all=True,
 | 
				
			||||||
 | 
					            )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.bar.focus()
 | 
					        self.bar.focus()
 | 
				
			||||||
 | 
					        self.show()
 | 
				
			||||||
    def show_only_cache_entries(self) -> None:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Clear the search results view and show only cached (aka recently
 | 
					 | 
				
			||||||
        loaded with active data) feeds in the results section.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        godw = self.godwidget
 | 
					 | 
				
			||||||
        self.view.set_section_entries(
 | 
					 | 
				
			||||||
            'cache',
 | 
					 | 
				
			||||||
            list(reversed(godw._chart_cache)),
 | 
					 | 
				
			||||||
            # remove all other completion results except for cache
 | 
					 | 
				
			||||||
            clear_all=True,
 | 
					 | 
				
			||||||
        )
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def get_current_item(self) -> Optional[tuple[str, str]]:
 | 
					    def get_current_item(self) -> Optional[tuple[str, str]]:
 | 
				
			||||||
        '''Return the current completer tree selection as
 | 
					        '''Return the current completer tree selection as
 | 
				
			||||||
| 
						 | 
					@ -649,8 +603,7 @@ class SearchWidget(QtWidgets.QWidget):
 | 
				
			||||||
        clear_to_cache: bool = True,
 | 
					        clear_to_cache: bool = True,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> Optional[str]:
 | 
					    ) -> Optional[str]:
 | 
				
			||||||
        '''
 | 
					        '''Attempt to load and switch the current selected
 | 
				
			||||||
        Attempt to load and switch the current selected
 | 
					 | 
				
			||||||
        completion result to the affiliated chart app.
 | 
					        completion result to the affiliated chart app.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        Return any loaded symbol.
 | 
					        Return any loaded symbol.
 | 
				
			||||||
| 
						 | 
					@ -661,11 +614,11 @@ class SearchWidget(QtWidgets.QWidget):
 | 
				
			||||||
            return None
 | 
					            return None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        provider, symbol = value
 | 
					        provider, symbol = value
 | 
				
			||||||
        godw = self.godwidget
 | 
					        chart = self.godwidget
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        log.info(f'Requesting symbol: {symbol}.{provider}')
 | 
					        log.info(f'Requesting symbol: {symbol}.{provider}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        await godw.load_symbol(
 | 
					        await chart.load_symbol(
 | 
				
			||||||
            provider,
 | 
					            provider,
 | 
				
			||||||
            symbol,
 | 
					            symbol,
 | 
				
			||||||
            'info',
 | 
					            'info',
 | 
				
			||||||
| 
						 | 
					@ -682,46 +635,18 @@ class SearchWidget(QtWidgets.QWidget):
 | 
				
			||||||
            # Re-order the symbol cache on the chart to display in
 | 
					            # Re-order the symbol cache on the chart to display in
 | 
				
			||||||
            # LIFO order. this is normally only done internally by
 | 
					            # LIFO order. this is normally only done internally by
 | 
				
			||||||
            # the chart on new symbols being loaded into memory
 | 
					            # the chart on new symbols being loaded into memory
 | 
				
			||||||
            godw.set_chart_symbol(
 | 
					            chart.set_chart_symbol(fqsn, chart.linkedsplits)
 | 
				
			||||||
                fqsn, (
 | 
					
 | 
				
			||||||
                    godw.hist_linked,
 | 
					            self.view.set_section_entries(
 | 
				
			||||||
                    godw.rt_linked,
 | 
					                'cache',
 | 
				
			||||||
                )
 | 
					                values=list(reversed(chart._chart_cache)),
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                # remove all other completion results except for cache
 | 
				
			||||||
 | 
					                clear_all=True,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            self.show_only_cache_entries()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.bar.focus()
 | 
					 | 
				
			||||||
        return fqsn
 | 
					        return fqsn
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def space_dims(self) -> tuple[float, float]:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Compute and return the "available space dimentions" for this
 | 
					 | 
				
			||||||
        search widget in terms of px space for results by return the
 | 
					 | 
				
			||||||
        pair of width and height.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        # XXX: dun need dis rite?
 | 
					 | 
				
			||||||
        # win = self.window()
 | 
					 | 
				
			||||||
        # win_h = win.height()
 | 
					 | 
				
			||||||
        # sb_h = win.statusBar().height()
 | 
					 | 
				
			||||||
        godw = self.godwidget
 | 
					 | 
				
			||||||
        hl = godw.hist_linked
 | 
					 | 
				
			||||||
        edit_h = self.bar.height()
 | 
					 | 
				
			||||||
        h = hl.height() - edit_h
 | 
					 | 
				
			||||||
        w = hl.width()
 | 
					 | 
				
			||||||
        return w, h
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    def on_resize(self) -> None:
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        Resize relay event from god, resize all child widgets.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        Right now this is just view to contents and/or the fast chart
 | 
					 | 
				
			||||||
        height.
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        '''
 | 
					 | 
				
			||||||
        w, h = self.space_dims()
 | 
					 | 
				
			||||||
        self.bar.view.show_matches(wh=(w, h))
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
_search_active: trio.Event = trio.Event()
 | 
					_search_active: trio.Event = trio.Event()
 | 
				
			||||||
_search_enabled: bool = False
 | 
					_search_enabled: bool = False
 | 
				
			||||||
| 
						 | 
					@ -787,11 +712,10 @@ async def fill_results(
 | 
				
			||||||
    max_pause_time: float = 6/16 + 0.001,
 | 
					    max_pause_time: float = 6/16 + 0.001,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
) -> None:
 | 
					) -> None:
 | 
				
			||||||
    '''
 | 
					    """Task to search through providers and fill in possible
 | 
				
			||||||
    Task to search through providers and fill in possible
 | 
					 | 
				
			||||||
    completion results.
 | 
					    completion results.
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    '''
 | 
					    """
 | 
				
			||||||
    global _search_active, _search_enabled, _searcher_cache
 | 
					    global _search_active, _search_enabled, _searcher_cache
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    bar = search.bar
 | 
					    bar = search.bar
 | 
				
			||||||
| 
						 | 
					@ -805,10 +729,6 @@ async def fill_results(
 | 
				
			||||||
    matches = defaultdict(list)
 | 
					    matches = defaultdict(list)
 | 
				
			||||||
    has_results: defaultdict[str, set[str]] = defaultdict(set)
 | 
					    has_results: defaultdict[str, set[str]] = defaultdict(set)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # show cached feed list at startup
 | 
					 | 
				
			||||||
    search.show_only_cache_entries()
 | 
					 | 
				
			||||||
    search.on_resize()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
    while True:
 | 
					    while True:
 | 
				
			||||||
        await _search_active.wait()
 | 
					        await _search_active.wait()
 | 
				
			||||||
        period = None
 | 
					        period = None
 | 
				
			||||||
| 
						 | 
					@ -822,7 +742,7 @@ async def fill_results(
 | 
				
			||||||
                pattern = await recv_chan.receive()
 | 
					                pattern = await recv_chan.receive()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            period = time.time() - wait_start
 | 
					            period = time.time() - wait_start
 | 
				
			||||||
            log.debug(f'{pattern} after {period}')
 | 
					            print(f'{pattern} after {period}')
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            # during fast multiple key inputs, wait until a pause
 | 
					            # during fast multiple key inputs, wait until a pause
 | 
				
			||||||
            # (in typing) to initiate search
 | 
					            # (in typing) to initiate search
 | 
				
			||||||
| 
						 | 
					@ -921,7 +841,8 @@ async def handle_keyboard_input(
 | 
				
			||||||
    godwidget = search.godwidget
 | 
					    godwidget = search.godwidget
 | 
				
			||||||
    view = bar.view
 | 
					    view = bar.view
 | 
				
			||||||
    view.set_font_size(bar.dpi_font.px_size)
 | 
					    view.set_font_size(bar.dpi_font.px_size)
 | 
				
			||||||
    send, recv = trio.open_memory_channel(616)
 | 
					
 | 
				
			||||||
 | 
					    send, recv = trio.open_memory_channel(16)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    async with trio.open_nursery() as n:
 | 
					    async with trio.open_nursery() as n:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -936,10 +857,6 @@ async def handle_keyboard_input(
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
        )
 | 
					        )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        bar.focus()
 | 
					 | 
				
			||||||
        search.show_only_cache_entries()
 | 
					 | 
				
			||||||
        await trio.sleep(0)
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        async for kbmsg in recv_chan:
 | 
					        async for kbmsg in recv_chan:
 | 
				
			||||||
            event, etype, key, mods, txt = kbmsg.to_tuple()
 | 
					            event, etype, key, mods, txt = kbmsg.to_tuple()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -950,11 +867,10 @@ async def handle_keyboard_input(
 | 
				
			||||||
                ctl = True
 | 
					                ctl = True
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            if key in (Qt.Key_Enter, Qt.Key_Return):
 | 
					            if key in (Qt.Key_Enter, Qt.Key_Return):
 | 
				
			||||||
                _search_enabled = False
 | 
					
 | 
				
			||||||
                await search.chart_current_item(clear_to_cache=True)
 | 
					                await search.chart_current_item(clear_to_cache=True)
 | 
				
			||||||
                search.show_only_cache_entries()
 | 
					                _search_enabled = False
 | 
				
			||||||
                view.show_matches()
 | 
					                continue
 | 
				
			||||||
                search.focus()
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
            elif not ctl and not bar.text():
 | 
					            elif not ctl and not bar.text():
 | 
				
			||||||
                # if nothing in search text show the cache
 | 
					                # if nothing in search text show the cache
 | 
				
			||||||
| 
						 | 
					@ -971,7 +887,7 @@ async def handle_keyboard_input(
 | 
				
			||||||
                Qt.Key_Space,   # i feel like this is the "native" one
 | 
					                Qt.Key_Space,   # i feel like this is the "native" one
 | 
				
			||||||
                Qt.Key_Alt,
 | 
					                Qt.Key_Alt,
 | 
				
			||||||
            }:
 | 
					            }:
 | 
				
			||||||
                bar.unfocus()
 | 
					                search.bar.unfocus()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                # kill the search and focus back on main chart
 | 
					                # kill the search and focus back on main chart
 | 
				
			||||||
                if godwidget:
 | 
					                if godwidget:
 | 
				
			||||||
| 
						 | 
					@ -1019,10 +935,9 @@ async def handle_keyboard_input(
 | 
				
			||||||
                if item:
 | 
					                if item:
 | 
				
			||||||
                    parent_item = item.parent()
 | 
					                    parent_item = item.parent()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
                    # if we're in the cache section and thus the next
 | 
					 | 
				
			||||||
                    # selection is a cache item, switch and show it
 | 
					 | 
				
			||||||
                    # immediately since it should be very fast.
 | 
					 | 
				
			||||||
                    if parent_item and parent_item.text() == 'cache':
 | 
					                    if parent_item and parent_item.text() == 'cache':
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					                        # if it's a cache item, switch and show it immediately
 | 
				
			||||||
                        await search.chart_current_item(clear_to_cache=False)
 | 
					                        await search.chart_current_item(clear_to_cache=False)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            elif not ctl:
 | 
					            elif not ctl:
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -21,29 +21,15 @@ Qt main window singletons and stuff.
 | 
				
			||||||
import os
 | 
					import os
 | 
				
			||||||
import signal
 | 
					import signal
 | 
				
			||||||
import time
 | 
					import time
 | 
				
			||||||
from typing import (
 | 
					from typing import Callable, Optional, Union
 | 
				
			||||||
    Callable,
 | 
					 | 
				
			||||||
    Optional,
 | 
					 | 
				
			||||||
    Union,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
import uuid
 | 
					import uuid
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					from pyqtgraph import QtGui
 | 
				
			||||||
from PyQt5 import QtCore
 | 
					from PyQt5 import QtCore
 | 
				
			||||||
from PyQt5.QtWidgets import (
 | 
					from PyQt5.QtWidgets import QLabel, QStatusBar
 | 
				
			||||||
    QWidget,
 | 
					 | 
				
			||||||
    QMainWindow,
 | 
					 | 
				
			||||||
    QApplication,
 | 
					 | 
				
			||||||
    QLabel,
 | 
					 | 
				
			||||||
    QStatusBar,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
from PyQt5.QtGui import (
 | 
					 | 
				
			||||||
    QScreen,
 | 
					 | 
				
			||||||
    QCloseEvent,
 | 
					 | 
				
			||||||
)
 | 
					 | 
				
			||||||
from ..log import get_logger
 | 
					from ..log import get_logger
 | 
				
			||||||
from ._style import _font_small, hcolor
 | 
					from ._style import _font_small, hcolor
 | 
				
			||||||
from ._chart import GodWidget
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
log = get_logger(__name__)
 | 
					log = get_logger(__name__)
 | 
				
			||||||
| 
						 | 
					@ -162,13 +148,12 @@ class MultiStatus:
 | 
				
			||||||
            self.bar.clearMessage()
 | 
					            self.bar.clearMessage()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
class MainWindow(QMainWindow):
 | 
					class MainWindow(QtGui.QMainWindow):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    # XXX: for tiling wms this should scale
 | 
					    # XXX: for tiling wms this should scale
 | 
				
			||||||
    # with the alloted window size.
 | 
					    # with the alloted window size.
 | 
				
			||||||
    # TODO: detect for tiling and if untrue set some size?
 | 
					    # TODO: detect for tiling and if untrue set some size?
 | 
				
			||||||
    # size = (300, 500)
 | 
					    size = (300, 500)
 | 
				
			||||||
    godwidget: GodWidget
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
    title = 'piker chart (ur symbol is loading bby)'
 | 
					    title = 'piker chart (ur symbol is loading bby)'
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -177,20 +162,17 @@ class MainWindow(QMainWindow):
 | 
				
			||||||
        # self.setMinimumSize(*self.size)
 | 
					        # self.setMinimumSize(*self.size)
 | 
				
			||||||
        self.setWindowTitle(self.title)
 | 
					        self.setWindowTitle(self.title)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # set by runtime after `trio` is engaged.
 | 
					 | 
				
			||||||
        self.godwidget: Optional[GodWidget] = None
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        self._status_bar: QStatusBar = None
 | 
					        self._status_bar: QStatusBar = None
 | 
				
			||||||
        self._status_label: QLabel = None
 | 
					        self._status_label: QLabel = None
 | 
				
			||||||
        self._size: Optional[tuple[int, int]] = None
 | 
					        self._size: Optional[tuple[int, int]] = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    @property
 | 
					    @property
 | 
				
			||||||
    def mode_label(self) -> QLabel:
 | 
					    def mode_label(self) -> QtGui.QLabel:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # init mode label
 | 
					        # init mode label
 | 
				
			||||||
        if not self._status_label:
 | 
					        if not self._status_label:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
            self._status_label = label = QLabel()
 | 
					            self._status_label = label = QtGui.QLabel()
 | 
				
			||||||
            label.setStyleSheet(
 | 
					            label.setStyleSheet(
 | 
				
			||||||
                f"""QLabel {{
 | 
					                f"""QLabel {{
 | 
				
			||||||
                    color : {hcolor('gunmetal')};
 | 
					                    color : {hcolor('gunmetal')};
 | 
				
			||||||
| 
						 | 
					@ -212,7 +194,8 @@ class MainWindow(QMainWindow):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def closeEvent(
 | 
					    def closeEvent(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
        event: QCloseEvent,
 | 
					
 | 
				
			||||||
 | 
					        event: QtGui.QCloseEvent,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
        '''Cancel the root actor asap.
 | 
					        '''Cancel the root actor asap.
 | 
				
			||||||
| 
						 | 
					@ -252,8 +235,8 @@ class MainWindow(QMainWindow):
 | 
				
			||||||
    def on_focus_change(
 | 
					    def on_focus_change(
 | 
				
			||||||
        self,
 | 
					        self,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        last: QWidget,
 | 
					        last: QtGui.QWidget,
 | 
				
			||||||
        current: QWidget,
 | 
					        current: QtGui.QWidget,
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    ) -> None:
 | 
					    ) -> None:
 | 
				
			||||||
 | 
					
 | 
				
			||||||
| 
						 | 
					@ -264,12 +247,11 @@ class MainWindow(QMainWindow):
 | 
				
			||||||
            name = getattr(current, 'mode_name', '')
 | 
					            name = getattr(current, 'mode_name', '')
 | 
				
			||||||
            self.set_mode_name(name)
 | 
					            self.set_mode_name(name)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def current_screen(self) -> QScreen:
 | 
					    def current_screen(self) -> QtGui.QScreen:
 | 
				
			||||||
        '''
 | 
					        """Get a frickin screen (if we can, gawd).
 | 
				
			||||||
        Get a frickin screen (if we can, gawd).
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        '''
 | 
					        """
 | 
				
			||||||
        app = QApplication.instance()
 | 
					        app = QtGui.QApplication.instance()
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        for _ in range(3):
 | 
					        for _ in range(3):
 | 
				
			||||||
            screen = app.screenAt(self.pos())
 | 
					            screen = app.screenAt(self.pos())
 | 
				
			||||||
| 
						 | 
					@ -302,7 +284,7 @@ class MainWindow(QMainWindow):
 | 
				
			||||||
        '''
 | 
					        '''
 | 
				
			||||||
        # https://stackoverflow.com/a/18975846
 | 
					        # https://stackoverflow.com/a/18975846
 | 
				
			||||||
        if not size and not self._size:
 | 
					        if not size and not self._size:
 | 
				
			||||||
            # app = QApplication.instance()
 | 
					            app = QtGui.QApplication.instance()
 | 
				
			||||||
            geo = self.current_screen().geometry()
 | 
					            geo = self.current_screen().geometry()
 | 
				
			||||||
            h, w = geo.height(), geo.width()
 | 
					            h, w = geo.height(), geo.width()
 | 
				
			||||||
            # use approx 1/3 of the area of the screen by default
 | 
					            # use approx 1/3 of the area of the screen by default
 | 
				
			||||||
| 
						 | 
					@ -310,36 +292,9 @@ class MainWindow(QMainWindow):
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        self.resize(*size or self._size)
 | 
					        self.resize(*size or self._size)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    def resizeEvent(self, event: QtCore.QEvent) -> None:
 | 
					 | 
				
			||||||
        if (
 | 
					 | 
				
			||||||
            # event.spontaneous()
 | 
					 | 
				
			||||||
            event.oldSize().height == event.size().height
 | 
					 | 
				
			||||||
        ):
 | 
					 | 
				
			||||||
            event.ignore()
 | 
					 | 
				
			||||||
            return
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # XXX: uncomment for debugging..
 | 
					 | 
				
			||||||
        # attrs = {}
 | 
					 | 
				
			||||||
        # for key in dir(event):
 | 
					 | 
				
			||||||
        #     if key == '__dir__':
 | 
					 | 
				
			||||||
        #         continue
 | 
					 | 
				
			||||||
        #     attr = getattr(event, key)
 | 
					 | 
				
			||||||
        #     try:
 | 
					 | 
				
			||||||
        #         attrs[key] = attr()
 | 
					 | 
				
			||||||
        #     except TypeError:
 | 
					 | 
				
			||||||
        #         attrs[key] = attr
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
        # from pprint import pformat
 | 
					 | 
				
			||||||
        # print(
 | 
					 | 
				
			||||||
        #     f'{pformat(attrs)}\n'
 | 
					 | 
				
			||||||
        #     f'WINDOW RESIZE: {self.size()}\n\n'
 | 
					 | 
				
			||||||
        # )
 | 
					 | 
				
			||||||
        self.godwidget.on_win_resize(event)
 | 
					 | 
				
			||||||
        event.accept()
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# singleton app per actor
 | 
					# singleton app per actor
 | 
				
			||||||
_qt_win: QMainWindow = None
 | 
					_qt_win: QtGui.QMainWindow = None
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def main_window() -> MainWindow:
 | 
					def main_window() -> MainWindow:
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
					@ -6,7 +6,8 @@
 | 
				
			||||||
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
 | 
					# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
 | 
				
			||||||
# pin this to a dev branch that we have more control over especially
 | 
					# pin this to a dev branch that we have more control over especially
 | 
				
			||||||
# as more graphics stuff gets hashed out.
 | 
					# as more graphics stuff gets hashed out.
 | 
				
			||||||
-e git+https://github.com/pikers/pyqtgraph.git@master#egg=pyqtgraph
 | 
					-e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# our async client for ``marketstore`` (the tsdb)
 | 
					# our async client for ``marketstore`` (the tsdb)
 | 
				
			||||||
-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
 | 
					-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
 | 
				
			||||||
| 
						 | 
					@ -17,7 +18,4 @@
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# ``asyncvnc`` for sending interactions to ib-gw inside docker
 | 
					# ``asyncvnc`` for sending interactions to ib-gw inside docker
 | 
				
			||||||
-e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc
 | 
					-e git+https://github.com/pikers/asyncvnc.git@vid_passthrough#egg=asyncvnc
 | 
				
			||||||
 | 
					 | 
				
			||||||
# ``cryptofeed`` for connecting to various crypto exchanges + custom fixes
 | 
					 | 
				
			||||||
-e git+https://github.com/pikers/cryptofeed.git@date_parsing#egg=cryptofeed
 | 
					 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
							
								
								
									
										5
									
								
								setup.py
								
								
								
								
							
							
						
						
									
										5
									
								
								setup.py
								
								
								
								
							| 
						 | 
					@ -41,24 +41,23 @@ setup(
 | 
				
			||||||
    },
 | 
					    },
 | 
				
			||||||
    install_requires=[
 | 
					    install_requires=[
 | 
				
			||||||
        'toml',
 | 
					        'toml',
 | 
				
			||||||
        'tomli',  # fastest pure py reader
 | 
					 | 
				
			||||||
        'click',
 | 
					        'click',
 | 
				
			||||||
        'colorlog',
 | 
					        'colorlog',
 | 
				
			||||||
        'attrs',
 | 
					        'attrs',
 | 
				
			||||||
        'pygments',
 | 
					        'pygments',
 | 
				
			||||||
        'colorama',  # numba traceback coloring
 | 
					        'colorama',  # numba traceback coloring
 | 
				
			||||||
        'msgspec',  # performant IPC messaging and structs
 | 
					        'pydantic',  # structured data
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # async
 | 
					        # async
 | 
				
			||||||
        'trio',
 | 
					        'trio',
 | 
				
			||||||
        'trio-websocket',
 | 
					        'trio-websocket',
 | 
				
			||||||
 | 
					        'msgspec',  # performant IPC messaging
 | 
				
			||||||
        'async_generator',
 | 
					        'async_generator',
 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # from github currently (see requirements.txt)
 | 
					        # from github currently (see requirements.txt)
 | 
				
			||||||
        # 'trimeter',  # not released yet..
 | 
					        # 'trimeter',  # not released yet..
 | 
				
			||||||
        # 'tractor',
 | 
					        # 'tractor',
 | 
				
			||||||
        # asyncvnc,
 | 
					        # asyncvnc,
 | 
				
			||||||
        # 'cryptofeed',
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
        # brokers
 | 
					        # brokers
 | 
				
			||||||
        'asks==2.4.8',
 | 
					        'asks==2.4.8',
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
| 
						 | 
					@ -8,6 +8,7 @@ from trio.testing import trio_test
 | 
				
			||||||
from piker.brokers import questrade as qt
 | 
					from piker.brokers import questrade as qt
 | 
				
			||||||
import pytest
 | 
					import pytest
 | 
				
			||||||
import tractor
 | 
					import tractor
 | 
				
			||||||
 | 
					from tractor.testing import tractor_test
 | 
				
			||||||
 | 
					
 | 
				
			||||||
import piker
 | 
					import piker
 | 
				
			||||||
from piker.brokers import get_brokermod
 | 
					from piker.brokers import get_brokermod
 | 
				
			||||||
| 
						 | 
					@ -22,12 +23,6 @@ pytestmark = pytest.mark.skipif(
 | 
				
			||||||
    reason="questrade tests can only be run locally with an API key",
 | 
					    reason="questrade tests can only be run locally with an API key",
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# TODO: this module was removed from tractor into it's
 | 
					 | 
				
			||||||
# tests/conftest.py, we need to rewrite the below tests
 | 
					 | 
				
			||||||
# to use the `open_pikerd_runtime()` to make these work again
 | 
					 | 
				
			||||||
# (if we're not just gonna junk em).
 | 
					 | 
				
			||||||
# from tractor.testing import tractor_test
 | 
					 | 
				
			||||||
 | 
					 | 
				
			||||||
 | 
					
 | 
				
			||||||
# stock quote
 | 
					# stock quote
 | 
				
			||||||
_ex_quotes = {
 | 
					_ex_quotes = {
 | 
				
			||||||
| 
						 | 
					@ -111,7 +106,7 @@ def match_packet(symbols, quotes, feed_type='stock'):
 | 
				
			||||||
    assert not quotes
 | 
					    assert not quotes
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
# @tractor_test
 | 
					@tractor_test
 | 
				
			||||||
async def test_concurrent_tokens_refresh(us_symbols, loglevel):
 | 
					async def test_concurrent_tokens_refresh(us_symbols, loglevel):
 | 
				
			||||||
    """Verify that concurrent requests from mulitple tasks work alongside
 | 
					    """Verify that concurrent requests from mulitple tasks work alongside
 | 
				
			||||||
    random token refreshing which simulates an access token expiry + refresh
 | 
					    random token refreshing which simulates an access token expiry + refresh
 | 
				
			||||||
| 
						 | 
					@ -342,7 +337,7 @@ async def stream_stocks(feed, symbols):
 | 
				
			||||||
        'options_and_options',
 | 
					        'options_and_options',
 | 
				
			||||||
    ],
 | 
					    ],
 | 
				
			||||||
)
 | 
					)
 | 
				
			||||||
# @tractor_test
 | 
					@tractor_test
 | 
				
			||||||
async def test_quote_streaming(tmx_symbols, loglevel, stream_what):
 | 
					async def test_quote_streaming(tmx_symbols, loglevel, stream_what):
 | 
				
			||||||
    """Set up option streaming using the broker daemon.
 | 
					    """Set up option streaming using the broker daemon.
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
| 
						 | 
					
 | 
				
			||||||
		Loading…
	
		Reference in New Issue