Compare commits
	
		
			344 Commits 
		
	
	
		
			310_plus
			...
			kraken_fil
		
	
	| Author | SHA1 | Date | 
|---|---|---|
|  | 4a9c16d298 | |
|  | b9d5b904f4 | |
|  | 0aef762d9a | |
|  | c724117c1a | |
|  | cc3bb85c66 | |
|  | 20817313b1 | |
|  | 23d0b8a7ac | |
|  | 087a34f061 | |
|  | 653f5c824b | |
|  | f9217570ab | |
|  | 7f224f0342 | |
|  | 75a5f3795a | |
|  | de9f215c83 | |
|  | 848e345364 | |
|  | 38b190e598 | |
|  | 3a9bc8058f | |
|  | 739a231afc | |
|  | 7dfa4c3cde | |
|  | 7b653fe4f4 | |
|  | 77a687bced | |
|  | d5c1cdd91d | |
|  | 46d3fe88ca | |
|  | 5c8c5d8fbf | |
|  | 71412310c4 | |
|  | 0c323fdc0b | |
|  | 02f53d0c13 | |
|  | 8792c97de6 | |
|  | 980815d075 | |
|  | 4cedfedc21 | |
|  | fe3d0c6fdd | |
|  | 9200e8da57 | |
|  | 430d065da6 | |
|  | ecd93cb05a | |
|  | 4facd161a9 | |
|  | c5447fda06 | |
|  | 0447612b34 | |
|  | b5499b8225 | |
|  | 00aabddfe8 | |
|  | 43fb720877 | |
|  | 9626dbd7ac | |
|  | f286c79a03 | |
|  | accb0eee6c | |
|  | e97dd1cbdb | |
|  | 34fb497eb4 | |
|  | 6669ba6590 | |
|  | cb8099bb8c | |
|  | 80a1a58bfc | |
|  | d60f222bb7 | |
|  | 2c2e43d8ac | |
|  | 212b3d620d | |
|  | 92090b01b8 | |
|  | 9073fbc317 | |
|  | f55f56a29f | |
|  | 28e025d02e | |
|  | e558e5837e | |
|  | a0b415095a | |
|  | 6df181c233 | |
|  | 7acc4e3208 | |
|  | 10ea242143 | |
|  | eda6ecd529 | |
|  | cf5b0bf9c6 | |
|  | b9dba48306 | |
|  | 4d2e23b5ce | |
|  | 973bf87e67 | |
|  | 5861839783 | |
|  | 06845e5504 | |
|  | 43bdd4d022 | |
|  | bafd2cb44f | |
|  | be8fd32e7d | |
|  | ee8c00684b | |
|  | 7379dc03af | |
|  | a602c47d47 | |
|  | 317610e00a | |
|  | c4af706d51 | |
|  | 665bb183f7 | |
|  | f6ba95a6c7 | |
|  | e2cd8c4aef | |
|  | c8bff81220 | |
|  | 2aec1c5f1d | |
|  | bec32956a8 | |
|  | 91fdc7c5c7 | |
|  | b59ed74bc1 | |
|  | 16012f6f02 | |
|  | 2b61672723 | |
|  | 176b230a46 | |
|  | 7fa9dbf869 | |
|  | 87ed9abefa | |
|  | 2548aae73d | |
|  | 1cfa04927d | |
|  | e34ea94f9f | |
|  | 1510383738 | |
|  | 016b669d63 | |
|  | 682a0191ef | |
|  | 9e36dbe47f | |
|  | 8bef67642e | |
|  | 52febac6ae | |
|  | f202699c25 | |
|  | 0fb07670d2 | |
|  | 73d2e7716f | |
|  | 999ae5a1c6 | |
|  | 23ba0e5e69 | |
|  | 941a2196b3 | |
|  | 0cf4e07b84 | |
|  | 7bec989eed | |
|  | 6856ca207f | |
|  | 2e5616850c | |
|  | a83bd9c608 | |
|  | 9651ca84bf | |
|  | 109b35f6eb | |
|  | e28c1748fc | |
|  | 72889b4d1f | |
|  | ae001c3dd7 | |
|  | 2309e7ab05 | |
|  | 46c51b55f7 | |
|  | a9185e7d6f | |
|  | 3a0987e0be | |
|  | d280a592b1 | |
|  | ef5829a6b7 | |
|  | 30bcfdcc83 | |
|  | 1a291939c3 | |
|  | 69e501764a | |
|  | 7f3f7f0372 | |
|  | 1cbf45b4c4 | |
|  | 227a80469e | |
|  | dc8072c6db | |
|  | 808dbb12e6 | |
|  | 44e21b1de9 | |
|  | b3058b8c78 | |
|  | db564d7977 | |
|  | e6a3e8b65a | |
|  | d43ba47ebe | |
|  | 168c9863cb | |
|  | 0fb31586fd | |
|  | 8b609f531b | |
|  | d502274eb9 | |
|  | b1419c850d | |
|  | aa7f24b6db | |
|  | 319e68c855 | |
|  | 64f920d7e5 | |
|  | 3b79743c7b | |
|  | 54008a1976 | |
|  | b96b7a8b9c | |
|  | 0fca1b3e1a | |
|  | 2386270cad | |
|  | 5b135fad61 | |
|  | abb6854e74 | |
|  | 22f9b2552c | |
|  | 57f2478dc7 | |
|  | 5dc9a61ec4 | |
|  | b0d3d9bb01 | |
|  | caecbaa231 | |
|  | a20a8d95d5 | |
|  | ba93f96c71 | |
|  | 804e9afdde | |
|  | 89bcaed15e | |
|  | bb2f8e4304 | |
|  | 8ab8268edc | |
|  | bbcc55b24c | |
|  | 9fa9c27e4d | |
|  | d9b4c4a413 | |
|  | 84cab1327d | |
|  | df4cec930b | |
|  | ab08dc582d | |
|  | f79d9865a0 | |
|  | 00378c330c | |
|  | 180b97b180 | |
|  | f0b3a4d5c0 | |
|  | e2e66324cc | |
|  | d950c78b81 | |
|  | 7dbcbfdcd5 | |
|  | 279c899de5 | |
|  | db5aacdb9c | |
|  | c7b84ab500 | |
|  | 9967adb371 | |
|  | 30ff793a22 | |
|  | 666587991a | |
|  | 01005e40a8 | |
|  | d81e629c29 | |
|  | 2766fad719 | |
|  | ae71168216 | |
|  | a0c238daa7 | |
|  | 7cbdc6a246 | |
|  | 2ff8be71aa | |
|  | ddffaa952d | |
|  | 5520e9ef21 | |
|  | 958e542f7d | |
|  | 927bbc7258 | |
|  | 45bef0cea9 | |
|  | a3d46f713e | |
|  | 5684120c11 | |
|  | ddb195ed2c | |
|  | 6747831677 | |
|  | 9326379b04 | |
|  | 09d9a7ea2b | |
|  | 45871d5846 | |
|  | bf7a49c19b | |
|  | 0a7fce087c | |
|  | d3130ca04c | |
|  | e30a3c5b54 | |
|  | 2393965e83 | |
|  | fb39da19f4 | |
|  | a27431c34f | |
|  | 070b9f3dc1 | |
|  | f2dba44169 | |
|  | 0ef5da0881 | |
|  | 0580b204a3 | |
|  | 6ce699ae1f | |
|  | 3aa72abacf | |
|  | 04004525c1 | |
|  | a7f0adf1cf | |
|  | cef511092d | |
|  | 4e5df973a9 | |
|  | 6a1a62d8c0 | |
|  | e0491cf2e7 | |
|  | 90bc9b9730 | |
|  | f449672c68 | |
|  | fd22f45178 | |
|  | 37f634a2ed | |
|  | dfee9dd97e | |
|  | 2a99f7a4d7 | |
|  | b44e2d9ed9 | |
|  | 795d4d76f4 | |
|  | c26acb1fa8 | |
|  | 11b6699a54 | |
|  | f9bdd643cf | |
|  | 2baea21c7d | |
|  | bea0111753 | |
|  | c870665be0 | |
|  | 4ff1090284 | |
|  | f22461a844 | |
|  | 458c7211ee | |
|  | 5cc4b19a7c | |
|  | f5236f658b | |
|  | a360b66cc0 | |
|  | 4bcb791161 | |
|  | 4c7c78c815 | |
|  | 019867b413 | |
|  | f356fb0a68 | |
|  | 756249ff70 | |
|  | 419ebebe72 | |
|  | a229996ebe | |
|  | af01e89612 | |
|  | 609034c634 | |
|  | 95dd0e6bd6 | |
|  | 479ad1bb15 | |
|  | d506235a8b | |
|  | 7846446a44 | |
|  | 214f864dcf | |
|  | 4c0f2099aa | |
|  | aea7bec2c3 | |
|  | 47777e4192 | |
|  | f6888057c3 | |
|  | f65f56ec75 | |
|  | 5d39b04552 | |
|  | 735fbc6259 | |
|  | fcd7e0f3f3 | |
|  | 9106d13dfe | |
|  | d3caad6e11 | |
|  | f87a2a810a | |
|  | 208e2e9e97 | |
|  | 90cc6eb317 | |
|  | b118becc84 | |
|  | 7442d68ecf | |
|  | 076c167d6e | |
|  | 64d8cd448f | |
|  | ec6a28a8b1 | |
|  | cc15d02488 | |
|  | d5bc43e8dd | |
|  | 287a2c8396 | |
|  | 453ebdfe30 | |
|  | 2b1fb90e03 | |
|  | 695ba5288d | |
|  | d6c32bba86 | |
|  | fa89207583 | |
|  | 557562e25c | |
|  | c6efa2641b | |
|  | 8a7e391b4e | |
|  | aec48a1dd5 | |
|  | 87f301500d | |
|  | 566a54ffb6 | |
|  | f9c4b3cc96 | |
|  | a12e6800ff | |
|  | cc68501c7a | |
|  | 7ebf8a8dc0 | |
|  | 4475823e48 | |
|  | 3713288b48 | |
|  | 4fdfb81876 | |
|  | f32b4d37cb | |
|  | 2063b9d8bb | |
|  | fe14605034 | |
|  | 68b32208de | |
|  | f1fe369bbf | |
|  | 16b2937d23 | |
|  | bfad676b7c | |
|  | c617a06905 | |
|  | ff74f4302a | |
|  | 21153a0e1e | |
|  | b6f344f34a | |
|  | ecdc747ced | |
|  | 5147cd7be0 | |
|  | 3dcb72d429 | |
|  | fbee33b00d | |
|  | 3991d8f911 | |
|  | 7b2e8f1ba5 | |
|  | cbcbb2b243 | |
|  | cd3bfb1ea4 | |
|  | 82b718d5a3 | |
|  | 05a1a4e3d8 | |
|  | 412138a75b | |
|  | c1b63f4757 | |
|  | 5d774bef90 | |
|  | de77c7d209 | |
|  | ce1eb11b59 | |
|  | b629ce177d | |
|  | 73fa320917 | |
|  | dd05ed1371 | |
|  | 2a641ab8b4 | |
|  | f8f7ca350c | |
|  | 88b4ccc768 | |
|  | eb2bad5138 | |
|  | f768576060 | |
|  | add0e92335 | |
|  | 1eb7e109e6 | |
|  | 725909a94c | |
|  | 050aa7594c | |
|  | 450009ff9c | |
|  | b2d5892010 | |
|  | 5a3b465ac0 | |
|  | be7afdaa89 | |
|  | 1c561207f5 | |
|  | ed2c962bb9 | |
|  | 147ceca016 | |
|  | 03a7940f83 | |
|  | dd2a9f74f1 | |
|  | 49c720af3c | |
|  | c620517543 | |
|  | a425c29ef1 | |
|  | 783914c7fe | |
|  | 920a394539 | |
|  | e977597cd0 | |
|  | 7a33ba64f1 | |
|  | 191b94b67c | |
|  | 4ad7b073c3 | |
|  | d92ff9c7a0 | 
|  | @ -50,3 +50,8 @@ prefer_data_account = [ | ||||||
| paper = "XX0000000" | paper = "XX0000000" | ||||||
| margin = "X0000000" | margin = "X0000000" | ||||||
| ira = "X0000000" | ira = "X0000000" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | [deribit] | ||||||
|  | key_id = 'XXXXXXXX' | ||||||
|  | key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx' | ||||||
|  |  | ||||||
|  | @ -3,11 +3,12 @@ | ||||||
| version: "3.5" | version: "3.5" | ||||||
| 
 | 
 | ||||||
| services: | services: | ||||||
|   ib-gateway: |   ib_gw_paper: | ||||||
|     # other image tags available: |     # other image tags available: | ||||||
|     # https://github.com/waytrade/ib-gateway-docker#supported-tags |     # https://github.com/waytrade/ib-gateway-docker#supported-tags | ||||||
|     image: waytrade/ib-gateway:981.3j |     # image: waytrade/ib-gateway:981.3j | ||||||
|     restart: always |     image: waytrade/ib-gateway:1012.2i | ||||||
|  |     restart: always  # restart whenev there's a crash or user clicsk | ||||||
|     network_mode: 'host' |     network_mode: 'host' | ||||||
| 
 | 
 | ||||||
|     volumes: |     volumes: | ||||||
|  | @ -39,14 +40,12 @@ services: | ||||||
|     # this compose file which looks something like: |     # this compose file which looks something like: | ||||||
|     # TWS_USERID='myuser' |     # TWS_USERID='myuser' | ||||||
|     # TWS_PASSWORD='guest' |     # TWS_PASSWORD='guest' | ||||||
|     # TRADING_MODE=paper (or live) |  | ||||||
|     # VNC_SERVER_PASSWORD='diggity' |  | ||||||
| 
 |  | ||||||
|     environment: |     environment: | ||||||
|       TWS_USERID: ${TWS_USERID} |       TWS_USERID: ${TWS_USERID} | ||||||
|       TWS_PASSWORD: ${TWS_PASSWORD} |       TWS_PASSWORD: ${TWS_PASSWORD} | ||||||
|       TRADING_MODE: ${TRADING_MODE:-paper} |       TRADING_MODE: 'paper' | ||||||
|       VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-} |       VNC_SERVER_PASSWORD: 'doggy' | ||||||
|  |       VNC_SERVER_PORT: '3003' | ||||||
| 
 | 
 | ||||||
|     # ports: |     # ports: | ||||||
|     #   - target: 4002 |     #   - target: 4002 | ||||||
|  | @ -62,3 +61,40 @@ services: | ||||||
|       # - "127.0.0.1:4001:4001" |       # - "127.0.0.1:4001:4001" | ||||||
|       # - "127.0.0.1:4002:4002" |       # - "127.0.0.1:4002:4002" | ||||||
|       # - "127.0.0.1:5900:5900" |       # - "127.0.0.1:5900:5900" | ||||||
|  | 
 | ||||||
|  |   ib_gw_live: | ||||||
|  |     image: waytrade/ib-gateway:1012.2i | ||||||
|  |     restart: always | ||||||
|  |     network_mode: 'host' | ||||||
|  | 
 | ||||||
|  |     volumes: | ||||||
|  |       - type: bind | ||||||
|  |         source: ./jts_live.ini | ||||||
|  |         target: /root/jts/jts.ini | ||||||
|  |         # don't let ibc clobber this file for | ||||||
|  |         # the main reason of not having a stupid | ||||||
|  |         # timezone set.. | ||||||
|  |         read_only: true | ||||||
|  | 
 | ||||||
|  |       # force our own ibc config | ||||||
|  |       - type: bind | ||||||
|  |         source: ./ibc.ini | ||||||
|  |         target: /root/ibc/config.ini | ||||||
|  | 
 | ||||||
|  |       # force our noop script - socat isn't needed in host mode. | ||||||
|  |       - type: bind | ||||||
|  |         source: ./fork_ports_delayed.sh | ||||||
|  |         target: /root/scripts/fork_ports_delayed.sh | ||||||
|  | 
 | ||||||
|  |       # force our noop script - socat isn't needed in host mode. | ||||||
|  |       - type: bind | ||||||
|  |         source: ./run_x11_vnc.sh | ||||||
|  |         target: /root/scripts/run_x11_vnc.sh | ||||||
|  |         read_only: true | ||||||
|  | 
 | ||||||
|  |     # NOTE: to fill these out, define an `.env` file in the same dir as | ||||||
|  |     # this compose file which looks something like: | ||||||
|  |     environment: | ||||||
|  |       TRADING_MODE: 'live' | ||||||
|  |       VNC_SERVER_PASSWORD: 'doggy' | ||||||
|  |       VNC_SERVER_PORT: '3004' | ||||||
|  |  | ||||||
|  | @ -188,7 +188,7 @@ AcceptNonBrokerageAccountWarning=yes | ||||||
| # | # | ||||||
| # The default value is 60. | # The default value is 60. | ||||||
| 
 | 
 | ||||||
| LoginDialogDisplayTimeout = 60 | LoginDialogDisplayTimeout=20 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -292,7 +292,7 @@ ExistingSessionDetectedAction=primary | ||||||
| # be set dynamically at run-time: most users will never need it, | # be set dynamically at run-time: most users will never need it, | ||||||
| # so don't use it unless you know you need it. | # so don't use it unless you know you need it. | ||||||
| 
 | 
 | ||||||
| OverrideTwsApiPort=4002 | ; OverrideTwsApiPort=4002 | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # Read-only Login | # Read-only Login | ||||||
|  |  | ||||||
|  | @ -0,0 +1,33 @@ | ||||||
|  | [IBGateway] | ||||||
|  | ApiOnly=true | ||||||
|  | LocalServerPort=4001 | ||||||
|  | # NOTE: must be set if using IBC's "reject" mode | ||||||
|  | TrustedIPs=127.0.0.1 | ||||||
|  | ; RemoteHostOrderRouting=ndc1.ibllc.com | ||||||
|  | ; WriteDebug=true | ||||||
|  | ; RemotePortOrderRouting=4001 | ||||||
|  | ; useRemoteSettings=false | ||||||
|  | ; tradingMode=p | ||||||
|  | ; Steps=8 | ||||||
|  | ; colorPalletName=dark | ||||||
|  | 
 | ||||||
|  | # window geo, this may be useful for sending `xdotool` commands? | ||||||
|  | ; MainWindow.Width=1986 | ||||||
|  | ; screenHeight=3960 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | [Logon] | ||||||
|  | Locale=en | ||||||
|  | # most markets are oriented around this zone | ||||||
|  | # so might as well hard code it. | ||||||
|  | TimeZone=America/New_York | ||||||
|  | UseSSL=true | ||||||
|  | displayedproxymsg=1 | ||||||
|  | os_titlebar=true | ||||||
|  | s3store=true | ||||||
|  | useRemoteSettings=false | ||||||
|  | 
 | ||||||
|  | [Communication] | ||||||
|  | ctciAutoEncrypt=true | ||||||
|  | Region=usr | ||||||
|  | ; Peer=cdc1.ibllc.com:4001 | ||||||
|  | @ -1,16 +1,35 @@ | ||||||
| #!/bin/sh | #!/bin/sh | ||||||
|  | # start vnc server and listen for connections | ||||||
|  | # on port specced in `$VNC_SERVER_PORT` | ||||||
| 
 | 
 | ||||||
| # start VNC server |  | ||||||
| x11vnc \ | x11vnc \ | ||||||
|     -ncache_cr \ |     -listen 127.0.0.1 \ | ||||||
|     -listen localhost \ |     -allow 127.0.0.1 \ | ||||||
|  |     -rfbport "${VNC_SERVER_PORT}" \ | ||||||
|     -display :1 \ |     -display :1 \ | ||||||
|     -forever \ |     -forever \ | ||||||
|     -shared \ |     -shared \ | ||||||
|     -logappend /var/log/x11vnc.log \ |  | ||||||
|     -bg \ |     -bg \ | ||||||
|  |     -nowf \ | ||||||
|  |     -noxdamage \ | ||||||
|  |     -noxfixes \ | ||||||
|  |     -no6 \ | ||||||
|     -noipv6 \ |     -noipv6 \ | ||||||
|     -autoport 3003 \ | 
 | ||||||
|     # can't use this because of ``asyncvnc`` issue: | 
 | ||||||
|  |     # -nowcr \ | ||||||
|  |     # TODO: can't use this because of ``asyncvnc`` issue: | ||||||
|     # https://github.com/barneygale/asyncvnc/issues/1 |     # https://github.com/barneygale/asyncvnc/issues/1 | ||||||
|     # -passwd 'ibcansmbz' |     # -passwd 'ibcansmbz' | ||||||
|  | 
 | ||||||
|  |     # XXX: optional graphics caching flags that seem to rekt the overlay | ||||||
|  |     # of the 2 gw windows? When running a single gateway | ||||||
|  |     # this seems to maybe optimize some memory usage? | ||||||
|  |     # -ncache_cr \ | ||||||
|  |     # -ncache \ | ||||||
|  | 
 | ||||||
|  |     # NOTE: this will prevent logs from going to the console. | ||||||
|  |     # -logappend /var/log/x11vnc.log \ | ||||||
|  | 
 | ||||||
|  |     # where to start allocating ports | ||||||
|  |     # -autoport "${VNC_SERVER_PORT}" \ | ||||||
|  |  | ||||||
|  | @ -22,10 +22,10 @@ from typing import Optional, Union, Callable, Any | ||||||
| from contextlib import asynccontextmanager as acm | from contextlib import asynccontextmanager as acm | ||||||
| from collections import defaultdict | from collections import defaultdict | ||||||
| 
 | 
 | ||||||
| from pydantic import BaseModel | from msgspec import Struct | ||||||
|  | import tractor | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| import tractor |  | ||||||
| 
 | 
 | ||||||
| from .log import get_logger, get_console_log | from .log import get_logger, get_console_log | ||||||
| from .brokers import get_brokermod | from .brokers import get_brokermod | ||||||
|  | @ -47,16 +47,13 @@ _root_modules = [ | ||||||
| ] | ] | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Services(BaseModel): | class Services(Struct): | ||||||
| 
 | 
 | ||||||
|     actor_n: tractor._supervise.ActorNursery |     actor_n: tractor._supervise.ActorNursery | ||||||
|     service_n: trio.Nursery |     service_n: trio.Nursery | ||||||
|     debug_mode: bool  # tractor sub-actor debug mode flag |     debug_mode: bool  # tractor sub-actor debug mode flag | ||||||
|     service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {} |     service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {} | ||||||
| 
 | 
 | ||||||
|     class Config: |  | ||||||
|         arbitrary_types_allowed = True |  | ||||||
| 
 |  | ||||||
|     async def start_service_task( |     async def start_service_task( | ||||||
|         self, |         self, | ||||||
|         name: str, |         name: str, | ||||||
|  | @ -220,7 +217,7 @@ async def open_piker_runtime( | ||||||
|             # TODO: eventually we should be able to avoid |             # TODO: eventually we should be able to avoid | ||||||
|             # having the root have more then permissions to |             # having the root have more then permissions to | ||||||
|             # spawn other specialized daemons I think? |             # spawn other specialized daemons I think? | ||||||
|             enable_modules=_root_modules, |             enable_modules=_root_modules + enable_modules, | ||||||
|         ) as _, |         ) as _, | ||||||
|     ): |     ): | ||||||
|         yield tractor.current_actor() |         yield tractor.current_actor() | ||||||
|  |  | ||||||
|  | @ -33,14 +33,13 @@ import asks | ||||||
| from fuzzywuzzy import process as fuzzy | from fuzzywuzzy import process as fuzzy | ||||||
| import numpy as np | import numpy as np | ||||||
| import tractor | import tractor | ||||||
| from pydantic.dataclasses import dataclass |  | ||||||
| from pydantic import BaseModel |  | ||||||
| import wsproto | import wsproto | ||||||
| 
 | 
 | ||||||
| from .._cacheables import open_cached_client | from .._cacheables import open_cached_client | ||||||
| from ._util import resproc, SymbolNotFound | from ._util import resproc, SymbolNotFound | ||||||
| from ..log import get_logger, get_console_log | from ..log import get_logger, get_console_log | ||||||
| from ..data import ShmArray | from ..data import ShmArray | ||||||
|  | from ..data.types import Struct | ||||||
| from ..data._web_bs import open_autorecon_ws, NoBsWs | from ..data._web_bs import open_autorecon_ws, NoBsWs | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
|  | @ -79,12 +78,14 @@ _show_wap_in_history = False | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # https://binance-docs.github.io/apidocs/spot/en/#exchange-information | # https://binance-docs.github.io/apidocs/spot/en/#exchange-information | ||||||
| class Pair(BaseModel): | class Pair(Struct, frozen=True): | ||||||
|     symbol: str |     symbol: str | ||||||
|     status: str |     status: str | ||||||
| 
 | 
 | ||||||
|     baseAsset: str |     baseAsset: str | ||||||
|     baseAssetPrecision: int |     baseAssetPrecision: int | ||||||
|  |     cancelReplaceAllowed: bool | ||||||
|  |     allowTrailingStop: bool | ||||||
|     quoteAsset: str |     quoteAsset: str | ||||||
|     quotePrecision: int |     quotePrecision: int | ||||||
|     quoteAssetPrecision: int |     quoteAssetPrecision: int | ||||||
|  | @ -104,14 +105,14 @@ class Pair(BaseModel): | ||||||
|     permissions: list[str] |     permissions: list[str] | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @dataclass | class OHLC(Struct): | ||||||
| class OHLC: |     ''' | ||||||
|     """Description of the flattened OHLC quote format. |     Description of the flattened OHLC quote format. | ||||||
| 
 | 
 | ||||||
|     For schema details see: |     For schema details see: | ||||||
|     https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams |     https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams | ||||||
| 
 | 
 | ||||||
|     """ |     ''' | ||||||
|     time: int |     time: int | ||||||
| 
 | 
 | ||||||
|     open: float |     open: float | ||||||
|  | @ -260,6 +261,7 @@ class Client: | ||||||
|         for i, bar in enumerate(bars): |         for i, bar in enumerate(bars): | ||||||
| 
 | 
 | ||||||
|             bar = OHLC(*bar) |             bar = OHLC(*bar) | ||||||
|  |             bar.typecast() | ||||||
| 
 | 
 | ||||||
|             row = [] |             row = [] | ||||||
|             for j, (name, ftype) in enumerate(_ohlc_dtype[1:]): |             for j, (name, ftype) in enumerate(_ohlc_dtype[1:]): | ||||||
|  | @ -287,7 +289,7 @@ async def get_client() -> Client: | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # validation type | # validation type | ||||||
| class AggTrade(BaseModel): | class AggTrade(Struct): | ||||||
|     e: str  # Event type |     e: str  # Event type | ||||||
|     E: int  # Event time |     E: int  # Event time | ||||||
|     s: str  # Symbol |     s: str  # Symbol | ||||||
|  | @ -341,7 +343,9 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]: | ||||||
| 
 | 
 | ||||||
|         elif msg.get('e') == 'aggTrade': |         elif msg.get('e') == 'aggTrade': | ||||||
| 
 | 
 | ||||||
|             # validate |             # NOTE: this is purely for a definition, ``msgspec.Struct`` | ||||||
|  |             # does not runtime-validate until you decode/encode. | ||||||
|  |             # see: https://jcristharif.com/msgspec/structs.html#type-validation | ||||||
|             msg = AggTrade(**msg) |             msg = AggTrade(**msg) | ||||||
| 
 | 
 | ||||||
|             # TODO: type out and require this quote format |             # TODO: type out and require this quote format | ||||||
|  | @ -352,8 +356,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]: | ||||||
|                 'brokerd_ts': time.time(), |                 'brokerd_ts': time.time(), | ||||||
|                 'ticks': [{ |                 'ticks': [{ | ||||||
|                     'type': 'trade', |                     'type': 'trade', | ||||||
|                     'price': msg.p, |                     'price': float(msg.p), | ||||||
|                     'size': msg.q, |                     'size': float(msg.q), | ||||||
|                     'broker_ts': msg.T, |                     'broker_ts': msg.T, | ||||||
|                 }], |                 }], | ||||||
|             } |             } | ||||||
|  | @ -448,7 +452,7 @@ async def stream_quotes( | ||||||
|             d = cache[sym.upper()] |             d = cache[sym.upper()] | ||||||
|             syminfo = Pair(**d)  # validation |             syminfo = Pair(**d)  # validation | ||||||
| 
 | 
 | ||||||
|             si = sym_infos[sym] = syminfo.dict() |             si = sym_infos[sym] = syminfo.to_dict() | ||||||
| 
 | 
 | ||||||
|             # XXX: after manually inspecting the response format we |             # XXX: after manually inspecting the response format we | ||||||
|             # just directly pick out the info we need |             # just directly pick out the info we need | ||||||
|  |  | ||||||
|  | @ -39,6 +39,148 @@ _config_dir = click.get_app_dir('piker') | ||||||
| _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json') | _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | OK = '\033[92m' | ||||||
|  | WARNING = '\033[93m' | ||||||
|  | FAIL = '\033[91m' | ||||||
|  | ENDC = '\033[0m' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def print_ok(s: str, **kwargs): | ||||||
|  |     print(OK + s + ENDC, **kwargs) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def print_error(s: str, **kwargs): | ||||||
|  |     print(FAIL + s + ENDC, **kwargs) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_method(client, meth_name: str): | ||||||
|  |     print(f'checking client for method \'{meth_name}\'...', end='', flush=True) | ||||||
|  |     method = getattr(client, meth_name, None) | ||||||
|  |     assert method | ||||||
|  |     print_ok('found!.') | ||||||
|  |     return method | ||||||
|  | 
 | ||||||
|  | async def run_method(client, meth_name: str, **kwargs): | ||||||
|  |     method = get_method(client, meth_name) | ||||||
|  |     print('running...', end='', flush=True) | ||||||
|  |     result = await method(**kwargs) | ||||||
|  |     print_ok(f'done! result: {type(result)}') | ||||||
|  |     return result | ||||||
|  | 
 | ||||||
|  | async def run_test(broker_name: str): | ||||||
|  |     brokermod = get_brokermod(broker_name) | ||||||
|  |     total = 0 | ||||||
|  |     passed = 0 | ||||||
|  |     failed = 0 | ||||||
|  | 
 | ||||||
|  |     print(f'getting client...', end='', flush=True) | ||||||
|  |     if not hasattr(brokermod, 'get_client'): | ||||||
|  |         print_error('fail! no \'get_client\' context manager found.') | ||||||
|  |         return | ||||||
|  | 
 | ||||||
|  |     async with brokermod.get_client(is_brokercheck=True) as client: | ||||||
|  |         print_ok(f'done! inside client context.') | ||||||
|  | 
 | ||||||
|  |         # check for methods present on brokermod | ||||||
|  |         method_list = [ | ||||||
|  |             'backfill_bars', | ||||||
|  |             'get_client', | ||||||
|  |             'trades_dialogue', | ||||||
|  |             'open_history_client', | ||||||
|  |             'open_symbol_search', | ||||||
|  |             'stream_quotes', | ||||||
|  | 
 | ||||||
|  |         ] | ||||||
|  | 
 | ||||||
|  |         for method in method_list: | ||||||
|  |             print( | ||||||
|  |                 f'checking brokermod for method \'{method}\'...', | ||||||
|  |                 end='', flush=True) | ||||||
|  |             if not hasattr(brokermod, method): | ||||||
|  |                 print_error(f'fail! method \'{method}\' not found.') | ||||||
|  |                 failed += 1 | ||||||
|  |             else: | ||||||
|  |                 print_ok('done!') | ||||||
|  |                 passed += 1 | ||||||
|  | 
 | ||||||
|  |             total += 1 | ||||||
|  | 
 | ||||||
|  |         # check for methods present con brokermod.Client and their | ||||||
|  |         # results | ||||||
|  | 
 | ||||||
|  |         # for private methods only check is present | ||||||
|  |         method_list = [ | ||||||
|  |             'get_balances', | ||||||
|  |             'get_assets', | ||||||
|  |             'get_trades', | ||||||
|  |             'get_xfers', | ||||||
|  |             'submit_limit', | ||||||
|  |             'submit_cancel', | ||||||
|  |             'search_symbols', | ||||||
|  |         ] | ||||||
|  | 
 | ||||||
|  |         for method_name in method_list: | ||||||
|  |             try: | ||||||
|  |                 get_method(client, method_name) | ||||||
|  |                 passed += 1 | ||||||
|  | 
 | ||||||
|  |             except AssertionError: | ||||||
|  |                 print_error(f'fail! method \'{method_name}\' not found.') | ||||||
|  |                 failed += 1 | ||||||
|  | 
 | ||||||
|  |             total += 1 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |         # check for methods present con brokermod.Client and their | ||||||
|  |         # results | ||||||
|  | 
 | ||||||
|  |         syms = await run_method(client, 'symbol_info') | ||||||
|  |         total += 1 | ||||||
|  | 
 | ||||||
|  |         if len(syms) == 0: | ||||||
|  |             raise BaseException('Empty Symbol list?') | ||||||
|  | 
 | ||||||
|  |         passed += 1 | ||||||
|  | 
 | ||||||
|  |         first_sym = tuple(syms.keys())[0] | ||||||
|  | 
 | ||||||
|  |         method_list = [ | ||||||
|  |             ('cache_symbols', {}), | ||||||
|  |             ('search_symbols', {'pattern': first_sym[:-1]}), | ||||||
|  |             ('bars', {'symbol': first_sym}) | ||||||
|  |         ] | ||||||
|  | 
 | ||||||
|  |         for method_name, method_kwargs in method_list: | ||||||
|  |             try: | ||||||
|  |                 await run_method(client, method_name, **method_kwargs) | ||||||
|  |                 passed += 1 | ||||||
|  | 
 | ||||||
|  |             except AssertionError: | ||||||
|  |                 print_error(f'fail! method \'{method_name}\' not found.') | ||||||
|  |                 failed += 1 | ||||||
|  | 
 | ||||||
|  |             total += 1 | ||||||
|  | 
 | ||||||
|  |         print(f'total: {total}, passed: {passed}, failed: {failed}') | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @cli.command() | ||||||
|  | @click.argument('broker', nargs=1, required=True) | ||||||
|  | @click.pass_obj | ||||||
|  | def brokercheck(config, broker): | ||||||
|  |     ''' | ||||||
|  |     Test broker apis for completeness. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     async def bcheck_main(): | ||||||
|  |         async with maybe_spawn_brokerd(broker) as portal: | ||||||
|  |             await portal.run(run_test, broker) | ||||||
|  |             await portal.cancel_actor() | ||||||
|  | 
 | ||||||
|  |     trio.run(run_test, broker) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @cli.command() | @cli.command() | ||||||
| @click.option('--keys', '-k', multiple=True, | @click.option('--keys', '-k', multiple=True, | ||||||
|               help='Return results only for these keys') |               help='Return results only for these keys') | ||||||
|  | @ -193,6 +335,8 @@ def contracts(ctx, loglevel, broker, symbol, ids): | ||||||
|     brokermod = get_brokermod(broker) |     brokermod = get_brokermod(broker) | ||||||
|     get_console_log(loglevel) |     get_console_log(loglevel) | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|     contracts = trio.run(partial(core.contracts, brokermod, symbol)) |     contracts = trio.run(partial(core.contracts, brokermod, symbol)) | ||||||
|     if not ids: |     if not ids: | ||||||
|         # just print out expiry dates which can be used with |         # just print out expiry dates which can be used with | ||||||
|  |  | ||||||
|  | @ -0,0 +1,70 @@ | ||||||
|  | ``deribit`` backend | ||||||
|  | ------------------ | ||||||
|  | pretty good liquidity crypto derivatives, uses custom json rpc over ws for | ||||||
|  | client methods, then `cryptofeed` for data streams. | ||||||
|  | 
 | ||||||
|  | status | ||||||
|  | ****** | ||||||
|  | - supports option charts | ||||||
|  | - no order support yet  | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | config | ||||||
|  | ****** | ||||||
|  | In order to get order mode support your ``brokers.toml`` | ||||||
|  | needs to have something like the following: | ||||||
|  | 
 | ||||||
|  | .. code:: toml | ||||||
|  | 
 | ||||||
|  |     [deribit] | ||||||
|  |     key_id = 'XXXXXXXX' | ||||||
|  |     key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx' | ||||||
|  | 
 | ||||||
|  | To obtain an api id and secret you need to create an account, which can be a | ||||||
|  | real market account over at: | ||||||
|  | 
 | ||||||
|  |     - deribit.com  (requires KYC for deposit address) | ||||||
|  | 
 | ||||||
|  | Or a testnet account over at: | ||||||
|  | 
 | ||||||
|  |     - test.deribit.com | ||||||
|  | 
 | ||||||
|  | For testnet once the account is created here is how you deposit fake crypto to | ||||||
|  | try it out: | ||||||
|  | 
 | ||||||
|  | 1) Go to Wallet: | ||||||
|  | 
 | ||||||
|  | .. figure:: assets/0_wallet.png | ||||||
|  |     :align: center | ||||||
|  |     :target: assets/0_wallet.png | ||||||
|  |     :alt: wallet page | ||||||
|  | 
 | ||||||
|  | 2) Then click on the elipsis menu and select deposit | ||||||
|  | 
 | ||||||
|  | .. figure:: assets/1_wallet_select_deposit.png | ||||||
|  |     :align: center | ||||||
|  |     :target: assets/1_wallet_select_deposit.png | ||||||
|  |     :alt: wallet deposit page | ||||||
|  | 
 | ||||||
|  | 3) This will take you to the deposit address page | ||||||
|  | 
 | ||||||
|  | .. figure:: assets/2_gen_deposit_addr.png | ||||||
|  |     :align: center | ||||||
|  |     :target: assets/2_gen_deposit_addr.png | ||||||
|  |     :alt: generate deposit address page | ||||||
|  | 
 | ||||||
|  | 4) After clicking generate you should see the address, copy it and go to the  | ||||||
|  | `coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake | ||||||
|  | coins to that address. | ||||||
|  | 
 | ||||||
|  | .. figure:: assets/3_deposit_address.png | ||||||
|  |     :align: center | ||||||
|  |     :target: assets/3_deposit_address.png | ||||||
|  |     :alt: generated address | ||||||
|  | 
 | ||||||
|  | 5) Back in the deposit address page you should see the deposit in your history | ||||||
|  | 
 | ||||||
|  | .. figure:: assets/4_wallet_deposit_history.png | ||||||
|  |     :align: center | ||||||
|  |     :target: assets/4_wallet_deposit_history.png | ||||||
|  |     :alt: wallet deposit history | ||||||
|  | @ -0,0 +1,65 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Deribit backend. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | 
 | ||||||
|  | from piker.log import get_logger | ||||||
|  | 
 | ||||||
|  | log = get_logger(__name__) | ||||||
|  | 
 | ||||||
|  | from .api import ( | ||||||
|  |     get_client, | ||||||
|  | ) | ||||||
|  | from .feed import ( | ||||||
|  |     open_history_client, | ||||||
|  |     open_symbol_search, | ||||||
|  |     stream_quotes, | ||||||
|  |     backfill_bars | ||||||
|  | ) | ||||||
|  | # from .broker import ( | ||||||
|  |     # trades_dialogue, | ||||||
|  |     # norm_trade_records, | ||||||
|  | # ) | ||||||
|  | 
 | ||||||
|  | __all__ = [ | ||||||
|  |     'get_client', | ||||||
|  | #    'trades_dialogue', | ||||||
|  |     'open_history_client', | ||||||
|  |     'open_symbol_search', | ||||||
|  |     'stream_quotes', | ||||||
|  | #    'norm_trade_records', | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # tractor RPC enable arg | ||||||
|  | __enable_modules__: list[str] = [ | ||||||
|  |     'api', | ||||||
|  |     'feed', | ||||||
|  | #   'broker', | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | # passed to ``tractor.ActorNursery.start_actor()`` | ||||||
|  | _spawn_kwargs = { | ||||||
|  |     'infect_asyncio': True, | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | # annotation to let backend agnostic code | ||||||
|  | # know if ``brokerd`` should be spawned with | ||||||
|  | # ``tractor``'s aio mode. | ||||||
|  | _infect_asyncio: bool = True | ||||||
|  | @ -0,0 +1,667 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Deribit backend. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | import json | ||||||
|  | import time | ||||||
|  | import asyncio | ||||||
|  | 
 | ||||||
|  | from contextlib import asynccontextmanager as acm, AsyncExitStack | ||||||
|  | from functools import partial | ||||||
|  | from datetime import datetime | ||||||
|  | from typing import Any, Optional, Iterable, Callable | ||||||
|  | 
 | ||||||
|  | import pendulum | ||||||
|  | import asks | ||||||
|  | import trio | ||||||
|  | from trio_typing import Nursery, TaskStatus | ||||||
|  | from fuzzywuzzy import process as fuzzy | ||||||
|  | import numpy as np | ||||||
|  | 
 | ||||||
|  | from piker.data.types import Struct | ||||||
|  | from piker.data._web_bs import ( | ||||||
|  |     NoBsWs, | ||||||
|  |     open_autorecon_ws, | ||||||
|  |     open_jsonrpc_session | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | from .._util import resproc | ||||||
|  | 
 | ||||||
|  | from piker import config | ||||||
|  | from piker.log import get_logger | ||||||
|  | 
 | ||||||
|  | from tractor.trionics import ( | ||||||
|  |     broadcast_receiver, | ||||||
|  |     BroadcastReceiver, | ||||||
|  |     maybe_open_context | ||||||
|  | ) | ||||||
|  | from tractor import to_asyncio | ||||||
|  | 
 | ||||||
|  | from cryptofeed import FeedHandler | ||||||
|  | 
 | ||||||
|  | from cryptofeed.defines import ( | ||||||
|  |     DERIBIT, | ||||||
|  |     L1_BOOK, TRADES, | ||||||
|  |     OPTION, CALL, PUT | ||||||
|  | ) | ||||||
|  | from cryptofeed.symbols import Symbol | ||||||
|  | 
 | ||||||
|  | log = get_logger(__name__) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _spawn_kwargs = { | ||||||
|  |     'infect_asyncio': True, | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | _url = 'https://www.deribit.com' | ||||||
|  | _ws_url = 'wss://www.deribit.com/ws/api/v2' | ||||||
|  | _testnet_ws_url = 'wss://test.deribit.com/ws/api/v2' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Broker specific ohlc schema (rest) | ||||||
|  | _ohlc_dtype = [ | ||||||
|  |     ('index', int), | ||||||
|  |     ('time', int), | ||||||
|  |     ('open', float), | ||||||
|  |     ('high', float), | ||||||
|  |     ('low', float), | ||||||
|  |     ('close', float), | ||||||
|  |     ('volume', float), | ||||||
|  |     ('bar_wap', float),  # will be zeroed by sampler if not filled | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class JSONRPCResult(Struct): | ||||||
|  |     jsonrpc: str = '2.0' | ||||||
|  |     id: int | ||||||
|  |     result: Optional[dict] = None | ||||||
|  |     error: Optional[dict] = None | ||||||
|  |     usIn: int  | ||||||
|  |     usOut: int  | ||||||
|  |     usDiff: int  | ||||||
|  |     testnet: bool | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class KLinesResult(Struct): | ||||||
|  |     close: list[float] | ||||||
|  |     cost: list[float] | ||||||
|  |     high: list[float] | ||||||
|  |     low: list[float] | ||||||
|  |     open: list[float] | ||||||
|  |     status: str | ||||||
|  |     ticks: list[int] | ||||||
|  |     volume: list[float] | ||||||
|  | 
 | ||||||
|  | class Trade(Struct): | ||||||
|  |     trade_seq: int | ||||||
|  |     trade_id: str | ||||||
|  |     timestamp: int | ||||||
|  |     tick_direction: int | ||||||
|  |     price: float | ||||||
|  |     mark_price: float | ||||||
|  |     iv: float | ||||||
|  |     instrument_name: str | ||||||
|  |     index_price: float | ||||||
|  |     direction: str | ||||||
|  |     combo_trade_id: Optional[int] = 0, | ||||||
|  |     combo_id: Optional[str] = '', | ||||||
|  |     amount: float | ||||||
|  | 
 | ||||||
|  | class LastTradesResult(Struct): | ||||||
|  |     trades: list[Trade] | ||||||
|  |     has_more: bool | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # convert datetime obj timestamp to unixtime in milliseconds | ||||||
|  | def deribit_timestamp(when): | ||||||
|  |     return int((when.timestamp() * 1000) + (when.microsecond / 1000)) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def str_to_cb_sym(name: str) -> Symbol: | ||||||
|  |     base, strike_price, expiry_date, option_type = name.split('-') | ||||||
|  | 
 | ||||||
|  |     quote = base | ||||||
|  | 
 | ||||||
|  |     if option_type == 'put': | ||||||
|  |         option_type = PUT  | ||||||
|  |     elif option_type  == 'call': | ||||||
|  |         option_type = CALL | ||||||
|  |     else: | ||||||
|  |         raise Exception("Couldn\'t parse option type") | ||||||
|  | 
 | ||||||
|  |     return Symbol( | ||||||
|  |         base, quote, | ||||||
|  |         type=OPTION, | ||||||
|  |         strike_price=strike_price, | ||||||
|  |         option_type=option_type, | ||||||
|  |         expiry_date=expiry_date, | ||||||
|  |         expiry_normalize=False) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def piker_sym_to_cb_sym(name: str) -> Symbol: | ||||||
|  |     base, expiry_date, strike_price, option_type = tuple( | ||||||
|  |         name.upper().split('-')) | ||||||
|  | 
 | ||||||
|  |     quote = base | ||||||
|  | 
 | ||||||
|  |     if option_type == 'P': | ||||||
|  |         option_type = PUT  | ||||||
|  |     elif option_type  == 'C': | ||||||
|  |         option_type = CALL | ||||||
|  |     else: | ||||||
|  |         raise Exception("Couldn\'t parse option type") | ||||||
|  | 
 | ||||||
|  |     return Symbol( | ||||||
|  |         base, quote, | ||||||
|  |         type=OPTION, | ||||||
|  |         strike_price=strike_price, | ||||||
|  |         option_type=option_type, | ||||||
|  |         expiry_date=expiry_date.upper()) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def cb_sym_to_deribit_inst(sym: Symbol): | ||||||
|  |     # cryptofeed normalized | ||||||
|  |     cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z'] | ||||||
|  | 
 | ||||||
|  |     # deribit specific  | ||||||
|  |     months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC'] | ||||||
|  | 
 | ||||||
|  |     exp = sym.expiry_date | ||||||
|  | 
 | ||||||
|  |     # YYMDD | ||||||
|  |     # 01234 | ||||||
|  |     year, month, day = ( | ||||||
|  |         exp[:2], months[cb_norm.index(exp[2:3])], exp[3:]) | ||||||
|  | 
 | ||||||
|  |     otype = 'C' if sym.option_type == CALL else 'P' | ||||||
|  | 
 | ||||||
|  |     return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_config() -> dict[str, Any]: | ||||||
|  | 
 | ||||||
|  |     conf, path = config.load() | ||||||
|  | 
 | ||||||
|  |     section = conf.get('deribit') | ||||||
|  | 
 | ||||||
|  |     # TODO: document why we send this, basically because logging params for cryptofeed | ||||||
|  |     conf['log'] = {} | ||||||
|  |     conf['log']['disabled'] = True | ||||||
|  | 
 | ||||||
|  |     if section is None: | ||||||
|  |         log.warning(f'No config section found for deribit in {path}') | ||||||
|  | 
 | ||||||
|  |     return conf  | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Client: | ||||||
|  | 
 | ||||||
|  |     def __init__(self, json_rpc: Callable) -> None: | ||||||
|  |         self._pairs: dict[str, Any] = None | ||||||
|  | 
 | ||||||
|  |         config = get_config().get('deribit', {}) | ||||||
|  | 
 | ||||||
|  |         if ('key_id' in config) and ('key_secret' in config): | ||||||
|  |             self._key_id = config['key_id'] | ||||||
|  |             self._key_secret = config['key_secret'] | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             self._key_id = None | ||||||
|  |             self._key_secret = None | ||||||
|  | 
 | ||||||
|  |         self.json_rpc = json_rpc | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def currencies(self): | ||||||
|  |         return ['btc', 'eth', 'sol', 'usd'] | ||||||
|  | 
 | ||||||
|  |     async def get_balances(self, kind: str = 'option') -> dict[str, float]: | ||||||
|  |         """Return the set of positions for this account | ||||||
|  |         by symbol. | ||||||
|  |         """ | ||||||
|  |         balances = {} | ||||||
|  | 
 | ||||||
|  |         for currency in self.currencies: | ||||||
|  |             resp = await self.json_rpc( | ||||||
|  |                 'private/get_positions', params={ | ||||||
|  |                     'currency': currency.upper(), | ||||||
|  |                     'kind': kind}) | ||||||
|  | 
 | ||||||
|  |             balances[currency] = resp.result | ||||||
|  | 
 | ||||||
|  |         return balances | ||||||
|  | 
 | ||||||
|  |     async def get_assets(self) -> dict[str, float]: | ||||||
|  |         """Return the set of asset balances for this account | ||||||
|  |         by symbol. | ||||||
|  |         """ | ||||||
|  |         balances = {} | ||||||
|  | 
 | ||||||
|  |         for currency in self.currencies: | ||||||
|  |             resp = await self.json_rpc( | ||||||
|  |                 'private/get_account_summary', params={ | ||||||
|  |                     'currency': currency.upper()}) | ||||||
|  | 
 | ||||||
|  |             balances[currency] = resp.result['balance'] | ||||||
|  | 
 | ||||||
|  |         return balances | ||||||
|  | 
 | ||||||
|  |     async def submit_limit( | ||||||
|  |         self, | ||||||
|  |         symbol: str, | ||||||
|  |         price: float, | ||||||
|  |         action: str, | ||||||
|  |         size: float | ||||||
|  |     ) -> dict: | ||||||
|  |         """Place an order | ||||||
|  |         """ | ||||||
|  |         params = { | ||||||
|  |             'instrument_name': symbol.upper(), | ||||||
|  |             'amount': size, | ||||||
|  |             'type': 'limit', | ||||||
|  |             'price': price, | ||||||
|  |         } | ||||||
|  |         resp = await self.json_rpc( | ||||||
|  |             f'private/{action}', params) | ||||||
|  | 
 | ||||||
|  |         return resp.result | ||||||
|  | 
 | ||||||
|  |     async def submit_cancel(self, oid: str): | ||||||
|  |         """Send cancel request for order id | ||||||
|  |         """ | ||||||
|  |         resp = await self.json_rpc( | ||||||
|  |             'private/cancel', {'order_id': oid}) | ||||||
|  |         return resp.result | ||||||
|  | 
 | ||||||
|  |     async def symbol_info( | ||||||
|  |         self, | ||||||
|  |         instrument: Optional[str] = None, | ||||||
|  |         currency: str = 'btc',  # BTC, ETH, SOL, USDC | ||||||
|  |         kind: str = 'option', | ||||||
|  |         expired: bool = False | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  |         """Get symbol info for the exchange. | ||||||
|  | 
 | ||||||
|  |         """ | ||||||
|  |         if self._pairs: | ||||||
|  |             return self._pairs | ||||||
|  | 
 | ||||||
|  |         # will retrieve all symbols by default | ||||||
|  |         params = { | ||||||
|  |             'currency': currency.upper(), | ||||||
|  |             'kind': kind, | ||||||
|  |             'expired': str(expired).lower() | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         resp = await self.json_rpc('public/get_instruments', params) | ||||||
|  |         results = resp.result | ||||||
|  | 
 | ||||||
|  |         instruments = { | ||||||
|  |             item['instrument_name'].lower(): item | ||||||
|  |             for item in results | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         if instrument is not None: | ||||||
|  |             return instruments[instrument] | ||||||
|  |         else: | ||||||
|  |             return instruments | ||||||
|  | 
 | ||||||
|  |     async def cache_symbols( | ||||||
|  |         self, | ||||||
|  |     ) -> dict: | ||||||
|  |         if not self._pairs: | ||||||
|  |             self._pairs = await self.symbol_info() | ||||||
|  | 
 | ||||||
|  |         return self._pairs | ||||||
|  | 
 | ||||||
|  |     async def search_symbols( | ||||||
|  |         self, | ||||||
|  |         pattern: str, | ||||||
|  |         limit: int = 30, | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  |         data = await self.symbol_info() | ||||||
|  | 
 | ||||||
|  |         matches = fuzzy.extractBests( | ||||||
|  |             pattern, | ||||||
|  |             data, | ||||||
|  |             score_cutoff=35, | ||||||
|  |             limit=limit | ||||||
|  |         ) | ||||||
|  |         # repack in dict form | ||||||
|  |         return {item[0]['instrument_name'].lower(): item[0] | ||||||
|  |                 for item in matches} | ||||||
|  | 
 | ||||||
|  |     async def bars( | ||||||
|  |         self, | ||||||
|  |         symbol: str, | ||||||
|  |         start_dt: Optional[datetime] = None, | ||||||
|  |         end_dt: Optional[datetime] = None, | ||||||
|  |         limit: int = 1000, | ||||||
|  |         as_np: bool = True, | ||||||
|  |     ) -> dict: | ||||||
|  |         instrument = symbol | ||||||
|  | 
 | ||||||
|  |         if end_dt is None: | ||||||
|  |             end_dt = pendulum.now('UTC') | ||||||
|  | 
 | ||||||
|  |         if start_dt is None: | ||||||
|  |             start_dt = end_dt.start_of( | ||||||
|  |                 'minute').subtract(minutes=limit) | ||||||
|  | 
 | ||||||
|  |         start_time = deribit_timestamp(start_dt) | ||||||
|  |         end_time = deribit_timestamp(end_dt) | ||||||
|  | 
 | ||||||
|  |         # https://docs.deribit.com/#public-get_tradingview_chart_data | ||||||
|  |         resp = await self.json_rpc( | ||||||
|  |             'public/get_tradingview_chart_data', | ||||||
|  |             params={ | ||||||
|  |                 'instrument_name': instrument.upper(), | ||||||
|  |                 'start_timestamp': start_time, | ||||||
|  |                 'end_timestamp': end_time, | ||||||
|  |                 'resolution': '1' | ||||||
|  |             }) | ||||||
|  | 
 | ||||||
|  |         result = KLinesResult(**resp.result) | ||||||
|  |         new_bars = [] | ||||||
|  |         for i in range(len(result.close)): | ||||||
|  | 
 | ||||||
|  |             _open = result.open[i] | ||||||
|  |             high = result.high[i] | ||||||
|  |             low = result.low[i] | ||||||
|  |             close = result.close[i] | ||||||
|  |             volume = result.volume[i] | ||||||
|  | 
 | ||||||
|  |             row = [ | ||||||
|  |                 (start_time + (i * (60 * 1000))) / 1000.0,  # time | ||||||
|  |                 result.open[i], | ||||||
|  |                 result.high[i], | ||||||
|  |                 result.low[i], | ||||||
|  |                 result.close[i], | ||||||
|  |                 result.volume[i], | ||||||
|  |                 0 | ||||||
|  |             ] | ||||||
|  | 
 | ||||||
|  |             new_bars.append((i,) + tuple(row)) | ||||||
|  | 
 | ||||||
|  |         array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines | ||||||
|  |         return array | ||||||
|  | 
 | ||||||
|  |     async def last_trades( | ||||||
|  |         self, | ||||||
|  |         instrument: str, | ||||||
|  |         count: int = 10 | ||||||
|  |     ): | ||||||
|  |         resp = await self.json_rpc( | ||||||
|  |             'public/get_last_trades_by_instrument', | ||||||
|  |             params={ | ||||||
|  |                 'instrument_name': instrument, | ||||||
|  |                 'count': count | ||||||
|  |             }) | ||||||
|  | 
 | ||||||
|  |         return LastTradesResult(**resp.result) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def get_client( | ||||||
|  |     is_brokercheck: bool = False | ||||||
|  | ) -> Client: | ||||||
|  | 
 | ||||||
|  |     async with ( | ||||||
|  |         trio.open_nursery() as n, | ||||||
|  |         open_jsonrpc_session( | ||||||
|  |             _testnet_ws_url, dtype=JSONRPCResult) as json_rpc | ||||||
|  |     ): | ||||||
|  |         client = Client(json_rpc) | ||||||
|  | 
 | ||||||
|  |         _refresh_token: Optional[str] = None | ||||||
|  |         _access_token: Optional[str] = None | ||||||
|  | 
 | ||||||
|  |         async def _auth_loop( | ||||||
|  |             task_status: TaskStatus = trio.TASK_STATUS_IGNORED | ||||||
|  |         ): | ||||||
|  |             """Background task that adquires a first access token and then will | ||||||
|  |             refresh the access token while the nursery isn't cancelled. | ||||||
|  | 
 | ||||||
|  |             https://docs.deribit.com/?python#authentication-2 | ||||||
|  |             """ | ||||||
|  |             renew_time = 10 | ||||||
|  |             access_scope = 'trade:read_write' | ||||||
|  |             _expiry_time = time.time() | ||||||
|  |             got_access = False | ||||||
|  |             nonlocal _refresh_token | ||||||
|  |             nonlocal _access_token | ||||||
|  | 
 | ||||||
|  |             while True: | ||||||
|  |                 if time.time() - _expiry_time < renew_time: | ||||||
|  |                     # if we are close to token expiry time | ||||||
|  | 
 | ||||||
|  |                     if _refresh_token != None: | ||||||
|  |                         # if we have a refresh token already dont need to send | ||||||
|  |                         # secret | ||||||
|  |                         params = { | ||||||
|  |                             'grant_type': 'refresh_token', | ||||||
|  |                             'refresh_token': _refresh_token, | ||||||
|  |                             'scope': access_scope | ||||||
|  |                         } | ||||||
|  | 
 | ||||||
|  |                     else: | ||||||
|  |                         # we don't have refresh token, send secret to initialize | ||||||
|  |                         params = { | ||||||
|  |                             'grant_type': 'client_credentials', | ||||||
|  |                             'client_id': client._key_id, | ||||||
|  |                             'client_secret': client._key_secret, | ||||||
|  |                             'scope': access_scope | ||||||
|  |                         } | ||||||
|  | 
 | ||||||
|  |                     resp = await json_rpc('public/auth', params) | ||||||
|  |                     result = resp.result | ||||||
|  | 
 | ||||||
|  |                     _expiry_time = time.time() + result['expires_in'] | ||||||
|  |                     _refresh_token = result['refresh_token'] | ||||||
|  | 
 | ||||||
|  |                     if 'access_token' in result: | ||||||
|  |                         _access_token = result['access_token'] | ||||||
|  | 
 | ||||||
|  |                     if not got_access: | ||||||
|  |                         # first time this loop runs we must indicate task is | ||||||
|  |                         # started, we have auth | ||||||
|  |                         got_access = True | ||||||
|  |                         task_status.started() | ||||||
|  | 
 | ||||||
|  |                 else: | ||||||
|  |                     await trio.sleep(renew_time / 2) | ||||||
|  | 
 | ||||||
|  |         # if we have client creds launch auth loop | ||||||
|  |         if client._key_id is not None: | ||||||
|  |             await n.start(_auth_loop) | ||||||
|  | 
 | ||||||
|  |         await client.cache_symbols() | ||||||
|  |         yield client | ||||||
|  |         n.cancel_scope.cancel() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def open_feed_handler(): | ||||||
|  |     fh = FeedHandler(config=get_config()) | ||||||
|  |     yield fh | ||||||
|  |     await to_asyncio.run_task(fh.stop_async) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def maybe_open_feed_handler() -> trio.abc.ReceiveStream: | ||||||
|  |     async with maybe_open_context( | ||||||
|  |         acm_func=open_feed_handler, | ||||||
|  |         key='feedhandler', | ||||||
|  |     ) as (cache_hit, fh): | ||||||
|  |         yield fh | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def aio_price_feed_relay( | ||||||
|  |     fh: FeedHandler, | ||||||
|  |     instrument: Symbol, | ||||||
|  |     from_trio: asyncio.Queue, | ||||||
|  |     to_trio: trio.abc.SendChannel, | ||||||
|  | ) -> None: | ||||||
|  |     async def _trade(data: dict, receipt_timestamp): | ||||||
|  |         to_trio.send_nowait(('trade', { | ||||||
|  |             'symbol': cb_sym_to_deribit_inst( | ||||||
|  |                 str_to_cb_sym(data.symbol)).lower(), | ||||||
|  |             'last': data, | ||||||
|  |             'broker_ts': time.time(), | ||||||
|  |             'data': data.to_dict(), | ||||||
|  |             'receipt': receipt_timestamp | ||||||
|  |         })) | ||||||
|  | 
 | ||||||
|  |     async def _l1(data: dict, receipt_timestamp): | ||||||
|  |         to_trio.send_nowait(('l1', { | ||||||
|  |             'symbol': cb_sym_to_deribit_inst( | ||||||
|  |                 str_to_cb_sym(data.symbol)).lower(), | ||||||
|  |             'ticks': [ | ||||||
|  |                 {'type': 'bid', | ||||||
|  |                     'price': float(data.bid_price), 'size': float(data.bid_size)}, | ||||||
|  |                 {'type': 'bsize', | ||||||
|  |                     'price': float(data.bid_price), 'size': float(data.bid_size)}, | ||||||
|  |                 {'type': 'ask', | ||||||
|  |                     'price': float(data.ask_price), 'size': float(data.ask_size)}, | ||||||
|  |                 {'type': 'asize', | ||||||
|  |                     'price': float(data.ask_price), 'size': float(data.ask_size)} | ||||||
|  |             ] | ||||||
|  |         })) | ||||||
|  | 
 | ||||||
|  |     fh.add_feed( | ||||||
|  |         DERIBIT, | ||||||
|  |         channels=[TRADES, L1_BOOK], | ||||||
|  |         symbols=[piker_sym_to_cb_sym(instrument)], | ||||||
|  |         callbacks={ | ||||||
|  |             TRADES: _trade, | ||||||
|  |             L1_BOOK: _l1 | ||||||
|  |         }) | ||||||
|  | 
 | ||||||
|  |     if not fh.running: | ||||||
|  |         fh.run( | ||||||
|  |             start_loop=False, | ||||||
|  |             install_signal_handlers=False) | ||||||
|  | 
 | ||||||
|  |     # sync with trio | ||||||
|  |     to_trio.send_nowait(None) | ||||||
|  | 
 | ||||||
|  |     await asyncio.sleep(float('inf')) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def open_price_feed( | ||||||
|  |     instrument: str | ||||||
|  | ) -> trio.abc.ReceiveStream: | ||||||
|  |     async with maybe_open_feed_handler() as fh: | ||||||
|  |         async with to_asyncio.open_channel_from( | ||||||
|  |             partial( | ||||||
|  |                 aio_price_feed_relay, | ||||||
|  |                 fh, | ||||||
|  |                 instrument | ||||||
|  |             ) | ||||||
|  |         ) as (first, chan): | ||||||
|  |             yield chan | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def maybe_open_price_feed( | ||||||
|  |     instrument: str | ||||||
|  | ) -> trio.abc.ReceiveStream: | ||||||
|  | 
 | ||||||
|  |     # TODO: add a predicate to maybe_open_context | ||||||
|  |     async with maybe_open_context( | ||||||
|  |         acm_func=open_price_feed, | ||||||
|  |         kwargs={ | ||||||
|  |             'instrument': instrument | ||||||
|  |         }, | ||||||
|  |         key=f'{instrument}-price', | ||||||
|  |     ) as (cache_hit, feed): | ||||||
|  |         if cache_hit: | ||||||
|  |             yield broadcast_receiver(feed, 10) | ||||||
|  |         else: | ||||||
|  |             yield feed | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def aio_order_feed_relay( | ||||||
|  |     fh: FeedHandler, | ||||||
|  |     instrument: Symbol, | ||||||
|  |     from_trio: asyncio.Queue, | ||||||
|  |     to_trio: trio.abc.SendChannel, | ||||||
|  | ) -> None: | ||||||
|  |     async def _fill(data: dict, receipt_timestamp): | ||||||
|  |         breakpoint() | ||||||
|  | 
 | ||||||
|  |     async def _order_info(data: dict, receipt_timestamp): | ||||||
|  |         breakpoint() | ||||||
|  | 
 | ||||||
|  |     fh.add_feed( | ||||||
|  |         DERIBIT, | ||||||
|  |         channels=[FILLS, ORDER_INFO], | ||||||
|  |         symbols=[instrument.upper()], | ||||||
|  |         callbacks={ | ||||||
|  |             FILLS: _fill, | ||||||
|  |             ORDER_INFO: _order_info, | ||||||
|  |         }) | ||||||
|  | 
 | ||||||
|  |     if not fh.running: | ||||||
|  |         fh.run( | ||||||
|  |             start_loop=False, | ||||||
|  |             install_signal_handlers=False) | ||||||
|  | 
 | ||||||
|  |     # sync with trio | ||||||
|  |     to_trio.send_nowait(None) | ||||||
|  | 
 | ||||||
|  |     await asyncio.sleep(float('inf')) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def open_order_feed( | ||||||
|  |     instrument: list[str] | ||||||
|  | ) -> trio.abc.ReceiveStream: | ||||||
|  |     async with maybe_open_feed_handler() as fh: | ||||||
|  |         async with to_asyncio.open_channel_from( | ||||||
|  |             partial( | ||||||
|  |                 aio_order_feed_relay, | ||||||
|  |                 fh, | ||||||
|  |                 instrument | ||||||
|  |             ) | ||||||
|  |         ) as (first, chan): | ||||||
|  |             yield chan | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def maybe_open_order_feed( | ||||||
|  |     instrument: str | ||||||
|  | ) -> trio.abc.ReceiveStream: | ||||||
|  | 
 | ||||||
|  |     # TODO: add a predicate to maybe_open_context | ||||||
|  |     async with maybe_open_context( | ||||||
|  |         acm_func=open_order_feed, | ||||||
|  |         kwargs={ | ||||||
|  |             'instrument': instrument, | ||||||
|  |             'fh': fh | ||||||
|  |         }, | ||||||
|  |         key=f'{instrument}-order', | ||||||
|  |     ) as (cache_hit, feed): | ||||||
|  |         if cache_hit: | ||||||
|  |             yield broadcast_receiver(feed, 10) | ||||||
|  |         else: | ||||||
|  |             yield feed | ||||||
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 169 KiB | 
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 106 KiB | 
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 59 KiB | 
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 70 KiB | 
										
											Binary file not shown.
										
									
								
							| After Width: | Height: | Size: 132 KiB | 
|  | @ -0,0 +1,200 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Deribit backend. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from contextlib import asynccontextmanager as acm | ||||||
|  | from datetime import datetime | ||||||
|  | from typing import Any, Optional, Callable | ||||||
|  | import time | ||||||
|  | 
 | ||||||
|  | import trio | ||||||
|  | from trio_typing import TaskStatus | ||||||
|  | import pendulum | ||||||
|  | from fuzzywuzzy import process as fuzzy | ||||||
|  | import numpy as np | ||||||
|  | import tractor | ||||||
|  | 
 | ||||||
|  | from piker._cacheables import open_cached_client | ||||||
|  | from piker.log import get_logger, get_console_log | ||||||
|  | from piker.data import ShmArray | ||||||
|  | from piker.brokers._util import ( | ||||||
|  |     BrokerError, | ||||||
|  |     DataUnavailable, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | from cryptofeed import FeedHandler | ||||||
|  | 
 | ||||||
|  | from cryptofeed.defines import ( | ||||||
|  |     DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT | ||||||
|  | ) | ||||||
|  | from cryptofeed.symbols import Symbol | ||||||
|  | 
 | ||||||
|  | from .api import ( | ||||||
|  |     Client, Trade, | ||||||
|  |     get_config, | ||||||
|  |     str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst, | ||||||
|  |     maybe_open_price_feed | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | _spawn_kwargs = { | ||||||
|  |     'infect_asyncio': True, | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | log = get_logger(__name__) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def open_history_client( | ||||||
|  |     instrument: str, | ||||||
|  | ) -> tuple[Callable, int]: | ||||||
|  | 
 | ||||||
|  |     # TODO implement history getter for the new storage layer. | ||||||
|  |     async with open_cached_client('deribit') as client: | ||||||
|  | 
 | ||||||
|  |         async def get_ohlc( | ||||||
|  |             end_dt: Optional[datetime] = None, | ||||||
|  |             start_dt: Optional[datetime] = None, | ||||||
|  | 
 | ||||||
|  |         ) -> tuple[ | ||||||
|  |             np.ndarray, | ||||||
|  |             datetime,  # start | ||||||
|  |             datetime,  # end | ||||||
|  |         ]: | ||||||
|  | 
 | ||||||
|  |             array = await client.bars( | ||||||
|  |                 instrument, | ||||||
|  |                 start_dt=start_dt, | ||||||
|  |                 end_dt=end_dt, | ||||||
|  |             ) | ||||||
|  |             if len(array) == 0: | ||||||
|  |                 raise DataUnavailable | ||||||
|  | 
 | ||||||
|  |             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||||
|  |             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||||
|  | 
 | ||||||
|  |             return array, start_dt, end_dt | ||||||
|  | 
 | ||||||
|  |         yield get_ohlc, {'erlangs': 3, 'rate': 3} | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def backfill_bars( | ||||||
|  |     symbol: str, | ||||||
|  |     shm: ShmArray,  # type: ignore # noqa | ||||||
|  |     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||||
|  | ) -> None: | ||||||
|  |     """Fill historical bars into shared mem / storage afap. | ||||||
|  |     """ | ||||||
|  |     instrument = symbol | ||||||
|  |     with trio.CancelScope() as cs: | ||||||
|  |         async with open_cached_client('deribit') as client: | ||||||
|  |             bars = await client.bars(instrument) | ||||||
|  |             shm.push(bars) | ||||||
|  |             task_status.started(cs) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def stream_quotes( | ||||||
|  | 
 | ||||||
|  |     send_chan: trio.abc.SendChannel, | ||||||
|  |     symbols: list[str], | ||||||
|  |     feed_is_live: trio.Event, | ||||||
|  |     loglevel: str = None, | ||||||
|  | 
 | ||||||
|  |     # startup sync | ||||||
|  |     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||||
|  |     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||||
|  | 
 | ||||||
|  |     sym = symbols[0] | ||||||
|  | 
 | ||||||
|  |     async with ( | ||||||
|  |         open_cached_client('deribit') as client, | ||||||
|  |         send_chan as send_chan | ||||||
|  |     ): | ||||||
|  | 
 | ||||||
|  |         init_msgs = { | ||||||
|  |             # pass back token, and bool, signalling if we're the writer | ||||||
|  |             # and that history has been written | ||||||
|  |             sym: { | ||||||
|  |                 'symbol_info': { | ||||||
|  |                     'asset_type': 'option', | ||||||
|  |                     'price_tick_size': 0.0005 | ||||||
|  |                 }, | ||||||
|  |                 'shm_write_opts': {'sum_tick_vml': False}, | ||||||
|  |                 'fqsn': sym, | ||||||
|  |             }, | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         nsym = piker_sym_to_cb_sym(sym) | ||||||
|  | 
 | ||||||
|  |         async with maybe_open_price_feed(sym) as stream: | ||||||
|  | 
 | ||||||
|  |             cache = await client.cache_symbols() | ||||||
|  | 
 | ||||||
|  |             last_trades = (await client.last_trades( | ||||||
|  |                 cb_sym_to_deribit_inst(nsym), count=1)).trades | ||||||
|  | 
 | ||||||
|  |             if len(last_trades) == 0: | ||||||
|  |                 last_trade = None | ||||||
|  |                 async for typ, quote in stream: | ||||||
|  |                     if typ == 'trade': | ||||||
|  |                         last_trade = Trade(**(quote['data'])) | ||||||
|  |                         break | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 last_trade = Trade(**(last_trades[0])) | ||||||
|  | 
 | ||||||
|  |             first_quote = { | ||||||
|  |                 'symbol': sym, | ||||||
|  |                 'last': last_trade.price, | ||||||
|  |                 'brokerd_ts': last_trade.timestamp, | ||||||
|  |                 'ticks': [{ | ||||||
|  |                     'type': 'trade', | ||||||
|  |                     'price': last_trade.price, | ||||||
|  |                     'size': last_trade.amount, | ||||||
|  |                     'broker_ts': last_trade.timestamp | ||||||
|  |                 }] | ||||||
|  |             } | ||||||
|  |             task_status.started((init_msgs,  first_quote)) | ||||||
|  | 
 | ||||||
|  |             feed_is_live.set() | ||||||
|  | 
 | ||||||
|  |             async for typ, quote in stream: | ||||||
|  |                 topic = quote['symbol'] | ||||||
|  |                 await send_chan.send({topic: quote}) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def open_symbol_search( | ||||||
|  |     ctx: tractor.Context, | ||||||
|  | ) -> Client: | ||||||
|  |     async with open_cached_client('deribit') as client: | ||||||
|  | 
 | ||||||
|  |         # load all symbols locally for fast search | ||||||
|  |         cache = await client.cache_symbols() | ||||||
|  |         await ctx.started() | ||||||
|  | 
 | ||||||
|  |         async with ctx.open_stream() as stream: | ||||||
|  | 
 | ||||||
|  |             async for pattern in stream: | ||||||
|  |                 # repack in dict form | ||||||
|  |                 await stream.send( | ||||||
|  |                     await client.search_symbols(pattern)) | ||||||
|  | @ -0,0 +1,134 @@ | ||||||
|  | ``ib`` backend | ||||||
|  | -------------- | ||||||
|  | more or less the "everything broker" for traditional and international | ||||||
|  | markets. they are the "go to" provider for automatic retail trading | ||||||
|  | and we interface to their APIs using the `ib_insync` project. | ||||||
|  | 
 | ||||||
|  | status | ||||||
|  | ****** | ||||||
|  | current support is *production grade* and both real-time data and order | ||||||
|  | management should be correct and fast. this backend is used by core devs | ||||||
|  | for live trading. | ||||||
|  | 
 | ||||||
|  | currently there is not yet full support for: | ||||||
|  | - options charting and trading | ||||||
|  | - paxos based crypto rt feeds and trading | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | config | ||||||
|  | ****** | ||||||
|  | In order to get order mode support your ``brokers.toml`` | ||||||
|  | needs to have something like the following: | ||||||
|  | 
 | ||||||
|  | .. code:: toml | ||||||
|  | 
 | ||||||
|  |    [ib] | ||||||
|  |    hosts = [ | ||||||
|  |     "127.0.0.1", | ||||||
|  |    ] | ||||||
|  |    # TODO: when we eventually spawn gateways in our | ||||||
|  |    # container, we can just dynamically allocate these | ||||||
|  |    # using IBC. | ||||||
|  |    ports = [ | ||||||
|  |        4002, | ||||||
|  |        4003, | ||||||
|  |        4006, | ||||||
|  |        4001, | ||||||
|  |        7497, | ||||||
|  |    ] | ||||||
|  | 
 | ||||||
|  |    # XXX: for a paper account the flex web query service | ||||||
|  |    # is not supported so you have to manually download | ||||||
|  |    # and XML report and put it in a location that can be | ||||||
|  |    # accessed by the ``brokerd.ib`` backend code for parsing. | ||||||
|  |    flex_token = '1111111111111111' | ||||||
|  |    flex_trades_query_id = '6969696'  # live accounts only? | ||||||
|  | 
 | ||||||
|  |    # 3rd party web-api token | ||||||
|  |    # (XXX: not sure if this works yet) | ||||||
|  |    trade_log_token = '111111111111111' | ||||||
|  | 
 | ||||||
|  |    # when clients are being scanned this determines | ||||||
|  |    # which clients are preferred to be used for data feeds | ||||||
|  |    # based on account names which are detected as active | ||||||
|  |    # on each client. | ||||||
|  |    prefer_data_account = [ | ||||||
|  |        # this has to be first in order to make data work with dual paper + live | ||||||
|  |        'main', | ||||||
|  |        'algopaper', | ||||||
|  |    ] | ||||||
|  | 
 | ||||||
|  |    [ib.accounts] | ||||||
|  |    main = 'U69696969' | ||||||
|  |    algopaper = 'DU9696969' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | If everything works correctly you should see any current positions | ||||||
|  | loaded in the pps pane on chart load and you should also be able to | ||||||
|  | check your trade records in the file:: | ||||||
|  | 
 | ||||||
|  |     <pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | An example ledger file will have entries written verbatim from the | ||||||
|  | trade events schema: | ||||||
|  | 
 | ||||||
|  | .. code:: toml | ||||||
|  | 
 | ||||||
|  |     ["0000e1a7.630f5e5a.01.01"] | ||||||
|  |     secType = "FUT" | ||||||
|  |     conId = 515416577 | ||||||
|  |     symbol = "MNQ" | ||||||
|  |     lastTradeDateOrContractMonth = "20221216" | ||||||
|  |     strike = 0.0 | ||||||
|  |     right = "" | ||||||
|  |     multiplier = "2" | ||||||
|  |     exchange = "GLOBEX" | ||||||
|  |     primaryExchange = "" | ||||||
|  |     currency = "USD" | ||||||
|  |     localSymbol = "MNQZ2" | ||||||
|  |     tradingClass = "MNQ" | ||||||
|  |     includeExpired = false | ||||||
|  |     secIdType = "" | ||||||
|  |     secId = "" | ||||||
|  |     comboLegsDescrip = "" | ||||||
|  |     comboLegs = [] | ||||||
|  |     execId = "0000e1a7.630f5e5a.01.01" | ||||||
|  |     time = 1661972086.0 | ||||||
|  |     acctNumber = "DU69696969" | ||||||
|  |     side = "BOT" | ||||||
|  |     shares = 1.0 | ||||||
|  |     price = 12372.75 | ||||||
|  |     permId = 441472655 | ||||||
|  |     clientId = 6116 | ||||||
|  |     orderId = 985 | ||||||
|  |     liquidation = 0 | ||||||
|  |     cumQty = 1.0 | ||||||
|  |     avgPrice = 12372.75 | ||||||
|  |     orderRef = "" | ||||||
|  |     evRule = "" | ||||||
|  |     evMultiplier = 0.0 | ||||||
|  |     modelCode = "" | ||||||
|  |     lastLiquidity = 1 | ||||||
|  |     broker_time = 1661972086.0 | ||||||
|  |     name = "ib" | ||||||
|  |     commission = 0.57 | ||||||
|  |     realizedPNL = 243.41 | ||||||
|  |     yield_ = 0.0 | ||||||
|  |     yieldRedemptionDate = 0 | ||||||
|  |     listingExchange = "GLOBEX" | ||||||
|  |     date = "2022-08-31T18:54:46+00:00" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | your ``pps.toml`` file will have position entries like, | ||||||
|  | 
 | ||||||
|  | .. code:: toml | ||||||
|  | 
 | ||||||
|  |     [ib.algopaper."mnq.globex.20221216"] | ||||||
|  |     size = -1.0 | ||||||
|  |     ppu = 12423.630576923071 | ||||||
|  |     bsuid = 515416577 | ||||||
|  |     expiry = "2022-12-16T00:00:00+00:00" | ||||||
|  |     clears = [ | ||||||
|  |      { dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" }, | ||||||
|  |     ] | ||||||
|  | @ -20,15 +20,10 @@ Interactive Brokers API backend. | ||||||
| Sub-modules within break into the core functionalities: | Sub-modules within break into the core functionalities: | ||||||
| 
 | 
 | ||||||
| - ``broker.py`` part for orders / trading endpoints | - ``broker.py`` part for orders / trading endpoints | ||||||
| - ``data.py`` for real-time data feed endpoints | - ``feed.py`` for real-time data feed endpoints | ||||||
| 
 | - ``api.py`` for the core API machinery which is ``trio``-ized | ||||||
| - ``client.py`` for the core API machinery which is ``trio``-ized |  | ||||||
|   wrapping around ``ib_insync``. |   wrapping around ``ib_insync``. | ||||||
| 
 | 
 | ||||||
| - ``report.py`` for the hackery to build manual pp calcs |  | ||||||
|   to avoid ib's absolute bullshit FIFO style position |  | ||||||
|   tracking.. |  | ||||||
| 
 |  | ||||||
| """ | """ | ||||||
| from .api import ( | from .api import ( | ||||||
|     get_client, |     get_client, | ||||||
|  | @ -38,7 +33,10 @@ from .feed import ( | ||||||
|     open_symbol_search, |     open_symbol_search, | ||||||
|     stream_quotes, |     stream_quotes, | ||||||
| ) | ) | ||||||
| from .broker import trades_dialogue | from .broker import ( | ||||||
|  |     trades_dialogue, | ||||||
|  |     norm_trade_records, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| __all__ = [ | __all__ = [ | ||||||
|     'get_client', |     'get_client', | ||||||
|  |  | ||||||
|  | @ -29,6 +29,7 @@ import itertools | ||||||
| from math import isnan | from math import isnan | ||||||
| from typing import ( | from typing import ( | ||||||
|     Any, |     Any, | ||||||
|  |     Optional, | ||||||
|     Union, |     Union, | ||||||
| ) | ) | ||||||
| import asyncio | import asyncio | ||||||
|  | @ -38,16 +39,28 @@ import time | ||||||
| from types import SimpleNamespace | from types import SimpleNamespace | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | from bidict import bidict | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from tractor import to_asyncio | from tractor import to_asyncio | ||||||
| from ib_insync.wrapper import RequestError | import ib_insync as ibis | ||||||
| from ib_insync.contract import Contract, ContractDetails | from ib_insync.contract import ( | ||||||
|  |     Contract, | ||||||
|  |     ContractDetails, | ||||||
|  |     Option, | ||||||
|  | ) | ||||||
| from ib_insync.order import Order | from ib_insync.order import Order | ||||||
| from ib_insync.ticker import Ticker | from ib_insync.ticker import Ticker | ||||||
| from ib_insync.objects import Position | from ib_insync.objects import ( | ||||||
| import ib_insync as ibis |     Position, | ||||||
| from ib_insync.wrapper import Wrapper |     Fill, | ||||||
|  |     Execution, | ||||||
|  |     CommissionReport, | ||||||
|  | ) | ||||||
|  | from ib_insync.wrapper import ( | ||||||
|  |     Wrapper, | ||||||
|  |     RequestError, | ||||||
|  | ) | ||||||
| from ib_insync.client import Client as ib_Client | from ib_insync.client import Client as ib_Client | ||||||
| import numpy as np | import numpy as np | ||||||
| 
 | 
 | ||||||
|  | @ -155,60 +168,93 @@ class NonShittyIB(ibis.IB): | ||||||
|         self.client.apiEnd += self.disconnectedEvent |         self.client.apiEnd += self.disconnectedEvent | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # map of symbols to contract ids |  | ||||||
| _adhoc_cmdty_data_map = { |  | ||||||
|     # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 |  | ||||||
| 
 |  | ||||||
|     # NOTE: some cmdtys/metals don't have trade data like gold/usd: |  | ||||||
|     # https://groups.io/g/twsapi/message/44174 |  | ||||||
|     'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}), |  | ||||||
| } |  | ||||||
| 
 |  | ||||||
| _futes_venues = ( | _futes_venues = ( | ||||||
|     'GLOBEX', |     'GLOBEX', | ||||||
|     'NYMEX', |     'NYMEX', | ||||||
|     'CME', |     'CME', | ||||||
|     'CMECRYPTO', |     'CMECRYPTO', | ||||||
|  |     'COMEX', | ||||||
|  |     'CMDTY',  # special name case.. | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| _adhoc_futes_set = { | _adhoc_futes_set = { | ||||||
| 
 | 
 | ||||||
|     # equities |     # equities | ||||||
|     'nq.globex', |     'nq.globex', | ||||||
|     'mnq.globex', |     'mnq.globex',  # micro | ||||||
| 
 | 
 | ||||||
|     'es.globex', |     'es.globex', | ||||||
|     'mes.globex', |     'mes.globex',  # micro | ||||||
| 
 | 
 | ||||||
|     # cypto$ |     # cypto$ | ||||||
|     'brr.cmecrypto', |     'brr.cmecrypto', | ||||||
|     'ethusdrr.cmecrypto', |     'ethusdrr.cmecrypto', | ||||||
| 
 | 
 | ||||||
|     # agriculture |     # agriculture | ||||||
|     'he.globex',  # lean hogs |     'he.nymex',  # lean hogs | ||||||
|     'le.globex',  # live cattle (geezers) |     'le.nymex',  # live cattle (geezers) | ||||||
|     'gf.globex',  # feeder cattle (younguns) |     'gf.nymex',  # feeder cattle (younguns) | ||||||
| 
 | 
 | ||||||
|     # raw |     # raw | ||||||
|     'lb.globex',  # random len lumber |     'lb.nymex',  # random len lumber | ||||||
| 
 | 
 | ||||||
|     # metals |     # metals | ||||||
|     'xauusd.cmdty',  # gold spot |     'xauusd.cmdty',  # gold spot | ||||||
|     'gc.nymex', |     'gc.nymex', | ||||||
|     'mgc.nymex', |     'mgc.nymex',  # micro | ||||||
|  | 
 | ||||||
|  |     # oil & gas | ||||||
|  |     'cl.nymex', | ||||||
| 
 | 
 | ||||||
|     'xagusd.cmdty',  # silver spot |     'xagusd.cmdty',  # silver spot | ||||||
|     'ni.nymex',  # silver futes |     'ni.nymex',  # silver futes | ||||||
|     'qi.comex',  # mini-silver futes |     'qi.comex',  # mini-silver futes | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
|  | # taken from list here: | ||||||
|  | # https://www.interactivebrokers.com/en/trading/products-spot-currencies.php | ||||||
|  | _adhoc_fiat_set = set(( | ||||||
|  |     'USD, AED, AUD, CAD,' | ||||||
|  |     'CHF, CNH, CZK, DKK,' | ||||||
|  |     'EUR, GBP, HKD, HUF,' | ||||||
|  |     'ILS, JPY, MXN, NOK,' | ||||||
|  |     'NZD, PLN, RUB, SAR,' | ||||||
|  |     'SEK, SGD, TRY, ZAR' | ||||||
|  |     ).split(' ,') | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # map of symbols to contract ids | ||||||
|  | _adhoc_symbol_map = { | ||||||
|  |     # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 | ||||||
|  | 
 | ||||||
|  |     # NOTE: some cmdtys/metals don't have trade data like gold/usd: | ||||||
|  |     # https://groups.io/g/twsapi/message/44174 | ||||||
|  |     'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}), | ||||||
|  | } | ||||||
|  | for qsn in _adhoc_futes_set: | ||||||
|  |     sym, venue = qsn.split('.') | ||||||
|  |     assert venue.upper() in _futes_venues, f'{venue}' | ||||||
|  |     _adhoc_symbol_map[sym.upper()] = ( | ||||||
|  |         {'exchange': venue}, | ||||||
|  |         {}, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| # exchanges we don't support at the moment due to not knowing | # exchanges we don't support at the moment due to not knowing | ||||||
| # how to do symbol-contract lookup correctly likely due | # how to do symbol-contract lookup correctly likely due | ||||||
| # to not having the data feeds subscribed. | # to not having the data feeds subscribed. | ||||||
| _exch_skip_list = { | _exch_skip_list = { | ||||||
|  | 
 | ||||||
|     'ASX',  # aussie stocks |     'ASX',  # aussie stocks | ||||||
|     'MEXI',  # mexican stocks |     'MEXI',  # mexican stocks | ||||||
|     'VALUE',  # no idea | 
 | ||||||
|  |     # no idea | ||||||
|  |     'VALUE', | ||||||
|  |     'FUNDSERV', | ||||||
|  |     'SWB2', | ||||||
|  |     'PSE', | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
| # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 | # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924 | ||||||
|  | @ -261,27 +307,29 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         # NOTE: the ib.client here is "throttled" to 45 rps by default |         # NOTE: the ib.client here is "throttled" to 45 rps by default | ||||||
| 
 | 
 | ||||||
|     async def trades( |     async def trades(self) -> dict[str, Any]: | ||||||
|         self, |         ''' | ||||||
|         # api_only: bool = False, |         Return list of trade-fills from current session in ``dict``. | ||||||
| 
 | 
 | ||||||
|     ) -> dict[str, Any]: |         ''' | ||||||
| 
 |         fills: list[Fill] = self.ib.fills() | ||||||
|         # orders = await self.ib.reqCompletedOrdersAsync( |         norm_fills: list[dict] = [] | ||||||
|         #     apiOnly=api_only |  | ||||||
|         # ) |  | ||||||
|         fills = await self.ib.reqExecutionsAsync() |  | ||||||
|         norm_fills = [] |  | ||||||
|         for fill in fills: |         for fill in fills: | ||||||
|             fill = fill._asdict()  # namedtuple |             fill = fill._asdict()  # namedtuple | ||||||
|             for key, val in fill.copy().items(): |             for key, val in fill.items(): | ||||||
|                 if isinstance(val, Contract): |                 match val: | ||||||
|  |                     case Contract() | Execution() | CommissionReport(): | ||||||
|                         fill[key] = asdict(val) |                         fill[key] = asdict(val) | ||||||
| 
 | 
 | ||||||
|             norm_fills.append(fill) |             norm_fills.append(fill) | ||||||
| 
 | 
 | ||||||
|         return norm_fills |         return norm_fills | ||||||
| 
 | 
 | ||||||
|  |     async def orders(self) -> list[Order]: | ||||||
|  |         return await self.ib.reqAllOpenOrdersAsync( | ||||||
|  |             apiOnly=False, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|     async def bars( |     async def bars( | ||||||
|         self, |         self, | ||||||
|         fqsn: str, |         fqsn: str, | ||||||
|  | @ -309,7 +357,7 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         _enters += 1 |         _enters += 1 | ||||||
| 
 | 
 | ||||||
|         contract = await self.find_contract(fqsn) |         contract = (await self.find_contracts(fqsn))[0] | ||||||
|         bars_kwargs.update(getattr(contract, 'bars_kwargs', {})) |         bars_kwargs.update(getattr(contract, 'bars_kwargs', {})) | ||||||
| 
 | 
 | ||||||
|         # _min = min(2000*100, count) |         # _min = min(2000*100, count) | ||||||
|  | @ -364,7 +412,15 @@ class Client: | ||||||
|                 futs.append(self.ib.reqContractDetailsAsync(con)) |                 futs.append(self.ib.reqContractDetailsAsync(con)) | ||||||
| 
 | 
 | ||||||
|         # batch request all details |         # batch request all details | ||||||
|  |         try: | ||||||
|             results = await asyncio.gather(*futs) |             results = await asyncio.gather(*futs) | ||||||
|  |         except RequestError as err: | ||||||
|  |             msg = err.message | ||||||
|  |             if ( | ||||||
|  |                 'No security definition' in msg | ||||||
|  |             ): | ||||||
|  |                 log.warning(f'{msg}: {contracts}') | ||||||
|  |                 return {} | ||||||
| 
 | 
 | ||||||
|         # one set per future result |         # one set per future result | ||||||
|         details = {} |         details = {} | ||||||
|  | @ -373,20 +429,11 @@ class Client: | ||||||
|             # XXX: if there is more then one entry in the details list |             # XXX: if there is more then one entry in the details list | ||||||
|             # then the contract is so called "ambiguous". |             # then the contract is so called "ambiguous". | ||||||
|             for d in details_set: |             for d in details_set: | ||||||
|                 con = d.contract |  | ||||||
| 
 | 
 | ||||||
|                 key = '.'.join([ |                 # nested dataclass we probably don't need and that won't | ||||||
|                     con.symbol, |                 # IPC serialize.. | ||||||
|                     con.primaryExchange or con.exchange, |  | ||||||
|                 ]) |  | ||||||
|                 expiry = con.lastTradeDateOrContractMonth |  | ||||||
|                 if expiry: |  | ||||||
|                     key += f'.{expiry}' |  | ||||||
| 
 |  | ||||||
|                 # nested dataclass we probably don't need and that |  | ||||||
|                 # won't IPC serialize.. |  | ||||||
|                 d.secIdList = '' |                 d.secIdList = '' | ||||||
| 
 |                 key, calc_price = con2fqsn(d.contract) | ||||||
|                 details[key] = d |                 details[key] = d | ||||||
| 
 | 
 | ||||||
|         return details |         return details | ||||||
|  | @ -416,7 +463,7 @@ class Client: | ||||||
|         self, |         self, | ||||||
|         pattern: str, |         pattern: str, | ||||||
|         # how many contracts to search "up to" |         # how many contracts to search "up to" | ||||||
|         upto: int = 3, |         upto: int = 6, | ||||||
|         asdicts: bool = True, |         asdicts: bool = True, | ||||||
| 
 | 
 | ||||||
|     ) -> dict[str, ContractDetails]: |     ) -> dict[str, ContractDetails]: | ||||||
|  | @ -427,7 +474,6 @@ class Client: | ||||||
|             pattern, |             pattern, | ||||||
|             upto=upto, |             upto=upto, | ||||||
|         ) |         ) | ||||||
| 
 |  | ||||||
|         for key, deats in results.copy().items(): |         for key, deats in results.copy().items(): | ||||||
| 
 | 
 | ||||||
|             tract = deats.contract |             tract = deats.contract | ||||||
|  | @ -437,21 +483,44 @@ class Client: | ||||||
|             if sectype == 'IND': |             if sectype == 'IND': | ||||||
|                 results[f'{sym}.IND'] = tract |                 results[f'{sym}.IND'] = tract | ||||||
|                 results.pop(key) |                 results.pop(key) | ||||||
|                 exch = tract.exchange |                 # exch = tract.exchange | ||||||
| 
 | 
 | ||||||
|                 if exch in _futes_venues: |                 # XXX: add back one of these to get the weird deadlock | ||||||
|  |                 # on the debugger from root without the latest | ||||||
|  |                 # maybe_wait_for_debugger() fix in the `open_context()` | ||||||
|  |                 # exit. | ||||||
|  |                 # assert 0 | ||||||
|  |                 # if con.exchange not in _exch_skip_list: | ||||||
|  | 
 | ||||||
|  |                 exch = tract.exchange | ||||||
|  |                 if exch not in _exch_skip_list: | ||||||
|                     # try get all possible contracts for symbol as per, |                     # try get all possible contracts for symbol as per, | ||||||
|                     # https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut |                     # https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut | ||||||
|                     con = ibis.Future( |                     con = ibis.Future( | ||||||
|                         symbol=sym, |                         symbol=sym, | ||||||
|                         exchange=exch, |                         exchange=exch, | ||||||
|                     ) |                     ) | ||||||
|                     try: |                     # TODO: make this work, think it's something to do | ||||||
|  |                     # with the qualify flag. | ||||||
|  |                     # cons = await self.find_contracts( | ||||||
|  |                     #     contract=con, | ||||||
|  |                     #     err_on_qualify=False, | ||||||
|  |                     # ) | ||||||
|  |                     # if cons: | ||||||
|                     all_deats = await self.con_deats([con]) |                     all_deats = await self.con_deats([con]) | ||||||
|                     results |= all_deats |                     results |= all_deats | ||||||
| 
 | 
 | ||||||
|                     except RequestError as err: |             # forex pairs | ||||||
|                         log.warning(err.message) |             elif sectype == 'CASH': | ||||||
|  |                 dst, src = tract.localSymbol.split('.') | ||||||
|  |                 pair_key = "/".join([dst, src]) | ||||||
|  |                 exch = tract.exchange.lower() | ||||||
|  |                 results[f'{pair_key}.{exch}'] = tract | ||||||
|  |                 results.pop(key) | ||||||
|  | 
 | ||||||
|  |                 # XXX: again seems to trigger the weird tractor | ||||||
|  |                 # bug with the debugger.. | ||||||
|  |                 # assert 0 | ||||||
| 
 | 
 | ||||||
|         return results |         return results | ||||||
| 
 | 
 | ||||||
|  | @ -483,13 +552,19 @@ class Client: | ||||||
| 
 | 
 | ||||||
|         return con |         return con | ||||||
| 
 | 
 | ||||||
|     async def find_contract( |     async def get_con( | ||||||
|  |         self, | ||||||
|  |         conid: int, | ||||||
|  |     ) -> Contract: | ||||||
|  |         return await self.ib.qualifyContractsAsync( | ||||||
|  |             ibis.Contract(conId=conid) | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     def parse_patt2fqsn( | ||||||
|         self, |         self, | ||||||
|         pattern: str, |         pattern: str, | ||||||
|         currency: str = 'USD', |  | ||||||
|         **kwargs, |  | ||||||
| 
 | 
 | ||||||
|     ) -> Contract: |     ) -> tuple[str, str, str, str]: | ||||||
| 
 | 
 | ||||||
|         # TODO: we can't use this currently because |         # TODO: we can't use this currently because | ||||||
|         # ``wrapper.starTicker()`` currently cashes ticker instances |         # ``wrapper.starTicker()`` currently cashes ticker instances | ||||||
|  | @ -502,12 +577,30 @@ class Client: | ||||||
|         # XXX UPDATE: we can probably do the tick/trades scraping |         # XXX UPDATE: we can probably do the tick/trades scraping | ||||||
|         # inside our eventkit handler instead to bypass this entirely? |         # inside our eventkit handler instead to bypass this entirely? | ||||||
| 
 | 
 | ||||||
|  |         currency = '' | ||||||
|  | 
 | ||||||
|  |         # fqsn parsing stage | ||||||
|  |         # ------------------ | ||||||
|         if '.ib' in pattern: |         if '.ib' in pattern: | ||||||
|             from ..data._source import unpack_fqsn |             from ..data._source import unpack_fqsn | ||||||
|             broker, symbol, expiry = unpack_fqsn(pattern) |             _, symbol, expiry = unpack_fqsn(pattern) | ||||||
|  | 
 | ||||||
|         else: |         else: | ||||||
|             symbol = pattern |             symbol = pattern | ||||||
|  |             expiry = '' | ||||||
| 
 | 
 | ||||||
|  |         # another hack for forex pairs lul. | ||||||
|  |         if ( | ||||||
|  |             '.idealpro' in symbol | ||||||
|  |             # or '/' in symbol | ||||||
|  |         ): | ||||||
|  |             exch = 'IDEALPRO' | ||||||
|  |             symbol = symbol.removesuffix('.idealpro') | ||||||
|  |             if '/' in symbol: | ||||||
|  |                 symbol, currency = symbol.split('/') | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             # TODO: yes, a cache.. | ||||||
|             # try: |             # try: | ||||||
|             #     # give the cache a go |             #     # give the cache a go | ||||||
|             #     return self._contracts[symbol] |             #     return self._contracts[symbol] | ||||||
|  | @ -518,45 +611,80 @@ class Client: | ||||||
|                 symbol, _, expiry = symbol.rpartition('.') |                 symbol, _, expiry = symbol.rpartition('.') | ||||||
| 
 | 
 | ||||||
|             # use heuristics to figure out contract "type" |             # use heuristics to figure out contract "type" | ||||||
|         sym, exch = symbol.upper().rsplit('.', maxsplit=1) |             symbol, exch = symbol.upper().rsplit('.', maxsplit=1) | ||||||
| 
 | 
 | ||||||
|         qualify: bool = True |         return symbol, currency, exch, expiry | ||||||
|  | 
 | ||||||
|  |     async def find_contracts( | ||||||
|  |         self, | ||||||
|  |         pattern: Optional[str] = None, | ||||||
|  |         contract: Optional[Contract] = None, | ||||||
|  |         qualify: bool = True, | ||||||
|  |         err_on_qualify: bool = True, | ||||||
|  | 
 | ||||||
|  |     ) -> Contract: | ||||||
|  | 
 | ||||||
|  |         if pattern is not None: | ||||||
|  |             symbol, currency, exch, expiry = self.parse_patt2fqsn( | ||||||
|  |                 pattern, | ||||||
|  |             ) | ||||||
|  |             sectype = '' | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             assert contract | ||||||
|  |             symbol = contract.symbol | ||||||
|  |             sectype = contract.secType | ||||||
|  |             exch = contract.exchange or contract.primaryExchange | ||||||
|  |             expiry = contract.lastTradeDateOrContractMonth | ||||||
|  |             currency = contract.currency | ||||||
|  | 
 | ||||||
|  |         # contract searching stage | ||||||
|  |         # ------------------------ | ||||||
| 
 | 
 | ||||||
|         # futes |         # futes | ||||||
|         if exch in _futes_venues: |         if exch in _futes_venues: | ||||||
|             if expiry: |             if expiry: | ||||||
|                 # get the "front" contract |                 # get the "front" contract | ||||||
|                 contract = await self.get_fute( |                 con = await self.get_fute( | ||||||
|                     symbol=sym, |                     symbol=symbol, | ||||||
|                     exchange=exch, |                     exchange=exch, | ||||||
|                     expiry=expiry, |                     expiry=expiry, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             else: |             else: | ||||||
|                 # get the "front" contract |                 # get the "front" contract | ||||||
|                 contract = await self.get_fute( |                 con = await self.get_fute( | ||||||
|                     symbol=sym, |                     symbol=symbol, | ||||||
|                     exchange=exch, |                     exchange=exch, | ||||||
|                     front=True, |                     front=True, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             qualify = False |         elif ( | ||||||
| 
 |             exch in ('IDEALPRO') | ||||||
|         elif exch in ('FOREX'): |             or sectype == 'CASH' | ||||||
|             currency = '' |         ): | ||||||
|             symbol, currency = sym.split('/') |             # if '/' in symbol: | ||||||
|  |             #     currency = '' | ||||||
|  |             #     symbol, currency = symbol.split('/') | ||||||
|             con = ibis.Forex( |             con = ibis.Forex( | ||||||
|                 symbol=symbol, |                 pair=''.join((symbol, currency)), | ||||||
|                 currency=currency, |                 currency=currency, | ||||||
|             ) |             ) | ||||||
|             con.bars_kwargs = {'whatToShow': 'MIDPOINT'} |             con.bars_kwargs = {'whatToShow': 'MIDPOINT'} | ||||||
| 
 | 
 | ||||||
|         # commodities |         # commodities | ||||||
|         elif exch == 'CMDTY':  # eg. XAUUSD.CMDTY |         elif exch == 'CMDTY':  # eg. XAUUSD.CMDTY | ||||||
|             con_kwargs, bars_kwargs = _adhoc_cmdty_data_map[sym] |             con_kwargs, bars_kwargs = _adhoc_symbol_map[symbol] | ||||||
|             con = ibis.Commodity(**con_kwargs) |             con = ibis.Commodity(**con_kwargs) | ||||||
|             con.bars_kwargs = bars_kwargs |             con.bars_kwargs = bars_kwargs | ||||||
| 
 | 
 | ||||||
|  |         # crypto$ | ||||||
|  |         elif exch == 'PAXOS':  # btc.paxos | ||||||
|  |             con = ibis.Crypto( | ||||||
|  |                 symbol=symbol, | ||||||
|  |                 currency=currency, | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|         # stonks |         # stonks | ||||||
|         else: |         else: | ||||||
|             # TODO: metadata system for all these exchange rules.. |             # TODO: metadata system for all these exchange rules.. | ||||||
|  | @ -569,33 +697,50 @@ class Client: | ||||||
|                 exch = 'SMART' |                 exch = 'SMART' | ||||||
| 
 | 
 | ||||||
|             else: |             else: | ||||||
|                 exch = 'SMART' |                 # XXX: order is super important here since | ||||||
|  |                 # a primary == 'SMART' won't ever work. | ||||||
|                 primaryExchange = exch |                 primaryExchange = exch | ||||||
|  |                 exch = 'SMART' | ||||||
| 
 | 
 | ||||||
|             con = ibis.Stock( |             con = ibis.Stock( | ||||||
|                 symbol=sym, |                 symbol=symbol, | ||||||
|                 exchange=exch, |                 exchange=exch, | ||||||
|                 primaryExchange=primaryExchange, |                 primaryExchange=primaryExchange, | ||||||
|                 currency=currency, |                 currency=currency, | ||||||
|             ) |             ) | ||||||
|         try: |  | ||||||
|             exch = 'SMART' if not exch else exch |             exch = 'SMART' if not exch else exch | ||||||
|             if qualify: |  | ||||||
|                 contract = (await self.ib.qualifyContractsAsync(con))[0] |  | ||||||
|             else: |  | ||||||
|                 assert contract |  | ||||||
| 
 | 
 | ||||||
|         except IndexError: |         contracts = [con] | ||||||
|  |         if qualify: | ||||||
|  |             try: | ||||||
|  |                 contracts = await self.ib.qualifyContractsAsync(con) | ||||||
|  |             except RequestError as err: | ||||||
|  |                 msg = err.message | ||||||
|  |                 if ( | ||||||
|  |                     'No security definition' in msg | ||||||
|  |                     and not err_on_qualify | ||||||
|  |                 ): | ||||||
|  |                     log.warning( | ||||||
|  |                         f'Could not find def for {con}') | ||||||
|  |                     return None | ||||||
|  | 
 | ||||||
|  |                 else: | ||||||
|  |                     raise | ||||||
|  |             if not contracts: | ||||||
|                 raise ValueError(f"No contract could be found {con}") |                 raise ValueError(f"No contract could be found {con}") | ||||||
| 
 | 
 | ||||||
|         self._contracts[pattern] = contract |         # pack all contracts into cache | ||||||
|  |         for tract in contracts: | ||||||
|  |             exch: str = tract.primaryExchange or tract.exchange or exch | ||||||
|  |             pattern = f'{symbol}.{exch}' | ||||||
|  |             expiry = tract.lastTradeDateOrContractMonth | ||||||
|  |             # add an entry with expiry suffix if available | ||||||
|  |             if expiry: | ||||||
|  |                 pattern += f'.{expiry}' | ||||||
| 
 | 
 | ||||||
|         # add an aditional entry with expiry suffix if available |             self._contracts[pattern.lower()] = tract | ||||||
|         conexp = contract.lastTradeDateOrContractMonth |  | ||||||
|         if conexp: |  | ||||||
|             self._contracts[pattern + f'.{conexp}'] = contract |  | ||||||
| 
 | 
 | ||||||
|         return contract |         return contracts | ||||||
| 
 | 
 | ||||||
|     async def get_head_time( |     async def get_head_time( | ||||||
|         self, |         self, | ||||||
|  | @ -614,9 +759,10 @@ class Client: | ||||||
|     async def get_sym_details( |     async def get_sym_details( | ||||||
|         self, |         self, | ||||||
|         symbol: str, |         symbol: str, | ||||||
|  | 
 | ||||||
|     ) -> tuple[Contract, Ticker, ContractDetails]: |     ) -> tuple[Contract, Ticker, ContractDetails]: | ||||||
| 
 | 
 | ||||||
|         contract = await self.find_contract(symbol) |         contract = (await self.find_contracts(symbol))[0] | ||||||
|         ticker: Ticker = self.ib.reqMktData( |         ticker: Ticker = self.ib.reqMktData( | ||||||
|             contract, |             contract, | ||||||
|             snapshot=True, |             snapshot=True, | ||||||
|  | @ -804,6 +950,73 @@ class Client: | ||||||
|         return self.ib.positions(account=account) |         return self.ib.positions(account=account) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | def con2fqsn( | ||||||
|  |     con: Contract, | ||||||
|  |     _cache: dict[int, (str, bool)] = {} | ||||||
|  | 
 | ||||||
|  | ) -> tuple[str, bool]: | ||||||
|  |     ''' | ||||||
|  |     Convert contracts to fqsn-style strings to be used both in symbol-search | ||||||
|  |     matching and as feed tokens passed to the front end data deed layer. | ||||||
|  | 
 | ||||||
|  |     Previously seen contracts are cached by id. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # should be real volume for this contract by default | ||||||
|  |     calc_price = False | ||||||
|  |     if con.conId: | ||||||
|  |         try: | ||||||
|  |             return _cache[con.conId] | ||||||
|  |         except KeyError: | ||||||
|  |             pass | ||||||
|  | 
 | ||||||
|  |     suffix = con.primaryExchange or con.exchange | ||||||
|  |     symbol = con.symbol | ||||||
|  |     expiry = con.lastTradeDateOrContractMonth or '' | ||||||
|  | 
 | ||||||
|  |     match con: | ||||||
|  |         case Option(): | ||||||
|  |             # TODO: option symbol parsing and sane display: | ||||||
|  |             symbol = con.localSymbol.replace(' ', '') | ||||||
|  | 
 | ||||||
|  |         case ibis.Commodity(): | ||||||
|  |             # commodities and forex don't have an exchange name and | ||||||
|  |             # no real volume so we have to calculate the price | ||||||
|  |             suffix = con.secType | ||||||
|  | 
 | ||||||
|  |             # no real volume on this tract | ||||||
|  |             calc_price = True | ||||||
|  | 
 | ||||||
|  |         case ibis.Forex() | ibis.Contract(secType='CASH'): | ||||||
|  |             dst, src = con.localSymbol.split('.') | ||||||
|  |             symbol = ''.join([dst, src]) | ||||||
|  |             suffix = con.exchange | ||||||
|  | 
 | ||||||
|  |             # no real volume on forex feeds.. | ||||||
|  |             calc_price = True | ||||||
|  | 
 | ||||||
|  |     if not suffix: | ||||||
|  |         entry = _adhoc_symbol_map.get( | ||||||
|  |             con.symbol or con.localSymbol | ||||||
|  |         ) | ||||||
|  |         if entry: | ||||||
|  |             meta, kwargs = entry | ||||||
|  |             cid = meta.get('conId') | ||||||
|  |             if cid: | ||||||
|  |                 assert con.conId == meta['conId'] | ||||||
|  |             suffix = meta['exchange'] | ||||||
|  | 
 | ||||||
|  |     # append a `.<suffix>` to the returned symbol | ||||||
|  |     # key for derivatives that normally is the expiry | ||||||
|  |     # date key. | ||||||
|  |     if expiry: | ||||||
|  |         suffix += f'.{expiry}' | ||||||
|  | 
 | ||||||
|  |     fqsn_key = '.'.join((symbol, suffix)).lower() | ||||||
|  |     _cache[con.conId] = fqsn_key, calc_price | ||||||
|  |     return fqsn_key, calc_price | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| # per-actor API ep caching | # per-actor API ep caching | ||||||
| _client_cache: dict[tuple[str, int], Client] = {} | _client_cache: dict[tuple[str, int], Client] = {} | ||||||
| _scan_ignore: set[tuple[str, int]] = set() | _scan_ignore: set[tuple[str, int]] = set() | ||||||
|  | @ -811,10 +1024,23 @@ _scan_ignore: set[tuple[str, int]] = set() | ||||||
| 
 | 
 | ||||||
| def get_config() -> dict[str, Any]: | def get_config() -> dict[str, Any]: | ||||||
| 
 | 
 | ||||||
|     conf, path = config.load() |     conf, path = config.load('brokers') | ||||||
| 
 |  | ||||||
|     section = conf.get('ib') |     section = conf.get('ib') | ||||||
| 
 | 
 | ||||||
|  |     accounts = section.get('accounts') | ||||||
|  |     if not accounts: | ||||||
|  |         raise ValueError( | ||||||
|  |             'brokers.toml -> `ib.accounts` must be defined\n' | ||||||
|  |             f'location: {path}' | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     names = list(accounts.keys()) | ||||||
|  |     accts = section['accounts'] = bidict(accounts) | ||||||
|  |     log.info( | ||||||
|  |         f'brokers.toml defines {len(accts)} accounts: ' | ||||||
|  |         f'{pformat(names)}' | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|     if section is None: |     if section is None: | ||||||
|         log.warning(f'No config section found for ib in {path}') |         log.warning(f'No config section found for ib in {path}') | ||||||
|         return {} |         return {} | ||||||
|  | @ -908,6 +1134,12 @@ async def load_aio_clients( | ||||||
|                     # careful. |                     # careful. | ||||||
|                     timeout=connect_timeout, |                     timeout=connect_timeout, | ||||||
|                 ) |                 ) | ||||||
|  |                 # create and cache client | ||||||
|  |                 client = Client(ib) | ||||||
|  | 
 | ||||||
|  |                 # update all actor-global caches | ||||||
|  |                 log.info(f"Caching client for {sockaddr}") | ||||||
|  |                 _client_cache[sockaddr] = client | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
|             except ( |             except ( | ||||||
|  | @ -931,21 +1163,9 @@ async def load_aio_clients( | ||||||
|                 log.warning( |                 log.warning( | ||||||
|                     f'Failed to connect on {port} for {i} time, retrying...') |                     f'Failed to connect on {port} for {i} time, retrying...') | ||||||
| 
 | 
 | ||||||
|         # create and cache client |  | ||||||
|         client = Client(ib) |  | ||||||
| 
 |  | ||||||
|         # Pre-collect all accounts available for this |         # Pre-collect all accounts available for this | ||||||
|         # connection and map account names to this client |         # connection and map account names to this client | ||||||
|         # instance. |         # instance. | ||||||
|         pps = ib.positions() |  | ||||||
|         if pps: |  | ||||||
|             for pp in pps: |  | ||||||
|                 accounts_found[ |  | ||||||
|                     accounts_def.inverse[pp.account] |  | ||||||
|                 ] = client |  | ||||||
| 
 |  | ||||||
|         # if there are accounts without positions we should still |  | ||||||
|         # register them for this client |  | ||||||
|         for value in ib.accountValues(): |         for value in ib.accountValues(): | ||||||
|             acct_number = value.account |             acct_number = value.account | ||||||
| 
 | 
 | ||||||
|  | @ -966,10 +1186,6 @@ async def load_aio_clients( | ||||||
|             f'{pformat(accounts_found)}' |             f'{pformat(accounts_found)}' | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # update all actor-global caches |  | ||||||
|         log.info(f"Caching client for {sockaddr}") |  | ||||||
|         _client_cache[sockaddr] = client |  | ||||||
| 
 |  | ||||||
|         # XXX: why aren't we just updating this directy above |         # XXX: why aren't we just updating this directy above | ||||||
|         # instead of using the intermediary `accounts_found`? |         # instead of using the intermediary `accounts_found`? | ||||||
|         _accounts2clients.update(accounts_found) |         _accounts2clients.update(accounts_found) | ||||||
|  | @ -990,7 +1206,7 @@ async def load_aio_clients( | ||||||
|         for acct, client in _accounts2clients.items(): |         for acct, client in _accounts2clients.items(): | ||||||
|             log.info(f'Disconnecting {acct}@{client}') |             log.info(f'Disconnecting {acct}@{client}') | ||||||
|             client.ib.disconnect() |             client.ib.disconnect() | ||||||
|             _client_cache.pop((host, port)) |             _client_cache.pop((host, port), None) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def load_clients_for_trio( | async def load_clients_for_trio( | ||||||
|  | @ -1019,9 +1235,6 @@ async def load_clients_for_trio( | ||||||
|             await asyncio.sleep(float('inf')) |             await asyncio.sleep(float('inf')) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _proxies: dict[str, MethodProxy] = {} |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| @acm | @acm | ||||||
| async def open_client_proxies() -> tuple[ | async def open_client_proxies() -> tuple[ | ||||||
|     dict[str, MethodProxy], |     dict[str, MethodProxy], | ||||||
|  | @ -1029,7 +1242,6 @@ async def open_client_proxies() -> tuple[ | ||||||
| ]: | ]: | ||||||
|     async with ( |     async with ( | ||||||
|         tractor.trionics.maybe_open_context( |         tractor.trionics.maybe_open_context( | ||||||
|             # acm_func=open_client_proxies, |  | ||||||
|             acm_func=tractor.to_asyncio.open_channel_from, |             acm_func=tractor.to_asyncio.open_channel_from, | ||||||
|             kwargs={'target': load_clients_for_trio}, |             kwargs={'target': load_clients_for_trio}, | ||||||
| 
 | 
 | ||||||
|  | @ -1044,13 +1256,14 @@ async def open_client_proxies() -> tuple[ | ||||||
|         if cache_hit: |         if cache_hit: | ||||||
|             log.info(f'Re-using cached clients: {clients}') |             log.info(f'Re-using cached clients: {clients}') | ||||||
| 
 | 
 | ||||||
|  |         proxies = {} | ||||||
|         for acct_name, client in clients.items(): |         for acct_name, client in clients.items(): | ||||||
|             proxy = await stack.enter_async_context( |             proxy = await stack.enter_async_context( | ||||||
|                 open_client_proxy(client), |                 open_client_proxy(client), | ||||||
|             ) |             ) | ||||||
|             _proxies[acct_name] = proxy |             proxies[acct_name] = proxy | ||||||
| 
 | 
 | ||||||
|         yield _proxies, clients |         yield proxies, clients | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def get_preferred_data_client( | def get_preferred_data_client( | ||||||
|  | @ -1199,11 +1412,13 @@ async def open_client_proxy( | ||||||
|     event_table = {} |     event_table = {} | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with ( | ||||||
|  | 
 | ||||||
|         to_asyncio.open_channel_from( |         to_asyncio.open_channel_from( | ||||||
|             open_aio_client_method_relay, |             open_aio_client_method_relay, | ||||||
|             client=client, |             client=client, | ||||||
|             event_consumers=event_table, |             event_consumers=event_table, | ||||||
|         ) as (first, chan), |         ) as (first, chan), | ||||||
|  | 
 | ||||||
|         trio.open_nursery() as relay_n, |         trio.open_nursery() as relay_n, | ||||||
|     ): |     ): | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -41,7 +41,8 @@ from trio_typing import TaskStatus | ||||||
| from piker.data._sharedmem import ShmArray | from piker.data._sharedmem import ShmArray | ||||||
| from .._util import SymbolNotFound, NoData | from .._util import SymbolNotFound, NoData | ||||||
| from .api import ( | from .api import ( | ||||||
|     _adhoc_futes_set, |     # _adhoc_futes_set, | ||||||
|  |     con2fqsn, | ||||||
|     log, |     log, | ||||||
|     load_aio_clients, |     load_aio_clients, | ||||||
|     ibis, |     ibis, | ||||||
|  | @ -207,8 +208,6 @@ async def get_bars( | ||||||
| 
 | 
 | ||||||
|         except RequestError as err: |         except RequestError as err: | ||||||
|             msg = err.message |             msg = err.message | ||||||
|             # why do we always need to rebind this? |  | ||||||
|             # _err = err |  | ||||||
| 
 | 
 | ||||||
|             if 'No market data permissions for' in msg: |             if 'No market data permissions for' in msg: | ||||||
|                 # TODO: signalling for no permissions searches |                 # TODO: signalling for no permissions searches | ||||||
|  | @ -217,8 +216,8 @@ async def get_bars( | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|             elif ( |             elif ( | ||||||
|                 err.code == 162 |                 err.code == 162 and | ||||||
|                 and 'HMDS query returned no data' in err.message |                 'HMDS query returned no data' in err.message | ||||||
|             ): |             ): | ||||||
|                 # XXX: this is now done in the storage mgmt layer |                 # XXX: this is now done in the storage mgmt layer | ||||||
|                 # and we shouldn't implicitly decrement the frame dt |                 # and we shouldn't implicitly decrement the frame dt | ||||||
|  | @ -237,6 +236,14 @@ async def get_bars( | ||||||
|                     frame_size=2000, |                     frame_size=2000, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|  |             # elif ( | ||||||
|  |             #     err.code == 162 and | ||||||
|  |             #     'Trading TWS session is connected from a different IP | ||||||
|  |             #     address' in err.message | ||||||
|  |             # ): | ||||||
|  |             #     log.warning("ignoring ip address warning") | ||||||
|  |             #     continue | ||||||
|  | 
 | ||||||
|             elif _pacing in msg: |             elif _pacing in msg: | ||||||
| 
 | 
 | ||||||
|                 log.warning( |                 log.warning( | ||||||
|  | @ -294,7 +301,13 @@ async def get_bars( | ||||||
|                 else: |                 else: | ||||||
| 
 | 
 | ||||||
|                     log.warning('Sending CONNECTION RESET') |                     log.warning('Sending CONNECTION RESET') | ||||||
|                     await data_reset_hack(reset_type='connection') |                     res = await data_reset_hack(reset_type='connection') | ||||||
|  |                     if not res: | ||||||
|  |                         log.warning( | ||||||
|  |                             'NO VNC DETECTED!\n' | ||||||
|  |                             'Manually press ctrl-alt-f on your IB java app' | ||||||
|  |                         ) | ||||||
|  |                         # break | ||||||
| 
 | 
 | ||||||
|                     with trio.move_on_after(timeout) as cs: |                     with trio.move_on_after(timeout) as cs: | ||||||
|                         for name, ev in [ |                         for name, ev in [ | ||||||
|  | @ -413,6 +426,7 @@ asset_type_map = { | ||||||
|     'WAR': 'warrant', |     'WAR': 'warrant', | ||||||
|     'IOPT': 'warran', |     'IOPT': 'warran', | ||||||
|     'BAG': 'bag', |     'BAG': 'bag', | ||||||
|  |     'CRYPTO': 'crypto',  # bc it's diff then fiat? | ||||||
|     # 'NEWS': 'news', |     # 'NEWS': 'news', | ||||||
| } | } | ||||||
| 
 | 
 | ||||||
|  | @ -553,38 +567,17 @@ async def open_aio_quote_stream( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # TODO: cython/mypyc/numba this! | # TODO: cython/mypyc/numba this! | ||||||
|  | # or we can at least cache a majority of the values | ||||||
|  | # except for the ones we expect to change?.. | ||||||
| def normalize( | def normalize( | ||||||
|     ticker: Ticker, |     ticker: Ticker, | ||||||
|     calc_price: bool = False |     calc_price: bool = False | ||||||
| 
 | 
 | ||||||
| ) -> dict: | ) -> dict: | ||||||
| 
 | 
 | ||||||
|     # should be real volume for this contract by default |  | ||||||
|     calc_price = False |  | ||||||
| 
 |  | ||||||
|     # check for special contract types |     # check for special contract types | ||||||
|     con = ticker.contract |     con = ticker.contract | ||||||
|     if type(con) in ( |     fqsn, calc_price = con2fqsn(con) | ||||||
|         ibis.Commodity, |  | ||||||
|         ibis.Forex, |  | ||||||
|     ): |  | ||||||
|         # commodities and forex don't have an exchange name and |  | ||||||
|         # no real volume so we have to calculate the price |  | ||||||
|         suffix = con.secType |  | ||||||
|         # no real volume on this tract |  | ||||||
|         calc_price = True |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         suffix = con.primaryExchange |  | ||||||
|         if not suffix: |  | ||||||
|             suffix = con.exchange |  | ||||||
| 
 |  | ||||||
|         # append a `.<suffix>` to the returned symbol |  | ||||||
|         # key for derivatives that normally is the expiry |  | ||||||
|         # date key. |  | ||||||
|         expiry = con.lastTradeDateOrContractMonth |  | ||||||
|         if expiry: |  | ||||||
|             suffix += f'.{expiry}' |  | ||||||
| 
 | 
 | ||||||
|     # convert named tuples to dicts so we send usable keys |     # convert named tuples to dicts so we send usable keys | ||||||
|     new_ticks = [] |     new_ticks = [] | ||||||
|  | @ -616,9 +609,7 @@ def normalize( | ||||||
| 
 | 
 | ||||||
|     # generate fqsn with possible specialized suffix |     # generate fqsn with possible specialized suffix | ||||||
|     # for derivatives, note the lowercase. |     # for derivatives, note the lowercase. | ||||||
|     data['symbol'] = data['fqsn'] = '.'.join( |     data['symbol'] = data['fqsn'] = fqsn | ||||||
|         (con.symbol, suffix) |  | ||||||
|     ).lower() |  | ||||||
| 
 | 
 | ||||||
|     # convert named tuples to dicts for transport |     # convert named tuples to dicts for transport | ||||||
|     tbts = data.get('tickByTicks') |     tbts = data.get('tickByTicks') | ||||||
|  | @ -683,6 +674,13 @@ async def stream_quotes( | ||||||
|             # TODO: more consistent field translation |             # TODO: more consistent field translation | ||||||
|             atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']] |             atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']] | ||||||
| 
 | 
 | ||||||
|  |             if atype in { | ||||||
|  |                 'forex', | ||||||
|  |                 'index', | ||||||
|  |                 'commodity', | ||||||
|  |             }: | ||||||
|  |                 syminfo['no_vlm'] = True | ||||||
|  | 
 | ||||||
|             # for stocks it seems TWS reports too small a tick size |             # for stocks it seems TWS reports too small a tick size | ||||||
|             # such that you can't submit orders with that granularity? |             # such that you can't submit orders with that granularity? | ||||||
|             min_tick = 0.01 if atype == 'stock' else 0 |             min_tick = 0.01 if atype == 'stock' else 0 | ||||||
|  | @ -709,9 +707,9 @@ async def stream_quotes( | ||||||
|                 }, |                 }, | ||||||
| 
 | 
 | ||||||
|             } |             } | ||||||
|             return init_msgs |             return init_msgs, syminfo | ||||||
| 
 | 
 | ||||||
|         init_msgs = mk_init_msgs() |         init_msgs, syminfo = mk_init_msgs() | ||||||
| 
 | 
 | ||||||
|         # TODO: we should instead spawn a task that waits on a feed to start |         # TODO: we should instead spawn a task that waits on a feed to start | ||||||
|         # and let it wait indefinitely..instead of this hard coded stuff. |         # and let it wait indefinitely..instead of this hard coded stuff. | ||||||
|  | @ -720,7 +718,14 @@ async def stream_quotes( | ||||||
| 
 | 
 | ||||||
|         # it might be outside regular trading hours so see if we can at |         # it might be outside regular trading hours so see if we can at | ||||||
|         # least grab history. |         # least grab history. | ||||||
|         if isnan(first_ticker.last): |         if ( | ||||||
|  |             isnan(first_ticker.last) | ||||||
|  |             and type(first_ticker.contract) not in ( | ||||||
|  |                 ibis.Commodity, | ||||||
|  |                 ibis.Forex, | ||||||
|  |                 ibis.Crypto, | ||||||
|  |             ) | ||||||
|  |         ): | ||||||
|             task_status.started((init_msgs, first_quote)) |             task_status.started((init_msgs, first_quote)) | ||||||
| 
 | 
 | ||||||
|             # it's not really live but this will unblock |             # it's not really live but this will unblock | ||||||
|  | @ -743,10 +748,16 @@ async def stream_quotes( | ||||||
|             task_status.started((init_msgs, first_quote)) |             task_status.started((init_msgs, first_quote)) | ||||||
| 
 | 
 | ||||||
|             async with aclosing(stream): |             async with aclosing(stream): | ||||||
|                 if type(first_ticker.contract) not in ( |                 if syminfo.get('no_vlm', False): | ||||||
|                     ibis.Commodity, | 
 | ||||||
|                     ibis.Forex |                     # generally speaking these feeds don't | ||||||
|                 ): |                     # include vlm data. | ||||||
|  |                     atype = syminfo['asset_type'] | ||||||
|  |                     log.info( | ||||||
|  |                         f'Non-vlm asset {sym}@{atype}, skipping quote poll...' | ||||||
|  |                     ) | ||||||
|  | 
 | ||||||
|  |                 else: | ||||||
|                     # wait for real volume on feed (trading might be closed) |                     # wait for real volume on feed (trading might be closed) | ||||||
|                     while True: |                     while True: | ||||||
|                         ticker = await stream.receive() |                         ticker = await stream.receive() | ||||||
|  | @ -805,6 +816,9 @@ async def data_reset_hack( | ||||||
|           successful. |           successful. | ||||||
|         - other OS support? |         - other OS support? | ||||||
|         - integration with ``ib-gw`` run in docker + Xorg? |         - integration with ``ib-gw`` run in docker + Xorg? | ||||||
|  |         - is it possible to offer a local server that can be accessed by | ||||||
|  |           a client? Would be sure be handy for running native java blobs | ||||||
|  |           that need to be wrangle. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
| 
 | 
 | ||||||
|  | @ -835,7 +849,10 @@ async def data_reset_hack( | ||||||
|             client.mouse.click() |             client.mouse.click() | ||||||
|             client.keyboard.press('Ctrl', 'Alt', key)  # keys are stacked |             client.keyboard.press('Ctrl', 'Alt', key)  # keys are stacked | ||||||
| 
 | 
 | ||||||
|  |     try: | ||||||
|         await tractor.to_asyncio.run_task(vnc_click_hack) |         await tractor.to_asyncio.run_task(vnc_click_hack) | ||||||
|  |     except OSError: | ||||||
|  |         return False | ||||||
| 
 | 
 | ||||||
|     # we don't really need the ``xdotool`` approach any more B) |     # we don't really need the ``xdotool`` approach any more B) | ||||||
|     return True |     return True | ||||||
|  | @ -850,15 +867,31 @@ async def open_symbol_search( | ||||||
|     # TODO: load user defined symbol set locally for fast search? |     # TODO: load user defined symbol set locally for fast search? | ||||||
|     await ctx.started({}) |     await ctx.started({}) | ||||||
| 
 | 
 | ||||||
|     async with open_data_client() as proxy: |     async with ( | ||||||
|  |         open_client_proxies() as (proxies, clients), | ||||||
|  |         open_data_client() as data_proxy, | ||||||
|  |     ): | ||||||
|         async with ctx.open_stream() as stream: |         async with ctx.open_stream() as stream: | ||||||
| 
 | 
 | ||||||
|             last = time.time() |             # select a non-history client for symbol search to lighten | ||||||
|  |             # the load in the main data node. | ||||||
|  |             proxy = data_proxy | ||||||
|  |             for name, proxy in proxies.items(): | ||||||
|  |                 if proxy is data_proxy: | ||||||
|  |                     continue | ||||||
|  |                 break | ||||||
| 
 | 
 | ||||||
|  |             ib_client = proxy._aio_ns.ib | ||||||
|  |             log.info(f'Using {ib_client} for symbol search') | ||||||
|  | 
 | ||||||
|  |             last = time.time() | ||||||
|             async for pattern in stream: |             async for pattern in stream: | ||||||
|                 log.debug(f'received {pattern}') |                 log.info(f'received {pattern}') | ||||||
|                 now = time.time() |                 now = time.time() | ||||||
| 
 | 
 | ||||||
|  |                 # this causes tractor hang... | ||||||
|  |                 # assert 0 | ||||||
|  | 
 | ||||||
|                 assert pattern, 'IB can not accept blank search pattern' |                 assert pattern, 'IB can not accept blank search pattern' | ||||||
| 
 | 
 | ||||||
|                 # throttle search requests to no faster then 1Hz |                 # throttle search requests to no faster then 1Hz | ||||||
|  | @ -886,7 +919,7 @@ async def open_symbol_search( | ||||||
| 
 | 
 | ||||||
|                     continue |                     continue | ||||||
| 
 | 
 | ||||||
|                 log.debug(f'searching for {pattern}') |                 log.info(f'searching for {pattern}') | ||||||
| 
 | 
 | ||||||
|                 last = time.time() |                 last = time.time() | ||||||
| 
 | 
 | ||||||
|  | @ -897,6 +930,8 @@ async def open_symbol_search( | ||||||
|                 async def stash_results(target: Awaitable[list]): |                 async def stash_results(target: Awaitable[list]): | ||||||
|                     stock_results.extend(await target) |                     stock_results.extend(await target) | ||||||
| 
 | 
 | ||||||
|  |                 for i in range(10): | ||||||
|  |                     with trio.move_on_after(3) as cs: | ||||||
|                         async with trio.open_nursery() as sn: |                         async with trio.open_nursery() as sn: | ||||||
|                             sn.start_soon( |                             sn.start_soon( | ||||||
|                                 stash_results, |                                 stash_results, | ||||||
|  | @ -909,17 +944,26 @@ async def open_symbol_search( | ||||||
|                             # trigger async request |                             # trigger async request | ||||||
|                             await trio.sleep(0) |                             await trio.sleep(0) | ||||||
| 
 | 
 | ||||||
|                     # match against our ad-hoc set immediately |                     if cs.cancelled_caught: | ||||||
|                     adhoc_matches = fuzzy.extractBests( |                         log.warning( | ||||||
|                         pattern, |                             f'Search timeout? {proxy._aio_ns.ib.client}' | ||||||
|                         list(_adhoc_futes_set), |  | ||||||
|                         score_cutoff=90, |  | ||||||
|                         ) |                         ) | ||||||
|                     log.info(f'fuzzy matched adhocs: {adhoc_matches}') |                         continue | ||||||
|                     adhoc_match_results = {} |                     else: | ||||||
|                     if adhoc_matches: |                         break | ||||||
|                         # TODO: do we need to pull contract details? | 
 | ||||||
|                         adhoc_match_results = {i[0]: {} for i in adhoc_matches} |                     # # match against our ad-hoc set immediately | ||||||
|  |                     # adhoc_matches = fuzzy.extractBests( | ||||||
|  |                     #     pattern, | ||||||
|  |                     #     list(_adhoc_futes_set), | ||||||
|  |                     #     score_cutoff=90, | ||||||
|  |                     # ) | ||||||
|  |                     # log.info(f'fuzzy matched adhocs: {adhoc_matches}') | ||||||
|  |                     # adhoc_match_results = {} | ||||||
|  |                     # if adhoc_matches: | ||||||
|  |                     #     # TODO: do we need to pull contract details? | ||||||
|  |                     #     adhoc_match_results = {i[0]: {} for i in | ||||||
|  |                     #     adhoc_matches} | ||||||
| 
 | 
 | ||||||
|                 log.debug(f'fuzzy matching stocks {stock_results}') |                 log.debug(f'fuzzy matching stocks {stock_results}') | ||||||
|                 stock_matches = fuzzy.extractBests( |                 stock_matches = fuzzy.extractBests( | ||||||
|  | @ -928,7 +972,8 @@ async def open_symbol_search( | ||||||
|                     score_cutoff=50, |                     score_cutoff=50, | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|                 matches = adhoc_match_results | { |                 # matches = adhoc_match_results | { | ||||||
|  |                 matches = { | ||||||
|                     item[0]: {} for item in stock_matches |                     item[0]: {} for item in stock_matches | ||||||
|                 } |                 } | ||||||
|                 # TODO: we used to deliver contract details |                 # TODO: we used to deliver contract details | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,64 @@ | ||||||
|  | ``kraken`` backend | ||||||
|  | ------------------ | ||||||
|  | though they don't have the most liquidity of all the cexes they sure are | ||||||
|  | accommodating to those of us who appreciate a little ``xmr``. | ||||||
|  | 
 | ||||||
|  | status | ||||||
|  | ****** | ||||||
|  | current support is *production grade* and both real-time data and order | ||||||
|  | management should be correct and fast. this backend is used by core devs | ||||||
|  | for live trading. | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | config | ||||||
|  | ****** | ||||||
|  | In order to get order mode support your ``brokers.toml`` | ||||||
|  | needs to have something like the following: | ||||||
|  | 
 | ||||||
|  | .. code:: toml | ||||||
|  | 
 | ||||||
|  |    [kraken] | ||||||
|  |    accounts.spot = 'spot' | ||||||
|  |    key_descr = "spot" | ||||||
|  |    api_key = "69696969696969696696969696969696969696969696969696969696" | ||||||
|  |    secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | If everything works correctly you should see any current positions | ||||||
|  | loaded in the pps pane on chart load and you should also be able to | ||||||
|  | check your trade records in the file:: | ||||||
|  | 
 | ||||||
|  |     <pikerk_conf_dir>/ledgers/trades_kraken_spot.toml | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | An example ledger file will have entries written verbatim from the | ||||||
|  | trade events schema: | ||||||
|  | 
 | ||||||
|  | .. code:: toml | ||||||
|  | 
 | ||||||
|  |     [TFJBKK-SMBZS-VJ4UWS] | ||||||
|  |     ordertxid = "SMBZSA-7CNQU-3HWLNJ" | ||||||
|  |     postxid = "SMBZSE-M7IF5-CFI7LT" | ||||||
|  |     pair = "XXMRZEUR" | ||||||
|  |     time = 1655691993.4133966 | ||||||
|  |     type = "buy" | ||||||
|  |     ordertype = "limit" | ||||||
|  |     price = "103.97000000" | ||||||
|  |     cost = "499.99999977" | ||||||
|  |     fee = "0.80000000" | ||||||
|  |     vol = "4.80907954" | ||||||
|  |     margin = "0.00000000" | ||||||
|  |     misc = "" | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | your ``pps.toml`` file will have position entries like, | ||||||
|  | 
 | ||||||
|  | .. code:: toml | ||||||
|  | 
 | ||||||
|  |    [kraken.spot."xmreur.kraken"] | ||||||
|  |    size = 4.80907954 | ||||||
|  |    ppu = 103.97000000 | ||||||
|  |    bsuid = "XXMRZEUR" | ||||||
|  |    clears = [ | ||||||
|  |     { tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" }, | ||||||
|  |    ] | ||||||
|  | @ -0,0 +1,61 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Kraken backend. | ||||||
|  | 
 | ||||||
|  | Sub-modules within break into the core functionalities: | ||||||
|  | 
 | ||||||
|  | - ``broker.py`` part for orders / trading endpoints | ||||||
|  | - ``feed.py`` for real-time data feed endpoints | ||||||
|  | - ``api.py`` for the core API machinery which is ``trio``-ized | ||||||
|  |   wrapping around ``ib_insync``. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | 
 | ||||||
|  | from piker.log import get_logger | ||||||
|  | 
 | ||||||
|  | log = get_logger(__name__) | ||||||
|  | 
 | ||||||
|  | from .api import ( | ||||||
|  |     get_client, | ||||||
|  | ) | ||||||
|  | from .feed import ( | ||||||
|  |     open_history_client, | ||||||
|  |     open_symbol_search, | ||||||
|  |     stream_quotes, | ||||||
|  | ) | ||||||
|  | from .broker import ( | ||||||
|  |     trades_dialogue, | ||||||
|  |     norm_trade_records, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | __all__ = [ | ||||||
|  |     'get_client', | ||||||
|  |     'trades_dialogue', | ||||||
|  |     'open_history_client', | ||||||
|  |     'open_symbol_search', | ||||||
|  |     'stream_quotes', | ||||||
|  |     'norm_trade_records', | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # tractor RPC enable arg | ||||||
|  | __enable_modules__: list[str] = [ | ||||||
|  |     'api', | ||||||
|  |     'feed', | ||||||
|  |     'broker', | ||||||
|  | ] | ||||||
|  | @ -0,0 +1,540 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Kraken web API wrapping. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from contextlib import asynccontextmanager as acm | ||||||
|  | from datetime import datetime | ||||||
|  | import itertools | ||||||
|  | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     Optional, | ||||||
|  |     Union, | ||||||
|  | ) | ||||||
|  | import time | ||||||
|  | 
 | ||||||
|  | from bidict import bidict | ||||||
|  | import pendulum | ||||||
|  | import asks | ||||||
|  | from fuzzywuzzy import process as fuzzy | ||||||
|  | import numpy as np | ||||||
|  | import urllib.parse | ||||||
|  | import hashlib | ||||||
|  | import hmac | ||||||
|  | import base64 | ||||||
|  | import trio | ||||||
|  | 
 | ||||||
|  | from piker import config | ||||||
|  | from piker.brokers._util import ( | ||||||
|  |     resproc, | ||||||
|  |     SymbolNotFound, | ||||||
|  |     BrokerError, | ||||||
|  |     DataThrottle, | ||||||
|  | ) | ||||||
|  | from piker.pp import Transaction | ||||||
|  | from . import log | ||||||
|  | 
 | ||||||
|  | # <uri>/<version>/ | ||||||
|  | _url = 'https://api.kraken.com/0' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # Broker specific ohlc schema which includes a vwap field | ||||||
|  | _ohlc_dtype = [ | ||||||
|  |     ('index', int), | ||||||
|  |     ('time', int), | ||||||
|  |     ('open', float), | ||||||
|  |     ('high', float), | ||||||
|  |     ('low', float), | ||||||
|  |     ('close', float), | ||||||
|  |     ('volume', float), | ||||||
|  |     ('count', int), | ||||||
|  |     ('bar_wap', float), | ||||||
|  | ] | ||||||
|  | 
 | ||||||
|  | # UI components allow this to be declared such that additional | ||||||
|  | # (historical) fields can be exposed. | ||||||
|  | ohlc_dtype = np.dtype(_ohlc_dtype) | ||||||
|  | 
 | ||||||
|  | _show_wap_in_history = True | ||||||
|  | _symbol_info_translation: dict[str, str] = { | ||||||
|  |     'tick_decimals': 'pair_decimals', | ||||||
|  | } | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_config() -> dict[str, Any]: | ||||||
|  | 
 | ||||||
|  |     conf, path = config.load() | ||||||
|  |     section = conf.get('kraken') | ||||||
|  | 
 | ||||||
|  |     if section is None: | ||||||
|  |         log.warning(f'No config section found for kraken in {path}') | ||||||
|  |         return {} | ||||||
|  | 
 | ||||||
|  |     return section | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def get_kraken_signature( | ||||||
|  |     urlpath: str, | ||||||
|  |     data: dict[str, Any], | ||||||
|  |     secret: str | ||||||
|  | ) -> str: | ||||||
|  |     postdata = urllib.parse.urlencode(data) | ||||||
|  |     encoded = (str(data['nonce']) + postdata).encode() | ||||||
|  |     message = urlpath.encode() + hashlib.sha256(encoded).digest() | ||||||
|  | 
 | ||||||
|  |     mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512) | ||||||
|  |     sigdigest = base64.b64encode(mac.digest()) | ||||||
|  |     return sigdigest.decode() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class InvalidKey(ValueError): | ||||||
|  |     ''' | ||||||
|  |     EAPI:Invalid key | ||||||
|  |     This error is returned when the API key used for the call is | ||||||
|  |     either expired or disabled, please review the API key in your | ||||||
|  |     Settings -> API tab of account management or generate a new one | ||||||
|  |     and update your application. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Client: | ||||||
|  | 
 | ||||||
|  |     # global symbol normalization table | ||||||
|  |     _ntable: dict[str, str] = {} | ||||||
|  |     _atable: bidict[str, str] = bidict() | ||||||
|  | 
 | ||||||
|  |     def __init__( | ||||||
|  |         self, | ||||||
|  |         config: dict[str, str], | ||||||
|  |         name: str = '', | ||||||
|  |         api_key: str = '', | ||||||
|  |         secret: str = '' | ||||||
|  |     ) -> None: | ||||||
|  |         self._sesh = asks.Session(connections=4) | ||||||
|  |         self._sesh.base_location = _url | ||||||
|  |         self._sesh.headers.update({ | ||||||
|  |             'User-Agent': | ||||||
|  |                 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)' | ||||||
|  |         }) | ||||||
|  |         self.conf: dict[str, str] = config | ||||||
|  |         self._pairs: list[str] = [] | ||||||
|  |         self._name = name | ||||||
|  |         self._api_key = api_key | ||||||
|  |         self._secret = secret | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def pairs(self) -> dict[str, Any]: | ||||||
|  |         if self._pairs is None: | ||||||
|  |             raise RuntimeError( | ||||||
|  |                 "Make sure to run `cache_symbols()` on startup!" | ||||||
|  |             ) | ||||||
|  |             # retreive and cache all symbols | ||||||
|  | 
 | ||||||
|  |         return self._pairs | ||||||
|  | 
 | ||||||
|  |     async def _public( | ||||||
|  |         self, | ||||||
|  |         method: str, | ||||||
|  |         data: dict, | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  |         resp = await self._sesh.post( | ||||||
|  |             path=f'/public/{method}', | ||||||
|  |             json=data, | ||||||
|  |             timeout=float('inf') | ||||||
|  |         ) | ||||||
|  |         return resproc(resp, log) | ||||||
|  | 
 | ||||||
|  |     async def _private( | ||||||
|  |         self, | ||||||
|  |         method: str, | ||||||
|  |         data: dict, | ||||||
|  |         uri_path: str | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  |         headers = { | ||||||
|  |             'Content-Type': | ||||||
|  |                 'application/x-www-form-urlencoded', | ||||||
|  |             'API-Key': | ||||||
|  |                 self._api_key, | ||||||
|  |             'API-Sign': | ||||||
|  |                 get_kraken_signature(uri_path, data, self._secret) | ||||||
|  |         } | ||||||
|  |         resp = await self._sesh.post( | ||||||
|  |             path=f'/private/{method}', | ||||||
|  |             data=data, | ||||||
|  |             headers=headers, | ||||||
|  |             timeout=float('inf') | ||||||
|  |         ) | ||||||
|  |         return resproc(resp, log) | ||||||
|  | 
 | ||||||
|  |     async def endpoint( | ||||||
|  |         self, | ||||||
|  |         method: str, | ||||||
|  |         data: dict[str, Any] | ||||||
|  | 
 | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  |         uri_path = f'/0/private/{method}' | ||||||
|  |         data['nonce'] = str(int(1000*time.time())) | ||||||
|  |         return await self._private(method, data, uri_path) | ||||||
|  | 
 | ||||||
|  |     async def get_balances( | ||||||
|  |         self, | ||||||
|  |     ) -> dict[str, float]: | ||||||
|  |         ''' | ||||||
|  |         Return the set of asset balances for this account | ||||||
|  |         by symbol. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         resp = await self.endpoint( | ||||||
|  |             'Balance', | ||||||
|  |             {}, | ||||||
|  |         ) | ||||||
|  |         by_bsuid = resp['result'] | ||||||
|  |         return { | ||||||
|  |             self._atable[sym].lower(): float(bal) | ||||||
|  |             for sym, bal in by_bsuid.items() | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |     async def get_assets(self) -> dict[str, dict]: | ||||||
|  |         resp = await self._public('Assets', {}) | ||||||
|  |         return resp['result'] | ||||||
|  | 
 | ||||||
|  |     async def cache_assets(self) -> None: | ||||||
|  |         assets = self.assets = await self.get_assets() | ||||||
|  |         for bsuid, info in assets.items(): | ||||||
|  |             self._atable[bsuid] = info['altname'] | ||||||
|  | 
 | ||||||
|  |     async def get_trades( | ||||||
|  |         self, | ||||||
|  |         fetch_limit: int = 10, | ||||||
|  | 
 | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  |         ''' | ||||||
|  |         Get the trades (aka cleared orders) history from the rest endpoint: | ||||||
|  |         https://docs.kraken.com/rest/#operation/getTradeHistory | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         ofs = 0 | ||||||
|  |         trades_by_id: dict[str, Any] = {} | ||||||
|  | 
 | ||||||
|  |         for i in itertools.count(): | ||||||
|  |             if i >= fetch_limit: | ||||||
|  |                 break | ||||||
|  | 
 | ||||||
|  |             # increment 'ofs' pagination offset | ||||||
|  |             ofs = i*50 | ||||||
|  | 
 | ||||||
|  |             resp = await self.endpoint( | ||||||
|  |                 'TradesHistory', | ||||||
|  |                 {'ofs': ofs}, | ||||||
|  |             ) | ||||||
|  |             by_id = resp['result']['trades'] | ||||||
|  |             trades_by_id.update(by_id) | ||||||
|  | 
 | ||||||
|  |             # we can get up to 50 results per query | ||||||
|  |             if ( | ||||||
|  |                 len(by_id) < 50 | ||||||
|  |             ): | ||||||
|  |                 err = resp.get('error') | ||||||
|  |                 if err: | ||||||
|  |                     raise BrokerError(err) | ||||||
|  | 
 | ||||||
|  |                 # we know we received the max amount of | ||||||
|  |                 # trade results so there may be more history. | ||||||
|  |                 # catch the end of the trades | ||||||
|  |                 count = resp['result']['count'] | ||||||
|  |                 break | ||||||
|  | 
 | ||||||
|  |         # santity check on update | ||||||
|  |         assert count == len(trades_by_id.values()) | ||||||
|  |         return trades_by_id | ||||||
|  | 
 | ||||||
|  |     async def get_xfers( | ||||||
|  |         self, | ||||||
|  |         asset: str, | ||||||
|  |         src_asset: str = '', | ||||||
|  | 
 | ||||||
|  |     ) -> dict[str, Transaction]: | ||||||
|  |         ''' | ||||||
|  |         Get asset balance transfer transactions. | ||||||
|  | 
 | ||||||
|  |         Currently only withdrawals are supported. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         xfers: list[dict] = (await self.endpoint( | ||||||
|  |             'WithdrawStatus', | ||||||
|  |             {'asset': asset}, | ||||||
|  |         ))['result'] | ||||||
|  | 
 | ||||||
|  |         # eg. resp schema: | ||||||
|  |         # 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset': | ||||||
|  |         #     'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid': | ||||||
|  |         #     'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44', | ||||||
|  |         #     'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z', | ||||||
|  |         #     'amount': '0.00300726', 'fee': '0.00001000', 'time': | ||||||
|  |         #     1658347714, 'status': 'Success'}]} | ||||||
|  | 
 | ||||||
|  |         trans: dict[str, Transaction] = {} | ||||||
|  |         for entry in xfers: | ||||||
|  |             # look up the normalized name | ||||||
|  |             asset = self._atable[entry['asset']].lower() | ||||||
|  | 
 | ||||||
|  |             # XXX: this is in the asset units (likely) so it isn't | ||||||
|  |             # quite the same as a commisions cost necessarily..) | ||||||
|  |             cost = float(entry['fee']) | ||||||
|  | 
 | ||||||
|  |             tran = Transaction( | ||||||
|  |                 fqsn=asset + '.kraken', | ||||||
|  |                 tid=entry['txid'], | ||||||
|  |                 dt=pendulum.from_timestamp(entry['time']), | ||||||
|  |                 bsuid=f'{asset}{src_asset}', | ||||||
|  |                 size=-1*( | ||||||
|  |                     float(entry['amount']) | ||||||
|  |                     + | ||||||
|  |                     cost | ||||||
|  |                 ), | ||||||
|  |                 # since this will be treated as a "sell" it | ||||||
|  |                 # shouldn't be needed to compute the be price. | ||||||
|  |                 price='NaN', | ||||||
|  | 
 | ||||||
|  |                 # XXX: see note above | ||||||
|  |                 cost=0, | ||||||
|  |             ) | ||||||
|  |             trans[tran.tid] = tran | ||||||
|  | 
 | ||||||
|  |         return trans | ||||||
|  | 
 | ||||||
|  |     async def submit_limit( | ||||||
|  |         self, | ||||||
|  |         symbol: str, | ||||||
|  |         price: float, | ||||||
|  |         action: str, | ||||||
|  |         size: float, | ||||||
|  |         reqid: str = None, | ||||||
|  |         validate: bool = False  # set True test call without a real submission | ||||||
|  | 
 | ||||||
|  |     ) -> dict: | ||||||
|  |         ''' | ||||||
|  |         Place an order and return integer request id provided by client. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         # Build common data dict for common keys from both endpoints | ||||||
|  |         data = { | ||||||
|  |             "pair": symbol, | ||||||
|  |             "price": str(price), | ||||||
|  |             "validate": validate | ||||||
|  |         } | ||||||
|  |         if reqid is None: | ||||||
|  |             # Build order data for kraken api | ||||||
|  |             data |= { | ||||||
|  |                 "ordertype": "limit", | ||||||
|  |                 "type": action, | ||||||
|  |                 "volume": str(size), | ||||||
|  |             } | ||||||
|  |             return await self.endpoint('AddOrder', data) | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             # Edit order data for kraken api | ||||||
|  |             data["txid"] = reqid | ||||||
|  |             return await self.endpoint('EditOrder', data) | ||||||
|  | 
 | ||||||
|  |     async def submit_cancel( | ||||||
|  |         self, | ||||||
|  |         reqid: str, | ||||||
|  |     ) -> dict: | ||||||
|  |         ''' | ||||||
|  |         Send cancel request for order id ``reqid``. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         # txid is a transaction id given by kraken | ||||||
|  |         return await self.endpoint('CancelOrder', {"txid": reqid}) | ||||||
|  | 
 | ||||||
|  |     async def symbol_info( | ||||||
|  |         self, | ||||||
|  |         pair: Optional[str] = None, | ||||||
|  | 
 | ||||||
|  |     ) -> dict[str, dict[str, str]]: | ||||||
|  | 
 | ||||||
|  |         if pair is not None: | ||||||
|  |             pairs = {'pair': pair} | ||||||
|  |         else: | ||||||
|  |             pairs = None  # get all pairs | ||||||
|  | 
 | ||||||
|  |         resp = await self._public('AssetPairs', pairs) | ||||||
|  |         err = resp['error'] | ||||||
|  |         if err: | ||||||
|  |             symbolname = pairs['pair'] if pair else None | ||||||
|  |             raise SymbolNotFound(f'{symbolname}.kraken') | ||||||
|  | 
 | ||||||
|  |         pairs = resp['result'] | ||||||
|  | 
 | ||||||
|  |         if pair is not None: | ||||||
|  |             _, data = next(iter(pairs.items())) | ||||||
|  |             return data | ||||||
|  |         else: | ||||||
|  |             return pairs | ||||||
|  | 
 | ||||||
|  |     async def cache_symbols( | ||||||
|  |         self, | ||||||
|  |     ) -> dict: | ||||||
|  |         if not self._pairs: | ||||||
|  |             self._pairs = await self.symbol_info() | ||||||
|  | 
 | ||||||
|  |             ntable = {} | ||||||
|  |             for restapikey, info in self._pairs.items(): | ||||||
|  |                 ntable[restapikey] = ntable[info['wsname']] = info['altname'] | ||||||
|  | 
 | ||||||
|  |             self._ntable.update(ntable) | ||||||
|  | 
 | ||||||
|  |         return self._pairs | ||||||
|  | 
 | ||||||
|  |     async def search_symbols( | ||||||
|  |         self, | ||||||
|  |         pattern: str, | ||||||
|  |         limit: int = None, | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  |         if self._pairs is not None: | ||||||
|  |             data = self._pairs | ||||||
|  |         else: | ||||||
|  |             data = await self.symbol_info() | ||||||
|  | 
 | ||||||
|  |         matches = fuzzy.extractBests( | ||||||
|  |             pattern, | ||||||
|  |             data, | ||||||
|  |             score_cutoff=50, | ||||||
|  |         ) | ||||||
|  |         # repack in dict form | ||||||
|  |         return {item[0]['altname']: item[0] for item in matches} | ||||||
|  | 
 | ||||||
|  |     async def bars( | ||||||
|  |         self, | ||||||
|  |         symbol: str = 'XBTUSD', | ||||||
|  | 
 | ||||||
|  |         # UTC 2017-07-02 12:53:20 | ||||||
|  |         since: Optional[Union[int, datetime]] = None, | ||||||
|  |         count: int = 720,  # <- max allowed per query | ||||||
|  |         as_np: bool = True, | ||||||
|  | 
 | ||||||
|  |     ) -> dict: | ||||||
|  | 
 | ||||||
|  |         if since is None: | ||||||
|  |             since = pendulum.now('UTC').start_of('minute').subtract( | ||||||
|  |                 minutes=count).timestamp() | ||||||
|  | 
 | ||||||
|  |         elif isinstance(since, int): | ||||||
|  |             since = pendulum.from_timestamp(since).timestamp() | ||||||
|  | 
 | ||||||
|  |         else:  # presumably a pendulum datetime | ||||||
|  |             since = since.timestamp() | ||||||
|  | 
 | ||||||
|  |         # UTC 2017-07-02 12:53:20 is oldest seconds value | ||||||
|  |         since = str(max(1499000000, int(since))) | ||||||
|  |         json = await self._public( | ||||||
|  |             'OHLC', | ||||||
|  |             data={ | ||||||
|  |                 'pair': symbol, | ||||||
|  |                 'since': since, | ||||||
|  |             }, | ||||||
|  |         ) | ||||||
|  |         try: | ||||||
|  |             res = json['result'] | ||||||
|  |             res.pop('last') | ||||||
|  |             bars = next(iter(res.values())) | ||||||
|  | 
 | ||||||
|  |             new_bars = [] | ||||||
|  | 
 | ||||||
|  |             first = bars[0] | ||||||
|  |             last_nz_vwap = first[-3] | ||||||
|  |             if last_nz_vwap == 0: | ||||||
|  |                 # use close if vwap is zero | ||||||
|  |                 last_nz_vwap = first[-4] | ||||||
|  | 
 | ||||||
|  |             # convert all fields to native types | ||||||
|  |             for i, bar in enumerate(bars): | ||||||
|  |                 # normalize weird zero-ed vwap values..cmon kraken.. | ||||||
|  |                 # indicates vwap didn't change since last bar | ||||||
|  |                 vwap = float(bar.pop(-3)) | ||||||
|  |                 if vwap != 0: | ||||||
|  |                     last_nz_vwap = vwap | ||||||
|  |                 if vwap == 0: | ||||||
|  |                     vwap = last_nz_vwap | ||||||
|  | 
 | ||||||
|  |                 # re-insert vwap as the last of the fields | ||||||
|  |                 bar.append(vwap) | ||||||
|  | 
 | ||||||
|  |                 new_bars.append( | ||||||
|  |                     (i,) + tuple( | ||||||
|  |                         ftype(bar[j]) for j, (name, ftype) in enumerate( | ||||||
|  |                             _ohlc_dtype[1:] | ||||||
|  |                         ) | ||||||
|  |                     ) | ||||||
|  |                 ) | ||||||
|  |             array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars | ||||||
|  |             return array | ||||||
|  |         except KeyError: | ||||||
|  |             errmsg = json['error'][0] | ||||||
|  | 
 | ||||||
|  |             if 'not found' in errmsg: | ||||||
|  |                 raise SymbolNotFound(errmsg + f': {symbol}') | ||||||
|  | 
 | ||||||
|  |             elif 'Too many requests' in errmsg: | ||||||
|  |                 raise DataThrottle(f'{symbol}') | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 raise BrokerError(errmsg) | ||||||
|  | 
 | ||||||
|  |     @classmethod | ||||||
|  |     def normalize_symbol( | ||||||
|  |         cls, | ||||||
|  |         ticker: str | ||||||
|  |     ) -> str: | ||||||
|  |         ''' | ||||||
|  |         Normalize symbol names to to a 3x3 pair from the global | ||||||
|  |         definition map which we build out from the data retreived from | ||||||
|  |         the 'AssetPairs' endpoint, see methods above. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         ticker = cls._ntable[ticker] | ||||||
|  |         symlen = len(ticker) | ||||||
|  |         if symlen != 6: | ||||||
|  |             raise ValueError(f'Unhandled symbol: {ticker}') | ||||||
|  | 
 | ||||||
|  |         return ticker.lower() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def get_client() -> Client: | ||||||
|  | 
 | ||||||
|  |     conf = get_config() | ||||||
|  |     if conf: | ||||||
|  |         client = Client( | ||||||
|  |             conf, | ||||||
|  |             name=conf['key_descr'], | ||||||
|  |             api_key=conf['api_key'], | ||||||
|  |             secret=conf['secret'] | ||||||
|  |         ) | ||||||
|  |     else: | ||||||
|  |         client = Client({}) | ||||||
|  | 
 | ||||||
|  |     # at startup, load all symbols, and asset info in | ||||||
|  |     # batch requests. | ||||||
|  |     async with trio.open_nursery() as nurse: | ||||||
|  |         nurse.start_soon(client.cache_assets) | ||||||
|  |         await client.cache_symbols() | ||||||
|  | 
 | ||||||
|  |     yield client | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -0,0 +1,499 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | Real-time and historical data feed endpoints. | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from contextlib import asynccontextmanager as acm | ||||||
|  | from datetime import datetime | ||||||
|  | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     Optional, | ||||||
|  |     Callable, | ||||||
|  | ) | ||||||
|  | import time | ||||||
|  | 
 | ||||||
|  | from async_generator import aclosing | ||||||
|  | from fuzzywuzzy import process as fuzzy | ||||||
|  | import numpy as np | ||||||
|  | import pendulum | ||||||
|  | from trio_typing import TaskStatus | ||||||
|  | import tractor | ||||||
|  | import trio | ||||||
|  | 
 | ||||||
|  | from piker._cacheables import open_cached_client | ||||||
|  | from piker.brokers._util import ( | ||||||
|  |     BrokerError, | ||||||
|  |     DataThrottle, | ||||||
|  |     DataUnavailable, | ||||||
|  | ) | ||||||
|  | from piker.log import get_console_log | ||||||
|  | from piker.data import ShmArray | ||||||
|  | from piker.data.types import Struct | ||||||
|  | from piker.data._web_bs import open_autorecon_ws, NoBsWs | ||||||
|  | from . import log | ||||||
|  | from .api import ( | ||||||
|  |     Client, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # https://www.kraken.com/features/api#get-tradable-pairs | ||||||
|  | class Pair(Struct): | ||||||
|  |     altname: str  # alternate pair name | ||||||
|  |     wsname: str  # WebSocket pair name (if available) | ||||||
|  |     aclass_base: str  # asset class of base component | ||||||
|  |     base: str  # asset id of base component | ||||||
|  |     aclass_quote: str  # asset class of quote component | ||||||
|  |     quote: str  # asset id of quote component | ||||||
|  |     lot: str  # volume lot size | ||||||
|  | 
 | ||||||
|  |     pair_decimals: int  # scaling decimal places for pair | ||||||
|  |     lot_decimals: int  # scaling decimal places for volume | ||||||
|  | 
 | ||||||
|  |     # amount to multiply lot volume by to get currency volume | ||||||
|  |     lot_multiplier: float | ||||||
|  | 
 | ||||||
|  |     # array of leverage amounts available when buying | ||||||
|  |     leverage_buy: list[int] | ||||||
|  |     # array of leverage amounts available when selling | ||||||
|  |     leverage_sell: list[int] | ||||||
|  | 
 | ||||||
|  |     # fee schedule array in [volume, percent fee] tuples | ||||||
|  |     fees: list[tuple[int, float]] | ||||||
|  | 
 | ||||||
|  |     # maker fee schedule array in [volume, percent fee] tuples (if on | ||||||
|  |     # maker/taker) | ||||||
|  |     fees_maker: list[tuple[int, float]] | ||||||
|  | 
 | ||||||
|  |     fee_volume_currency: str  # volume discount currency | ||||||
|  |     margin_call: str  # margin call level | ||||||
|  |     margin_stop: str  # stop-out/liquidation margin level | ||||||
|  |     ordermin: float  # minimum order volume for pair | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class OHLC(Struct): | ||||||
|  |     ''' | ||||||
|  |     Description of the flattened OHLC quote format. | ||||||
|  | 
 | ||||||
|  |     For schema details see: | ||||||
|  |         https://docs.kraken.com/websockets/#message-ohlc | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     chan_id: int  # internal kraken id | ||||||
|  |     chan_name: str  # eg. ohlc-1  (name-interval) | ||||||
|  |     pair: str  # fx pair | ||||||
|  |     time: float  # Begin time of interval, in seconds since epoch | ||||||
|  |     etime: float  # End time of interval, in seconds since epoch | ||||||
|  |     open: float  # Open price of interval | ||||||
|  |     high: float  # High price within interval | ||||||
|  |     low: float  # Low price within interval | ||||||
|  |     close: float  # Close price of interval | ||||||
|  |     vwap: float  # Volume weighted average price within interval | ||||||
|  |     volume: float  # Accumulated volume **within interval** | ||||||
|  |     count: int  # Number of trades within interval | ||||||
|  |     # (sampled) generated tick data | ||||||
|  |     ticks: list[Any] = [] | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def stream_messages( | ||||||
|  |     ws: NoBsWs, | ||||||
|  | ): | ||||||
|  |     ''' | ||||||
|  |     Message stream parser and heartbeat handler. | ||||||
|  | 
 | ||||||
|  |     Deliver ws subscription messages as well as handle heartbeat logic | ||||||
|  |     though a single async generator. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     too_slow_count = last_hb = 0 | ||||||
|  | 
 | ||||||
|  |     while True: | ||||||
|  | 
 | ||||||
|  |         with trio.move_on_after(5) as cs: | ||||||
|  |             msg = await ws.recv_msg() | ||||||
|  | 
 | ||||||
|  |         # trigger reconnection if heartbeat is laggy | ||||||
|  |         if cs.cancelled_caught: | ||||||
|  | 
 | ||||||
|  |             too_slow_count += 1 | ||||||
|  | 
 | ||||||
|  |             if too_slow_count > 20: | ||||||
|  |                 log.warning( | ||||||
|  |                     "Heartbeat is too slow, resetting ws connection") | ||||||
|  | 
 | ||||||
|  |                 await ws._connect() | ||||||
|  |                 too_slow_count = 0 | ||||||
|  |                 continue | ||||||
|  | 
 | ||||||
|  |         match msg: | ||||||
|  |             case {'event': 'heartbeat'}: | ||||||
|  |                 now = time.time() | ||||||
|  |                 delay = now - last_hb | ||||||
|  |                 last_hb = now | ||||||
|  | 
 | ||||||
|  |                 # XXX: why tf is this not printing without --tl flag? | ||||||
|  |                 log.debug(f"Heartbeat after {delay}") | ||||||
|  |                 # print(f"Heartbeat after {delay}") | ||||||
|  | 
 | ||||||
|  |                 continue | ||||||
|  | 
 | ||||||
|  |             case _: | ||||||
|  |                 # passthrough sub msgs | ||||||
|  |                 yield msg | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def process_data_feed_msgs( | ||||||
|  |     ws: NoBsWs, | ||||||
|  | ): | ||||||
|  |     ''' | ||||||
|  |     Parse and pack data feed messages. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     async for msg in stream_messages(ws): | ||||||
|  |         match msg: | ||||||
|  |             case { | ||||||
|  |                 'errorMessage': errmsg | ||||||
|  |             }: | ||||||
|  |                 raise BrokerError(errmsg) | ||||||
|  | 
 | ||||||
|  |             case { | ||||||
|  |                 'event': 'subscriptionStatus', | ||||||
|  |             } as sub: | ||||||
|  |                 log.info( | ||||||
|  |                     'WS subscription is active:\n' | ||||||
|  |                     f'{sub}' | ||||||
|  |                 ) | ||||||
|  |                 continue | ||||||
|  | 
 | ||||||
|  |             case [ | ||||||
|  |                 chan_id, | ||||||
|  |                 *payload_array, | ||||||
|  |                 chan_name, | ||||||
|  |                 pair | ||||||
|  |             ]: | ||||||
|  |                 if 'ohlc' in chan_name: | ||||||
|  |                     ohlc = OHLC( | ||||||
|  |                         chan_id, | ||||||
|  |                         chan_name, | ||||||
|  |                         pair, | ||||||
|  |                         *payload_array[0] | ||||||
|  |                     ) | ||||||
|  |                     ohlc.typecast() | ||||||
|  |                     yield 'ohlc', ohlc | ||||||
|  | 
 | ||||||
|  |                 elif 'spread' in chan_name: | ||||||
|  | 
 | ||||||
|  |                     bid, ask, ts, bsize, asize = map( | ||||||
|  |                         float, payload_array[0]) | ||||||
|  | 
 | ||||||
|  |                     # TODO: really makes you think IB has a horrible API... | ||||||
|  |                     quote = { | ||||||
|  |                         'symbol': pair.replace('/', ''), | ||||||
|  |                         'ticks': [ | ||||||
|  |                             {'type': 'bid', 'price': bid, 'size': bsize}, | ||||||
|  |                             {'type': 'bsize', 'price': bid, 'size': bsize}, | ||||||
|  | 
 | ||||||
|  |                             {'type': 'ask', 'price': ask, 'size': asize}, | ||||||
|  |                             {'type': 'asize', 'price': ask, 'size': asize}, | ||||||
|  |                         ], | ||||||
|  |                     } | ||||||
|  |                     yield 'l1', quote | ||||||
|  | 
 | ||||||
|  |                 # elif 'book' in msg[-2]: | ||||||
|  |                 #     chan_id, *payload_array, chan_name, pair = msg | ||||||
|  |                 #     print(msg) | ||||||
|  | 
 | ||||||
|  |             case _: | ||||||
|  |                 print(f'UNHANDLED MSG: {msg}') | ||||||
|  |                 # yield msg | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def normalize( | ||||||
|  |     ohlc: OHLC, | ||||||
|  | 
 | ||||||
|  | ) -> dict: | ||||||
|  |     quote = ohlc.to_dict() | ||||||
|  |     quote['broker_ts'] = quote['time'] | ||||||
|  |     quote['brokerd_ts'] = time.time() | ||||||
|  |     quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '') | ||||||
|  |     quote['last'] = quote['close'] | ||||||
|  |     quote['bar_wap'] = ohlc.vwap | ||||||
|  | 
 | ||||||
|  |     # seriously eh? what's with this non-symmetry everywhere | ||||||
|  |     # in subscription systems... | ||||||
|  |     # XXX: piker style is always lowercases symbols. | ||||||
|  |     topic = quote['pair'].replace('/', '').lower() | ||||||
|  | 
 | ||||||
|  |     # print(quote) | ||||||
|  |     return topic, quote | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @acm | ||||||
|  | async def open_history_client( | ||||||
|  |     symbol: str, | ||||||
|  | 
 | ||||||
|  | ) -> tuple[Callable, int]: | ||||||
|  | 
 | ||||||
|  |     # TODO implement history getter for the new storage layer. | ||||||
|  |     async with open_cached_client('kraken') as client: | ||||||
|  | 
 | ||||||
|  |         # lol, kraken won't send any more then the "last" | ||||||
|  |         # 720 1m bars.. so we have to just ignore further | ||||||
|  |         # requests of this type.. | ||||||
|  |         queries: int = 0 | ||||||
|  | 
 | ||||||
|  |         async def get_ohlc( | ||||||
|  |             end_dt: Optional[datetime] = None, | ||||||
|  |             start_dt: Optional[datetime] = None, | ||||||
|  | 
 | ||||||
|  |         ) -> tuple[ | ||||||
|  |             np.ndarray, | ||||||
|  |             datetime,  # start | ||||||
|  |             datetime,  # end | ||||||
|  |         ]: | ||||||
|  | 
 | ||||||
|  |             nonlocal queries | ||||||
|  |             if queries > 0: | ||||||
|  |                 raise DataUnavailable | ||||||
|  | 
 | ||||||
|  |             count = 0 | ||||||
|  |             while count <= 3: | ||||||
|  |                 try: | ||||||
|  |                     array = await client.bars( | ||||||
|  |                         symbol, | ||||||
|  |                         since=end_dt, | ||||||
|  |                     ) | ||||||
|  |                     count += 1 | ||||||
|  |                     queries += 1 | ||||||
|  |                     break | ||||||
|  |                 except DataThrottle: | ||||||
|  |                     log.warning(f'kraken OHLC throttle for {symbol}') | ||||||
|  |                     await trio.sleep(1) | ||||||
|  | 
 | ||||||
|  |             start_dt = pendulum.from_timestamp(array[0]['time']) | ||||||
|  |             end_dt = pendulum.from_timestamp(array[-1]['time']) | ||||||
|  |             return array, start_dt, end_dt | ||||||
|  | 
 | ||||||
|  |         yield get_ohlc, {'erlangs': 1, 'rate': 1} | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def backfill_bars( | ||||||
|  | 
 | ||||||
|  |     sym: str, | ||||||
|  |     shm: ShmArray,  # type: ignore # noqa | ||||||
|  |     count: int = 10,  # NOTE: any more and we'll overrun the underlying buffer | ||||||
|  |     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Fill historical bars into shared mem / storage afap. | ||||||
|  |     ''' | ||||||
|  |     with trio.CancelScope() as cs: | ||||||
|  |         async with open_cached_client('kraken') as client: | ||||||
|  |             bars = await client.bars(symbol=sym) | ||||||
|  |             shm.push(bars) | ||||||
|  |             task_status.started(cs) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | async def stream_quotes( | ||||||
|  | 
 | ||||||
|  |     send_chan: trio.abc.SendChannel, | ||||||
|  |     symbols: list[str], | ||||||
|  |     feed_is_live: trio.Event, | ||||||
|  |     loglevel: str = None, | ||||||
|  | 
 | ||||||
|  |     # backend specific | ||||||
|  |     sub_type: str = 'ohlc', | ||||||
|  | 
 | ||||||
|  |     # startup sync | ||||||
|  |     task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     ''' | ||||||
|  |     Subscribe for ohlc stream of quotes for ``pairs``. | ||||||
|  | 
 | ||||||
|  |     ``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     # XXX: required to propagate ``tractor`` loglevel to piker logging | ||||||
|  |     get_console_log(loglevel or tractor.current_actor().loglevel) | ||||||
|  | 
 | ||||||
|  |     ws_pairs = {} | ||||||
|  |     sym_infos = {} | ||||||
|  | 
 | ||||||
|  |     async with open_cached_client('kraken') as client, send_chan as send_chan: | ||||||
|  | 
 | ||||||
|  |         # keep client cached for real-time section | ||||||
|  |         for sym in symbols: | ||||||
|  | 
 | ||||||
|  |             # transform to upper since piker style is always lower | ||||||
|  |             sym = sym.upper() | ||||||
|  | 
 | ||||||
|  |             si = Pair(**await client.symbol_info(sym))  # validation | ||||||
|  |             syminfo = si.to_dict() | ||||||
|  |             syminfo['price_tick_size'] = 1 / 10**si.pair_decimals | ||||||
|  |             syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals | ||||||
|  |             syminfo['asset_type'] = 'crypto' | ||||||
|  |             sym_infos[sym] = syminfo | ||||||
|  |             ws_pairs[sym] = si.wsname | ||||||
|  | 
 | ||||||
|  |         symbol = symbols[0].lower() | ||||||
|  | 
 | ||||||
|  |         init_msgs = { | ||||||
|  |             # pass back token, and bool, signalling if we're the writer | ||||||
|  |             # and that history has been written | ||||||
|  |             symbol: { | ||||||
|  |                 'symbol_info': sym_infos[sym], | ||||||
|  |                 'shm_write_opts': {'sum_tick_vml': False}, | ||||||
|  |                 'fqsn': sym, | ||||||
|  |             }, | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         @acm | ||||||
|  |         async def subscribe(ws: NoBsWs): | ||||||
|  | 
 | ||||||
|  |             # XXX: setup subs | ||||||
|  |             # https://docs.kraken.com/websockets/#message-subscribe | ||||||
|  |             # specific logic for this in kraken's sync client: | ||||||
|  |             # https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188 | ||||||
|  |             ohlc_sub = { | ||||||
|  |                 'event': 'subscribe', | ||||||
|  |                 'pair': list(ws_pairs.values()), | ||||||
|  |                 'subscription': { | ||||||
|  |                     'name': 'ohlc', | ||||||
|  |                     'interval': 1, | ||||||
|  |                 }, | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             # TODO: we want to eventually allow unsubs which should | ||||||
|  |             # be completely fine to request from a separate task | ||||||
|  |             # since internally the ws methods appear to be FIFO | ||||||
|  |             # locked. | ||||||
|  |             await ws.send_msg(ohlc_sub) | ||||||
|  | 
 | ||||||
|  |             # trade data (aka L1) | ||||||
|  |             l1_sub = { | ||||||
|  |                 'event': 'subscribe', | ||||||
|  |                 'pair': list(ws_pairs.values()), | ||||||
|  |                 'subscription': { | ||||||
|  |                     'name': 'spread', | ||||||
|  |                     # 'depth': 10} | ||||||
|  |                 }, | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             # pull a first quote and deliver | ||||||
|  |             await ws.send_msg(l1_sub) | ||||||
|  | 
 | ||||||
|  |             yield | ||||||
|  | 
 | ||||||
|  |             # unsub from all pairs on teardown | ||||||
|  |             await ws.send_msg({ | ||||||
|  |                 'pair': list(ws_pairs.values()), | ||||||
|  |                 'event': 'unsubscribe', | ||||||
|  |                 'subscription': ['ohlc', 'spread'], | ||||||
|  |             }) | ||||||
|  | 
 | ||||||
|  |             # XXX: do we need to ack the unsub? | ||||||
|  |             # await ws.recv_msg() | ||||||
|  | 
 | ||||||
|  |         # see the tips on reconnection logic: | ||||||
|  |         # https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds | ||||||
|  |         ws: NoBsWs | ||||||
|  |         async with ( | ||||||
|  |             open_autorecon_ws( | ||||||
|  |                 'wss://ws.kraken.com/', | ||||||
|  |                 fixture=subscribe, | ||||||
|  |             ) as ws, | ||||||
|  |             aclosing(process_data_feed_msgs(ws)) as msg_gen, | ||||||
|  |         ): | ||||||
|  |             # pull a first quote and deliver | ||||||
|  |             typ, ohlc_last = await anext(msg_gen) | ||||||
|  |             topic, quote = normalize(ohlc_last) | ||||||
|  | 
 | ||||||
|  |             task_status.started((init_msgs,  quote)) | ||||||
|  | 
 | ||||||
|  |             # lol, only "closes" when they're margin squeezing clients ;P | ||||||
|  |             feed_is_live.set() | ||||||
|  | 
 | ||||||
|  |             # keep start of last interval for volume tracking | ||||||
|  |             last_interval_start = ohlc_last.etime | ||||||
|  | 
 | ||||||
|  |             # start streaming | ||||||
|  |             async for typ, ohlc in msg_gen: | ||||||
|  | 
 | ||||||
|  |                 if typ == 'ohlc': | ||||||
|  | 
 | ||||||
|  |                     # TODO: can get rid of all this by using | ||||||
|  |                     # ``trades`` subscription... | ||||||
|  | 
 | ||||||
|  |                     # generate tick values to match time & sales pane: | ||||||
|  |                     # https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m | ||||||
|  |                     volume = ohlc.volume | ||||||
|  | 
 | ||||||
|  |                     # new OHLC sample interval | ||||||
|  |                     if ohlc.etime > last_interval_start: | ||||||
|  |                         last_interval_start = ohlc.etime | ||||||
|  |                         tick_volume = volume | ||||||
|  | 
 | ||||||
|  |                     else: | ||||||
|  |                         # this is the tick volume *within the interval* | ||||||
|  |                         tick_volume = volume - ohlc_last.volume | ||||||
|  | 
 | ||||||
|  |                     ohlc_last = ohlc | ||||||
|  |                     last = ohlc.close | ||||||
|  | 
 | ||||||
|  |                     if tick_volume: | ||||||
|  |                         ohlc.ticks.append({ | ||||||
|  |                             'type': 'trade', | ||||||
|  |                             'price': last, | ||||||
|  |                             'size': tick_volume, | ||||||
|  |                         }) | ||||||
|  | 
 | ||||||
|  |                     topic, quote = normalize(ohlc) | ||||||
|  | 
 | ||||||
|  |                 elif typ == 'l1': | ||||||
|  |                     quote = ohlc | ||||||
|  |                     topic = quote['symbol'].lower() | ||||||
|  | 
 | ||||||
|  |                 await send_chan.send({topic: quote}) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @tractor.context | ||||||
|  | async def open_symbol_search( | ||||||
|  |     ctx: tractor.Context, | ||||||
|  | 
 | ||||||
|  | ) -> Client: | ||||||
|  |     async with open_cached_client('kraken') as client: | ||||||
|  | 
 | ||||||
|  |         # load all symbols locally for fast search | ||||||
|  |         cache = await client.cache_symbols() | ||||||
|  |         await ctx.started(cache) | ||||||
|  | 
 | ||||||
|  |         async with ctx.open_stream() as stream: | ||||||
|  | 
 | ||||||
|  |             async for pattern in stream: | ||||||
|  | 
 | ||||||
|  |                 matches = fuzzy.extractBests( | ||||||
|  |                     pattern, | ||||||
|  |                     cache, | ||||||
|  |                     score_cutoff=50, | ||||||
|  |                 ) | ||||||
|  |                 # repack in dict form | ||||||
|  |                 await stream.send( | ||||||
|  |                     {item[0]['altname']: item[0] | ||||||
|  |                      for item in matches} | ||||||
|  |                 ) | ||||||
|  | @ -22,54 +22,10 @@ from enum import Enum | ||||||
| from typing import Optional | from typing import Optional | ||||||
| 
 | 
 | ||||||
| from bidict import bidict | from bidict import bidict | ||||||
| from pydantic import BaseModel, validator |  | ||||||
| 
 | 
 | ||||||
| from ..data._source import Symbol | from ..data._source import Symbol | ||||||
| from ._messages import BrokerdPosition, Status | from ..data.types import Struct | ||||||
| 
 | from ..pp import Position | ||||||
| 
 |  | ||||||
| class Position(BaseModel): |  | ||||||
|     ''' |  | ||||||
|     Basic pp (personal position) model with attached fills history. |  | ||||||
| 
 |  | ||||||
|     This type should be IPC wire ready? |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     symbol: Symbol |  | ||||||
| 
 |  | ||||||
|     # last size and avg entry price |  | ||||||
|     size: float |  | ||||||
|     avg_price: float  # TODO: contextual pricing |  | ||||||
| 
 |  | ||||||
|     # ordered record of known constituent trade messages |  | ||||||
|     fills: list[Status] = [] |  | ||||||
| 
 |  | ||||||
|     def update_from_msg( |  | ||||||
|         self, |  | ||||||
|         msg: BrokerdPosition, |  | ||||||
| 
 |  | ||||||
|     ) -> None: |  | ||||||
| 
 |  | ||||||
|         # XXX: better place to do this? |  | ||||||
|         symbol = self.symbol |  | ||||||
| 
 |  | ||||||
|         lot_size_digits = symbol.lot_size_digits |  | ||||||
|         avg_price, size = ( |  | ||||||
|             round(msg['avg_price'], ndigits=symbol.tick_size_digits), |  | ||||||
|             round(msg['size'], ndigits=lot_size_digits), |  | ||||||
|         ) |  | ||||||
| 
 |  | ||||||
|         self.avg_price = avg_price |  | ||||||
|         self.size = size |  | ||||||
| 
 |  | ||||||
|     @property |  | ||||||
|     def dsize(self) -> float: |  | ||||||
|         ''' |  | ||||||
|         The "dollar" size of the pp, normally in trading (fiat) unit |  | ||||||
|         terms. |  | ||||||
| 
 |  | ||||||
|         ''' |  | ||||||
|         return self.avg_price * self.size |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _size_units = bidict({ | _size_units = bidict({ | ||||||
|  | @ -84,33 +40,30 @@ SizeUnit = Enum( | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Allocator(BaseModel): | class Allocator(Struct): | ||||||
| 
 |  | ||||||
|     class Config: |  | ||||||
|         validate_assignment = True |  | ||||||
|         copy_on_model_validation = False |  | ||||||
|         arbitrary_types_allowed = True |  | ||||||
| 
 |  | ||||||
|         # required to get the account validator lookup working? |  | ||||||
|         extra = 'allow' |  | ||||||
|         underscore_attrs_are_private = False |  | ||||||
| 
 | 
 | ||||||
|     symbol: Symbol |     symbol: Symbol | ||||||
|     account: Optional[str] = 'paper' |     account: Optional[str] = 'paper' | ||||||
|  | 
 | ||||||
|  |     _size_units: bidict[str, Optional[str]] = _size_units | ||||||
|  | 
 | ||||||
|     # TODO: for enums this clearly doesn't fucking work, you can't set |     # TODO: for enums this clearly doesn't fucking work, you can't set | ||||||
|     # a default at startup by passing in a `dict` but yet you can set |     # a default at startup by passing in a `dict` but yet you can set | ||||||
|     # that value through assignment..for wtv cucked reason.. honestly, pure |     # that value through assignment..for wtv cucked reason.. honestly, pure | ||||||
|     # unintuitive garbage. |     # unintuitive garbage. | ||||||
|     size_unit: str = 'currency' |     _size_unit: str = 'currency' | ||||||
|     _size_units: dict[str, Optional[str]] = _size_units |  | ||||||
| 
 | 
 | ||||||
|     @validator('size_unit', pre=True) |     @property | ||||||
|     def maybe_lookup_key(cls, v): |     def size_unit(self) -> str: | ||||||
|         # apply the corresponding enum key for the text "description" value |         return self._size_unit | ||||||
|  | 
 | ||||||
|  |     @size_unit.setter | ||||||
|  |     def size_unit(self, v: str) -> Optional[str]: | ||||||
|         if v not in _size_units: |         if v not in _size_units: | ||||||
|             return _size_units.inverse[v] |             v = _size_units.inverse[v] | ||||||
| 
 | 
 | ||||||
|         assert v in _size_units |         assert v in _size_units | ||||||
|  |         self._size_unit = v | ||||||
|         return v |         return v | ||||||
| 
 | 
 | ||||||
|     # TODO: if we ever want ot support non-uniform entry-slot-proportion |     # TODO: if we ever want ot support non-uniform entry-slot-proportion | ||||||
|  | @ -143,7 +96,7 @@ class Allocator(BaseModel): | ||||||
|     def next_order_info( |     def next_order_info( | ||||||
|         self, |         self, | ||||||
| 
 | 
 | ||||||
|         # we only need a startup size for exit calcs, we can the |         # we only need a startup size for exit calcs, we can then | ||||||
|         # determine how large slots should be if the initial pp size was |         # determine how large slots should be if the initial pp size was | ||||||
|         # larger then the current live one, and the live one is smaller |         # larger then the current live one, and the live one is smaller | ||||||
|         # then the initial config settings. |         # then the initial config settings. | ||||||
|  | @ -173,7 +126,7 @@ class Allocator(BaseModel): | ||||||
|             l_sub_pp = self.units_limit - abs_live_size |             l_sub_pp = self.units_limit - abs_live_size | ||||||
| 
 | 
 | ||||||
|         elif size_unit == 'currency': |         elif size_unit == 'currency': | ||||||
|             live_cost_basis = abs_live_size * live_pp.avg_price |             live_cost_basis = abs_live_size * live_pp.ppu | ||||||
|             slot_size = currency_per_slot / price |             slot_size = currency_per_slot / price | ||||||
|             l_sub_pp = (self.currency_limit - live_cost_basis) / price |             l_sub_pp = (self.currency_limit - live_cost_basis) / price | ||||||
| 
 | 
 | ||||||
|  | @ -184,12 +137,14 @@ class Allocator(BaseModel): | ||||||
| 
 | 
 | ||||||
|         # an entry (adding-to or starting a pp) |         # an entry (adding-to or starting a pp) | ||||||
|         if ( |         if ( | ||||||
|             action == 'buy' and live_size > 0 or |  | ||||||
|             action == 'sell' and live_size < 0 or |  | ||||||
|             live_size == 0 |             live_size == 0 | ||||||
|  |             or (action == 'buy' and live_size > 0) | ||||||
|  |             or action == 'sell' and live_size < 0 | ||||||
|         ): |         ): | ||||||
| 
 |             order_size = min( | ||||||
|             order_size = min(slot_size, l_sub_pp) |                 slot_size, | ||||||
|  |                 max(l_sub_pp, 0), | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         # an exit (removing-from or going to net-zero pp) |         # an exit (removing-from or going to net-zero pp) | ||||||
|         else: |         else: | ||||||
|  | @ -205,7 +160,7 @@ class Allocator(BaseModel): | ||||||
|             if size_unit == 'currency': |             if size_unit == 'currency': | ||||||
|                 # compute the "projected" limit's worth of units at the |                 # compute the "projected" limit's worth of units at the | ||||||
|                 # current pp (weighted) price: |                 # current pp (weighted) price: | ||||||
|                 slot_size = currency_per_slot / live_pp.avg_price |                 slot_size = currency_per_slot / live_pp.ppu | ||||||
| 
 | 
 | ||||||
|             else: |             else: | ||||||
|                 slot_size = u_per_slot |                 slot_size = u_per_slot | ||||||
|  | @ -244,7 +199,12 @@ class Allocator(BaseModel): | ||||||
|         if order_size < slot_size: |         if order_size < slot_size: | ||||||
|             # compute a fractional slots size to display |             # compute a fractional slots size to display | ||||||
|             slots_used = self.slots_used( |             slots_used = self.slots_used( | ||||||
|                 Position(symbol=sym, size=order_size, avg_price=price) |                 Position( | ||||||
|  |                     symbol=sym, | ||||||
|  |                     size=order_size, | ||||||
|  |                     ppu=price, | ||||||
|  |                     bsuid=sym, | ||||||
|  |                 ) | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         return { |         return { | ||||||
|  | @ -271,8 +231,8 @@ class Allocator(BaseModel): | ||||||
|         abs_pp_size = abs(pp.size) |         abs_pp_size = abs(pp.size) | ||||||
| 
 | 
 | ||||||
|         if self.size_unit == 'currency': |         if self.size_unit == 'currency': | ||||||
|             # live_currency_size = size or (abs_pp_size * pp.avg_price) |             # live_currency_size = size or (abs_pp_size * pp.ppu) | ||||||
|             live_currency_size = abs_pp_size * pp.avg_price |             live_currency_size = abs_pp_size * pp.ppu | ||||||
|             prop = live_currency_size / self.currency_limit |             prop = live_currency_size / self.currency_limit | ||||||
| 
 | 
 | ||||||
|         else: |         else: | ||||||
|  | @ -284,14 +244,6 @@ class Allocator(BaseModel): | ||||||
|         return round(prop * self.slots) |         return round(prop * self.slots) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _derivs = ( |  | ||||||
|     'future', |  | ||||||
|     'continuous_future', |  | ||||||
|     'option', |  | ||||||
|     'futures_option', |  | ||||||
| ) |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def mk_allocator( | def mk_allocator( | ||||||
| 
 | 
 | ||||||
|     symbol: Symbol, |     symbol: Symbol, | ||||||
|  | @ -300,7 +252,7 @@ def mk_allocator( | ||||||
|     # default allocation settings |     # default allocation settings | ||||||
|     defaults: dict[str, float] = { |     defaults: dict[str, float] = { | ||||||
|         'account': None,  # select paper by default |         'account': None,  # select paper by default | ||||||
|         'size_unit': 'currency', |         # 'size_unit': 'currency', | ||||||
|         'units_limit': 400, |         'units_limit': 400, | ||||||
|         'currency_limit': 5e3, |         'currency_limit': 5e3, | ||||||
|         'slots': 4, |         'slots': 4, | ||||||
|  | @ -318,42 +270,9 @@ def mk_allocator( | ||||||
|         'currency_limit': 6e3, |         'currency_limit': 6e3, | ||||||
|         'slots': 6, |         'slots': 6, | ||||||
|     } |     } | ||||||
| 
 |  | ||||||
|     defaults.update(user_def) |     defaults.update(user_def) | ||||||
| 
 | 
 | ||||||
|     alloc = Allocator( |     return Allocator( | ||||||
|         symbol=symbol, |         symbol=symbol, | ||||||
|         **defaults, |         **defaults, | ||||||
|     ) |     ) | ||||||
| 
 |  | ||||||
|     asset_type = symbol.type_key |  | ||||||
| 
 |  | ||||||
|     # specific configs by asset class / type |  | ||||||
| 
 |  | ||||||
|     if asset_type in _derivs: |  | ||||||
|         # since it's harder to know how currency "applies" in this case |  | ||||||
|         # given leverage properties |  | ||||||
|         alloc.size_unit = '# units' |  | ||||||
| 
 |  | ||||||
|         # set units limit to slots size thus making make the next |  | ||||||
|         # entry step 1.0 |  | ||||||
|         alloc.units_limit = alloc.slots |  | ||||||
| 
 |  | ||||||
|     # if the current position is already greater then the limit |  | ||||||
|     # settings, increase the limit to the current position |  | ||||||
|     if alloc.size_unit == 'currency': |  | ||||||
|         startup_size = startup_pp.size * startup_pp.avg_price |  | ||||||
| 
 |  | ||||||
|         if startup_size > alloc.currency_limit: |  | ||||||
|             alloc.currency_limit = round(startup_size, ndigits=2) |  | ||||||
| 
 |  | ||||||
|     else: |  | ||||||
|         startup_size = abs(startup_pp.size) |  | ||||||
| 
 |  | ||||||
|         if startup_size > alloc.units_limit: |  | ||||||
|             alloc.units_limit = startup_size |  | ||||||
| 
 |  | ||||||
|             if asset_type in _derivs: |  | ||||||
|                 alloc.slots = alloc.units_limit |  | ||||||
| 
 |  | ||||||
|     return alloc |  | ||||||
|  |  | ||||||
|  | @ -31,6 +31,7 @@ from ..log import get_logger | ||||||
| from ._ems import _emsd_main | from ._ems import _emsd_main | ||||||
| from .._daemon import maybe_open_emsd | from .._daemon import maybe_open_emsd | ||||||
| from ._messages import Order, Cancel | from ._messages import Order, Cancel | ||||||
|  | from ..brokers import get_brokermod | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
|  | @ -58,11 +59,11 @@ class OrderBook: | ||||||
| 
 | 
 | ||||||
|     def send( |     def send( | ||||||
|         self, |         self, | ||||||
|         msg: Order, |         msg: Order | dict, | ||||||
| 
 | 
 | ||||||
|     ) -> dict: |     ) -> dict: | ||||||
|         self._sent_orders[msg.oid] = msg |         self._sent_orders[msg.oid] = msg | ||||||
|         self._to_ems.send_nowait(msg.dict()) |         self._to_ems.send_nowait(msg) | ||||||
|         return msg |         return msg | ||||||
| 
 | 
 | ||||||
|     def update( |     def update( | ||||||
|  | @ -73,9 +74,8 @@ class OrderBook: | ||||||
| 
 | 
 | ||||||
|     ) -> dict: |     ) -> dict: | ||||||
|         cmd = self._sent_orders[uuid] |         cmd = self._sent_orders[uuid] | ||||||
|         msg = cmd.dict() |         msg = cmd.copy(update=data) | ||||||
|         msg.update(data) |         self._sent_orders[uuid] = msg | ||||||
|         self._sent_orders[uuid] = Order(**msg) |  | ||||||
|         self._to_ems.send_nowait(msg) |         self._to_ems.send_nowait(msg) | ||||||
|         return cmd |         return cmd | ||||||
| 
 | 
 | ||||||
|  | @ -83,12 +83,18 @@ class OrderBook: | ||||||
|         """Cancel an order (or alert) in the EMS. |         """Cancel an order (or alert) in the EMS. | ||||||
| 
 | 
 | ||||||
|         """ |         """ | ||||||
|         cmd = self._sent_orders[uuid] |         cmd = self._sent_orders.get(uuid) | ||||||
|  |         if not cmd: | ||||||
|  |             log.error( | ||||||
|  |                 f'Unknown order {uuid}!?\n' | ||||||
|  |                 f'Maybe there is a stale entry or line?\n' | ||||||
|  |                 f'You should report this as a bug!' | ||||||
|  |             ) | ||||||
|         msg = Cancel( |         msg = Cancel( | ||||||
|             oid=uuid, |             oid=uuid, | ||||||
|             symbol=cmd.symbol, |             symbol=cmd.symbol, | ||||||
|         ) |         ) | ||||||
|         self._to_ems.send_nowait(msg.dict()) |         self._to_ems.send_nowait(msg) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| _orders: OrderBook = None | _orders: OrderBook = None | ||||||
|  | @ -149,10 +155,17 @@ async def relay_order_cmds_from_sync_code( | ||||||
|     book = get_orders() |     book = get_orders() | ||||||
|     async with book._from_order_book.subscribe() as orders_stream: |     async with book._from_order_book.subscribe() as orders_stream: | ||||||
|         async for cmd in orders_stream: |         async for cmd in orders_stream: | ||||||
|             if cmd['symbol'] == symbol_key: |             sym = cmd.symbol | ||||||
|                 log.info(f'Send order cmd:\n{pformat(cmd)}') |             msg = pformat(cmd) | ||||||
|  |             if sym == symbol_key: | ||||||
|  |                 log.info(f'Send order cmd:\n{msg}') | ||||||
|                 # send msg over IPC / wire |                 # send msg over IPC / wire | ||||||
|                 await to_ems_stream.send(cmd) |                 await to_ems_stream.send(cmd) | ||||||
|  |             else: | ||||||
|  |                 log.warning( | ||||||
|  |                     f'Ignoring unmatched order cmd for {sym} != {symbol_key}:' | ||||||
|  |                     f'\n{msg}' | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
|  | @ -204,20 +217,35 @@ async def open_ems( | ||||||
|     from ..data._source import unpack_fqsn |     from ..data._source import unpack_fqsn | ||||||
|     broker, symbol, suffix = unpack_fqsn(fqsn) |     broker, symbol, suffix = unpack_fqsn(fqsn) | ||||||
| 
 | 
 | ||||||
|  |     mode: str = 'live' | ||||||
|  | 
 | ||||||
|     async with maybe_open_emsd(broker) as portal: |     async with maybe_open_emsd(broker) as portal: | ||||||
| 
 | 
 | ||||||
|  |         mod = get_brokermod(broker) | ||||||
|  |         if not getattr(mod, 'trades_dialogue', None): | ||||||
|  |             mode = 'paper' | ||||||
|  | 
 | ||||||
|         async with ( |         async with ( | ||||||
|             # connect to emsd |             # connect to emsd | ||||||
|             portal.open_context( |             portal.open_context( | ||||||
| 
 | 
 | ||||||
|                 _emsd_main, |                 _emsd_main, | ||||||
|                 fqsn=fqsn, |                 fqsn=fqsn, | ||||||
|  |                 exec_mode=mode, | ||||||
| 
 | 
 | ||||||
|             ) as (ctx, (positions, accounts)), |             ) as ( | ||||||
|  |                 ctx, | ||||||
|  |                 ( | ||||||
|  |                     positions, | ||||||
|  |                     accounts, | ||||||
|  |                     dialogs, | ||||||
|  |                 ) | ||||||
|  |             ), | ||||||
| 
 | 
 | ||||||
|             # open 2-way trade command stream |             # open 2-way trade command stream | ||||||
|             ctx.open_stream() as trades_stream, |             ctx.open_stream() as trades_stream, | ||||||
|         ): |         ): | ||||||
|  |             # start sync code order msg delivery task | ||||||
|             async with trio.open_nursery() as n: |             async with trio.open_nursery() as n: | ||||||
|                 n.start_soon( |                 n.start_soon( | ||||||
|                     relay_order_cmds_from_sync_code, |                     relay_order_cmds_from_sync_code, | ||||||
|  | @ -225,4 +253,10 @@ async def open_ems( | ||||||
|                     trades_stream |                     trades_stream | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|                 yield book, trades_stream, positions, accounts |                 yield ( | ||||||
|  |                     book, | ||||||
|  |                     trades_stream, | ||||||
|  |                     positions, | ||||||
|  |                     accounts, | ||||||
|  |                     dialogs, | ||||||
|  |                 ) | ||||||
|  |  | ||||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							|  | @ -1,5 +1,5 @@ | ||||||
| # piker: trading gear for hackers | # piker: trading gear for hackers | ||||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
| 
 | 
 | ||||||
| # This program is free software: you can redistribute it and/or modify | # This program is free software: you can redistribute it and/or modify | ||||||
| # it under the terms of the GNU Affero General Public License as published by | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | @ -15,22 +15,95 @@ | ||||||
| # along with this program.  If not, see <https://www.gnu.org/licenses/>. | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| Clearing system messagingn types and protocols. | Clearing sub-system message and protocols. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from typing import Optional, Union | # from collections import ( | ||||||
| 
 | #     ChainMap, | ||||||
| # TODO: try out just encoding/send direction for now? | #     deque, | ||||||
| # import msgspec | # ) | ||||||
| from pydantic import BaseModel | from typing import ( | ||||||
|  |     Optional, | ||||||
|  |     Literal, | ||||||
|  | ) | ||||||
| 
 | 
 | ||||||
| from ..data._source import Symbol | from ..data._source import Symbol | ||||||
|  | from ..data.types import Struct | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: a composite for tracking msg flow on 2-legged | ||||||
|  | # dialogs. | ||||||
|  | # class Dialog(ChainMap): | ||||||
|  | #     ''' | ||||||
|  | #     Msg collection abstraction to easily track the state changes of | ||||||
|  | #     a msg flow in one high level, query-able and immutable construct. | ||||||
|  | 
 | ||||||
|  | #     The main use case is to query data from a (long-running) | ||||||
|  | #     msg-transaction-sequence | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | #     ''' | ||||||
|  | #     def update( | ||||||
|  | #         self, | ||||||
|  | #         msg, | ||||||
|  | #     ) -> None: | ||||||
|  | #         self.maps.insert(0, msg.to_dict()) | ||||||
|  | 
 | ||||||
|  | #     def flatten(self) -> dict: | ||||||
|  | #         return dict(self) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: ``msgspec`` stuff worth paying attention to: | ||||||
|  | # - schema evolution: | ||||||
|  | # https://jcristharif.com/msgspec/usage.html#schema-evolution | ||||||
|  | # - for eg. ``BrokerdStatus``, instead just have separate messages? | ||||||
|  | # - use literals for a common msg determined by diff keys? | ||||||
|  | #   - https://jcristharif.com/msgspec/usage.html#literal | ||||||
|  | 
 | ||||||
|  | # -------------- | ||||||
| # Client -> emsd | # Client -> emsd | ||||||
|  | # -------------- | ||||||
|  | 
 | ||||||
|  | class Order(Struct): | ||||||
|  | 
 | ||||||
|  |     # TODO: ideally we can combine these 2 fields into | ||||||
|  |     # 1 and just use the size polarity to determine a buy/sell. | ||||||
|  |     # i would like to see this become more like | ||||||
|  |     # https://jcristharif.com/msgspec/usage.html#literal | ||||||
|  |     # action: Literal[ | ||||||
|  |     #     'live', | ||||||
|  |     #     'dark', | ||||||
|  |     #     'alert', | ||||||
|  |     # ] | ||||||
|  | 
 | ||||||
|  |     action: Literal[ | ||||||
|  |         'buy', | ||||||
|  |         'sell', | ||||||
|  |         'alert', | ||||||
|  |     ] | ||||||
|  |     # determines whether the create execution | ||||||
|  |     # will be submitted to the ems or directly to | ||||||
|  |     # the backend broker | ||||||
|  |     exec_mode: Literal[ | ||||||
|  |         'dark', | ||||||
|  |         'live', | ||||||
|  |         # 'paper',  no right? | ||||||
|  |     ] | ||||||
|  | 
 | ||||||
|  |     # internal ``emdsd`` unique "order id" | ||||||
|  |     oid: str  # uuid4 | ||||||
|  |     symbol: str | Symbol | ||||||
|  |     account: str  # should we set a default as '' ? | ||||||
|  | 
 | ||||||
|  |     price: float | ||||||
|  |     size: float  # -ve is "sell", +ve is "buy" | ||||||
|  | 
 | ||||||
|  |     brokers: Optional[list[str]] = [] | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Cancel(BaseModel): | class Cancel(Struct): | ||||||
|     '''Cancel msg for removing a dark (ems triggered) or |     ''' | ||||||
|  |     Cancel msg for removing a dark (ems triggered) or | ||||||
|     broker-submitted (live) trigger/order. |     broker-submitted (live) trigger/order. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  | @ -39,82 +112,57 @@ class Cancel(BaseModel): | ||||||
|     symbol: str |     symbol: str | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Order(BaseModel): | # -------------- | ||||||
| 
 |  | ||||||
|     action: str  # {'buy', 'sell', 'alert'} |  | ||||||
|     # internal ``emdsd`` unique "order id" |  | ||||||
|     oid: str  # uuid4 |  | ||||||
|     symbol: Union[str, Symbol] |  | ||||||
|     account: str  # should we set a default as '' ? |  | ||||||
| 
 |  | ||||||
|     price: float |  | ||||||
|     size: float |  | ||||||
|     brokers: list[str] |  | ||||||
| 
 |  | ||||||
|     # Assigned once initial ack is received |  | ||||||
|     # ack_time_ns: Optional[int] = None |  | ||||||
| 
 |  | ||||||
|     # determines whether the create execution |  | ||||||
|     # will be submitted to the ems or directly to |  | ||||||
|     # the backend broker |  | ||||||
|     exec_mode: str  # {'dark', 'live', 'paper'} |  | ||||||
| 
 |  | ||||||
|     class Config: |  | ||||||
|         # just for pre-loading a ``Symbol`` when used |  | ||||||
|         # in the order mode staging process |  | ||||||
|         arbitrary_types_allowed = True |  | ||||||
|         # don't copy this model instance when used in |  | ||||||
|         # a recursive model |  | ||||||
|         copy_on_model_validation = False |  | ||||||
| 
 |  | ||||||
| # Client <- emsd | # Client <- emsd | ||||||
|  | # -------------- | ||||||
| # update msgs from ems which relay state change info | # update msgs from ems which relay state change info | ||||||
| # from the active clearing engine. | # from the active clearing engine. | ||||||
| 
 | 
 | ||||||
| 
 | class Status(Struct): | ||||||
| class Status(BaseModel): |  | ||||||
| 
 | 
 | ||||||
|     name: str = 'status' |     name: str = 'status' | ||||||
|     oid: str  # uuid4 |  | ||||||
|     time_ns: int |     time_ns: int | ||||||
|  |     oid: str  # uuid4 ems-order dialog id | ||||||
| 
 | 
 | ||||||
|     # { |     resp: Literal[ | ||||||
|     #   'dark_submitted', |       'pending',  # acked by broker but not yet open | ||||||
|     #   'dark_cancelled', |       'open', | ||||||
|     #   'dark_triggered', |       'dark_open',  # dark/algo triggered order is open in ems clearing loop | ||||||
| 
 |       'triggered',  # above triggered order sent to brokerd, or an alert closed | ||||||
|     #   'broker_submitted', |       'closed',  # fully cleared all size/units | ||||||
|     #   'broker_cancelled', |       'fill',  # partial execution | ||||||
|     #   'broker_executed', |       'canceled', | ||||||
|     #   'broker_filled', |       'error', | ||||||
|     #   'broker_errored', |     ] | ||||||
| 
 |  | ||||||
|     #   'alert_submitted', |  | ||||||
|     #   'alert_triggered', |  | ||||||
| 
 |  | ||||||
|     # } |  | ||||||
|     resp: str  # "response", see above |  | ||||||
| 
 |  | ||||||
|     # symbol: str |  | ||||||
| 
 |  | ||||||
|     # trigger info |  | ||||||
|     trigger_price: Optional[float] = None |  | ||||||
|     # price: float |  | ||||||
| 
 |  | ||||||
|     # broker: Optional[str] = None |  | ||||||
| 
 | 
 | ||||||
|     # this maps normally to the ``BrokerdOrder.reqid`` below, an id |     # this maps normally to the ``BrokerdOrder.reqid`` below, an id | ||||||
|     # normally allocated internally by the backend broker routing system |     # normally allocated internally by the backend broker routing system | ||||||
|     broker_reqid: Optional[Union[int, str]] = None |     reqid: Optional[int | str] = None | ||||||
| 
 | 
 | ||||||
|     # for relaying backend msg data "through" the ems layer |     # the (last) source order/request msg if provided | ||||||
|  |     # (eg. the Order/Cancel which causes this msg) and | ||||||
|  |     # acts as a back-reference to the corresponding | ||||||
|  |     # request message which was the source of this msg. | ||||||
|  |     req: Optional[Order | Cancel] = None | ||||||
|  | 
 | ||||||
|  |     # XXX: better design/name here? | ||||||
|  |     # flag that can be set to indicate a message for an order | ||||||
|  |     # event that wasn't originated by piker's emsd (eg. some external | ||||||
|  |     # trading system which does it's own order control but that you | ||||||
|  |     # might want to "track" using piker UIs/systems). | ||||||
|  |     src: Optional[str] = None | ||||||
|  | 
 | ||||||
|  |     # for relaying a boxed brokerd-dialog-side msg data "through" the | ||||||
|  |     # ems layer to clients. | ||||||
|     brokerd_msg: dict = {} |     brokerd_msg: dict = {} | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # --------------- | ||||||
| # emsd -> brokerd | # emsd -> brokerd | ||||||
|  | # --------------- | ||||||
| # requests *sent* from ems to respective backend broker daemon | # requests *sent* from ems to respective backend broker daemon | ||||||
| 
 | 
 | ||||||
| class BrokerdCancel(BaseModel): | class BrokerdCancel(Struct): | ||||||
| 
 | 
 | ||||||
|     action: str = 'cancel' |     action: str = 'cancel' | ||||||
|     oid: str  # piker emsd order id |     oid: str  # piker emsd order id | ||||||
|  | @ -127,34 +175,38 @@ class BrokerdCancel(BaseModel): | ||||||
|     # for setting a unique order id then this value will be relayed back |     # for setting a unique order id then this value will be relayed back | ||||||
|     # on the emsd order request stream as the ``BrokerdOrderAck.reqid`` |     # on the emsd order request stream as the ``BrokerdOrderAck.reqid`` | ||||||
|     # field |     # field | ||||||
|     reqid: Optional[Union[int, str]] = None |     reqid: Optional[int | str] = None | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class BrokerdOrder(BaseModel): | class BrokerdOrder(Struct): | ||||||
| 
 | 
 | ||||||
|     action: str  # {buy, sell} |  | ||||||
|     oid: str |     oid: str | ||||||
|     account: str |     account: str | ||||||
|     time_ns: int |     time_ns: int | ||||||
| 
 | 
 | ||||||
|  |     # TODO: if we instead rely on a +ve/-ve size to determine | ||||||
|  |     # the action we more or less don't need this field right? | ||||||
|  |     action: str = ''  # {buy, sell} | ||||||
|  | 
 | ||||||
|     # "broker request id": broker specific/internal order id if this is |     # "broker request id": broker specific/internal order id if this is | ||||||
|     # None, creates a new order otherwise if the id is valid the backend |     # None, creates a new order otherwise if the id is valid the backend | ||||||
|     # api must modify the existing matching order. If the broker allows |     # api must modify the existing matching order. If the broker allows | ||||||
|     # for setting a unique order id then this value will be relayed back |     # for setting a unique order id then this value will be relayed back | ||||||
|     # on the emsd order request stream as the ``BrokerdOrderAck.reqid`` |     # on the emsd order request stream as the ``BrokerdOrderAck.reqid`` | ||||||
|     # field |     # field | ||||||
|     reqid: Optional[Union[int, str]] = None |     reqid: Optional[int | str] = None | ||||||
| 
 | 
 | ||||||
|     symbol: str  # symbol.<providername> ? |     symbol: str  # fqsn | ||||||
|     price: float |     price: float | ||||||
|     size: float |     size: float | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # --------------- | ||||||
| # emsd <- brokerd | # emsd <- brokerd | ||||||
|  | # --------------- | ||||||
| # requests *received* to ems from broker backend | # requests *received* to ems from broker backend | ||||||
| 
 | 
 | ||||||
| 
 | class BrokerdOrderAck(Struct): | ||||||
| class BrokerdOrderAck(BaseModel): |  | ||||||
|     ''' |     ''' | ||||||
|     Immediate reponse to a brokerd order request providing the broker |     Immediate reponse to a brokerd order request providing the broker | ||||||
|     specific unique order id so that the EMS can associate this |     specific unique order id so that the EMS can associate this | ||||||
|  | @ -165,39 +217,32 @@ class BrokerdOrderAck(BaseModel): | ||||||
|     name: str = 'ack' |     name: str = 'ack' | ||||||
| 
 | 
 | ||||||
|     # defined and provided by backend |     # defined and provided by backend | ||||||
|     reqid: Union[int, str] |     reqid: int | str | ||||||
| 
 | 
 | ||||||
|     # emsd id originally sent in matching request msg |     # emsd id originally sent in matching request msg | ||||||
|     oid: str |     oid: str | ||||||
|     account: str = '' |     account: str = '' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class BrokerdStatus(BaseModel): | class BrokerdStatus(Struct): | ||||||
| 
 | 
 | ||||||
|     name: str = 'status' |     name: str = 'status' | ||||||
|     reqid: Union[int, str] |     reqid: int | str | ||||||
|     time_ns: int |     time_ns: int | ||||||
|  |     status: Literal[ | ||||||
|  |         'open', | ||||||
|  |         'canceled', | ||||||
|  |         'fill', | ||||||
|  |         'pending', | ||||||
|  |         'error', | ||||||
|  |     ] | ||||||
| 
 | 
 | ||||||
|     # XXX: should be best effort set for every update |     account: str | ||||||
|     account: str = '' |  | ||||||
| 
 |  | ||||||
|     # { |  | ||||||
|     #   'submitted', |  | ||||||
|     #   'cancelled', |  | ||||||
|     #   'filled', |  | ||||||
|     # } |  | ||||||
|     status: str |  | ||||||
| 
 |  | ||||||
|     filled: float = 0.0 |     filled: float = 0.0 | ||||||
|     reason: str = '' |     reason: str = '' | ||||||
|     remaining: float = 0.0 |     remaining: float = 0.0 | ||||||
| 
 | 
 | ||||||
|     # XXX: better design/name here? |     # external: bool = False | ||||||
|     # flag that can be set to indicate a message for an order |  | ||||||
|     # event that wasn't originated by piker's emsd (eg. some external |  | ||||||
|     # trading system which does it's own order control but that you |  | ||||||
|     # might want to "track" using piker UIs/systems). |  | ||||||
|     external: bool = False |  | ||||||
| 
 | 
 | ||||||
|     # XXX: not required schema as of yet |     # XXX: not required schema as of yet | ||||||
|     broker_details: dict = { |     broker_details: dict = { | ||||||
|  | @ -205,14 +250,14 @@ class BrokerdStatus(BaseModel): | ||||||
|     } |     } | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class BrokerdFill(BaseModel): | class BrokerdFill(Struct): | ||||||
|     ''' |     ''' | ||||||
|     A single message indicating a "fill-details" event from the broker |     A single message indicating a "fill-details" event from the broker | ||||||
|     if avaiable. |     if avaiable. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     name: str = 'fill' |     name: str = 'fill' | ||||||
|     reqid: Union[int, str] |     reqid: int | str | ||||||
|     time_ns: int |     time_ns: int | ||||||
| 
 | 
 | ||||||
|     # order exeuction related |     # order exeuction related | ||||||
|  | @ -230,7 +275,7 @@ class BrokerdFill(BaseModel): | ||||||
|     broker_time: float |     broker_time: float | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class BrokerdError(BaseModel): | class BrokerdError(Struct): | ||||||
|     ''' |     ''' | ||||||
|     Optional error type that can be relayed to emsd for error handling. |     Optional error type that can be relayed to emsd for error handling. | ||||||
| 
 | 
 | ||||||
|  | @ -242,14 +287,14 @@ class BrokerdError(BaseModel): | ||||||
| 
 | 
 | ||||||
|     # if no brokerd order request was actually submitted (eg. we errored |     # if no brokerd order request was actually submitted (eg. we errored | ||||||
|     # at the ``pikerd`` layer) then there will be ``reqid`` allocated. |     # at the ``pikerd`` layer) then there will be ``reqid`` allocated. | ||||||
|     reqid: Optional[Union[int, str]] = None |     reqid: Optional[int | str] = None | ||||||
| 
 | 
 | ||||||
|     symbol: str |     symbol: str | ||||||
|     reason: str |     reason: str | ||||||
|     broker_details: dict = {} |     broker_details: dict = {} | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class BrokerdPosition(BaseModel): | class BrokerdPosition(Struct): | ||||||
|     '''Position update event from brokerd. |     '''Position update event from brokerd. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  | @ -258,6 +303,6 @@ class BrokerdPosition(BaseModel): | ||||||
|     broker: str |     broker: str | ||||||
|     account: str |     account: str | ||||||
|     symbol: str |     symbol: str | ||||||
|     currency: str |  | ||||||
|     size: float |     size: float | ||||||
|     avg_price: float |     avg_price: float | ||||||
|  |     currency: str = '' | ||||||
|  |  | ||||||
|  | @ -18,33 +18,49 @@ | ||||||
| Fake trading for forward testing. | Fake trading for forward testing. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
|  | from collections import defaultdict | ||||||
| from contextlib import asynccontextmanager | from contextlib import asynccontextmanager | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from operator import itemgetter | from operator import itemgetter | ||||||
|  | import itertools | ||||||
| import time | import time | ||||||
| from typing import Tuple, Optional, Callable | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     Optional, | ||||||
|  |     Callable, | ||||||
|  | ) | ||||||
| import uuid | import uuid | ||||||
| 
 | 
 | ||||||
| from bidict import bidict | from bidict import bidict | ||||||
|  | import pendulum | ||||||
| import trio | import trio | ||||||
| import tractor | import tractor | ||||||
| from dataclasses import dataclass |  | ||||||
| 
 | 
 | ||||||
| from .. import data | from .. import data | ||||||
|  | from ..data._source import Symbol | ||||||
|  | from ..data.types import Struct | ||||||
|  | from ..pp import ( | ||||||
|  |     Position, | ||||||
|  |     Transaction, | ||||||
|  | ) | ||||||
| from ..data._normalize import iterticks | from ..data._normalize import iterticks | ||||||
| from ..data._source import unpack_fqsn | from ..data._source import unpack_fqsn | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| from ._messages import ( | from ._messages import ( | ||||||
|     BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus, |     BrokerdCancel, | ||||||
|     BrokerdFill, BrokerdPosition, BrokerdError |     BrokerdOrder, | ||||||
|  |     BrokerdOrderAck, | ||||||
|  |     BrokerdStatus, | ||||||
|  |     BrokerdFill, | ||||||
|  |     BrokerdPosition, | ||||||
|  |     BrokerdError, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @dataclass | class PaperBoi(Struct): | ||||||
| class PaperBoi: |  | ||||||
|     """ |     """ | ||||||
|     Emulates a broker order client providing the same API and |     Emulates a broker order client providing the same API and | ||||||
|     delivering an order-event response stream but with methods for |     delivering an order-event response stream but with methods for | ||||||
|  | @ -58,14 +74,15 @@ class PaperBoi: | ||||||
| 
 | 
 | ||||||
|     # map of paper "live" orders which be used |     # map of paper "live" orders which be used | ||||||
|     # to simulate fills based on paper engine settings |     # to simulate fills based on paper engine settings | ||||||
|     _buys: bidict |     _buys: defaultdict[str, bidict] | ||||||
|     _sells: bidict |     _sells: defaultdict[str, bidict] | ||||||
|     _reqids: bidict |     _reqids: bidict | ||||||
|     _positions: dict[str, BrokerdPosition] |     _positions: dict[str, Position] | ||||||
|  |     _trade_ledger: dict[str, Any] | ||||||
| 
 | 
 | ||||||
|     # init edge case L1 spread |     # init edge case L1 spread | ||||||
|     last_ask: Tuple[float, float] = (float('inf'), 0)  # price, size |     last_ask: tuple[float, float] = (float('inf'), 0)  # price, size | ||||||
|     last_bid: Tuple[float, float] = (0, 0) |     last_bid: tuple[float, float] = (0, 0) | ||||||
| 
 | 
 | ||||||
|     async def submit_limit( |     async def submit_limit( | ||||||
|         self, |         self, | ||||||
|  | @ -75,27 +92,24 @@ class PaperBoi: | ||||||
|         action: str, |         action: str, | ||||||
|         size: float, |         size: float, | ||||||
|         reqid: Optional[str], |         reqid: Optional[str], | ||||||
|  | 
 | ||||||
|     ) -> int: |     ) -> int: | ||||||
|         """Place an order and return integer request id provided by client. |         ''' | ||||||
| 
 |         Place an order and return integer request id provided by client. | ||||||
|         """ |  | ||||||
|         is_modify: bool = False |  | ||||||
|         if reqid is None: |  | ||||||
|             reqid = str(uuid.uuid4()) |  | ||||||
| 
 |  | ||||||
|         else: |  | ||||||
|             # order is already existing, this is a modify |  | ||||||
|             (oid, symbol, action, old_price) = self._reqids[reqid] |  | ||||||
|             assert old_price != price |  | ||||||
|             is_modify = True |  | ||||||
| 
 |  | ||||||
|         # register order internally |  | ||||||
|         self._reqids[reqid] = (oid, symbol, action, price) |  | ||||||
| 
 | 
 | ||||||
|  |         ''' | ||||||
|         if action == 'alert': |         if action == 'alert': | ||||||
|             # bypass all fill simulation |             # bypass all fill simulation | ||||||
|             return reqid |             return reqid | ||||||
| 
 | 
 | ||||||
|  |         entry = self._reqids.get(reqid) | ||||||
|  |         if entry: | ||||||
|  |             # order is already existing, this is a modify | ||||||
|  |             (oid, symbol, action, old_price) = entry | ||||||
|  |         else: | ||||||
|  |             # register order internally | ||||||
|  |             self._reqids[reqid] = (oid, symbol, action, price) | ||||||
|  | 
 | ||||||
|         # TODO: net latency model |         # TODO: net latency model | ||||||
|         # we checkpoint here quickly particulalry |         # we checkpoint here quickly particulalry | ||||||
|         # for dark orders since we want the dark_executed |         # for dark orders since we want the dark_executed | ||||||
|  | @ -107,15 +121,18 @@ class PaperBoi: | ||||||
|             size = -size |             size = -size | ||||||
| 
 | 
 | ||||||
|         msg = BrokerdStatus( |         msg = BrokerdStatus( | ||||||
|             status='submitted', |             status='open', | ||||||
|  |             # account=f'paper_{self.broker}', | ||||||
|  |             account='paper', | ||||||
|             reqid=reqid, |             reqid=reqid, | ||||||
|             broker=self.broker, |  | ||||||
|             time_ns=time.time_ns(), |             time_ns=time.time_ns(), | ||||||
|             filled=0.0, |             filled=0.0, | ||||||
|             reason='paper_trigger', |             reason='paper_trigger', | ||||||
|             remaining=size, |             remaining=size, | ||||||
|  | 
 | ||||||
|  |             broker_details={'name': 'paperboi'}, | ||||||
|         ) |         ) | ||||||
|         await self.ems_trades_stream.send(msg.dict()) |         await self.ems_trades_stream.send(msg) | ||||||
| 
 | 
 | ||||||
|         # if we're already a clearing price simulate an immediate fill |         # if we're already a clearing price simulate an immediate fill | ||||||
|         if ( |         if ( | ||||||
|  | @ -123,28 +140,28 @@ class PaperBoi: | ||||||
|             ) or ( |             ) or ( | ||||||
|             action == 'sell' and (clear_price := self.last_bid[0]) >= price |             action == 'sell' and (clear_price := self.last_bid[0]) >= price | ||||||
|         ): |         ): | ||||||
|             await self.fake_fill(symbol, clear_price, size, action, reqid, oid) |             await self.fake_fill( | ||||||
|  |                 symbol, | ||||||
|  |                 clear_price, | ||||||
|  |                 size, | ||||||
|  |                 action, | ||||||
|  |                 reqid, | ||||||
|  |                 oid, | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         else: |  | ||||||
|         # register this submissions as a paper live order |         # register this submissions as a paper live order | ||||||
| 
 |         else: | ||||||
|             # submit order to book simulation fill loop |             # set the simulated order in the respective table for lookup | ||||||
|  |             # and trigger by the simulated clearing task normally | ||||||
|  |             # running ``simulate_fills()``. | ||||||
|             if action == 'buy': |             if action == 'buy': | ||||||
|                 orders = self._buys |                 orders = self._buys | ||||||
| 
 | 
 | ||||||
|             elif action == 'sell': |             elif action == 'sell': | ||||||
|                 orders = self._sells |                 orders = self._sells | ||||||
| 
 | 
 | ||||||
|             # set the simulated order in the respective table for lookup |             # {symbol -> bidict[oid, (<price data>)]} | ||||||
|             # and trigger by the simulated clearing task normally |             orders[symbol][oid] = (price, size, reqid, action) | ||||||
|             # running ``simulate_fills()``. |  | ||||||
| 
 |  | ||||||
|             if is_modify: |  | ||||||
|                 # remove any existing order for the old price |  | ||||||
|                 orders[symbol].pop((oid, old_price)) |  | ||||||
| 
 |  | ||||||
|             # buys/sells: (symbol  -> (price -> order)) |  | ||||||
|             orders.setdefault(symbol, {})[(oid, price)] = (size, reqid, action) |  | ||||||
| 
 | 
 | ||||||
|         return reqid |         return reqid | ||||||
| 
 | 
 | ||||||
|  | @ -157,26 +174,26 @@ class PaperBoi: | ||||||
|         oid, symbol, action, price = self._reqids[reqid] |         oid, symbol, action, price = self._reqids[reqid] | ||||||
| 
 | 
 | ||||||
|         if action == 'buy': |         if action == 'buy': | ||||||
|             self._buys[symbol].pop((oid, price)) |             self._buys[symbol].pop(oid, None) | ||||||
|         elif action == 'sell': |         elif action == 'sell': | ||||||
|             self._sells[symbol].pop((oid, price)) |             self._sells[symbol].pop(oid, None) | ||||||
| 
 | 
 | ||||||
|         # TODO: net latency model |         # TODO: net latency model | ||||||
|         await trio.sleep(0.05) |         await trio.sleep(0.05) | ||||||
| 
 | 
 | ||||||
|         msg = BrokerdStatus( |         msg = BrokerdStatus( | ||||||
|             status='cancelled', |             status='canceled', | ||||||
|             oid=oid, |             account='paper', | ||||||
|             reqid=reqid, |             reqid=reqid, | ||||||
|             broker=self.broker, |  | ||||||
|             time_ns=time.time_ns(), |             time_ns=time.time_ns(), | ||||||
|  |             broker_details={'name': 'paperboi'}, | ||||||
|         ) |         ) | ||||||
|         await self.ems_trades_stream.send(msg.dict()) |         await self.ems_trades_stream.send(msg) | ||||||
| 
 | 
 | ||||||
|     async def fake_fill( |     async def fake_fill( | ||||||
|         self, |         self, | ||||||
| 
 | 
 | ||||||
|         symbol: str, |         fqsn: str, | ||||||
|         price: float, |         price: float, | ||||||
|         size: float, |         size: float, | ||||||
|         action: str,  # one of {'buy', 'sell'} |         action: str,  # one of {'buy', 'sell'} | ||||||
|  | @ -195,16 +212,15 @@ class PaperBoi: | ||||||
|         """ |         """ | ||||||
|         # TODO: net latency model |         # TODO: net latency model | ||||||
|         await trio.sleep(0.05) |         await trio.sleep(0.05) | ||||||
|  |         fill_time_ns = time.time_ns() | ||||||
|  |         fill_time_s = time.time() | ||||||
| 
 | 
 | ||||||
|         msg = BrokerdFill( |         fill_msg = BrokerdFill( | ||||||
| 
 |  | ||||||
|             reqid=reqid, |             reqid=reqid, | ||||||
|             time_ns=time.time_ns(), |             time_ns=fill_time_ns, | ||||||
| 
 |  | ||||||
|             action=action, |             action=action, | ||||||
|             size=size, |             size=size, | ||||||
|             price=price, |             price=price, | ||||||
| 
 |  | ||||||
|             broker_time=datetime.now().timestamp(), |             broker_time=datetime.now().timestamp(), | ||||||
|             broker_details={ |             broker_details={ | ||||||
|                 'paper_info': { |                 'paper_info': { | ||||||
|  | @ -214,79 +230,66 @@ class PaperBoi: | ||||||
|                 'name': self.broker + '_paper', |                 'name': self.broker + '_paper', | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         await self.ems_trades_stream.send(msg.dict()) |         await self.ems_trades_stream.send(fill_msg) | ||||||
|  | 
 | ||||||
|  |         self._trade_ledger.update(fill_msg.to_dict()) | ||||||
| 
 | 
 | ||||||
|         if order_complete: |         if order_complete: | ||||||
| 
 |  | ||||||
|             msg = BrokerdStatus( |             msg = BrokerdStatus( | ||||||
| 
 |  | ||||||
|                 reqid=reqid, |                 reqid=reqid, | ||||||
|                 time_ns=time.time_ns(), |                 time_ns=time.time_ns(), | ||||||
| 
 |                 # account=f'paper_{self.broker}', | ||||||
|                 status='filled', |                 account='paper', | ||||||
|  |                 status='closed', | ||||||
|                 filled=size, |                 filled=size, | ||||||
|                 remaining=0 if order_complete else remaining, |                 remaining=0 if order_complete else remaining, | ||||||
| 
 |  | ||||||
|                 action=action, |  | ||||||
|                 size=size, |  | ||||||
|                 price=price, |  | ||||||
| 
 |  | ||||||
|                 broker_details={ |  | ||||||
|                     'paper_info': { |  | ||||||
|                         'oid': oid, |  | ||||||
|                     }, |  | ||||||
|                     'name': self.broker, |  | ||||||
|                 }, |  | ||||||
|             ) |             ) | ||||||
|             await self.ems_trades_stream.send(msg.dict()) |             await self.ems_trades_stream.send(msg) | ||||||
| 
 | 
 | ||||||
|         # lookup any existing position |         # lookup any existing position | ||||||
|         token = f'{symbol}.{self.broker}' |         key = fqsn.rstrip(f'.{self.broker}') | ||||||
|         pp_msg = self._positions.setdefault( |         pp = self._positions.setdefault( | ||||||
|             token, |             fqsn, | ||||||
|             BrokerdPosition( |             Position( | ||||||
|  |                 Symbol( | ||||||
|  |                     key=key, | ||||||
|  |                     broker_info={self.broker: {}}, | ||||||
|  |                 ), | ||||||
|  |                 size=size, | ||||||
|  |                 ppu=price, | ||||||
|  |                 bsuid=key, | ||||||
|  |             ) | ||||||
|  |         ) | ||||||
|  |         t = Transaction( | ||||||
|  |             fqsn=fqsn, | ||||||
|  |             tid=oid, | ||||||
|  |             size=size, | ||||||
|  |             price=price, | ||||||
|  |             cost=0,  # TODO: cost model | ||||||
|  |             dt=pendulum.from_timestamp(fill_time_s), | ||||||
|  |             bsuid=key, | ||||||
|  |         ) | ||||||
|  |         pp.add_clear(t) | ||||||
|  | 
 | ||||||
|  |         pp_msg = BrokerdPosition( | ||||||
|             broker=self.broker, |             broker=self.broker, | ||||||
|             account='paper', |             account='paper', | ||||||
|                 symbol=symbol, |             symbol=fqsn, | ||||||
|             # TODO: we need to look up the asset currency from |             # TODO: we need to look up the asset currency from | ||||||
|             # broker info. i guess for crypto this can be |             # broker info. i guess for crypto this can be | ||||||
|             # inferred from the pair? |             # inferred from the pair? | ||||||
|             currency='', |             currency='', | ||||||
|                 size=0.0, |             size=pp.size, | ||||||
|                 avg_price=0, |             avg_price=pp.ppu, | ||||||
|             ) |  | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # "avg position price" calcs |         await self.ems_trades_stream.send(pp_msg) | ||||||
|         # TODO: eventually it'd be nice to have a small set of routines |  | ||||||
|         # to do this stuff from a sequence of cleared orders to enable |  | ||||||
|         # so called "contextual positions". |  | ||||||
|         new_size = size + pp_msg.size |  | ||||||
| 
 |  | ||||||
|         # old size minus the new size gives us size differential with |  | ||||||
|         # +ve -> increase in pp size |  | ||||||
|         # -ve -> decrease in pp size |  | ||||||
|         size_diff = abs(new_size) - abs(pp_msg.size) |  | ||||||
| 
 |  | ||||||
|         if new_size == 0: |  | ||||||
|             pp_msg.avg_price = 0 |  | ||||||
| 
 |  | ||||||
|         elif size_diff > 0: |  | ||||||
|             # only update the "average position price" when the position |  | ||||||
|             # size increases not when it decreases (i.e. the position is |  | ||||||
|             # being made smaller) |  | ||||||
|             pp_msg.avg_price = ( |  | ||||||
|                 abs(size) * price + pp_msg.avg_price * abs(pp_msg.size) |  | ||||||
|             ) / abs(new_size) |  | ||||||
| 
 |  | ||||||
|         pp_msg.size = new_size |  | ||||||
| 
 |  | ||||||
|         await self.ems_trades_stream.send(pp_msg.dict()) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def simulate_fills( | async def simulate_fills( | ||||||
|     quote_stream: 'tractor.ReceiveStream',  # noqa |     quote_stream: tractor.MsgStream,  # noqa | ||||||
|     client: PaperBoi, |     client: PaperBoi, | ||||||
|  | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|     # TODO: more machinery to better simulate real-world market things: |     # TODO: more machinery to better simulate real-world market things: | ||||||
|  | @ -306,61 +309,103 @@ async def simulate_fills( | ||||||
| 
 | 
 | ||||||
|     # this stream may eventually contain multiple symbols |     # this stream may eventually contain multiple symbols | ||||||
|     async for quotes in quote_stream: |     async for quotes in quote_stream: | ||||||
| 
 |  | ||||||
|         for sym, quote in quotes.items(): |         for sym, quote in quotes.items(): | ||||||
| 
 |  | ||||||
|             for tick in iterticks( |             for tick in iterticks( | ||||||
|                 quote, |                 quote, | ||||||
|                 # dark order price filter(s) |                 # dark order price filter(s) | ||||||
|                 types=('ask', 'bid', 'trade', 'last') |                 types=('ask', 'bid', 'trade', 'last') | ||||||
|             ): |             ): | ||||||
|                 # print(tick) |                 tick_price = tick['price'] | ||||||
|                 tick_price = tick.get('price') |  | ||||||
|                 ttype = tick['type'] |  | ||||||
| 
 | 
 | ||||||
|                 if ttype in ('ask',): |                 buys: bidict[str, tuple] = client._buys[sym] | ||||||
|  |                 iter_buys = reversed(sorted( | ||||||
|  |                     buys.values(), | ||||||
|  |                     key=itemgetter(0), | ||||||
|  |                 )) | ||||||
| 
 | 
 | ||||||
|  |                 def buy_on_ask(our_price): | ||||||
|  |                     return tick_price <= our_price | ||||||
|  | 
 | ||||||
|  |                 sells: bidict[str, tuple] = client._sells[sym] | ||||||
|  |                 iter_sells = sorted( | ||||||
|  |                     sells.values(), | ||||||
|  |                     key=itemgetter(0) | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |                 def sell_on_bid(our_price): | ||||||
|  |                     return tick_price >= our_price | ||||||
|  | 
 | ||||||
|  |                 match tick: | ||||||
|  |                     case { | ||||||
|  |                         'price': tick_price, | ||||||
|  |                         # 'type': ('ask' | 'trade' | 'last'), | ||||||
|  |                         'type': 'ask', | ||||||
|  |                     }: | ||||||
|                         client.last_ask = ( |                         client.last_ask = ( | ||||||
|                             tick_price, |                             tick_price, | ||||||
|                             tick.get('size', client.last_ask[1]), |                             tick.get('size', client.last_ask[1]), | ||||||
|                         ) |                         ) | ||||||
| 
 | 
 | ||||||
|                     orders = client._buys.get(sym, {}) |                         iter_entries = zip( | ||||||
| 
 |                             iter_buys, | ||||||
|                     book_sequence = reversed( |                             itertools.repeat(buy_on_ask) | ||||||
|                         sorted(orders.keys(), key=itemgetter(1))) |                         ) | ||||||
| 
 |  | ||||||
|                     def pred(our_price): |  | ||||||
|                         return tick_price < our_price |  | ||||||
| 
 |  | ||||||
|                 elif ttype in ('bid',): |  | ||||||
| 
 | 
 | ||||||
|  |                     case { | ||||||
|  |                         'price': tick_price, | ||||||
|  |                         # 'type': ('bid' | 'trade' | 'last'), | ||||||
|  |                         'type': 'bid', | ||||||
|  |                     }: | ||||||
|                         client.last_bid = ( |                         client.last_bid = ( | ||||||
|                             tick_price, |                             tick_price, | ||||||
|                             tick.get('size', client.last_bid[1]), |                             tick.get('size', client.last_bid[1]), | ||||||
|                         ) |                         ) | ||||||
| 
 | 
 | ||||||
|                     orders = client._sells.get(sym, {}) |                         iter_entries = zip( | ||||||
|                     book_sequence = sorted(orders.keys(), key=itemgetter(1)) |                             iter_sells, | ||||||
|  |                             itertools.repeat(sell_on_bid) | ||||||
|  |                         ) | ||||||
| 
 | 
 | ||||||
|                     def pred(our_price): |                     case { | ||||||
|                         return tick_price > our_price |                         'price': tick_price, | ||||||
|  |                         'type': ('trade' | 'last'), | ||||||
|  |                     }: | ||||||
|  |                         # in the clearing price / last price case we | ||||||
|  |                         # want to iterate both sides of our book for | ||||||
|  |                         # clears since we don't know which direction the | ||||||
|  |                         # price is going to move (especially with HFT) | ||||||
|  |                         # and thus we simply interleave both sides (buys | ||||||
|  |                         # and sells) until one side clears and then | ||||||
|  |                         # break until the next tick? | ||||||
|  |                         def interleave(): | ||||||
|  |                             for pair in zip( | ||||||
|  |                                 iter_buys, | ||||||
|  |                                 iter_sells, | ||||||
|  |                             ): | ||||||
|  |                                 for order_info, pred in zip( | ||||||
|  |                                     pair, | ||||||
|  |                                     itertools.cycle([buy_on_ask, sell_on_bid]), | ||||||
|  |                                 ): | ||||||
|  |                                     yield order_info, pred | ||||||
| 
 | 
 | ||||||
|                 elif ttype in ('trade', 'last'): |                         iter_entries = interleave() | ||||||
|                     # TODO: simulate actual book queues and our orders |  | ||||||
|                     # place in it, might require full L2 data? |  | ||||||
|                     continue |  | ||||||
| 
 | 
 | ||||||
|                 # iterate book prices descending |                 # iterate all potentially clearable book prices | ||||||
|                 for oid, our_price in book_sequence: |                 # in FIFO order per side. | ||||||
|                     if pred(our_price): |                 for order_info, pred in iter_entries: | ||||||
|  |                     (our_price, size, reqid, action) = order_info | ||||||
| 
 | 
 | ||||||
|                         # retreive order info |                     clearable = pred(our_price) | ||||||
|                         (size, reqid, action) = orders.pop((oid, our_price)) |                     if clearable: | ||||||
|  |                         # pop and retreive order info | ||||||
|  |                         oid = { | ||||||
|  |                             'buy': buys, | ||||||
|  |                             'sell': sells | ||||||
|  |                         }[action].inverse.pop(order_info) | ||||||
| 
 | 
 | ||||||
|                         # clearing price would have filled entirely |                         # clearing price would have filled entirely | ||||||
|                         await client.fake_fill( |                         await client.fake_fill( | ||||||
|                             symbol=sym, |                             fqsn=sym, | ||||||
|                             # todo slippage to determine fill price |                             # todo slippage to determine fill price | ||||||
|                             price=tick_price, |                             price=tick_price, | ||||||
|                             size=size, |                             size=size, | ||||||
|  | @ -368,9 +413,6 @@ async def simulate_fills( | ||||||
|                             reqid=reqid, |                             reqid=reqid, | ||||||
|                             oid=oid, |                             oid=oid, | ||||||
|                         ) |                         ) | ||||||
|                     else: |  | ||||||
|                         # prices are iterated in sorted order so we're done |  | ||||||
|                         break |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def handle_order_requests( | async def handle_order_requests( | ||||||
|  | @ -380,68 +422,83 @@ async def handle_order_requests( | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|     # order_request: dict |     request_msg: dict | ||||||
|     async for request_msg in ems_order_stream: |     async for request_msg in ems_order_stream: | ||||||
| 
 |         match request_msg: | ||||||
|         action = request_msg['action'] |             case {'action': ('buy' | 'sell')}: | ||||||
| 
 |  | ||||||
|         if action in {'buy', 'sell'}: |  | ||||||
| 
 |  | ||||||
|             account = request_msg['account'] |  | ||||||
|             if account != 'paper': |  | ||||||
|                 log.error( |  | ||||||
|                     'This is a paper account, only a `paper` selection is valid' |  | ||||||
|                 ) |  | ||||||
|                 await ems_order_stream.send(BrokerdError( |  | ||||||
|                     oid=request_msg['oid'], |  | ||||||
|                     symbol=request_msg['symbol'], |  | ||||||
|                     reason=f'Paper only. No account found: `{account}` ?', |  | ||||||
|                 ).dict()) |  | ||||||
|                 continue |  | ||||||
| 
 |  | ||||||
|             # validate |  | ||||||
|                 order = BrokerdOrder(**request_msg) |                 order = BrokerdOrder(**request_msg) | ||||||
|  |                 account = order.account | ||||||
| 
 | 
 | ||||||
|             # call our client api to submit the order |                 # error on bad inputs | ||||||
|             reqid = await client.submit_limit( |                 reason = None | ||||||
|  |                 if account != 'paper': | ||||||
|  |                     reason = f'No account found:`{account}` (paper only)?' | ||||||
| 
 | 
 | ||||||
|  |                 elif order.size == 0: | ||||||
|  |                     reason = 'Invalid size: 0' | ||||||
|  | 
 | ||||||
|  |                 if reason: | ||||||
|  |                     log.error(reason) | ||||||
|  |                     await ems_order_stream.send(BrokerdError( | ||||||
|                         oid=order.oid, |                         oid=order.oid, | ||||||
|                         symbol=order.symbol, |                         symbol=order.symbol, | ||||||
|                 price=order.price, |                         reason=reason, | ||||||
|                 action=order.action, |                     )) | ||||||
|                 size=order.size, |                     continue | ||||||
| 
 | 
 | ||||||
|                 # XXX: by default 0 tells ``ib_insync`` methods that |                 reqid = order.reqid or str(uuid.uuid4()) | ||||||
|                 # there is no existing order so ask the client to create |  | ||||||
|                 # a new one (which it seems to do by allocating an int |  | ||||||
|                 # counter - collision prone..) |  | ||||||
|                 reqid=order.reqid, |  | ||||||
|             ) |  | ||||||
| 
 | 
 | ||||||
|                 # deliver ack that order has been submitted to broker routing |                 # deliver ack that order has been submitted to broker routing | ||||||
|                 await ems_order_stream.send( |                 await ems_order_stream.send( | ||||||
|                     BrokerdOrderAck( |                     BrokerdOrderAck( | ||||||
| 
 |  | ||||||
|                     # ems order request id |  | ||||||
|                         oid=order.oid, |                         oid=order.oid, | ||||||
| 
 |  | ||||||
|                     # broker specific request id |  | ||||||
|                         reqid=reqid, |                         reqid=reqid, | ||||||
| 
 |                     ) | ||||||
|                 ).dict() |  | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|         elif action == 'cancel': |                 # call our client api to submit the order | ||||||
|             msg = BrokerdCancel(**request_msg) |                 reqid = await client.submit_limit( | ||||||
|  |                     oid=order.oid, | ||||||
|  |                     symbol=f'{order.symbol}.{client.broker}', | ||||||
|  |                     price=order.price, | ||||||
|  |                     action=order.action, | ||||||
|  |                     size=order.size, | ||||||
|  |                     # XXX: by default 0 tells ``ib_insync`` methods that | ||||||
|  |                     # there is no existing order so ask the client to create | ||||||
|  |                     # a new one (which it seems to do by allocating an int | ||||||
|  |                     # counter - collision prone..) | ||||||
|  |                     reqid=reqid, | ||||||
|  |                 ) | ||||||
| 
 | 
 | ||||||
|  |             # elif action == 'cancel': | ||||||
|  |             case {'action': 'cancel'}: | ||||||
|  |                 msg = BrokerdCancel(**request_msg) | ||||||
|                 await client.submit_cancel( |                 await client.submit_cancel( | ||||||
|                     reqid=msg.reqid |                     reqid=msg.reqid | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|         else: |             case _: | ||||||
|                 log.error(f'Unknown order command: {request_msg}') |                 log.error(f'Unknown order command: {request_msg}') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | _reqids: bidict[str, tuple] = {} | ||||||
|  | _buys: defaultdict[ | ||||||
|  |     str,  # symbol | ||||||
|  |     bidict[ | ||||||
|  |         str,  # oid | ||||||
|  |         tuple[float, float, str, str],  # order info | ||||||
|  |     ] | ||||||
|  | ] = defaultdict(bidict) | ||||||
|  | _sells: defaultdict[ | ||||||
|  |     str,  # symbol | ||||||
|  |     bidict[ | ||||||
|  |         str,  # oid | ||||||
|  |         tuple[float, float, str, str],  # order info | ||||||
|  |     ] | ||||||
|  | ] = defaultdict(bidict) | ||||||
|  | _positions: dict[str, Position] = {} | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| @tractor.context | @tractor.context | ||||||
| async def trades_dialogue( | async def trades_dialogue( | ||||||
| 
 | 
 | ||||||
|  | @ -451,39 +508,56 @@ async def trades_dialogue( | ||||||
|     loglevel: str = None, |     loglevel: str = None, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|  | 
 | ||||||
|     tractor.log.get_console_log(loglevel) |     tractor.log.get_console_log(loglevel) | ||||||
| 
 | 
 | ||||||
|     async with ( |     async with ( | ||||||
| 
 |  | ||||||
|         data.open_feed( |         data.open_feed( | ||||||
|             [fqsn], |             [fqsn], | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
|         ) as feed, |         ) as feed, | ||||||
| 
 | 
 | ||||||
|     ): |     ): | ||||||
|  |         pp_msgs: list[BrokerdPosition] = [] | ||||||
|  |         pos: Position | ||||||
|  |         token: str  # f'{symbol}.{self.broker}' | ||||||
|  |         for token, pos in _positions.items(): | ||||||
|  |             pp_msgs.append(BrokerdPosition( | ||||||
|  |                 broker=broker, | ||||||
|  |                 account='paper', | ||||||
|  |                 symbol=pos.symbol.front_fqsn(), | ||||||
|  |                 size=pos.size, | ||||||
|  |                 avg_price=pos.ppu, | ||||||
|  |             )) | ||||||
|  | 
 | ||||||
|         # TODO: load paper positions per broker from .toml config file |         # TODO: load paper positions per broker from .toml config file | ||||||
|         # and pass as symbol to position data mapping: ``dict[str, dict]`` |         # and pass as symbol to position data mapping: ``dict[str, dict]`` | ||||||
|         # await ctx.started(all_positions) |         await ctx.started((pp_msgs, ['paper'])) | ||||||
|         await ctx.started(({}, {'paper',})) |  | ||||||
| 
 | 
 | ||||||
|         async with ( |         async with ( | ||||||
|             ctx.open_stream() as ems_stream, |             ctx.open_stream() as ems_stream, | ||||||
|             trio.open_nursery() as n, |             trio.open_nursery() as n, | ||||||
|         ): |         ): | ||||||
| 
 |  | ||||||
|             client = PaperBoi( |             client = PaperBoi( | ||||||
|                 broker, |                 broker, | ||||||
|                 ems_stream, |                 ems_stream, | ||||||
|                 _buys={}, |                 _buys=_buys, | ||||||
|                 _sells={}, |                 _sells=_sells, | ||||||
| 
 | 
 | ||||||
|                 _reqids={}, |                 _reqids=_reqids, | ||||||
| 
 | 
 | ||||||
|                 # TODO: load paper positions from ``positions.toml`` |                 # TODO: load paper positions from ``positions.toml`` | ||||||
|                 _positions={}, |                 _positions=_positions, | ||||||
|  | 
 | ||||||
|  |                 # TODO: load postions from ledger file | ||||||
|  |                 _trade_ledger={}, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             n.start_soon(handle_order_requests, client, ems_stream) |             n.start_soon( | ||||||
|  |                 handle_order_requests, | ||||||
|  |                 client, | ||||||
|  |                 ems_stream, | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|             # paper engine simulator clearing task |             # paper engine simulator clearing task | ||||||
|             await simulate_fills(feed.stream, client) |             await simulate_fills(feed.stream, client) | ||||||
|  | @ -511,6 +585,7 @@ async def open_paperboi( | ||||||
|         # (we likely don't need more then one proc for basic |         # (we likely don't need more then one proc for basic | ||||||
|         # simulated order clearing) |         # simulated order clearing) | ||||||
|         if portal is None: |         if portal is None: | ||||||
|  |             log.info('Starting new paper-engine actor') | ||||||
|             portal = await tn.start_actor( |             portal = await tn.start_actor( | ||||||
|                 service_name, |                 service_name, | ||||||
|                 enable_modules=[__name__] |                 enable_modules=[__name__] | ||||||
|  | @ -523,5 +598,4 @@ async def open_paperboi( | ||||||
|             loglevel=loglevel, |             loglevel=loglevel, | ||||||
| 
 | 
 | ||||||
|         ) as (ctx, first): |         ) as (ctx, first): | ||||||
| 
 |  | ||||||
|             yield ctx, first |             yield ctx, first | ||||||
|  |  | ||||||
|  | @ -83,9 +83,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb): | ||||||
| 
 | 
 | ||||||
|                 ) |                 ) | ||||||
|                 log.info( |                 log.info( | ||||||
|                     f'`marketstore` up!\n' |                     f'`marketstored` up!\n' | ||||||
|                     f'`marketstored` pid: {pid}\n' |                     f'pid: {pid}\n' | ||||||
|                     f'docker container id: {cid}\n' |                     f'container id: {cid[:12]}\n' | ||||||
|                     f'config: {pformat(config)}' |                     f'config: {pformat(config)}' | ||||||
|                 ) |                 ) | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -21,6 +21,7 @@ Broker configuration mgmt. | ||||||
| import platform | import platform | ||||||
| import sys | import sys | ||||||
| import os | import os | ||||||
|  | from os import path | ||||||
| from os.path import dirname | from os.path import dirname | ||||||
| import shutil | import shutil | ||||||
| from typing import Optional | from typing import Optional | ||||||
|  | @ -111,6 +112,7 @@ if _parent_user: | ||||||
| 
 | 
 | ||||||
| _conf_names: set[str] = { | _conf_names: set[str] = { | ||||||
|     'brokers', |     'brokers', | ||||||
|  |     'pps', | ||||||
|     'trades', |     'trades', | ||||||
|     'watchlists', |     'watchlists', | ||||||
| } | } | ||||||
|  | @ -147,19 +149,21 @@ def get_conf_path( | ||||||
|     conf_name: str = 'brokers', |     conf_name: str = 'brokers', | ||||||
| 
 | 
 | ||||||
| ) -> str: | ) -> str: | ||||||
|     """Return the default config path normally under |     ''' | ||||||
|     ``~/.config/piker`` on linux. |     Return the top-level default config path normally under | ||||||
|  |     ``~/.config/piker`` on linux for a given ``conf_name``, the config | ||||||
|  |     name. | ||||||
| 
 | 
 | ||||||
|     Contains files such as: |     Contains files such as: | ||||||
|     - brokers.toml |     - brokers.toml | ||||||
|  |     - pp.toml | ||||||
|     - watchlists.toml |     - watchlists.toml | ||||||
|     - trades.toml |  | ||||||
| 
 | 
 | ||||||
|     # maybe coming soon ;) |     # maybe coming soon ;) | ||||||
|     - signals.toml |     - signals.toml | ||||||
|     - strats.toml |     - strats.toml | ||||||
| 
 | 
 | ||||||
|     """ |     ''' | ||||||
|     assert conf_name in _conf_names |     assert conf_name in _conf_names | ||||||
|     fn = _conf_fn_w_ext(conf_name) |     fn = _conf_fn_w_ext(conf_name) | ||||||
|     return os.path.join( |     return os.path.join( | ||||||
|  | @ -173,7 +177,7 @@ def repodir(): | ||||||
|     Return the abspath to the repo directory. |     Return the abspath to the repo directory. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     dirpath = os.path.abspath( |     dirpath = path.abspath( | ||||||
|         # we're 3 levels down in **this** module file |         # we're 3 levels down in **this** module file | ||||||
|         dirname(dirname(os.path.realpath(__file__))) |         dirname(dirname(os.path.realpath(__file__))) | ||||||
|     ) |     ) | ||||||
|  | @ -182,7 +186,9 @@ def repodir(): | ||||||
| 
 | 
 | ||||||
| def load( | def load( | ||||||
|     conf_name: str = 'brokers', |     conf_name: str = 'brokers', | ||||||
|     path: str = None |     path: str = None, | ||||||
|  | 
 | ||||||
|  |     **tomlkws, | ||||||
| 
 | 
 | ||||||
| ) -> (dict, str): | ) -> (dict, str): | ||||||
|     ''' |     ''' | ||||||
|  | @ -190,6 +196,7 @@ def load( | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     path = path or get_conf_path(conf_name) |     path = path or get_conf_path(conf_name) | ||||||
|  | 
 | ||||||
|     if not os.path.isfile(path): |     if not os.path.isfile(path): | ||||||
|         fn = _conf_fn_w_ext(conf_name) |         fn = _conf_fn_w_ext(conf_name) | ||||||
| 
 | 
 | ||||||
|  | @ -202,8 +209,11 @@ def load( | ||||||
|         # if one exists. |         # if one exists. | ||||||
|         if os.path.isfile(template): |         if os.path.isfile(template): | ||||||
|             shutil.copyfile(template, path) |             shutil.copyfile(template, path) | ||||||
|  |         else: | ||||||
|  |             with open(path, 'w'): | ||||||
|  |                 pass  # touch | ||||||
| 
 | 
 | ||||||
|     config = toml.load(path) |     config = toml.load(path, **tomlkws) | ||||||
|     log.debug(f"Read config file {path}") |     log.debug(f"Read config file {path}") | ||||||
|     return config, path |     return config, path | ||||||
| 
 | 
 | ||||||
|  | @ -212,6 +222,7 @@ def write( | ||||||
|     config: dict,  # toml config as dict |     config: dict,  # toml config as dict | ||||||
|     name: str = 'brokers', |     name: str = 'brokers', | ||||||
|     path: str = None, |     path: str = None, | ||||||
|  |     **toml_kwargs, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
|     '''' |     '''' | ||||||
|  | @ -235,11 +246,14 @@ def write( | ||||||
|         f"{path}" |         f"{path}" | ||||||
|     ) |     ) | ||||||
|     with open(path, 'w') as cf: |     with open(path, 'w') as cf: | ||||||
|         return toml.dump(config, cf) |         return toml.dump( | ||||||
|  |             config, | ||||||
|  |             cf, | ||||||
|  |             **toml_kwargs, | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def load_accounts( | def load_accounts( | ||||||
| 
 |  | ||||||
|     providers: Optional[list[str]] = None |     providers: Optional[list[str]] = None | ||||||
| 
 | 
 | ||||||
| ) -> bidict[str, Optional[str]]: | ) -> bidict[str, Optional[str]]: | ||||||
|  |  | ||||||
|  | @ -37,8 +37,13 @@ from docker.models.containers import Container as DockerContainer | ||||||
| from docker.errors import ( | from docker.errors import ( | ||||||
|     DockerException, |     DockerException, | ||||||
|     APIError, |     APIError, | ||||||
|  |     # ContainerError, | ||||||
|  | ) | ||||||
|  | import requests | ||||||
|  | from requests.exceptions import ( | ||||||
|  |     ConnectionError, | ||||||
|  |     ReadTimeout, | ||||||
| ) | ) | ||||||
| from requests.exceptions import ConnectionError, ReadTimeout |  | ||||||
| 
 | 
 | ||||||
| from ..log import get_logger, get_console_log | from ..log import get_logger, get_console_log | ||||||
| from .. import config | from .. import config | ||||||
|  | @ -50,8 +55,8 @@ class DockerNotStarted(Exception): | ||||||
|     'Prolly you dint start da daemon bruh' |     'Prolly you dint start da daemon bruh' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class ContainerError(RuntimeError): | class ApplicationLogError(Exception): | ||||||
|     'Error reported via app-container logging level' |     'App in container reported an error in logs' | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @acm | @acm | ||||||
|  | @ -96,9 +101,9 @@ async def open_docker( | ||||||
|         # not perms? |         # not perms? | ||||||
|         raise |         raise | ||||||
| 
 | 
 | ||||||
|     finally: |     # finally: | ||||||
|         if client: |     #     if client: | ||||||
|             client.close() |     #         client.close() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Container: | class Container: | ||||||
|  | @ -156,7 +161,7 @@ class Container: | ||||||
| 
 | 
 | ||||||
|                     # print(f'level: {level}') |                     # print(f'level: {level}') | ||||||
|                     if level in ('error', 'fatal'): |                     if level in ('error', 'fatal'): | ||||||
|                         raise ContainerError(msg) |                         raise ApplicationLogError(msg) | ||||||
| 
 | 
 | ||||||
|                 if patt in msg: |                 if patt in msg: | ||||||
|                     return True |                     return True | ||||||
|  | @ -185,12 +190,29 @@ class Container: | ||||||
|             if 'is not running' in err.explanation: |             if 'is not running' in err.explanation: | ||||||
|                 return False |                 return False | ||||||
| 
 | 
 | ||||||
|  |     def hard_kill(self, start: float) -> None: | ||||||
|  |         delay = time.time() - start | ||||||
|  |         # get out the big guns, bc apparently marketstore | ||||||
|  |         # doesn't actually know how to terminate gracefully | ||||||
|  |         # :eyeroll:... | ||||||
|  |         log.error( | ||||||
|  |             f'SIGKILL-ing: {self.cntr.id} after {delay}s\n' | ||||||
|  |         ) | ||||||
|  |         self.try_signal('SIGKILL') | ||||||
|  |         self.cntr.wait( | ||||||
|  |             timeout=3, | ||||||
|  |             condition='not-running', | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|     async def cancel( |     async def cancel( | ||||||
|         self, |         self, | ||||||
|         stop_msg: str, |         stop_msg: str, | ||||||
|  |         hard_kill: bool = False, | ||||||
|  | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
| 
 | 
 | ||||||
|         cid = self.cntr.id |         cid = self.cntr.id | ||||||
|  | 
 | ||||||
|         # first try a graceful cancel |         # first try a graceful cancel | ||||||
|         log.cancel( |         log.cancel( | ||||||
|             f'SIGINT cancelling container: {cid}\n' |             f'SIGINT cancelling container: {cid}\n' | ||||||
|  | @ -199,16 +221,26 @@ class Container: | ||||||
|         self.try_signal('SIGINT') |         self.try_signal('SIGINT') | ||||||
| 
 | 
 | ||||||
|         start = time.time() |         start = time.time() | ||||||
|         for _ in range(30): |         for _ in range(6): | ||||||
| 
 | 
 | ||||||
|             with trio.move_on_after(0.5) as cs: |             with trio.move_on_after(0.5) as cs: | ||||||
|                 cs.shield = True |                 log.cancel('polling for CNTR logs...') | ||||||
|                 await self.process_logs_until(stop_msg) |  | ||||||
| 
 | 
 | ||||||
|  |                 try: | ||||||
|  |                     await self.process_logs_until(stop_msg) | ||||||
|  |                 except ApplicationLogError: | ||||||
|  |                     hard_kill = True | ||||||
|  |                 else: | ||||||
|                     # if we aren't cancelled on above checkpoint then we |                     # if we aren't cancelled on above checkpoint then we | ||||||
|                 # assume we read the expected stop msg and terminated. |                     # assume we read the expected stop msg and | ||||||
|  |                     # terminated. | ||||||
|                     break |                     break | ||||||
| 
 | 
 | ||||||
|  |             if cs.cancelled_caught: | ||||||
|  |                 # on timeout just try a hard kill after | ||||||
|  |                 # a quick container sync-wait. | ||||||
|  |                 hard_kill = True | ||||||
|  | 
 | ||||||
|             try: |             try: | ||||||
|                 log.info(f'Polling for container shutdown:\n{cid}') |                 log.info(f'Polling for container shutdown:\n{cid}') | ||||||
| 
 | 
 | ||||||
|  | @ -218,6 +250,7 @@ class Container: | ||||||
|                         condition='not-running', |                         condition='not-running', | ||||||
|                     ) |                     ) | ||||||
| 
 | 
 | ||||||
|  |                 # graceful exit if we didn't time out | ||||||
|                 break |                 break | ||||||
| 
 | 
 | ||||||
|             except ( |             except ( | ||||||
|  | @ -229,24 +262,22 @@ class Container: | ||||||
|             except ( |             except ( | ||||||
|                 docker.errors.APIError, |                 docker.errors.APIError, | ||||||
|                 ConnectionError, |                 ConnectionError, | ||||||
|  |                 requests.exceptions.ConnectionError, | ||||||
|  |                 trio.Cancelled, | ||||||
|             ): |             ): | ||||||
|                 log.exception('Docker connection failure') |                 log.exception('Docker connection failure') | ||||||
|                 break |                 self.hard_kill(start) | ||||||
|         else: |                 raise | ||||||
|             delay = time.time() - start |  | ||||||
|             log.error( |  | ||||||
|                 f'Failed to kill container {cid} after {delay}s\n' |  | ||||||
|                 'sending SIGKILL..' |  | ||||||
|             ) |  | ||||||
|             # get out the big guns, bc apparently marketstore |  | ||||||
|             # doesn't actually know how to terminate gracefully |  | ||||||
|             # :eyeroll:... |  | ||||||
|             self.try_signal('SIGKILL') |  | ||||||
|             self.cntr.wait( |  | ||||||
|                 timeout=3, |  | ||||||
|                 condition='not-running', |  | ||||||
|             ) |  | ||||||
| 
 | 
 | ||||||
|  |             except trio.Cancelled: | ||||||
|  |                 log.exception('trio cancelled...') | ||||||
|  |                 self.hard_kill(start) | ||||||
|  |         else: | ||||||
|  |             hard_kill = True | ||||||
|  | 
 | ||||||
|  |         if hard_kill: | ||||||
|  |             self.hard_kill(start) | ||||||
|  |         else: | ||||||
|             log.cancel(f'Container stopped: {cid}') |             log.cancel(f'Container stopped: {cid}') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -289,14 +320,12 @@ async def open_ahabd( | ||||||
|         )) |         )) | ||||||
| 
 | 
 | ||||||
|         try: |         try: | ||||||
| 
 |  | ||||||
|             # TODO: we might eventually want a proxy-style msg-prot here |             # TODO: we might eventually want a proxy-style msg-prot here | ||||||
|             # to allow remote control of containers without needing |             # to allow remote control of containers without needing | ||||||
|             # callers to have root perms? |             # callers to have root perms? | ||||||
|             await trio.sleep_forever() |             await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
|         finally: |         finally: | ||||||
|             with trio.CancelScope(shield=True): |  | ||||||
|             await cntr.cancel(stop_msg) |             await cntr.cancel(stop_msg) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -56,7 +56,7 @@ def iterticks( | ||||||
|                     sig = ( |                     sig = ( | ||||||
|                         time, |                         time, | ||||||
|                         tick['price'], |                         tick['price'], | ||||||
|                         tick['size'] |                         tick.get('size') | ||||||
|                     ) |                     ) | ||||||
| 
 | 
 | ||||||
|                     if ttype == 'dark_trade': |                     if ttype == 'dark_trade': | ||||||
|  |  | ||||||
|  | @ -1,5 +1,5 @@ | ||||||
| # piker: trading gear for hackers | # piker: trading gear for hackers | ||||||
| # Copyright (C) Tyler Goodlet (in stewardship for piker0) | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
| 
 | 
 | ||||||
| # This program is free software: you can redistribute it and/or modify | # This program is free software: you can redistribute it and/or modify | ||||||
| # it under the terms of the GNU Affero General Public License as published by | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | @ -27,13 +27,14 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX | ||||||
| if _USE_POSIX: | if _USE_POSIX: | ||||||
|     from _posixshmem import shm_unlink |     from _posixshmem import shm_unlink | ||||||
| 
 | 
 | ||||||
| import tractor | # import msgspec | ||||||
| import numpy as np | import numpy as np | ||||||
| from pydantic import BaseModel |  | ||||||
| from numpy.lib import recfunctions as rfn | from numpy.lib import recfunctions as rfn | ||||||
|  | import tractor | ||||||
| 
 | 
 | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| from ._source import base_iohlc_dtype | from ._source import base_iohlc_dtype | ||||||
|  | from .types import Struct | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
|  | @ -49,7 +50,11 @@ _rt_buffer_start = int((_days_worth - 1) * _secs_in_day) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def cuckoff_mantracker(): | def cuckoff_mantracker(): | ||||||
|  |     ''' | ||||||
|  |     Disable all ``multiprocessing``` "resource tracking" machinery since | ||||||
|  |     it's an absolute multi-threaded mess of non-SC madness. | ||||||
| 
 | 
 | ||||||
|  |     ''' | ||||||
|     from multiprocessing import resource_tracker as mantracker |     from multiprocessing import resource_tracker as mantracker | ||||||
| 
 | 
 | ||||||
|     # Tell the "resource tracker" thing to fuck off. |     # Tell the "resource tracker" thing to fuck off. | ||||||
|  | @ -107,36 +112,39 @@ class SharedInt: | ||||||
|                 log.warning(f'Shm for {name} already unlinked?') |                 log.warning(f'Shm for {name} already unlinked?') | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class _Token(BaseModel): | class _Token(Struct, frozen=True): | ||||||
|     ''' |     ''' | ||||||
|     Internal represenation of a shared memory "token" |     Internal represenation of a shared memory "token" | ||||||
|     which can be used to key a system wide post shm entry. |     which can be used to key a system wide post shm entry. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     class Config: |  | ||||||
|         frozen = True |  | ||||||
| 
 |  | ||||||
|     shm_name: str  # this servers as a "key" value |     shm_name: str  # this servers as a "key" value | ||||||
|     shm_first_index_name: str |     shm_first_index_name: str | ||||||
|     shm_last_index_name: str |     shm_last_index_name: str | ||||||
|     dtype_descr: tuple |     dtype_descr: tuple | ||||||
|  |     size: int  # in struct-array index / row terms | ||||||
| 
 | 
 | ||||||
|     @property |     @property | ||||||
|     def dtype(self) -> np.dtype: |     def dtype(self) -> np.dtype: | ||||||
|         return np.dtype(list(map(tuple, self.dtype_descr))).descr |         return np.dtype(list(map(tuple, self.dtype_descr))).descr | ||||||
| 
 | 
 | ||||||
|     def as_msg(self): |     def as_msg(self): | ||||||
|         return self.dict() |         return self.to_dict() | ||||||
| 
 | 
 | ||||||
|     @classmethod |     @classmethod | ||||||
|     def from_msg(cls, msg: dict) -> _Token: |     def from_msg(cls, msg: dict) -> _Token: | ||||||
|         if isinstance(msg, _Token): |         if isinstance(msg, _Token): | ||||||
|             return msg |             return msg | ||||||
| 
 | 
 | ||||||
|  |         # TODO: native struct decoding | ||||||
|  |         # return _token_dec.decode(msg) | ||||||
|  | 
 | ||||||
|         msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr'])) |         msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr'])) | ||||||
|         return _Token(**msg) |         return _Token(**msg) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | # _token_dec = msgspec.msgpack.Decoder(_Token) | ||||||
|  | 
 | ||||||
| # TODO: this api? | # TODO: this api? | ||||||
| # _known_tokens = tractor.ActorVar('_shm_tokens', {}) | # _known_tokens = tractor.ActorVar('_shm_tokens', {}) | ||||||
| # _known_tokens = tractor.ContextStack('_known_tokens', ) | # _known_tokens = tractor.ContextStack('_known_tokens', ) | ||||||
|  | @ -155,6 +163,7 @@ def get_shm_token(key: str) -> _Token: | ||||||
| 
 | 
 | ||||||
| def _make_token( | def _make_token( | ||||||
|     key: str, |     key: str, | ||||||
|  |     size: int, | ||||||
|     dtype: Optional[np.dtype] = None, |     dtype: Optional[np.dtype] = None, | ||||||
| ) -> _Token: | ) -> _Token: | ||||||
|     ''' |     ''' | ||||||
|  | @ -167,7 +176,8 @@ def _make_token( | ||||||
|         shm_name=key, |         shm_name=key, | ||||||
|         shm_first_index_name=key + "_first", |         shm_first_index_name=key + "_first", | ||||||
|         shm_last_index_name=key + "_last", |         shm_last_index_name=key + "_last", | ||||||
|         dtype_descr=np.dtype(dtype).descr |         dtype_descr=tuple(np.dtype(dtype).descr), | ||||||
|  |         size=size, | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -219,6 +229,7 @@ class ShmArray: | ||||||
|             shm_first_index_name=self._first._shm.name, |             shm_first_index_name=self._first._shm.name, | ||||||
|             shm_last_index_name=self._last._shm.name, |             shm_last_index_name=self._last._shm.name, | ||||||
|             dtype_descr=tuple(self._array.dtype.descr), |             dtype_descr=tuple(self._array.dtype.descr), | ||||||
|  |             size=self._len, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     @property |     @property | ||||||
|  | @ -433,7 +444,7 @@ class ShmArray: | ||||||
| def open_shm_array( | def open_shm_array( | ||||||
| 
 | 
 | ||||||
|     key: Optional[str] = None, |     key: Optional[str] = None, | ||||||
|     size: int = _default_size, |     size: int = _default_size,  # see above | ||||||
|     dtype: Optional[np.dtype] = None, |     dtype: Optional[np.dtype] = None, | ||||||
|     readonly: bool = False, |     readonly: bool = False, | ||||||
| 
 | 
 | ||||||
|  | @ -464,7 +475,8 @@ def open_shm_array( | ||||||
| 
 | 
 | ||||||
|     token = _make_token( |     token = _make_token( | ||||||
|         key=key, |         key=key, | ||||||
|         dtype=dtype |         size=size, | ||||||
|  |         dtype=dtype, | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     # create single entry arrays for storing an first and last indices |     # create single entry arrays for storing an first and last indices | ||||||
|  | @ -516,6 +528,7 @@ def open_shm_array( | ||||||
|     # "unlink" created shm on process teardown by |     # "unlink" created shm on process teardown by | ||||||
|     # pushing teardown calls onto actor context stack |     # pushing teardown calls onto actor context stack | ||||||
| 
 | 
 | ||||||
|  |     # TODO: make this a public API in ``tractor``.. | ||||||
|     tractor._actor._lifetime_stack.callback(shmarr.close) |     tractor._actor._lifetime_stack.callback(shmarr.close) | ||||||
|     tractor._actor._lifetime_stack.callback(shmarr.destroy) |     tractor._actor._lifetime_stack.callback(shmarr.destroy) | ||||||
| 
 | 
 | ||||||
|  | @ -524,7 +537,6 @@ def open_shm_array( | ||||||
| 
 | 
 | ||||||
| def attach_shm_array( | def attach_shm_array( | ||||||
|     token: tuple[str, str, tuple[str, str]], |     token: tuple[str, str, tuple[str, str]], | ||||||
|     size: int = _default_size, |  | ||||||
|     readonly: bool = True, |     readonly: bool = True, | ||||||
| 
 | 
 | ||||||
| ) -> ShmArray: | ) -> ShmArray: | ||||||
|  | @ -563,7 +575,7 @@ def attach_shm_array( | ||||||
|             raise _err |             raise _err | ||||||
| 
 | 
 | ||||||
|     shmarr = np.ndarray( |     shmarr = np.ndarray( | ||||||
|         (size,), |         (token.size,), | ||||||
|         dtype=token.dtype, |         dtype=token.dtype, | ||||||
|         buffer=shm.buf |         buffer=shm.buf | ||||||
|     ) |     ) | ||||||
|  | @ -631,6 +643,7 @@ def maybe_open_shm_array( | ||||||
|     use ``attach_shm_array``. |     use ``attach_shm_array``. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  |     size = kwargs.pop('size', _default_size) | ||||||
|     try: |     try: | ||||||
|         # see if we already know this key |         # see if we already know this key | ||||||
|         token = _known_tokens[key] |         token = _known_tokens[key] | ||||||
|  | @ -638,7 +651,11 @@ def maybe_open_shm_array( | ||||||
|     except KeyError: |     except KeyError: | ||||||
|         log.warning(f"Could not find {key} in shms cache") |         log.warning(f"Could not find {key} in shms cache") | ||||||
|         if dtype: |         if dtype: | ||||||
|             token = _make_token(key, dtype) |             token = _make_token( | ||||||
|  |                 key, | ||||||
|  |                 size=size, | ||||||
|  |                 dtype=dtype, | ||||||
|  |             ) | ||||||
|             try: |             try: | ||||||
|                 return attach_shm_array(token=token, **kwargs), False |                 return attach_shm_array(token=token, **kwargs), False | ||||||
|             except FileNotFoundError: |             except FileNotFoundError: | ||||||
|  |  | ||||||
|  | @ -23,7 +23,7 @@ import decimal | ||||||
| 
 | 
 | ||||||
| from bidict import bidict | from bidict import bidict | ||||||
| import numpy as np | import numpy as np | ||||||
| from pydantic import BaseModel | from msgspec import Struct | ||||||
| # from numba import from_dtype | # from numba import from_dtype | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -126,7 +126,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]: | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class Symbol(BaseModel): | class Symbol(Struct): | ||||||
|     ''' |     ''' | ||||||
|     I guess this is some kinda container thing for dealing with |     I guess this is some kinda container thing for dealing with | ||||||
|     all the different meta-data formats from brokers? |     all the different meta-data formats from brokers? | ||||||
|  | @ -152,9 +152,7 @@ class Symbol(BaseModel): | ||||||
|         info: dict[str, Any], |         info: dict[str, Any], | ||||||
|         suffix: str = '', |         suffix: str = '', | ||||||
| 
 | 
 | ||||||
|     # XXX: like wtf.. |     ) -> Symbol: | ||||||
|     # ) -> 'Symbol': |  | ||||||
|     ) -> None: |  | ||||||
| 
 | 
 | ||||||
|         tick_size = info.get('price_tick_size', 0.01) |         tick_size = info.get('price_tick_size', 0.01) | ||||||
|         lot_tick_size = info.get('lot_tick_size', 0.0) |         lot_tick_size = info.get('lot_tick_size', 0.0) | ||||||
|  | @ -175,9 +173,7 @@ class Symbol(BaseModel): | ||||||
|         fqsn: str, |         fqsn: str, | ||||||
|         info: dict[str, Any], |         info: dict[str, Any], | ||||||
| 
 | 
 | ||||||
|     # XXX: like wtf.. |     ) -> Symbol: | ||||||
|     # ) -> 'Symbol': |  | ||||||
|     ) -> None: |  | ||||||
|         broker, key, suffix = unpack_fqsn(fqsn) |         broker, key, suffix = unpack_fqsn(fqsn) | ||||||
|         return cls.from_broker_info( |         return cls.from_broker_info( | ||||||
|             broker, |             broker, | ||||||
|  | @ -240,7 +236,7 @@ class Symbol(BaseModel): | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         tokens = self.tokens() |         tokens = self.tokens() | ||||||
|         fqsn = '.'.join(tokens) |         fqsn = '.'.join(map(str.lower, tokens)) | ||||||
|         return fqsn |         return fqsn | ||||||
| 
 | 
 | ||||||
|     def iterfqsns(self) -> list[str]: |     def iterfqsns(self) -> list[str]: | ||||||
|  |  | ||||||
|  | @ -19,8 +19,9 @@ ToOlS fOr CoPInG wITh "tHE wEB" protocols. | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from contextlib import asynccontextmanager, AsyncExitStack | from contextlib import asynccontextmanager, AsyncExitStack | ||||||
|  | from itertools import count | ||||||
| from types import ModuleType | from types import ModuleType | ||||||
| from typing import Any, Callable, AsyncGenerator | from typing import Any, Optional, Callable, AsyncGenerator | ||||||
| import json | import json | ||||||
| 
 | 
 | ||||||
| import trio | import trio | ||||||
|  | @ -35,6 +36,8 @@ from trio_websocket._impl import ( | ||||||
| 
 | 
 | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| 
 | 
 | ||||||
|  | from .types import Struct | ||||||
|  | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -53,13 +56,11 @@ class NoBsWs: | ||||||
|     def __init__( |     def __init__( | ||||||
|         self, |         self, | ||||||
|         url: str, |         url: str, | ||||||
|         token: str, |  | ||||||
|         stack: AsyncExitStack, |         stack: AsyncExitStack, | ||||||
|         fixture: Callable, |         fixture: Optional[Callable] = None, | ||||||
|         serializer: ModuleType = json, |         serializer: ModuleType = json | ||||||
|     ): |     ): | ||||||
|         self.url = url |         self.url = url | ||||||
|         self.token = token |  | ||||||
|         self.fixture = fixture |         self.fixture = fixture | ||||||
|         self._stack = stack |         self._stack = stack | ||||||
|         self._ws: 'WebSocketConnection' = None  # noqa |         self._ws: 'WebSocketConnection' = None  # noqa | ||||||
|  | @ -82,15 +83,12 @@ class NoBsWs: | ||||||
|                 self._ws = await self._stack.enter_async_context( |                 self._ws = await self._stack.enter_async_context( | ||||||
|                     trio_websocket.open_websocket_url(self.url) |                     trio_websocket.open_websocket_url(self.url) | ||||||
|                 ) |                 ) | ||||||
|  | 
 | ||||||
|  |                 if self.fixture is not None: | ||||||
|                     # rerun user code fixture |                     # rerun user code fixture | ||||||
|                 if self.token == '': |  | ||||||
|                     ret = await self._stack.enter_async_context( |                     ret = await self._stack.enter_async_context( | ||||||
|                         self.fixture(self) |                         self.fixture(self) | ||||||
|                     ) |                     ) | ||||||
|                 else: |  | ||||||
|                     ret = await self._stack.enter_async_context( |  | ||||||
|                         self.fixture(self, self.token) |  | ||||||
|                     ) |  | ||||||
| 
 | 
 | ||||||
|                     assert ret is None |                     assert ret is None | ||||||
| 
 | 
 | ||||||
|  | @ -128,21 +126,26 @@ class NoBsWs: | ||||||
|             except self.recon_errors: |             except self.recon_errors: | ||||||
|                 await self._connect() |                 await self._connect() | ||||||
| 
 | 
 | ||||||
|  |     def __aiter__(self): | ||||||
|  |         return self | ||||||
|  | 
 | ||||||
|  |     async def __anext__(self): | ||||||
|  |         return await self.recv_msg() | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| @asynccontextmanager | @asynccontextmanager | ||||||
| async def open_autorecon_ws( | async def open_autorecon_ws( | ||||||
|     url: str, |     url: str, | ||||||
| 
 | 
 | ||||||
|     # TODO: proper type annot smh |     # TODO: proper type annot smh | ||||||
|     fixture: Callable, |     fixture: Optional[Callable] = None, | ||||||
|     # used for authenticated websockets | 
 | ||||||
|     token: str = '', |  | ||||||
| ) -> AsyncGenerator[tuple[...],  NoBsWs]: | ) -> AsyncGenerator[tuple[...],  NoBsWs]: | ||||||
|     """Apparently we can QoS for all sorts of reasons..so catch em. |     """Apparently we can QoS for all sorts of reasons..so catch em. | ||||||
| 
 | 
 | ||||||
|     """ |     """ | ||||||
|     async with AsyncExitStack() as stack: |     async with AsyncExitStack() as stack: | ||||||
|         ws = NoBsWs(url, token, stack, fixture=fixture) |         ws = NoBsWs(url, stack, fixture=fixture) | ||||||
|         await ws._connect() |         await ws._connect() | ||||||
| 
 | 
 | ||||||
|         try: |         try: | ||||||
|  | @ -150,3 +153,86 @@ async def open_autorecon_ws( | ||||||
| 
 | 
 | ||||||
|         finally: |         finally: | ||||||
|             await stack.aclose() |             await stack.aclose() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | JSONRPC response-request style machinery for transparent multiplexing of msgs | ||||||
|  | over a NoBsWs. | ||||||
|  | ''' | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class JSONRPCResult(Struct): | ||||||
|  |     jsonrpc: str = '2.0' | ||||||
|  |     id: int | ||||||
|  |     result: Optional[dict] = None | ||||||
|  |     error: Optional[dict] = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @asynccontextmanager | ||||||
|  | async def open_jsonrpc_session( | ||||||
|  |     url: str, | ||||||
|  |     start_id: int = 0, | ||||||
|  |     dtype: type = JSONRPCResult | ||||||
|  | ) -> Callable[[str, dict], dict]: | ||||||
|  | 
 | ||||||
|  |     async with ( | ||||||
|  |         trio.open_nursery() as n, | ||||||
|  |         open_autorecon_ws(url) as ws | ||||||
|  |     ): | ||||||
|  |         rpc_id: Iterable = count(start_id) | ||||||
|  |         rpc_results: dict[int, dict] = {} | ||||||
|  | 
 | ||||||
|  |         async def json_rpc(method: str, params: dict) -> dict: | ||||||
|  |             ''' | ||||||
|  |             perform a json rpc call and wait for the result, raise exception in | ||||||
|  |             case of error field present on response | ||||||
|  |             ''' | ||||||
|  |             msg = { | ||||||
|  |                 'jsonrpc': '2.0', | ||||||
|  |                 'id': next(rpc_id), | ||||||
|  |                 'method': method, | ||||||
|  |                 'params': params | ||||||
|  |             } | ||||||
|  |             _id = msg['id'] | ||||||
|  | 
 | ||||||
|  |             rpc_results[_id] = { | ||||||
|  |                 'result': None, | ||||||
|  |                 'event': trio.Event() | ||||||
|  |             } | ||||||
|  | 
 | ||||||
|  |             await ws.send_msg(msg) | ||||||
|  | 
 | ||||||
|  |             await rpc_results[_id]['event'].wait() | ||||||
|  | 
 | ||||||
|  |             ret = rpc_results[_id]['result'] | ||||||
|  | 
 | ||||||
|  |             del rpc_results[_id] | ||||||
|  | 
 | ||||||
|  |             if ret.error is not None: | ||||||
|  |                 raise Exception(json.dumps(ret.error, indent=4)) | ||||||
|  | 
 | ||||||
|  |             return ret | ||||||
|  | 
 | ||||||
|  |         async def recv_task(): | ||||||
|  |             ''' | ||||||
|  |             receives every ws message and stores it in its corresponding result | ||||||
|  |             field, then sets the event to wakeup original sender tasks. | ||||||
|  |             ''' | ||||||
|  |             async for msg in ws: | ||||||
|  |                 msg = dtype(**msg) | ||||||
|  | 
 | ||||||
|  |                 if msg.id not in rpc_results: | ||||||
|  |                     log.warning(f'Wasn\'t expecting ws msg: {json.dumps(msg, indent=4)}') | ||||||
|  | 
 | ||||||
|  |                 res = rpc_results.setdefault( | ||||||
|  |                     msg.id, | ||||||
|  |                     {'result': None, 'event': trio.Event()} | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |                 res['result'] = msg | ||||||
|  |                 res['event'].set() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  |         n.start_soon(recv_task) | ||||||
|  |         yield json_rpc | ||||||
|  |         n.cancel_scope.cancel() | ||||||
|  |  | ||||||
|  | @ -42,7 +42,6 @@ from trio_typing import TaskStatus | ||||||
| import trimeter | import trimeter | ||||||
| import tractor | import tractor | ||||||
| from tractor.trionics import maybe_open_context | from tractor.trionics import maybe_open_context | ||||||
| from pydantic import BaseModel |  | ||||||
| import pendulum | import pendulum | ||||||
| import numpy as np | import numpy as np | ||||||
| 
 | 
 | ||||||
|  | @ -59,6 +58,7 @@ from ._sharedmem import ( | ||||||
|     ShmArray, |     ShmArray, | ||||||
| ) | ) | ||||||
| from .ingest import get_ingestormod | from .ingest import get_ingestormod | ||||||
|  | from .types import Struct | ||||||
| from ._source import ( | from ._source import ( | ||||||
|     base_iohlc_dtype, |     base_iohlc_dtype, | ||||||
|     Symbol, |     Symbol, | ||||||
|  | @ -84,7 +84,7 @@ if TYPE_CHECKING: | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class _FeedsBus(BaseModel): | class _FeedsBus(Struct): | ||||||
|     ''' |     ''' | ||||||
|     Data feeds broadcaster and persistence management. |     Data feeds broadcaster and persistence management. | ||||||
| 
 | 
 | ||||||
|  | @ -100,10 +100,6 @@ class _FeedsBus(BaseModel): | ||||||
|           a dedicated cancel scope. |           a dedicated cancel scope. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     class Config: |  | ||||||
|         arbitrary_types_allowed = True |  | ||||||
|         underscore_attrs_are_private = False |  | ||||||
| 
 |  | ||||||
|     brokername: str |     brokername: str | ||||||
|     nursery: trio.Nursery |     nursery: trio.Nursery | ||||||
|     feeds: dict[str, tuple[dict, dict]] = {} |     feeds: dict[str, tuple[dict, dict]] = {} | ||||||
|  | @ -313,7 +309,7 @@ async def start_backfill( | ||||||
|             # when no tsdb "last datum" is provided, we just load |             # when no tsdb "last datum" is provided, we just load | ||||||
|             # some near-term history. |             # some near-term history. | ||||||
|             periods = { |             periods = { | ||||||
|                 1: {'days': 1}, |                 1: {'seconds': 4000}, | ||||||
|                 60: {'days': 14}, |                 60: {'days': 14}, | ||||||
|             } |             } | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -37,7 +37,7 @@ import time | ||||||
| from math import isnan | from math import isnan | ||||||
| 
 | 
 | ||||||
| from bidict import bidict | from bidict import bidict | ||||||
| import msgpack | from msgspec.msgpack import encode, decode | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| import numpy as np | import numpy as np | ||||||
| import tractor | import tractor | ||||||
|  | @ -774,12 +774,13 @@ async def stream_quotes( | ||||||
|     async with open_websocket_url(f'ws://{host}:{port}/ws') as ws: |     async with open_websocket_url(f'ws://{host}:{port}/ws') as ws: | ||||||
|         # send subs topics to server |         # send subs topics to server | ||||||
|         resp = await ws.send_message( |         resp = await ws.send_message( | ||||||
|             msgpack.dumps({'streams': list(tbks.values())}) | 
 | ||||||
|  |             encode({'streams': list(tbks.values())}) | ||||||
|         ) |         ) | ||||||
|         log.info(resp) |         log.info(resp) | ||||||
| 
 | 
 | ||||||
|         async def recv() -> dict[str, Any]: |         async def recv() -> dict[str, Any]: | ||||||
|             return msgpack.loads((await ws.get_message()), encoding='utf-8') |             return decode((await ws.get_message()), encoding='utf-8') | ||||||
| 
 | 
 | ||||||
|         streams = (await recv())['streams'] |         streams = (await recv())['streams'] | ||||||
|         log.info(f"Subscribed to {streams}") |         log.info(f"Subscribed to {streams}") | ||||||
|  |  | ||||||
|  | @ -0,0 +1,84 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Guillermo Rodriguez (in stewardship for piker0) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | Built-in (extension) types. | ||||||
|  | 
 | ||||||
|  | """ | ||||||
|  | import sys | ||||||
|  | from typing import Optional | ||||||
|  | from pprint import pformat | ||||||
|  | 
 | ||||||
|  | import msgspec | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Struct( | ||||||
|  |     msgspec.Struct, | ||||||
|  | 
 | ||||||
|  |     # https://jcristharif.com/msgspec/structs.html#tagged-unions | ||||||
|  |     # tag='pikerstruct', | ||||||
|  |     # tag=True, | ||||||
|  | ): | ||||||
|  |     ''' | ||||||
|  |     A "human friendlier" (aka repl buddy) struct subtype. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     def to_dict(self) -> dict: | ||||||
|  |         return { | ||||||
|  |             f: getattr(self, f) | ||||||
|  |             for f in self.__struct_fields__ | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |     def __repr__(self): | ||||||
|  |         # only turn on pprint when we detect a python REPL | ||||||
|  |         # at runtime B) | ||||||
|  |         if ( | ||||||
|  |             hasattr(sys, 'ps1') | ||||||
|  |             # TODO: check if we're in pdb | ||||||
|  |         ): | ||||||
|  |             return f'Struct({pformat(self.to_dict())})' | ||||||
|  | 
 | ||||||
|  |         return super().__repr__() | ||||||
|  | 
 | ||||||
|  |     def copy( | ||||||
|  |         self, | ||||||
|  |         update: Optional[dict] = None, | ||||||
|  | 
 | ||||||
|  |     ) -> msgspec.Struct: | ||||||
|  |         ''' | ||||||
|  |         Validate-typecast all self defined fields, return a copy of us | ||||||
|  |         with all such fields. | ||||||
|  | 
 | ||||||
|  |         This is kinda like the default behaviour in `pydantic.BaseModel`. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         if update: | ||||||
|  |             for k, v in update.items(): | ||||||
|  |                 setattr(self, k, v) | ||||||
|  | 
 | ||||||
|  |         # roundtrip serialize to validate | ||||||
|  |         return msgspec.msgpack.Decoder( | ||||||
|  |             type=type(self) | ||||||
|  |         ).decode( | ||||||
|  |             msgspec.msgpack.Encoder().encode(self) | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |     def typecast( | ||||||
|  |         self, | ||||||
|  |         # fields: Optional[list[str]] = None, | ||||||
|  |     ) -> None: | ||||||
|  |         for fname, ftype in self.__annotations__.items(): | ||||||
|  |             setattr(self, fname, ftype(getattr(self, fname))) | ||||||
|  | @ -78,7 +78,8 @@ class Fsp: | ||||||
|     # + the consuming fsp *to* the consumers output |     # + the consuming fsp *to* the consumers output | ||||||
|     # shm flow. |     # shm flow. | ||||||
|     _flow_registry: dict[ |     _flow_registry: dict[ | ||||||
|         tuple[_Token, str], _Token, |         tuple[_Token, str], | ||||||
|  |         tuple[_Token, Optional[ShmArray]], | ||||||
|     ] = {} |     ] = {} | ||||||
| 
 | 
 | ||||||
|     def __init__( |     def __init__( | ||||||
|  | @ -120,7 +121,6 @@ class Fsp: | ||||||
|     ): |     ): | ||||||
|         return self.func(*args, **kwargs) |         return self.func(*args, **kwargs) | ||||||
| 
 | 
 | ||||||
|     # TODO: lru_cache this? prettty sure it'll work? |  | ||||||
|     def get_shm( |     def get_shm( | ||||||
|         self, |         self, | ||||||
|         src_shm: ShmArray, |         src_shm: ShmArray, | ||||||
|  | @ -131,12 +131,27 @@ class Fsp: | ||||||
|         for this "instance" of a signal processor for |         for this "instance" of a signal processor for | ||||||
|         the given ``key``. |         the given ``key``. | ||||||
| 
 | 
 | ||||||
|  |         The destination shm "token" and array are cached if possible to | ||||||
|  |         minimize multiple stdlib/system calls. | ||||||
|  | 
 | ||||||
|         ''' |         ''' | ||||||
|         dst_token = self._flow_registry[ |         dst_token, maybe_array = self._flow_registry[ | ||||||
|             (src_shm._token, self.name) |             (src_shm._token, self.name) | ||||||
|         ] |         ] | ||||||
|         shm = attach_shm_array(dst_token) |         if maybe_array is None: | ||||||
|         return shm |             self._flow_registry[ | ||||||
|  |                 (src_shm._token, self.name) | ||||||
|  |             ] = ( | ||||||
|  |                 dst_token, | ||||||
|  |                 # "cache" the ``ShmArray`` such that | ||||||
|  |                 # we call the underlying "attach" code as few | ||||||
|  |                 # times as possible as per: | ||||||
|  |                 # - https://github.com/pikers/piker/issues/359 | ||||||
|  |                 # - https://github.com/pikers/piker/issues/332 | ||||||
|  |                 maybe_array := attach_shm_array(dst_token) | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |         return maybe_array | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def fsp( | def fsp( | ||||||
|  |  | ||||||
|  | @ -114,7 +114,7 @@ async def fsp_compute( | ||||||
|         dict[str, np.ndarray],  # multi-output case |         dict[str, np.ndarray],  # multi-output case | ||||||
|         np.ndarray,  # single output case |         np.ndarray,  # single output case | ||||||
|     ] |     ] | ||||||
|     history_output = await out_stream.__anext__() |     history_output = await anext(out_stream) | ||||||
| 
 | 
 | ||||||
|     func_name = func.__name__ |     func_name = func.__name__ | ||||||
|     profiler(f'{func_name} generated history') |     profiler(f'{func_name} generated history') | ||||||
|  | @ -284,9 +284,10 @@ async def cascade( | ||||||
|     # TODO: ugh i hate this wind/unwind to list over the wire |     # TODO: ugh i hate this wind/unwind to list over the wire | ||||||
|     # but not sure how else to do it. |     # but not sure how else to do it. | ||||||
|     for (token, fsp_name, dst_token) in shm_registry: |     for (token, fsp_name, dst_token) in shm_registry: | ||||||
|         Fsp._flow_registry[ |         Fsp._flow_registry[( | ||||||
|             (_Token.from_msg(token), fsp_name) |             _Token.from_msg(token), | ||||||
|         ] = _Token.from_msg(dst_token) |             fsp_name, | ||||||
|  |         )] = _Token.from_msg(dst_token), None | ||||||
| 
 | 
 | ||||||
|     fsp: Fsp = reg.get( |     fsp: Fsp = reg.get( | ||||||
|         NamespacePath(ns_path) |         NamespacePath(ns_path) | ||||||
|  | @ -374,7 +375,8 @@ async def cascade( | ||||||
|                             'key': dst_shm_token, |                             'key': dst_shm_token, | ||||||
|                             'first': dst._first.value, |                             'first': dst._first.value, | ||||||
|                             'last': dst._last.value, |                             'last': dst._last.value, | ||||||
|                     }}) |                         } | ||||||
|  |                     }) | ||||||
|                     return tracker, index |                     return tracker, index | ||||||
| 
 | 
 | ||||||
|                 def is_synced( |                 def is_synced( | ||||||
|  |  | ||||||
|  | @ -0,0 +1,975 @@ | ||||||
|  | # piker: trading gear for hackers | ||||||
|  | # Copyright (C) Tyler Goodlet (in stewardship for pikers) | ||||||
|  | 
 | ||||||
|  | # This program is free software: you can redistribute it and/or modify | ||||||
|  | # it under the terms of the GNU Affero General Public License as published by | ||||||
|  | # the Free Software Foundation, either version 3 of the License, or | ||||||
|  | # (at your option) any later version. | ||||||
|  | 
 | ||||||
|  | # This program is distributed in the hope that it will be useful, | ||||||
|  | # but WITHOUT ANY WARRANTY; without even the implied warranty of | ||||||
|  | # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the | ||||||
|  | # GNU Affero General Public License for more details. | ||||||
|  | 
 | ||||||
|  | # You should have received a copy of the GNU Affero General Public License | ||||||
|  | 
 | ||||||
|  | # along with this program.  If not, see <https://www.gnu.org/licenses/>. | ||||||
|  | ''' | ||||||
|  | Personal/Private position parsing, calculating, summarizing in a way | ||||||
|  | that doesn't try to cuk most humans who prefer to not lose their moneys.. | ||||||
|  | (looking at you `ib` and dirt-bird friends) | ||||||
|  | 
 | ||||||
|  | ''' | ||||||
|  | from contextlib import contextmanager as cm | ||||||
|  | from pprint import pformat | ||||||
|  | import os | ||||||
|  | from os import path | ||||||
|  | from math import copysign | ||||||
|  | import re | ||||||
|  | import time | ||||||
|  | from typing import ( | ||||||
|  |     Any, | ||||||
|  |     Optional, | ||||||
|  |     Union, | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | import pendulum | ||||||
|  | from pendulum import datetime, now | ||||||
|  | import tomli | ||||||
|  | import toml | ||||||
|  | 
 | ||||||
|  | from . import config | ||||||
|  | from .brokers import get_brokermod | ||||||
|  | from .clearing._messages import BrokerdPosition, Status | ||||||
|  | from .data._source import Symbol | ||||||
|  | from .log import get_logger | ||||||
|  | from .data.types import Struct | ||||||
|  | 
 | ||||||
|  | log = get_logger(__name__) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @cm | ||||||
|  | def open_trade_ledger( | ||||||
|  |     broker: str, | ||||||
|  |     account: str, | ||||||
|  | 
 | ||||||
|  | ) -> str: | ||||||
|  |     ''' | ||||||
|  |     Indempotently create and read in a trade log file from the | ||||||
|  |     ``<configuration_dir>/ledgers/`` directory. | ||||||
|  | 
 | ||||||
|  |     Files are named per broker account of the form | ||||||
|  |     ``<brokername>_<accountname>.toml``. The ``accountname`` here is the | ||||||
|  |     name as defined in the user's ``brokers.toml`` config. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     ldir = path.join(config._config_dir, 'ledgers') | ||||||
|  |     if not path.isdir(ldir): | ||||||
|  |         os.makedirs(ldir) | ||||||
|  | 
 | ||||||
|  |     fname = f'trades_{broker}_{account}.toml' | ||||||
|  |     tradesfile = path.join(ldir, fname) | ||||||
|  | 
 | ||||||
|  |     if not path.isfile(tradesfile): | ||||||
|  |         log.info( | ||||||
|  |             f'Creating new local trades ledger: {tradesfile}' | ||||||
|  |         ) | ||||||
|  |         with open(tradesfile, 'w') as cf: | ||||||
|  |             pass  # touch | ||||||
|  |     with open(tradesfile, 'rb') as cf: | ||||||
|  |         start = time.time() | ||||||
|  |         ledger = tomli.load(cf) | ||||||
|  |         print(f'Ledger load took {time.time() - start}s') | ||||||
|  |         cpy = ledger.copy() | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         yield cpy | ||||||
|  |     finally: | ||||||
|  |         if cpy != ledger: | ||||||
|  |             # TODO: show diff output? | ||||||
|  |             # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries | ||||||
|  |             print(f'Updating ledger for {tradesfile}:\n') | ||||||
|  |             ledger.update(cpy) | ||||||
|  | 
 | ||||||
|  |             # we write on close the mutated ledger data | ||||||
|  |             with open(tradesfile, 'w') as cf: | ||||||
|  |                 toml.dump(ledger, cf) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Transaction(Struct, frozen=True): | ||||||
|  |     # TODO: should this be ``.to`` (see below)? | ||||||
|  |     fqsn: str | ||||||
|  | 
 | ||||||
|  |     tid: Union[str, int]  # unique transaction id | ||||||
|  |     size: float | ||||||
|  |     price: float | ||||||
|  |     cost: float  # commisions or other additional costs | ||||||
|  |     dt: datetime | ||||||
|  |     expiry: Optional[datetime] = None | ||||||
|  | 
 | ||||||
|  |     # optional key normally derived from the broker | ||||||
|  |     # backend which ensures the instrument-symbol this record | ||||||
|  |     # is for is truly unique. | ||||||
|  |     bsuid: Optional[Union[str, int]] = None | ||||||
|  | 
 | ||||||
|  |     # optional fqsn for the source "asset"/money symbol? | ||||||
|  |     # from: Optional[str] = None | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class Position(Struct): | ||||||
|  |     ''' | ||||||
|  |     Basic pp (personal/piker position) model with attached clearing | ||||||
|  |     transaction history. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     symbol: Symbol | ||||||
|  | 
 | ||||||
|  |     # can be +ve or -ve for long/short | ||||||
|  |     size: float | ||||||
|  | 
 | ||||||
|  |     # "breakeven price" above or below which pnl moves above and below | ||||||
|  |     # zero for the entirety of the current "trade state". | ||||||
|  |     ppu: float | ||||||
|  | 
 | ||||||
|  |     # unique backend symbol id | ||||||
|  |     bsuid: str | ||||||
|  | 
 | ||||||
|  |     split_ratio: Optional[int] = None | ||||||
|  | 
 | ||||||
|  |     # ordered record of known constituent trade messages | ||||||
|  |     clears: dict[ | ||||||
|  |         Union[str, int, Status],  # trade id | ||||||
|  |         dict[str, Any],  # transaction history summaries | ||||||
|  |     ] = {} | ||||||
|  |     first_clear_dt: Optional[datetime] = None | ||||||
|  | 
 | ||||||
|  |     expiry: Optional[datetime] = None | ||||||
|  | 
 | ||||||
|  |     def to_dict(self) -> dict: | ||||||
|  |         return { | ||||||
|  |             f: getattr(self, f) | ||||||
|  |             for f in self.__struct_fields__ | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |     def to_pretoml(self) -> tuple[str, dict]: | ||||||
|  |         ''' | ||||||
|  |         Prep this position's data contents for export to toml including | ||||||
|  |         re-structuring of the ``.clears`` table to an array of | ||||||
|  |         inline-subtables for better ``pps.toml`` compactness. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         d = self.to_dict() | ||||||
|  |         clears = d.pop('clears') | ||||||
|  |         expiry = d.pop('expiry') | ||||||
|  | 
 | ||||||
|  |         if self.split_ratio is None: | ||||||
|  |             d.pop('split_ratio') | ||||||
|  | 
 | ||||||
|  |         # should be obvious from clears/event table | ||||||
|  |         d.pop('first_clear_dt') | ||||||
|  | 
 | ||||||
|  |         # TODO: we need to figure out how to have one top level | ||||||
|  |         # listing venue here even when the backend isn't providing | ||||||
|  |         # it via the trades ledger.. | ||||||
|  |         # drop symbol obj in serialized form | ||||||
|  |         s = d.pop('symbol') | ||||||
|  |         fqsn = s.front_fqsn() | ||||||
|  | 
 | ||||||
|  |         if self.expiry is None: | ||||||
|  |             d.pop('expiry', None) | ||||||
|  |         elif expiry: | ||||||
|  |             d['expiry'] = str(expiry) | ||||||
|  | 
 | ||||||
|  |         toml_clears_list = [] | ||||||
|  | 
 | ||||||
|  |         # reverse sort so latest clears are at top of section? | ||||||
|  |         for tid, data in sorted( | ||||||
|  |             list(clears.items()), | ||||||
|  | 
 | ||||||
|  |             # sort by datetime | ||||||
|  |             key=lambda item: item[1]['dt'], | ||||||
|  |         ): | ||||||
|  |             inline_table = toml.TomlDecoder().get_empty_inline_table() | ||||||
|  | 
 | ||||||
|  |             # serialize datetime to parsable `str` | ||||||
|  |             inline_table['dt'] = str(data['dt']) | ||||||
|  | 
 | ||||||
|  |             # insert optional clear fields in column order | ||||||
|  |             for k in ['ppu', 'accum_size']: | ||||||
|  |                 val = data.get(k) | ||||||
|  |                 if val: | ||||||
|  |                     inline_table[k] = val | ||||||
|  | 
 | ||||||
|  |             # insert required fields | ||||||
|  |             for k in ['price', 'size', 'cost']: | ||||||
|  |                 inline_table[k] = data[k] | ||||||
|  | 
 | ||||||
|  |             inline_table['tid'] = tid | ||||||
|  |             toml_clears_list.append(inline_table) | ||||||
|  | 
 | ||||||
|  |         d['clears'] = toml_clears_list | ||||||
|  | 
 | ||||||
|  |         return fqsn, d | ||||||
|  | 
 | ||||||
|  |     def ensure_state(self) -> None: | ||||||
|  |         ''' | ||||||
|  |         Audit either the `.size` and `.ppu` local instance vars against | ||||||
|  |         the clears table calculations and return the calc-ed values if | ||||||
|  |         they differ and log warnings to console. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         clears = list(self.clears.values()) | ||||||
|  |         self.first_clear_dt = min(list(entry['dt'] for entry in clears)) | ||||||
|  |         last_clear = clears[-1] | ||||||
|  | 
 | ||||||
|  |         csize = self.calc_size() | ||||||
|  |         accum = last_clear['accum_size'] | ||||||
|  |         if not self.expired(): | ||||||
|  |             if ( | ||||||
|  |                 csize != accum | ||||||
|  |                 and csize != round(accum * self.split_ratio or 1) | ||||||
|  |             ): | ||||||
|  |                 raise ValueError(f'Size mismatch: {csize}') | ||||||
|  |         else: | ||||||
|  |             assert csize == 0, 'Contract is expired but non-zero size?' | ||||||
|  | 
 | ||||||
|  |         if self.size != csize: | ||||||
|  |             log.warning( | ||||||
|  |                 'Position state mismatch:\n' | ||||||
|  |                 f'{self.size} => {csize}' | ||||||
|  |             ) | ||||||
|  |             self.size = csize | ||||||
|  | 
 | ||||||
|  |         cppu = self.calc_ppu() | ||||||
|  |         ppu = last_clear['ppu'] | ||||||
|  |         if ( | ||||||
|  |             cppu != ppu | ||||||
|  |             and self.split_ratio is not None | ||||||
|  |             # handle any split info entered (for now) manually by user | ||||||
|  |             and cppu != (ppu / self.split_ratio) | ||||||
|  |         ): | ||||||
|  |             raise ValueError(f'PPU mismatch: {cppu}') | ||||||
|  | 
 | ||||||
|  |         if self.ppu != cppu: | ||||||
|  |             log.warning( | ||||||
|  |                 'Position state mismatch:\n' | ||||||
|  |                 f'{self.ppu} => {cppu}' | ||||||
|  |             ) | ||||||
|  |             self.ppu = cppu | ||||||
|  | 
 | ||||||
|  |     def update_from_msg( | ||||||
|  |         self, | ||||||
|  |         msg: BrokerdPosition, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  | 
 | ||||||
|  |         # XXX: better place to do this? | ||||||
|  |         symbol = self.symbol | ||||||
|  | 
 | ||||||
|  |         lot_size_digits = symbol.lot_size_digits | ||||||
|  |         ppu, size = ( | ||||||
|  |             round( | ||||||
|  |                 msg['avg_price'], | ||||||
|  |                 ndigits=symbol.tick_size_digits | ||||||
|  |             ), | ||||||
|  |             round( | ||||||
|  |                 msg['size'], | ||||||
|  |                 ndigits=lot_size_digits | ||||||
|  |             ), | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         self.ppu = ppu | ||||||
|  |         self.size = size | ||||||
|  | 
 | ||||||
|  |     @property | ||||||
|  |     def dsize(self) -> float: | ||||||
|  |         ''' | ||||||
|  |         The "dollar" size of the pp, normally in trading (fiat) unit | ||||||
|  |         terms. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         return self.ppu * self.size | ||||||
|  | 
 | ||||||
|  |     # TODO: idea: "real LIFO" dynamic positioning. | ||||||
|  |     # - when a trade takes place where the pnl for | ||||||
|  |     # the (set of) trade(s) is below the breakeven price | ||||||
|  |     # it may be that the trader took a +ve pnl on a short(er) | ||||||
|  |     # term trade in the same account. | ||||||
|  |     # - in this case we could recalc the be price to | ||||||
|  |     # be reverted back to it's prior value before the nearest term | ||||||
|  |     # trade was opened.? | ||||||
|  |     # def lifo_price() -> float: | ||||||
|  |     #     ... | ||||||
|  | 
 | ||||||
|  |     def calc_ppu( | ||||||
|  |         self, | ||||||
|  |         # include transaction cost in breakeven price | ||||||
|  |         # and presume the worst case of the same cost | ||||||
|  |         # to exit this transaction (even though in reality | ||||||
|  |         # it will be dynamic based on exit stratetgy). | ||||||
|  |         cost_scalar: float = 2, | ||||||
|  | 
 | ||||||
|  |     ) -> float: | ||||||
|  |         ''' | ||||||
|  |         Compute the "price-per-unit" price for the given non-zero sized | ||||||
|  |         rolling position. | ||||||
|  | 
 | ||||||
|  |         The recurrence relation which computes this (exponential) mean | ||||||
|  |         per new clear which **increases** the accumulative postiion size | ||||||
|  |         is: | ||||||
|  | 
 | ||||||
|  |         ppu[-1] = ( | ||||||
|  |             ppu[-2] * accum_size[-2] | ||||||
|  |             + | ||||||
|  |             ppu[-1] * size | ||||||
|  |         ) / accum_size[-1] | ||||||
|  | 
 | ||||||
|  |         where `cost_basis` for the current step is simply the price | ||||||
|  |         * size of the most recent clearing transaction. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         asize_h: list[float] = []  # historical accumulative size | ||||||
|  |         ppu_h: list[float] = []  # historical price-per-unit | ||||||
|  | 
 | ||||||
|  |         clears = list(self.clears.items()) | ||||||
|  | 
 | ||||||
|  |         for i, (tid, entry) in enumerate(clears): | ||||||
|  | 
 | ||||||
|  |             clear_size = entry['size'] | ||||||
|  |             clear_price = entry['price'] | ||||||
|  | 
 | ||||||
|  |             last_accum_size = asize_h[-1] if asize_h else 0 | ||||||
|  |             accum_size = last_accum_size + clear_size | ||||||
|  |             accum_sign = copysign(1, accum_size) | ||||||
|  | 
 | ||||||
|  |             sign_change: bool = False | ||||||
|  | 
 | ||||||
|  |             if accum_size == 0: | ||||||
|  |                 ppu_h.append(0) | ||||||
|  |                 asize_h.append(0) | ||||||
|  |                 continue | ||||||
|  | 
 | ||||||
|  |             # test if the pp somehow went "passed" a net zero size state | ||||||
|  |             # resulting in a change of the "sign" of the size (+ve for | ||||||
|  |             # long, -ve for short). | ||||||
|  |             sign_change = ( | ||||||
|  |                 copysign(1, last_accum_size) + accum_sign == 0 | ||||||
|  |                 and last_accum_size != 0 | ||||||
|  |             ) | ||||||
|  | 
 | ||||||
|  |             # since we passed the net-zero-size state the new size | ||||||
|  |             # after sum should be the remaining size the new | ||||||
|  |             # "direction" (aka, long vs. short) for this clear. | ||||||
|  |             if sign_change: | ||||||
|  |                 clear_size = accum_size | ||||||
|  |                 abs_diff = abs(accum_size) | ||||||
|  |                 asize_h.append(0) | ||||||
|  |                 ppu_h.append(0) | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 # old size minus the new size gives us size diff with | ||||||
|  |                 # +ve -> increase in pp size | ||||||
|  |                 # -ve -> decrease in pp size | ||||||
|  |                 abs_diff = abs(accum_size) - abs(last_accum_size) | ||||||
|  | 
 | ||||||
|  |             # XXX: LIFO breakeven price update. only an increaze in size | ||||||
|  |             # of the position contributes the breakeven price, | ||||||
|  |             # a decrease does not (i.e. the position is being made | ||||||
|  |             # smaller). | ||||||
|  |             # abs_clear_size = abs(clear_size) | ||||||
|  |             abs_new_size = abs(accum_size) | ||||||
|  | 
 | ||||||
|  |             if abs_diff > 0: | ||||||
|  | 
 | ||||||
|  |                 cost_basis = ( | ||||||
|  |                     # cost basis for this clear | ||||||
|  |                     clear_price * abs(clear_size) | ||||||
|  |                     + | ||||||
|  |                     # transaction cost | ||||||
|  |                     accum_sign * cost_scalar * entry['cost'] | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |                 if asize_h: | ||||||
|  |                     size_last = abs(asize_h[-1]) | ||||||
|  |                     cb_last = ppu_h[-1] * size_last | ||||||
|  |                     ppu = (cost_basis + cb_last) / abs_new_size | ||||||
|  | 
 | ||||||
|  |                 else: | ||||||
|  |                     ppu = cost_basis / abs_new_size | ||||||
|  | 
 | ||||||
|  |                 ppu_h.append(ppu) | ||||||
|  |                 asize_h.append(accum_size) | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 # on "exit" clears from a given direction, | ||||||
|  |                 # only the size changes not the price-per-unit | ||||||
|  |                 # need to be updated since the ppu remains constant | ||||||
|  |                 # and gets weighted by the new size. | ||||||
|  |                 asize_h.append(accum_size) | ||||||
|  |                 ppu_h.append(ppu_h[-1]) | ||||||
|  | 
 | ||||||
|  |         final_ppu = ppu_h[-1] if ppu_h else 0 | ||||||
|  | 
 | ||||||
|  |         # handle any split info entered (for now) manually by user | ||||||
|  |         if self.split_ratio is not None: | ||||||
|  |             final_ppu /= self.split_ratio | ||||||
|  | 
 | ||||||
|  |         return final_ppu | ||||||
|  | 
 | ||||||
|  |     def expired(self) -> bool: | ||||||
|  |         ''' | ||||||
|  |         Predicate which checks if the contract/instrument is past its expiry. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         return bool(self.expiry) and self.expiry < now() | ||||||
|  | 
 | ||||||
|  |     def calc_size(self) -> float: | ||||||
|  |         ''' | ||||||
|  |         Calculate the unit size of this position in the destination | ||||||
|  |         asset using the clears/trade event table; zero if expired. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         size: float = 0 | ||||||
|  | 
 | ||||||
|  |         # time-expired pps (normally derivatives) are "closed" | ||||||
|  |         # and have a zero size. | ||||||
|  |         if self.expired(): | ||||||
|  |             return 0 | ||||||
|  | 
 | ||||||
|  |         for tid, entry in self.clears.items(): | ||||||
|  |             size += entry['size'] | ||||||
|  | 
 | ||||||
|  |         if self.split_ratio is not None: | ||||||
|  |             size = round(size * self.split_ratio) | ||||||
|  | 
 | ||||||
|  |         return size | ||||||
|  | 
 | ||||||
|  |     def minimize_clears( | ||||||
|  |         self, | ||||||
|  | 
 | ||||||
|  |     ) -> dict[str, dict]: | ||||||
|  |         ''' | ||||||
|  |         Minimize the position's clears entries by removing | ||||||
|  |         all transactions before the last net zero size to avoid | ||||||
|  |         unecessary history irrelevant to the current pp state. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         size: float = 0 | ||||||
|  |         clears_since_zero: list[tuple(str, dict)] = [] | ||||||
|  | 
 | ||||||
|  |         # TODO: we might just want to always do this when iterating | ||||||
|  |         # a ledger? keep a state of the last net-zero and only do the | ||||||
|  |         # full iterate when no state was stashed? | ||||||
|  | 
 | ||||||
|  |         # scan for the last "net zero" position by iterating | ||||||
|  |         # transactions until the next net-zero size, rinse, repeat. | ||||||
|  |         for tid, clear in self.clears.items(): | ||||||
|  |             size += clear['size'] | ||||||
|  |             clears_since_zero.append((tid, clear)) | ||||||
|  | 
 | ||||||
|  |             if size == 0: | ||||||
|  |                 clears_since_zero.clear() | ||||||
|  | 
 | ||||||
|  |         self.clears = dict(clears_since_zero) | ||||||
|  |         return self.clears | ||||||
|  | 
 | ||||||
|  |     def add_clear( | ||||||
|  |         self, | ||||||
|  |         t: Transaction, | ||||||
|  |     ) -> dict: | ||||||
|  |         ''' | ||||||
|  |         Update clearing table and populate rolling ppu and accumulative | ||||||
|  |         size in both the clears entry and local attrs state. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         clear = self.clears[t.tid] = { | ||||||
|  |             'cost': t.cost, | ||||||
|  |             'price': t.price, | ||||||
|  |             'size': t.size, | ||||||
|  |             'dt': t.dt, | ||||||
|  |         } | ||||||
|  | 
 | ||||||
|  |         # TODO: compute these incrementally instead | ||||||
|  |         # of re-looping through each time resulting in O(n**2) | ||||||
|  |         # behaviour..? | ||||||
|  | 
 | ||||||
|  |         # NOTE: we compute these **after** adding the entry in order to | ||||||
|  |         # make the recurrence relation math work inside | ||||||
|  |         # ``.calc_size()``. | ||||||
|  |         self.size = clear['accum_size'] = self.calc_size() | ||||||
|  |         self.ppu = clear['ppu'] = self.calc_ppu() | ||||||
|  | 
 | ||||||
|  |         return clear | ||||||
|  | 
 | ||||||
|  |     def sugest_split(self) -> float: | ||||||
|  |         ... | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | class PpTable(Struct): | ||||||
|  | 
 | ||||||
|  |     brokername: str | ||||||
|  |     acctid: str | ||||||
|  |     pps: dict[str, Position] | ||||||
|  |     conf: Optional[dict] = {} | ||||||
|  | 
 | ||||||
|  |     def update_from_trans( | ||||||
|  |         self, | ||||||
|  |         trans: dict[str, Transaction], | ||||||
|  |         cost_scalar: float = 2, | ||||||
|  | 
 | ||||||
|  |     ) -> dict[str, Position]: | ||||||
|  | 
 | ||||||
|  |         pps = self.pps | ||||||
|  |         updated: dict[str, Position] = {} | ||||||
|  | 
 | ||||||
|  |         # lifo update all pps from records | ||||||
|  |         for tid, t in trans.items(): | ||||||
|  | 
 | ||||||
|  |             pp = pps.setdefault( | ||||||
|  |                 t.bsuid, | ||||||
|  | 
 | ||||||
|  |                 # if no existing pp, allocate fresh one. | ||||||
|  |                 Position( | ||||||
|  |                     Symbol.from_fqsn( | ||||||
|  |                         t.fqsn, | ||||||
|  |                         info={}, | ||||||
|  |                     ), | ||||||
|  |                     size=0.0, | ||||||
|  |                     ppu=0.0, | ||||||
|  |                     bsuid=t.bsuid, | ||||||
|  |                     expiry=t.expiry, | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
|  |             clears = pp.clears | ||||||
|  |             if clears: | ||||||
|  |                 first_clear_dt = pp.first_clear_dt | ||||||
|  | 
 | ||||||
|  |                 # don't do updates for ledger records we already have | ||||||
|  |                 # included in the current pps state. | ||||||
|  |                 if ( | ||||||
|  |                     t.tid in clears | ||||||
|  |                     or first_clear_dt and t.dt < first_clear_dt | ||||||
|  |                 ): | ||||||
|  |                     # NOTE: likely you'll see repeats of the same | ||||||
|  |                     # ``Transaction`` passed in here if/when you are restarting | ||||||
|  |                     # a ``brokerd.ib`` where the API will re-report trades from | ||||||
|  |                     # the current session, so we need to make sure we don't | ||||||
|  |                     # "double count" these in pp calculations. | ||||||
|  |                     continue | ||||||
|  | 
 | ||||||
|  |             # update clearing table | ||||||
|  |             pp.add_clear(t) | ||||||
|  |             updated[t.bsuid] = pp | ||||||
|  | 
 | ||||||
|  |         # minimize clears tables and update sizing. | ||||||
|  |         for bsuid, pp in updated.items(): | ||||||
|  |             pp.ensure_state() | ||||||
|  | 
 | ||||||
|  |         return updated | ||||||
|  | 
 | ||||||
|  |     def dump_active( | ||||||
|  |         self, | ||||||
|  |     ) -> tuple[ | ||||||
|  |         dict[str, Position], | ||||||
|  |         dict[str, Position] | ||||||
|  |     ]: | ||||||
|  |         ''' | ||||||
|  |         Iterate all tabulated positions, render active positions to | ||||||
|  |         a ``dict`` format amenable to serialization (via TOML) and drop | ||||||
|  |         from state (``.pps``) as well as return in a ``dict`` all | ||||||
|  |         ``Position``s which have recently closed. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         # NOTE: newly closed position are also important to report/return | ||||||
|  |         # since a consumer, like an order mode UI ;), might want to react | ||||||
|  |         # based on the closure (for example removing the breakeven line | ||||||
|  |         # and clearing the entry from any lists/monitors). | ||||||
|  |         closed_pp_objs: dict[str, Position] = {} | ||||||
|  |         open_pp_objs: dict[str, Position] = {} | ||||||
|  | 
 | ||||||
|  |         pp_objs = self.pps | ||||||
|  |         for bsuid in list(pp_objs): | ||||||
|  |             pp = pp_objs[bsuid] | ||||||
|  | 
 | ||||||
|  |             # XXX: debug hook for size mismatches | ||||||
|  |             # qqqbsuid = 320227571 | ||||||
|  |             # if bsuid == qqqbsuid: | ||||||
|  |             #     breakpoint() | ||||||
|  | 
 | ||||||
|  |             pp.ensure_state() | ||||||
|  | 
 | ||||||
|  |             if ( | ||||||
|  |                 # "net-zero" is a "closed" position | ||||||
|  |                 pp.size == 0 | ||||||
|  | 
 | ||||||
|  |                 # time-expired pps (normally derivatives) are "closed" | ||||||
|  |                 or (pp.expiry and pp.expiry < now()) | ||||||
|  |             ): | ||||||
|  |                 # for expired cases | ||||||
|  |                 pp.size = 0 | ||||||
|  | 
 | ||||||
|  |                 # NOTE: we DO NOT pop the pp here since it can still be | ||||||
|  |                 # used to check for duplicate clears that may come in as | ||||||
|  |                 # new transaction from some backend API and need to be | ||||||
|  |                 # ignored; the closed positions won't be written to the | ||||||
|  |                 # ``pps.toml`` since ``pp_active_entries`` above is what's | ||||||
|  |                 # written. | ||||||
|  |                 closed_pp_objs[bsuid] = pp | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 open_pp_objs[bsuid] = pp | ||||||
|  | 
 | ||||||
|  |         return open_pp_objs, closed_pp_objs | ||||||
|  | 
 | ||||||
|  |     def to_toml( | ||||||
|  |         self, | ||||||
|  |     ) -> dict[str, Any]: | ||||||
|  | 
 | ||||||
|  |         active, closed = self.dump_active() | ||||||
|  | 
 | ||||||
|  |         # ONLY dict-serialize all active positions; those that are closed | ||||||
|  |         # we don't store in the ``pps.toml``. | ||||||
|  |         to_toml_dict = {} | ||||||
|  | 
 | ||||||
|  |         for bsuid, pos in active.items(): | ||||||
|  | 
 | ||||||
|  |             # keep the minimal amount of clears that make up this | ||||||
|  |             # position since the last net-zero state. | ||||||
|  |             pos.minimize_clears() | ||||||
|  |             pos.ensure_state() | ||||||
|  | 
 | ||||||
|  |             # serialize to pre-toml form | ||||||
|  |             fqsn, asdict = pos.to_pretoml() | ||||||
|  |             log.info(f'Updating active pp: {fqsn}') | ||||||
|  | 
 | ||||||
|  |             # XXX: ugh, it's cuz we push the section under | ||||||
|  |             # the broker name.. maybe we need to rethink this? | ||||||
|  |             brokerless_key = fqsn.removeprefix(f'{self.brokername}.') | ||||||
|  |             to_toml_dict[brokerless_key] = asdict | ||||||
|  | 
 | ||||||
|  |         return to_toml_dict | ||||||
|  | 
 | ||||||
|  |     def write_config(self) -> None: | ||||||
|  |         ''' | ||||||
|  |         Write the current position table to the user's ``pps.toml``. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         # TODO: show diff output? | ||||||
|  |         # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries | ||||||
|  |         print(f'Updating ``pps.toml`` for {path}:\n') | ||||||
|  | 
 | ||||||
|  |         # active, closed_pp_objs = table.dump_active() | ||||||
|  |         pp_entries = self.to_toml() | ||||||
|  |         self.conf[self.brokername][self.acctid] = pp_entries | ||||||
|  | 
 | ||||||
|  |         # TODO: why tf haven't they already done this for inline | ||||||
|  |         # tables smh.. | ||||||
|  |         enc = PpsEncoder(preserve=True) | ||||||
|  |         # table_bs_type = type(toml.TomlDecoder().get_empty_inline_table()) | ||||||
|  |         enc.dump_funcs[ | ||||||
|  |             toml.decoder.InlineTableDict | ||||||
|  |         ] = enc.dump_inline_table | ||||||
|  | 
 | ||||||
|  |         config.write( | ||||||
|  |             self.conf, | ||||||
|  |             'pps', | ||||||
|  |             encoder=enc, | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | def load_pps_from_ledger( | ||||||
|  | 
 | ||||||
|  |     brokername: str, | ||||||
|  |     acctname: str, | ||||||
|  | 
 | ||||||
|  |     # post normalization filter on ledger entries to be processed | ||||||
|  |     filter_by: Optional[list[dict]] = None, | ||||||
|  | 
 | ||||||
|  | ) -> tuple[ | ||||||
|  |     dict[str, Transaction], | ||||||
|  |     dict[str, Position], | ||||||
|  | ]: | ||||||
|  |     ''' | ||||||
|  |     Open a ledger file by broker name and account and read in and | ||||||
|  |     process any trade records into our normalized ``Transaction`` form | ||||||
|  |     and then update the equivalent ``Pptable`` and deliver the two | ||||||
|  |     bsuid-mapped dict-sets of the transactions and pps. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     with ( | ||||||
|  |         open_trade_ledger(brokername, acctname) as ledger, | ||||||
|  |         open_pps(brokername, acctname) as table, | ||||||
|  |     ): | ||||||
|  |         if not ledger: | ||||||
|  |             # null case, no ledger file with content | ||||||
|  |             return {} | ||||||
|  | 
 | ||||||
|  |         mod = get_brokermod(brokername) | ||||||
|  |         src_records: dict[str, Transaction] = mod.norm_trade_records(ledger) | ||||||
|  | 
 | ||||||
|  |         if filter_by: | ||||||
|  |             records = {} | ||||||
|  |             bsuids = set(filter_by) | ||||||
|  |             for tid, r in src_records.items(): | ||||||
|  |                 if r.bsuid in bsuids: | ||||||
|  |                     records[tid] = r | ||||||
|  |         else: | ||||||
|  |             records = src_records | ||||||
|  | 
 | ||||||
|  |         updated = table.update_from_trans(records) | ||||||
|  | 
 | ||||||
|  |     return records, updated | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | # TODO: instead see if we can hack tomli and tomli-w to do the same: | ||||||
|  | # - https://github.com/hukkin/tomli | ||||||
|  | # - https://github.com/hukkin/tomli-w | ||||||
|  | class PpsEncoder(toml.TomlEncoder): | ||||||
|  |     ''' | ||||||
|  |     Special "styled" encoder that makes a ``pps.toml`` redable and | ||||||
|  |     compact by putting `.clears` tables inline and everything else | ||||||
|  |     flat-ish. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     separator = ',' | ||||||
|  | 
 | ||||||
|  |     def dump_list(self, v): | ||||||
|  |         ''' | ||||||
|  |         Dump an inline list with a newline after every element and | ||||||
|  |         with consideration for denoted inline table types. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         retval = "[\n" | ||||||
|  |         for u in v: | ||||||
|  |             if isinstance(u, toml.decoder.InlineTableDict): | ||||||
|  |                 out = self.dump_inline_table(u) | ||||||
|  |             else: | ||||||
|  |                 out = str(self.dump_value(u)) | ||||||
|  | 
 | ||||||
|  |             retval += " " + out + "," + "\n" | ||||||
|  |         retval += "]" | ||||||
|  |         return retval | ||||||
|  | 
 | ||||||
|  |     def dump_inline_table(self, section): | ||||||
|  |         """Preserve inline table in its compact syntax instead of expanding | ||||||
|  |         into subsection. | ||||||
|  |         https://github.com/toml-lang/toml#user-content-inline-table | ||||||
|  |         """ | ||||||
|  |         val_list = [] | ||||||
|  |         for k, v in section.items(): | ||||||
|  |             # if isinstance(v, toml.decoder.InlineTableDict): | ||||||
|  |             if isinstance(v, dict): | ||||||
|  |                 val = self.dump_inline_table(v) | ||||||
|  |             else: | ||||||
|  |                 val = str(self.dump_value(v)) | ||||||
|  | 
 | ||||||
|  |             val_list.append(k + " = " + val) | ||||||
|  | 
 | ||||||
|  |         retval = "{ " + ", ".join(val_list) + " }" | ||||||
|  |         return retval | ||||||
|  | 
 | ||||||
|  |     def dump_sections(self, o, sup): | ||||||
|  |         retstr = "" | ||||||
|  |         if sup != "" and sup[-1] != ".": | ||||||
|  |             sup += '.' | ||||||
|  |         retdict = self._dict() | ||||||
|  |         arraystr = "" | ||||||
|  |         for section in o: | ||||||
|  |             qsection = str(section) | ||||||
|  |             value = o[section] | ||||||
|  | 
 | ||||||
|  |             if not re.match(r'^[A-Za-z0-9_-]+$', section): | ||||||
|  |                 qsection = toml.encoder._dump_str(section) | ||||||
|  | 
 | ||||||
|  |             # arrayoftables = False | ||||||
|  |             if ( | ||||||
|  |                 self.preserve | ||||||
|  |                 and isinstance(value, toml.decoder.InlineTableDict) | ||||||
|  |             ): | ||||||
|  |                 retstr += ( | ||||||
|  |                     qsection | ||||||
|  |                     + | ||||||
|  |                     " = " | ||||||
|  |                     + | ||||||
|  |                     self.dump_inline_table(o[section]) | ||||||
|  |                     + | ||||||
|  |                     '\n'  # only on the final terminating left brace | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |             # XXX: this code i'm pretty sure is just blatantly bad | ||||||
|  |             # and/or wrong.. | ||||||
|  |             # if isinstance(o[section], list): | ||||||
|  |             #     for a in o[section]: | ||||||
|  |             #         if isinstance(a, dict): | ||||||
|  |             #             arrayoftables = True | ||||||
|  |             # if arrayoftables: | ||||||
|  |             #     for a in o[section]: | ||||||
|  |             #         arraytabstr = "\n" | ||||||
|  |             #         arraystr += "[[" + sup + qsection + "]]\n" | ||||||
|  |             #         s, d = self.dump_sections(a, sup + qsection) | ||||||
|  |             #         if s: | ||||||
|  |             #             if s[0] == "[": | ||||||
|  |             #                 arraytabstr += s | ||||||
|  |             #             else: | ||||||
|  |             #                 arraystr += s | ||||||
|  |             #         while d: | ||||||
|  |             #             newd = self._dict() | ||||||
|  |             #             for dsec in d: | ||||||
|  |             #                 s1, d1 = self.dump_sections(d[dsec], sup + | ||||||
|  |             #                                             qsection + "." + | ||||||
|  |             #                                             dsec) | ||||||
|  |             #                 if s1: | ||||||
|  |             #                     arraytabstr += ("[" + sup + qsection + | ||||||
|  |             #                                     "." + dsec + "]\n") | ||||||
|  |             #                     arraytabstr += s1 | ||||||
|  |             #                 for s1 in d1: | ||||||
|  |             #                     newd[dsec + "." + s1] = d1[s1] | ||||||
|  |             #             d = newd | ||||||
|  |             #         arraystr += arraytabstr | ||||||
|  | 
 | ||||||
|  |             elif isinstance(value, dict): | ||||||
|  |                 retdict[qsection] = o[section] | ||||||
|  | 
 | ||||||
|  |             elif o[section] is not None: | ||||||
|  |                 retstr += ( | ||||||
|  |                     qsection | ||||||
|  |                     + | ||||||
|  |                     " = " | ||||||
|  |                     + | ||||||
|  |                     str(self.dump_value(o[section])) | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |                 # if not isinstance(value, dict): | ||||||
|  |                 if not isinstance(value, toml.decoder.InlineTableDict): | ||||||
|  |                     # inline tables should not contain newlines: | ||||||
|  |                     # https://toml.io/en/v1.0.0#inline-table | ||||||
|  |                     retstr += '\n' | ||||||
|  | 
 | ||||||
|  |             else: | ||||||
|  |                 raise ValueError(value) | ||||||
|  | 
 | ||||||
|  |         retstr += arraystr | ||||||
|  |         return (retstr, retdict) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | @cm | ||||||
|  | def open_pps( | ||||||
|  |     brokername: str, | ||||||
|  |     acctid: str, | ||||||
|  |     write_on_exit: bool = True, | ||||||
|  | 
 | ||||||
|  | ) -> PpTable: | ||||||
|  |     ''' | ||||||
|  |     Read out broker-specific position entries from | ||||||
|  |     incremental update file: ``pps.toml``. | ||||||
|  | 
 | ||||||
|  |     ''' | ||||||
|  |     conf, path = config.load('pps') | ||||||
|  |     brokersection = conf.setdefault(brokername, {}) | ||||||
|  |     pps = brokersection.setdefault(acctid, {}) | ||||||
|  | 
 | ||||||
|  |     # TODO: ideally we can pass in an existing | ||||||
|  |     # pps state to this right? such that we | ||||||
|  |     # don't have to do a ledger reload all the | ||||||
|  |     # time.. a couple ideas I can think of, | ||||||
|  |     # - mirror this in some client side actor which | ||||||
|  |     #   does the actual ledger updates (say the paper | ||||||
|  |     #   engine proc if we decide to always spawn it?), | ||||||
|  |     # - do diffs against updates from the ledger writer | ||||||
|  |     #   actor and the in-mem state here? | ||||||
|  | 
 | ||||||
|  |     pp_objs = {} | ||||||
|  |     table = PpTable( | ||||||
|  |         brokername, | ||||||
|  |         acctid, | ||||||
|  |         pp_objs, | ||||||
|  |         conf=conf, | ||||||
|  |     ) | ||||||
|  | 
 | ||||||
|  |     # unmarshal/load ``pps.toml`` config entries into object form | ||||||
|  |     # and update `PpTable` obj entries. | ||||||
|  |     for fqsn, entry in pps.items(): | ||||||
|  |         bsuid = entry['bsuid'] | ||||||
|  | 
 | ||||||
|  |         # convert clears sub-tables (only in this form | ||||||
|  |         # for toml re-presentation) back into a master table. | ||||||
|  |         clears_list = entry['clears'] | ||||||
|  | 
 | ||||||
|  |         # index clears entries in "object" form by tid in a top | ||||||
|  |         # level dict instead of a list (as is presented in our | ||||||
|  |         # ``pps.toml``). | ||||||
|  |         clears = pp_objs.setdefault(bsuid, {}) | ||||||
|  | 
 | ||||||
|  |         # TODO: should be make a ``Struct`` for clear/event entries? | ||||||
|  |         # convert "clear events table" from the toml config (list of | ||||||
|  |         # a dicts) and load it into object form for use in position | ||||||
|  |         # processing of new clear events. | ||||||
|  |         trans: list[Transaction] = [] | ||||||
|  | 
 | ||||||
|  |         for clears_table in clears_list: | ||||||
|  |             tid = clears_table.pop('tid') | ||||||
|  |             dtstr = clears_table['dt'] | ||||||
|  |             dt = pendulum.parse(dtstr) | ||||||
|  |             clears_table['dt'] = dt | ||||||
|  |             trans.append(Transaction( | ||||||
|  |                 fqsn=bsuid, | ||||||
|  |                 bsuid=bsuid, | ||||||
|  |                 tid=tid, | ||||||
|  |                 size=clears_table['size'], | ||||||
|  |                 price=clears_table['price'], | ||||||
|  |                 cost=clears_table['cost'], | ||||||
|  |                 dt=dt, | ||||||
|  |             )) | ||||||
|  |             clears[tid] = clears_table | ||||||
|  | 
 | ||||||
|  |         size = entry['size'] | ||||||
|  | 
 | ||||||
|  |         # TODO: remove but, handle old field name for now | ||||||
|  |         ppu = entry.get('ppu', entry.get('be_price', 0)) | ||||||
|  |         split_ratio = entry.get('split_ratio') | ||||||
|  | 
 | ||||||
|  |         expiry = entry.get('expiry') | ||||||
|  |         if expiry: | ||||||
|  |             expiry = pendulum.parse(expiry) | ||||||
|  | 
 | ||||||
|  |         pp = pp_objs[bsuid] = Position( | ||||||
|  |             Symbol.from_fqsn(fqsn, info={}), | ||||||
|  |             size=size, | ||||||
|  |             ppu=ppu, | ||||||
|  |             split_ratio=split_ratio, | ||||||
|  |             expiry=expiry, | ||||||
|  |             bsuid=entry['bsuid'], | ||||||
|  |         ) | ||||||
|  | 
 | ||||||
|  |         # XXX: super critical, we need to be sure to include | ||||||
|  |         # all pps.toml clears to avoid reusing clears that were | ||||||
|  |         # already included in the current incremental update | ||||||
|  |         # state, since today's records may have already been | ||||||
|  |         # processed! | ||||||
|  |         for t in trans: | ||||||
|  |             pp.add_clear(t) | ||||||
|  | 
 | ||||||
|  |         # audit entries loaded from toml | ||||||
|  |         pp.ensure_state() | ||||||
|  | 
 | ||||||
|  |     try: | ||||||
|  |         yield table | ||||||
|  |     finally: | ||||||
|  |         if write_on_exit: | ||||||
|  |             table.write_config() | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
|  | if __name__ == '__main__': | ||||||
|  |     import sys | ||||||
|  | 
 | ||||||
|  |     args = sys.argv | ||||||
|  |     assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`' | ||||||
|  |     args = args[1:] | ||||||
|  |     for acctid in args: | ||||||
|  |         broker, name = acctid.split('.') | ||||||
|  |         trans, updated_pps = load_pps_from_ledger(broker, name) | ||||||
|  |         print( | ||||||
|  |             f'Processing transactions into pps for {broker}:{acctid}\n' | ||||||
|  |             f'{pformat(trans)}\n\n' | ||||||
|  |             f'{pformat(updated_pps)}' | ||||||
|  |         ) | ||||||
|  | @ -230,18 +230,19 @@ class GodWidget(QWidget): | ||||||
|             # - we'll probably want per-instrument/provider state here? |             # - we'll probably want per-instrument/provider state here? | ||||||
|             #   change the order config form over to the new chart |             #   change the order config form over to the new chart | ||||||
| 
 | 
 | ||||||
|             # XXX: since the pp config is a singleton widget we have to |  | ||||||
|             # also switch it over to the new chart's interal-layout |  | ||||||
|             # self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane) |  | ||||||
|             chart = linkedsplits.chart |  | ||||||
| 
 |  | ||||||
|             # chart is already in memory so just focus it |             # chart is already in memory so just focus it | ||||||
|             linkedsplits.show() |             linkedsplits.show() | ||||||
|             linkedsplits.focus() |             linkedsplits.focus() | ||||||
|             linkedsplits.graphics_cycle() |             linkedsplits.graphics_cycle() | ||||||
|             await trio.sleep(0) |             await trio.sleep(0) | ||||||
| 
 | 
 | ||||||
|  |             # XXX: since the pp config is a singleton widget we have to | ||||||
|  |             # also switch it over to the new chart's interal-layout | ||||||
|  |             # self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane) | ||||||
|  |             chart = linkedsplits.chart | ||||||
|  | 
 | ||||||
|             # resume feeds *after* rendering chart view asap |             # resume feeds *after* rendering chart view asap | ||||||
|  |             if chart: | ||||||
|                 chart.resume_all_feeds() |                 chart.resume_all_feeds() | ||||||
| 
 | 
 | ||||||
|                 # TODO: we need a check to see if the chart |                 # TODO: we need a check to see if the chart | ||||||
|  | @ -452,13 +453,6 @@ class LinkedSplits(QWidget): | ||||||
|         # add crosshair graphic |         # add crosshair graphic | ||||||
|         self.chart.addItem(self.cursor) |         self.chart.addItem(self.cursor) | ||||||
| 
 | 
 | ||||||
|         # axis placement |  | ||||||
|         if ( |  | ||||||
|             _xaxis_at == 'bottom' and |  | ||||||
|             'bottom' in self.chart.plotItem.axes |  | ||||||
|         ): |  | ||||||
|             self.chart.hideAxis('bottom') |  | ||||||
| 
 |  | ||||||
|         # style? |         # style? | ||||||
|         self.chart.setFrameStyle( |         self.chart.setFrameStyle( | ||||||
|             QFrame.StyledPanel | |             QFrame.StyledPanel | | ||||||
|  | @ -523,6 +517,15 @@ class LinkedSplits(QWidget): | ||||||
|         cpw.hideAxis('left') |         cpw.hideAxis('left') | ||||||
|         cpw.hideAxis('bottom') |         cpw.hideAxis('bottom') | ||||||
| 
 | 
 | ||||||
|  |         if ( | ||||||
|  |             _xaxis_at == 'bottom' and ( | ||||||
|  |                 self.xaxis_chart | ||||||
|  |                 or ( | ||||||
|  |                     not self.subplots | ||||||
|  |                     and self.xaxis_chart is None | ||||||
|  |                 ) | ||||||
|  |             ) | ||||||
|  |         ): | ||||||
|             if self.xaxis_chart: |             if self.xaxis_chart: | ||||||
|                 self.xaxis_chart.hideAxis('bottom') |                 self.xaxis_chart.hideAxis('bottom') | ||||||
| 
 | 
 | ||||||
|  | @ -531,13 +534,9 @@ class LinkedSplits(QWidget): | ||||||
|             # https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master |             # https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master | ||||||
|             # _ = self.xaxis_chart.removeAxis('bottom', unlink=False) |             # _ = self.xaxis_chart.removeAxis('bottom', unlink=False) | ||||||
|             # assert 'bottom' not in self.xaxis_chart.plotItem.axes |             # assert 'bottom' not in self.xaxis_chart.plotItem.axes | ||||||
| 
 |  | ||||||
|             self.xaxis_chart = cpw |             self.xaxis_chart = cpw | ||||||
|             cpw.showAxis('bottom') |             cpw.showAxis('bottom') | ||||||
| 
 | 
 | ||||||
|         if self.xaxis_chart is None: |  | ||||||
|             self.xaxis_chart = cpw |  | ||||||
| 
 |  | ||||||
|         qframe.chart = cpw |         qframe.chart = cpw | ||||||
|         qframe.hbox.addWidget(cpw) |         qframe.hbox.addWidget(cpw) | ||||||
| 
 | 
 | ||||||
|  | @ -760,9 +759,18 @@ class ChartPlotWidget(pg.PlotWidget): | ||||||
| 
 | 
 | ||||||
|         self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem) |         self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem) | ||||||
| 
 | 
 | ||||||
|  |         # indempotent startup flag for auto-yrange subsys | ||||||
|  |         # to detect the "first time" y-domain graphics begin | ||||||
|  |         # to be shown in the (main) graphics view. | ||||||
|  |         self._on_screen: bool = False | ||||||
|  | 
 | ||||||
|     def resume_all_feeds(self): |     def resume_all_feeds(self): | ||||||
|  |         try: | ||||||
|             for feed in self._feeds.values(): |             for feed in self._feeds.values(): | ||||||
|                 self.linked.godwidget._root_n.start_soon(feed.resume) |                 self.linked.godwidget._root_n.start_soon(feed.resume) | ||||||
|  |         except RuntimeError: | ||||||
|  |             # TODO: cancel the qtractor runtime here? | ||||||
|  |             raise | ||||||
| 
 | 
 | ||||||
|     def pause_all_feeds(self): |     def pause_all_feeds(self): | ||||||
|         for feed in self._feeds.values(): |         for feed in self._feeds.values(): | ||||||
|  | @ -859,7 +867,8 @@ class ChartPlotWidget(pg.PlotWidget): | ||||||
| 
 | 
 | ||||||
|     def default_view( |     def default_view( | ||||||
|         self, |         self, | ||||||
|         bars_from_y: int = 3000, |         bars_from_y: int = 616, | ||||||
|  |         do_ds: bool = True, | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         ''' |         ''' | ||||||
|  | @ -920,8 +929,11 @@ class ChartPlotWidget(pg.PlotWidget): | ||||||
|             max=end, |             max=end, | ||||||
|             padding=0, |             padding=0, | ||||||
|         ) |         ) | ||||||
|  | 
 | ||||||
|  |         if do_ds: | ||||||
|             self.view.maybe_downsample_graphics() |             self.view.maybe_downsample_graphics() | ||||||
|             view._set_yrange() |             view._set_yrange() | ||||||
|  | 
 | ||||||
|         try: |         try: | ||||||
|             self.linked.graphics_cycle() |             self.linked.graphics_cycle() | ||||||
|         except IndexError: |         except IndexError: | ||||||
|  | @ -1255,7 +1267,6 @@ class ChartPlotWidget(pg.PlotWidget): | ||||||
|         If ``bars_range`` is provided use that range. |         If ``bars_range`` is provided use that range. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         # print(f'Chart[{self.name}].maxmin()') |  | ||||||
|         profiler = pg.debug.Profiler( |         profiler = pg.debug.Profiler( | ||||||
|             msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`', |             msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`', | ||||||
|             disabled=not pg_profile_enabled(), |             disabled=not pg_profile_enabled(), | ||||||
|  | @ -1287,11 +1298,18 @@ class ChartPlotWidget(pg.PlotWidget): | ||||||
| 
 | 
 | ||||||
|             key = round(lbar), round(rbar) |             key = round(lbar), round(rbar) | ||||||
|             res = flow.maxmin(*key) |             res = flow.maxmin(*key) | ||||||
|             if res == (None, None): | 
 | ||||||
|                 log.error( |             if ( | ||||||
|  |                 res is None | ||||||
|  |             ): | ||||||
|  |                 log.warning( | ||||||
|                     f"{flow_key} no mxmn for bars_range => {key} !?" |                     f"{flow_key} no mxmn for bars_range => {key} !?" | ||||||
|                 ) |                 ) | ||||||
|                 res = 0, 0 |                 res = 0, 0 | ||||||
|  |                 if not self._on_screen: | ||||||
|  |                     self.default_view(do_ds=False) | ||||||
|  |                     self._on_screen = True | ||||||
| 
 | 
 | ||||||
|         profiler(f'yrange mxmn: {key} -> {res}') |         profiler(f'yrange mxmn: {key} -> {res}') | ||||||
|  |         # print(f'{flow_key} yrange mxmn: {key} -> {res}') | ||||||
|         return res |         return res | ||||||
|  |  | ||||||
|  | @ -223,14 +223,20 @@ def ds_m4( | ||||||
|     assert frames >= (xrange / uppx) |     assert frames >= (xrange / uppx) | ||||||
| 
 | 
 | ||||||
|     # call into ``numba`` |     # call into ``numba`` | ||||||
|     nb, i_win, y_out = _m4( |     ( | ||||||
|  |         nb, | ||||||
|  |         x_out, | ||||||
|  |         y_out, | ||||||
|  |         ymn, | ||||||
|  |         ymx, | ||||||
|  |     ) = _m4( | ||||||
|         x, |         x, | ||||||
|         y, |         y, | ||||||
| 
 | 
 | ||||||
|         frames, |         frames, | ||||||
| 
 | 
 | ||||||
|         # TODO: see func below.. |         # TODO: see func below.. | ||||||
|         # i_win, |         # x_out, | ||||||
|         # y_out, |         # y_out, | ||||||
| 
 | 
 | ||||||
|         # first index in x data to start at |         # first index in x data to start at | ||||||
|  | @ -243,10 +249,11 @@ def ds_m4( | ||||||
|     # filter out any overshoot in the input allocation arrays by |     # filter out any overshoot in the input allocation arrays by | ||||||
|     # removing zero-ed tail entries which should start at a certain |     # removing zero-ed tail entries which should start at a certain | ||||||
|     # index. |     # index. | ||||||
|     i_win = i_win[i_win != 0] |     x_out = x_out[x_out != 0] | ||||||
|     y_out = y_out[:i_win.size] |     y_out = y_out[:x_out.size] | ||||||
| 
 | 
 | ||||||
|     return nb, i_win, y_out |     # print(f'M4 output ymn, ymx: {ymn},{ymx}') | ||||||
|  |     return nb, x_out, y_out, ymn, ymx | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @jit( | @jit( | ||||||
|  | @ -260,8 +267,8 @@ def _m4( | ||||||
| 
 | 
 | ||||||
|     frames: int, |     frames: int, | ||||||
| 
 | 
 | ||||||
|     # TODO: using this approach by having the ``.zeros()`` alloc lines |     # TODO: using this approach, having the ``.zeros()`` alloc lines | ||||||
|     # below, in put python was causing segs faults and alloc crashes.. |     # below in pure python, there were segs faults and alloc crashes.. | ||||||
|     # we might need to see how it behaves with shm arrays and consider |     # we might need to see how it behaves with shm arrays and consider | ||||||
|     # allocating them once at startup? |     # allocating them once at startup? | ||||||
| 
 | 
 | ||||||
|  | @ -274,14 +281,22 @@ def _m4( | ||||||
|     x_start: int, |     x_start: int, | ||||||
|     step: float, |     step: float, | ||||||
| 
 | 
 | ||||||
| ) -> int: | ) -> tuple[ | ||||||
|     # nbins = len(i_win) |     int, | ||||||
|     # count = len(xs) |     np.ndarray, | ||||||
|  |     np.ndarray, | ||||||
|  |     float, | ||||||
|  |     float, | ||||||
|  | ]: | ||||||
|  |     ''' | ||||||
|  |     Implementation of the m4 algorithm in ``numba``: | ||||||
|  |     http://www.vldb.org/pvldb/vol7/p797-jugel.pdf | ||||||
| 
 | 
 | ||||||
|  |     ''' | ||||||
|     # these are pre-allocated and mutated by ``numba`` |     # these are pre-allocated and mutated by ``numba`` | ||||||
|     # code in-place. |     # code in-place. | ||||||
|     y_out = np.zeros((frames, 4), ys.dtype) |     y_out = np.zeros((frames, 4), ys.dtype) | ||||||
|     i_win = np.zeros(frames, xs.dtype) |     x_out = np.zeros(frames, xs.dtype) | ||||||
| 
 | 
 | ||||||
|     bincount = 0 |     bincount = 0 | ||||||
|     x_left = x_start |     x_left = x_start | ||||||
|  | @ -295,24 +310,34 @@ def _m4( | ||||||
| 
 | 
 | ||||||
|     # set all bins in the left-most entry to the starting left-most x value |     # set all bins in the left-most entry to the starting left-most x value | ||||||
|     # (aka a row broadcast). |     # (aka a row broadcast). | ||||||
|     i_win[bincount] = x_left |     x_out[bincount] = x_left | ||||||
|     # set all y-values to the first value passed in. |     # set all y-values to the first value passed in. | ||||||
|     y_out[bincount] = ys[0] |     y_out[bincount] = ys[0] | ||||||
| 
 | 
 | ||||||
|  |     # full input y-data mx and mn | ||||||
|  |     mx: float = -np.inf | ||||||
|  |     mn: float = np.inf | ||||||
|  | 
 | ||||||
|  |     # compute OHLC style max / min values per window sized x-frame. | ||||||
|     for i in range(len(xs)): |     for i in range(len(xs)): | ||||||
|  | 
 | ||||||
|         x = xs[i] |         x = xs[i] | ||||||
|         y = ys[i] |         y = ys[i] | ||||||
|  | 
 | ||||||
|         if x < x_left + step:   # the current window "step" is [bin, bin+1) |         if x < x_left + step:   # the current window "step" is [bin, bin+1) | ||||||
|             y_out[bincount, 1] = min(y, y_out[bincount, 1]) |             ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1]) | ||||||
|             y_out[bincount, 2] = max(y, y_out[bincount, 2]) |             ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2]) | ||||||
|             y_out[bincount, 3] = y |             y_out[bincount, 3] = y | ||||||
|  |             mx = max(mx, ymx) | ||||||
|  |             mn = min(mn, ymn) | ||||||
|  | 
 | ||||||
|         else: |         else: | ||||||
|             # Find the next bin |             # Find the next bin | ||||||
|             while x >= x_left + step: |             while x >= x_left + step: | ||||||
|                 x_left += step |                 x_left += step | ||||||
| 
 | 
 | ||||||
|             bincount += 1 |             bincount += 1 | ||||||
|             i_win[bincount] = x_left |             x_out[bincount] = x_left | ||||||
|             y_out[bincount] = y |             y_out[bincount] = y | ||||||
| 
 | 
 | ||||||
|     return bincount, i_win, y_out |     return bincount, x_out, y_out, mn, mx | ||||||
|  |  | ||||||
|  | @ -105,6 +105,10 @@ def chart_maxmin( | ||||||
|     mn, mx = out |     mn, mx = out | ||||||
| 
 | 
 | ||||||
|     mx_vlm_in_view = 0 |     mx_vlm_in_view = 0 | ||||||
|  | 
 | ||||||
|  |     # TODO: we need to NOT call this to avoid a manual | ||||||
|  |     # np.max/min trigger and especially on the vlm_chart | ||||||
|  |     # flows which aren't shown.. like vlm? | ||||||
|     if vlm_chart: |     if vlm_chart: | ||||||
|         out = vlm_chart.maxmin() |         out = vlm_chart.maxmin() | ||||||
|         if out: |         if out: | ||||||
|  | @ -132,16 +136,16 @@ class DisplayState: | ||||||
|     # high level chart handles |     # high level chart handles | ||||||
|     linked: LinkedSplits |     linked: LinkedSplits | ||||||
|     chart: ChartPlotWidget |     chart: ChartPlotWidget | ||||||
|     vlm_chart: ChartPlotWidget |  | ||||||
| 
 | 
 | ||||||
|     # axis labels |     # axis labels | ||||||
|     l1: L1Labels |     l1: L1Labels | ||||||
|     last_price_sticky: YAxisLabel |     last_price_sticky: YAxisLabel | ||||||
|     vlm_sticky: YAxisLabel |  | ||||||
| 
 | 
 | ||||||
|     # misc state tracking |     # misc state tracking | ||||||
|     vars: dict[str, Any] |     vars: dict[str, Any] | ||||||
| 
 | 
 | ||||||
|  |     vlm_chart: Optional[ChartPlotWidget] = None | ||||||
|  |     vlm_sticky: Optional[YAxisLabel] = None | ||||||
|     wap_in_history: bool = False |     wap_in_history: bool = False | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -181,9 +185,6 @@ async def graphics_update_loop( | ||||||
|         *ohlcv.array[-1][['index', 'close']] |         *ohlcv.array[-1][['index', 'close']] | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     if vlm_chart: |  | ||||||
|         vlm_sticky = vlm_chart._ysticks['volume'] |  | ||||||
| 
 |  | ||||||
|     maxmin = partial( |     maxmin = partial( | ||||||
|         chart_maxmin, |         chart_maxmin, | ||||||
|         chart, |         chart, | ||||||
|  | @ -222,33 +223,9 @@ async def graphics_update_loop( | ||||||
|     tick_margin = 3 * tick_size |     tick_margin = 3 * tick_size | ||||||
| 
 | 
 | ||||||
|     chart.show() |     chart.show() | ||||||
|     # view = chart.view |  | ||||||
|     last_quote = time.time() |     last_quote = time.time() | ||||||
|     i_last = ohlcv.index |     i_last = ohlcv.index | ||||||
| 
 | 
 | ||||||
|     # async def iter_drain_quotes(): |  | ||||||
|     #     # NOTE: all code below this loop is expected to be synchronous |  | ||||||
|     #     # and thus draw instructions are not picked up jntil the next |  | ||||||
|     #     # wait / iteration. |  | ||||||
|     #     async for quotes in stream: |  | ||||||
|     #         while True: |  | ||||||
|     #             try: |  | ||||||
|     #                 moar = stream.receive_nowait() |  | ||||||
|     #             except trio.WouldBlock: |  | ||||||
|     #                 yield quotes |  | ||||||
|     #                 break |  | ||||||
|     #             else: |  | ||||||
|     #                 for sym, quote in moar.items(): |  | ||||||
|     #                     ticks_frame = quote.get('ticks') |  | ||||||
|     #                     if ticks_frame: |  | ||||||
|     #                         quotes[sym].setdefault( |  | ||||||
|     #                             'ticks', []).extend(ticks_frame) |  | ||||||
|     #                     print('pulled extra') |  | ||||||
| 
 |  | ||||||
|     #                 yield quotes |  | ||||||
| 
 |  | ||||||
|     # async for quotes in iter_drain_quotes(): |  | ||||||
| 
 |  | ||||||
|     ds = linked.display_state = DisplayState(**{ |     ds = linked.display_state = DisplayState(**{ | ||||||
|         'quotes': {}, |         'quotes': {}, | ||||||
|         'linked': linked, |         'linked': linked, | ||||||
|  | @ -256,8 +233,6 @@ async def graphics_update_loop( | ||||||
|         'ohlcv': ohlcv, |         'ohlcv': ohlcv, | ||||||
|         'chart': chart, |         'chart': chart, | ||||||
|         'last_price_sticky': last_price_sticky, |         'last_price_sticky': last_price_sticky, | ||||||
|         'vlm_chart': vlm_chart, |  | ||||||
|         'vlm_sticky': vlm_sticky, |  | ||||||
|         'l1': l1, |         'l1': l1, | ||||||
| 
 | 
 | ||||||
|         'vars': { |         'vars': { | ||||||
|  | @ -270,6 +245,11 @@ async def graphics_update_loop( | ||||||
|         } |         } | ||||||
|     }) |     }) | ||||||
| 
 | 
 | ||||||
|  |     if vlm_chart: | ||||||
|  |         vlm_sticky = vlm_chart._ysticks['volume'] | ||||||
|  |         ds.vlm_chart = vlm_chart | ||||||
|  |         ds.vlm_sticky = vlm_sticky | ||||||
|  | 
 | ||||||
|     chart.default_view() |     chart.default_view() | ||||||
| 
 | 
 | ||||||
|     # main real-time quotes update loop |     # main real-time quotes update loop | ||||||
|  | @ -293,6 +273,7 @@ async def graphics_update_loop( | ||||||
| 
 | 
 | ||||||
|         # chart isn't active/shown so skip render cycle and pause feed(s) |         # chart isn't active/shown so skip render cycle and pause feed(s) | ||||||
|         if chart.linked.isHidden(): |         if chart.linked.isHidden(): | ||||||
|  |             print('skipping update') | ||||||
|             chart.pause_all_feeds() |             chart.pause_all_feeds() | ||||||
|             continue |             continue | ||||||
| 
 | 
 | ||||||
|  | @ -341,7 +322,7 @@ def graphics_update_cycle( | ||||||
|     for sym, quote in ds.quotes.items(): |     for sym, quote in ds.quotes.items(): | ||||||
| 
 | 
 | ||||||
|         # compute the first available graphic's x-units-per-pixel |         # compute the first available graphic's x-units-per-pixel | ||||||
|         uppx = vlm_chart.view.x_uppx() |         uppx = chart.view.x_uppx() | ||||||
| 
 | 
 | ||||||
|         # NOTE: vlm may be written by the ``brokerd`` backend |         # NOTE: vlm may be written by the ``brokerd`` backend | ||||||
|         # event though a tick sample is not emitted. |         # event though a tick sample is not emitted. | ||||||
|  | @ -416,10 +397,8 @@ def graphics_update_cycle( | ||||||
|             ) |             ) | ||||||
|             or trigger_all |             or trigger_all | ||||||
|         ): |         ): | ||||||
|             # TODO: we should track and compute whether the last |  | ||||||
|             # pixel in a curve should show new data based on uppx |  | ||||||
|             # and then iff update curves and shift? |  | ||||||
|             chart.increment_view(steps=i_diff) |             chart.increment_view(steps=i_diff) | ||||||
|  |             # chart.increment_view(steps=i_diff + round(append_diff - uppx)) | ||||||
| 
 | 
 | ||||||
|             if vlm_chart: |             if vlm_chart: | ||||||
|                 vlm_chart.increment_view(steps=i_diff) |                 vlm_chart.increment_view(steps=i_diff) | ||||||
|  | @ -477,7 +456,6 @@ def graphics_update_cycle( | ||||||
|         ): |         ): | ||||||
|             chart.update_graphics_from_flow( |             chart.update_graphics_from_flow( | ||||||
|                 chart.name, |                 chart.name, | ||||||
|                 # do_append=uppx < update_uppx, |  | ||||||
|                 do_append=do_append, |                 do_append=do_append, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  | @ -808,7 +786,10 @@ async def display_symbol_data( | ||||||
|         async with trio.open_nursery() as ln: |         async with trio.open_nursery() as ln: | ||||||
| 
 | 
 | ||||||
|             # if available load volume related built-in display(s) |             # if available load volume related built-in display(s) | ||||||
|             if has_vlm(ohlcv): |             if ( | ||||||
|  |                 not symbol.broker_info[provider].get('no_vlm', False) | ||||||
|  |                 and has_vlm(ohlcv) | ||||||
|  |             ): | ||||||
|                 vlm_chart = await ln.start( |                 vlm_chart = await ln.start( | ||||||
|                     open_vlm_displays, |                     open_vlm_displays, | ||||||
|                     linked, |                     linked, | ||||||
|  | @ -843,6 +824,9 @@ async def display_symbol_data( | ||||||
|                     order_mode_started |                     order_mode_started | ||||||
|                 ) |                 ) | ||||||
|             ): |             ): | ||||||
|  |                 if not vlm_chart: | ||||||
|  |                     chart.default_view() | ||||||
|  | 
 | ||||||
|                 # let Qt run to render all widgets and make sure the |                 # let Qt run to render all widgets and make sure the | ||||||
|                 # sidepanes line up vertically. |                 # sidepanes line up vertically. | ||||||
|                 await trio.sleep(0) |                 await trio.sleep(0) | ||||||
|  |  | ||||||
|  | @ -140,9 +140,9 @@ class LineEditor: | ||||||
| 
 | 
 | ||||||
|     ) -> LevelLine: |     ) -> LevelLine: | ||||||
| 
 | 
 | ||||||
|         staged_line = self._active_staged_line |         # staged_line = self._active_staged_line | ||||||
|         if not staged_line: |         # if not staged_line: | ||||||
|             raise RuntimeError("No line is currently staged!?") |         #     raise RuntimeError("No line is currently staged!?") | ||||||
| 
 | 
 | ||||||
|         # for now, until submission reponse arrives |         # for now, until submission reponse arrives | ||||||
|         line.hide_labels() |         line.hide_labels() | ||||||
|  |  | ||||||
|  | @ -21,7 +21,6 @@ Qt event proxying and processing using ``trio`` mem chans. | ||||||
| from contextlib import asynccontextmanager, AsyncExitStack | from contextlib import asynccontextmanager, AsyncExitStack | ||||||
| from typing import Callable | from typing import Callable | ||||||
| 
 | 
 | ||||||
| from pydantic import BaseModel |  | ||||||
| import trio | import trio | ||||||
| from PyQt5 import QtCore | from PyQt5 import QtCore | ||||||
| from PyQt5.QtCore import QEvent, pyqtBoundSignal | from PyQt5.QtCore import QEvent, pyqtBoundSignal | ||||||
|  | @ -30,6 +29,8 @@ from PyQt5.QtWidgets import ( | ||||||
|     QGraphicsSceneMouseEvent as gs_mouse, |     QGraphicsSceneMouseEvent as gs_mouse, | ||||||
| ) | ) | ||||||
| 
 | 
 | ||||||
|  | from ..data.types import Struct | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| MOUSE_EVENTS = { | MOUSE_EVENTS = { | ||||||
|     gs_mouse.GraphicsSceneMousePress, |     gs_mouse.GraphicsSceneMousePress, | ||||||
|  | @ -43,13 +44,10 @@ MOUSE_EVENTS = { | ||||||
| # TODO: maybe consider some constrained ints down the road? | # TODO: maybe consider some constrained ints down the road? | ||||||
| # https://pydantic-docs.helpmanual.io/usage/types/#constrained-types | # https://pydantic-docs.helpmanual.io/usage/types/#constrained-types | ||||||
| 
 | 
 | ||||||
| class KeyboardMsg(BaseModel): | class KeyboardMsg(Struct): | ||||||
|     '''Unpacked Qt keyboard event data. |     '''Unpacked Qt keyboard event data. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     class Config: |  | ||||||
|         arbitrary_types_allowed = True |  | ||||||
| 
 |  | ||||||
|     event: QEvent |     event: QEvent | ||||||
|     etype: int |     etype: int | ||||||
|     key: int |     key: int | ||||||
|  | @ -57,16 +55,13 @@ class KeyboardMsg(BaseModel): | ||||||
|     txt: str |     txt: str | ||||||
| 
 | 
 | ||||||
|     def to_tuple(self) -> tuple: |     def to_tuple(self) -> tuple: | ||||||
|         return tuple(self.dict().values()) |         return tuple(self.to_dict().values()) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class MouseMsg(BaseModel): | class MouseMsg(Struct): | ||||||
|     '''Unpacked Qt keyboard event data. |     '''Unpacked Qt keyboard event data. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|     class Config: |  | ||||||
|         arbitrary_types_allowed = True |  | ||||||
| 
 |  | ||||||
|     event: QEvent |     event: QEvent | ||||||
|     etype: int |     etype: int | ||||||
|     button: int |     button: int | ||||||
|  |  | ||||||
|  | @ -337,6 +337,7 @@ class Flow(msgspec.Struct):  # , frozen=True): | ||||||
|     name: str |     name: str | ||||||
|     plot: pg.PlotItem |     plot: pg.PlotItem | ||||||
|     graphics: Union[Curve, BarItems] |     graphics: Union[Curve, BarItems] | ||||||
|  |     yrange: tuple[float, float] = None | ||||||
| 
 | 
 | ||||||
|     # in some cases a flow may want to change its |     # in some cases a flow may want to change its | ||||||
|     # graphical "type" or, "form" when downsampling, |     # graphical "type" or, "form" when downsampling, | ||||||
|  | @ -386,10 +387,11 @@ class Flow(msgspec.Struct):  # , frozen=True): | ||||||
|         lbar: int, |         lbar: int, | ||||||
|         rbar: int, |         rbar: int, | ||||||
| 
 | 
 | ||||||
|     ) -> tuple[float, float]: |     ) -> Optional[tuple[float, float]]: | ||||||
|         ''' |         ''' | ||||||
|         Compute the cached max and min y-range values for a given |         Compute the cached max and min y-range values for a given | ||||||
|         x-range determined by ``lbar`` and ``rbar``. |         x-range determined by ``lbar`` and ``rbar`` or ``None`` | ||||||
|  |         if no range can be determined (yet). | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         rkey = (lbar, rbar) |         rkey = (lbar, rbar) | ||||||
|  | @ -399,9 +401,8 @@ class Flow(msgspec.Struct):  # , frozen=True): | ||||||
| 
 | 
 | ||||||
|         shm = self.shm |         shm = self.shm | ||||||
|         if shm is None: |         if shm is None: | ||||||
|             mxmn = None |             return None | ||||||
| 
 | 
 | ||||||
|         else:  # new block for profiling?.. |  | ||||||
|         arr = shm.array |         arr = shm.array | ||||||
| 
 | 
 | ||||||
|         # build relative indexes into shm array |         # build relative indexes into shm array | ||||||
|  | @ -414,7 +415,11 @@ class Flow(msgspec.Struct):  # , frozen=True): | ||||||
|         ] |         ] | ||||||
| 
 | 
 | ||||||
|         if not slice_view.size: |         if not slice_view.size: | ||||||
|                 mxmn = None |             return None | ||||||
|  | 
 | ||||||
|  |         elif self.yrange: | ||||||
|  |             mxmn = self.yrange | ||||||
|  |             # print(f'{self.name} M4 maxmin: {mxmn}') | ||||||
| 
 | 
 | ||||||
|         else: |         else: | ||||||
|             if self.is_ohlc: |             if self.is_ohlc: | ||||||
|  | @ -427,9 +432,10 @@ class Flow(msgspec.Struct):  # , frozen=True): | ||||||
|                 yhigh = np.max(view) |                 yhigh = np.max(view) | ||||||
| 
 | 
 | ||||||
|             mxmn = ylow, yhigh |             mxmn = ylow, yhigh | ||||||
|  |             # print(f'{self.name} MANUAL maxmin: {mxmin}') | ||||||
| 
 | 
 | ||||||
|             if mxmn is not None: |         # cache result for input range | ||||||
|                 # cache new mxmn result |         assert mxmn | ||||||
|         self._mxmns[rkey] = mxmn |         self._mxmns[rkey] = mxmn | ||||||
| 
 | 
 | ||||||
|         return mxmn |         return mxmn | ||||||
|  | @ -628,10 +634,13 @@ class Flow(msgspec.Struct):  # , frozen=True): | ||||||
|             # source data so we clear our path data in prep |             # source data so we clear our path data in prep | ||||||
|             # to generate a new one from original source data. |             # to generate a new one from original source data. | ||||||
|             new_sample_rate = True |             new_sample_rate = True | ||||||
|             showing_src_data = True |  | ||||||
|             should_ds = False |             should_ds = False | ||||||
|             should_redraw = True |             should_redraw = True | ||||||
| 
 | 
 | ||||||
|  |             showing_src_data = True | ||||||
|  |             # reset yrange to be computed from source data | ||||||
|  |             self.yrange = None | ||||||
|  | 
 | ||||||
|         # MAIN RENDER LOGIC: |         # MAIN RENDER LOGIC: | ||||||
|         # - determine in view data and redraw on range change |         # - determine in view data and redraw on range change | ||||||
|         # - determine downsampling ops if needed |         # - determine downsampling ops if needed | ||||||
|  | @ -657,6 +666,10 @@ class Flow(msgspec.Struct):  # , frozen=True): | ||||||
| 
 | 
 | ||||||
|             **rkwargs, |             **rkwargs, | ||||||
|         ) |         ) | ||||||
|  |         if showing_src_data: | ||||||
|  |             # print(f"{self.name} SHOWING SOURCE") | ||||||
|  |             # reset yrange to be computed from source data | ||||||
|  |             self.yrange = None | ||||||
| 
 | 
 | ||||||
|         if not out: |         if not out: | ||||||
|             log.warning(f'{self.name} failed to render!?') |             log.warning(f'{self.name} failed to render!?') | ||||||
|  | @ -664,6 +677,9 @@ class Flow(msgspec.Struct):  # , frozen=True): | ||||||
| 
 | 
 | ||||||
|         path, data, reset = out |         path, data, reset = out | ||||||
| 
 | 
 | ||||||
|  |         # if self.yrange: | ||||||
|  |         #     print(f'flow {self.name} yrange from m4: {self.yrange}') | ||||||
|  | 
 | ||||||
|         # XXX: SUPER UGGGHHH... without this we get stale cache |         # XXX: SUPER UGGGHHH... without this we get stale cache | ||||||
|         # graphics that don't update until you downsampler again.. |         # graphics that don't update until you downsampler again.. | ||||||
|         if reset: |         if reset: | ||||||
|  | @ -1058,6 +1074,7 @@ class Renderer(msgspec.Struct): | ||||||
|         # xy-path data transform: convert source data to a format |         # xy-path data transform: convert source data to a format | ||||||
|         # able to be passed to a `QPainterPath` rendering routine. |         # able to be passed to a `QPainterPath` rendering routine. | ||||||
|         if not len(hist): |         if not len(hist): | ||||||
|  |             # XXX: this might be why the profiler only has exits? | ||||||
|             return |             return | ||||||
| 
 | 
 | ||||||
|         x_out, y_out, connect = self.format_xy( |         x_out, y_out, connect = self.format_xy( | ||||||
|  | @ -1144,11 +1161,14 @@ class Renderer(msgspec.Struct): | ||||||
| 
 | 
 | ||||||
|             elif should_ds and uppx > 1: |             elif should_ds and uppx > 1: | ||||||
| 
 | 
 | ||||||
|                 x_out, y_out = xy_downsample( |                 x_out, y_out, ymn, ymx = xy_downsample( | ||||||
|                     x_out, |                     x_out, | ||||||
|                     y_out, |                     y_out, | ||||||
|                     uppx, |                     uppx, | ||||||
|                 ) |                 ) | ||||||
|  |                 self.flow.yrange = ymn, ymx | ||||||
|  |                 # print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}') | ||||||
|  | 
 | ||||||
|                 reset = True |                 reset = True | ||||||
|                 profiler(f'FULL PATH downsample redraw={should_ds}') |                 profiler(f'FULL PATH downsample redraw={should_ds}') | ||||||
|                 self._in_ds = True |                 self._in_ds = True | ||||||
|  |  | ||||||
|  | @ -619,7 +619,7 @@ class FillStatusBar(QProgressBar): | ||||||
|         # color: #19232D; |         # color: #19232D; | ||||||
|         # width: 10px; |         # width: 10px; | ||||||
| 
 | 
 | ||||||
|         self.setRange(0, slots) |         self.setRange(0, int(slots)) | ||||||
|         self.setValue(value) |         self.setValue(value) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -27,12 +27,13 @@ from itertools import cycle | ||||||
| from typing import Optional, AsyncGenerator, Any | from typing import Optional, AsyncGenerator, Any | ||||||
| 
 | 
 | ||||||
| import numpy as np | import numpy as np | ||||||
| from pydantic import create_model | import msgspec | ||||||
| import tractor | import tractor | ||||||
| import pyqtgraph as pg | import pyqtgraph as pg | ||||||
| import trio | import trio | ||||||
| from trio_typing import TaskStatus | from trio_typing import TaskStatus | ||||||
| 
 | 
 | ||||||
|  | from piker.data.types import Struct | ||||||
| from ._axes import PriceAxis | from ._axes import PriceAxis | ||||||
| from .._cacheables import maybe_open_context | from .._cacheables import maybe_open_context | ||||||
| from ..calc import humanize | from ..calc import humanize | ||||||
|  | @ -53,7 +54,7 @@ from ._forms import ( | ||||||
| from ..fsp._api import maybe_mk_fsp_shm, Fsp | from ..fsp._api import maybe_mk_fsp_shm, Fsp | ||||||
| from ..fsp import cascade | from ..fsp import cascade | ||||||
| from ..fsp._volume import ( | from ..fsp._volume import ( | ||||||
|     tina_vwap, |     # tina_vwap, | ||||||
|     dolla_vlm, |     dolla_vlm, | ||||||
|     flow_rates, |     flow_rates, | ||||||
| ) | ) | ||||||
|  | @ -153,12 +154,13 @@ async def open_fsp_sidepane( | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     # https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation |     # https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation | ||||||
|     FspConfig = create_model( |     FspConfig = msgspec.defstruct( | ||||||
|         'FspConfig', |         "Point", | ||||||
|         name=name, |         [('name', name)] + list(params.items()), | ||||||
|         **params, |         bases=(Struct,), | ||||||
|     ) |     ) | ||||||
|     sidepane.model = FspConfig() |     model = FspConfig(name=name, **params) | ||||||
|  |     sidepane.model = model | ||||||
| 
 | 
 | ||||||
|     # just a logger for now until we get fsp configs up and running. |     # just a logger for now until we get fsp configs up and running. | ||||||
|     async def settings_change( |     async def settings_change( | ||||||
|  | @ -440,7 +442,9 @@ class FspAdmin: | ||||||
|                         # if the chart isn't hidden try to update |                         # if the chart isn't hidden try to update | ||||||
|                         # the data on screen. |                         # the data on screen. | ||||||
|                         if not self.linked.isHidden(): |                         if not self.linked.isHidden(): | ||||||
|                             log.debug(f'Re-syncing graphics for fsp: {ns_path}') |                             log.debug( | ||||||
|  |                                 f'Re-syncing graphics for fsp: {ns_path}' | ||||||
|  |                             ) | ||||||
|                             self.linked.graphics_cycle( |                             self.linked.graphics_cycle( | ||||||
|                                 trigger_all=True, |                                 trigger_all=True, | ||||||
|                                 prepend_update_index=info['first'], |                                 prepend_update_index=info['first'], | ||||||
|  | @ -469,9 +473,10 @@ class FspAdmin: | ||||||
|             target=target, |             target=target, | ||||||
|             readonly=True, |             readonly=True, | ||||||
|         ) |         ) | ||||||
|         self._flow_registry[ |         self._flow_registry[( | ||||||
|             (self.src_shm._token, target.name) |             self.src_shm._token, | ||||||
|         ] = dst_shm._token |             target.name | ||||||
|  |         )] = dst_shm._token | ||||||
| 
 | 
 | ||||||
|         # if not opened: |         # if not opened: | ||||||
|         #     raise RuntimeError( |         #     raise RuntimeError( | ||||||
|  | @ -639,20 +644,25 @@ async def open_vlm_displays( | ||||||
|             names: list[str], |             names: list[str], | ||||||
| 
 | 
 | ||||||
|         ) -> tuple[float, float]: |         ) -> tuple[float, float]: | ||||||
|  |             ''' | ||||||
|  |             Flows "group" maxmin loop; assumes all named flows | ||||||
|  |             are in the same co-domain and thus can be sorted | ||||||
|  |             as one set. | ||||||
| 
 | 
 | ||||||
|  |             Iterates all the named flows and calls the chart | ||||||
|  |             api to find their range values and return. | ||||||
|  | 
 | ||||||
|  |             TODO: really we should probably have a more built-in API | ||||||
|  |             for this? | ||||||
|  | 
 | ||||||
|  |             ''' | ||||||
|             mx = 0 |             mx = 0 | ||||||
|             for name in names: |             for name in names: | ||||||
| 
 |                 ymn, ymx = chart.maxmin(name=name) | ||||||
|                 mxmn = chart.maxmin(name=name) |                 mx = max(mx, ymx) | ||||||
|                 if mxmn: |  | ||||||
|                     ymax = mxmn[1] |  | ||||||
|                     if ymax > mx: |  | ||||||
|                         mx = ymax |  | ||||||
| 
 | 
 | ||||||
|             return 0, mx |             return 0, mx | ||||||
| 
 | 
 | ||||||
|         chart.view.maxmin = partial(multi_maxmin, names=['volume']) |  | ||||||
| 
 |  | ||||||
|         # TODO: fix the x-axis label issue where if you put |         # TODO: fix the x-axis label issue where if you put | ||||||
|         # the axis on the left it's totally not lined up... |         # the axis on the left it's totally not lined up... | ||||||
|         # show volume units value on LHS (for dinkus) |         # show volume units value on LHS (for dinkus) | ||||||
|  | @ -776,6 +786,7 @@ async def open_vlm_displays( | ||||||
| 
 | 
 | ||||||
|             ) -> None: |             ) -> None: | ||||||
|                 for name in names: |                 for name in names: | ||||||
|  | 
 | ||||||
|                     if 'dark' in name: |                     if 'dark' in name: | ||||||
|                         color = dark_vlm_color |                         color = dark_vlm_color | ||||||
|                     elif 'rate' in name: |                     elif 'rate' in name: | ||||||
|  |  | ||||||
|  | @ -221,6 +221,7 @@ async def handle_viewmode_kb_inputs( | ||||||
|             # TODO: show pp config mini-params in status bar widget |             # TODO: show pp config mini-params in status bar widget | ||||||
|             # mode.pp_config.show() |             # mode.pp_config.show() | ||||||
| 
 | 
 | ||||||
|  |             trigger_type: str = 'dark' | ||||||
|             if ( |             if ( | ||||||
|                 # 's' for "submit" to activate "live" order |                 # 's' for "submit" to activate "live" order | ||||||
|                 Qt.Key_S in pressed or |                 Qt.Key_S in pressed or | ||||||
|  | @ -228,9 +229,6 @@ async def handle_viewmode_kb_inputs( | ||||||
|             ): |             ): | ||||||
|                 trigger_type: str = 'live' |                 trigger_type: str = 'live' | ||||||
| 
 | 
 | ||||||
|             else: |  | ||||||
|                 trigger_type: str = 'dark' |  | ||||||
| 
 |  | ||||||
|             # order mode trigger "actions" |             # order mode trigger "actions" | ||||||
|             if Qt.Key_D in pressed:  # for "damp eet" |             if Qt.Key_D in pressed:  # for "damp eet" | ||||||
|                 action = 'sell' |                 action = 'sell' | ||||||
|  | @ -397,8 +395,11 @@ class ChartView(ViewBox): | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         if self._ic is None: |         if self._ic is None: | ||||||
|  |             try: | ||||||
|                 self.chart.pause_all_feeds() |                 self.chart.pause_all_feeds() | ||||||
|                 self._ic = trio.Event() |                 self._ic = trio.Event() | ||||||
|  |             except RuntimeError: | ||||||
|  |                 pass | ||||||
| 
 | 
 | ||||||
|     def signal_ic( |     def signal_ic( | ||||||
|         self, |         self, | ||||||
|  | @ -411,9 +412,12 @@ class ChartView(ViewBox): | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         if self._ic: |         if self._ic: | ||||||
|  |             try: | ||||||
|                 self._ic.set() |                 self._ic.set() | ||||||
|                 self._ic = None |                 self._ic = None | ||||||
|                 self.chart.resume_all_feeds() |                 self.chart.resume_all_feeds() | ||||||
|  |             except RuntimeError: | ||||||
|  |                 pass | ||||||
| 
 | 
 | ||||||
|     @asynccontextmanager |     @asynccontextmanager | ||||||
|     async def open_async_input_handler( |     async def open_async_input_handler( | ||||||
|  | @ -669,7 +673,10 @@ class ChartView(ViewBox): | ||||||
|                 # XXX: WHY |                 # XXX: WHY | ||||||
|                 ev.accept() |                 ev.accept() | ||||||
| 
 | 
 | ||||||
|  |                 try: | ||||||
|                     self.start_ic() |                     self.start_ic() | ||||||
|  |                 except RuntimeError: | ||||||
|  |                     pass | ||||||
|                 # if self._ic is None: |                 # if self._ic is None: | ||||||
|                 #     self.chart.pause_all_feeds() |                 #     self.chart.pause_all_feeds() | ||||||
|                 #     self._ic = trio.Event() |                 #     self._ic = trio.Event() | ||||||
|  | @ -923,6 +930,7 @@ class ChartView(ViewBox): | ||||||
|                     # XXX: super important to be aware of this. |                     # XXX: super important to be aware of this. | ||||||
|                     # or not flow.graphics.isVisible() |                     # or not flow.graphics.isVisible() | ||||||
|                 ): |                 ): | ||||||
|  |                     # print(f'skipping {flow.name}') | ||||||
|                     continue |                     continue | ||||||
| 
 | 
 | ||||||
|                 # pass in no array which will read and render from the last |                 # pass in no array which will read and render from the last | ||||||
|  |  | ||||||
|  | @ -421,6 +421,10 @@ class LevelLine(pg.InfiniteLine): | ||||||
| 
 | 
 | ||||||
|         return path |         return path | ||||||
| 
 | 
 | ||||||
|  |     @property | ||||||
|  |     def marker(self) -> LevelMarker: | ||||||
|  |         return self._marker | ||||||
|  | 
 | ||||||
|     def hoverEvent(self, ev): |     def hoverEvent(self, ev): | ||||||
|         ''' |         ''' | ||||||
|         Mouse hover callback. |         Mouse hover callback. | ||||||
|  |  | ||||||
|  | @ -22,12 +22,9 @@ from __future__ import annotations | ||||||
| from typing import ( | from typing import ( | ||||||
|     Optional, Generic, |     Optional, Generic, | ||||||
|     TypeVar, Callable, |     TypeVar, Callable, | ||||||
|     Literal, |  | ||||||
| ) | ) | ||||||
| import enum |  | ||||||
| import sys |  | ||||||
| 
 | 
 | ||||||
| from pydantic import BaseModel, validator | # from pydantic import BaseModel, validator | ||||||
| from pydantic.generics import GenericModel | from pydantic.generics import GenericModel | ||||||
| from PyQt5.QtWidgets import ( | from PyQt5.QtWidgets import ( | ||||||
|     QWidget, |     QWidget, | ||||||
|  | @ -38,6 +35,7 @@ from ._forms import ( | ||||||
|     # FontScaledDelegate, |     # FontScaledDelegate, | ||||||
|     Edit, |     Edit, | ||||||
| ) | ) | ||||||
|  | from ..data.types import Struct | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| DataType = TypeVar('DataType') | DataType = TypeVar('DataType') | ||||||
|  | @ -62,7 +60,7 @@ class Selection(Field[DataType], Generic[DataType]): | ||||||
|     options: dict[str, DataType] |     options: dict[str, DataType] | ||||||
|     # value: DataType = None |     # value: DataType = None | ||||||
| 
 | 
 | ||||||
|     @validator('value')  # , always=True) |     # @validator('value')  # , always=True) | ||||||
|     def set_value_first( |     def set_value_first( | ||||||
|         cls, |         cls, | ||||||
| 
 | 
 | ||||||
|  | @ -100,7 +98,7 @@ class Edit(Field[DataType], Generic[DataType]): | ||||||
|     widget_factory = Edit |     widget_factory = Edit | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class AllocatorPane(BaseModel): | class AllocatorPane(Struct): | ||||||
| 
 | 
 | ||||||
|     account = Selection[str]( |     account = Selection[str]( | ||||||
|         options=dict.fromkeys( |         options=dict.fromkeys( | ||||||
|  |  | ||||||
|  | @ -80,8 +80,8 @@ class ComposedGridLayout: | ||||||
|     ``<axis_name>i`` in the layout. |     ``<axis_name>i`` in the layout. | ||||||
| 
 | 
 | ||||||
|     The ``item: PlotItem`` passed to the constructor's grid layout is |     The ``item: PlotItem`` passed to the constructor's grid layout is | ||||||
|     used verbatim as the "main plot" who's view box is give precedence |     used verbatim as the "main plot" who's view box is given precedence | ||||||
|     for input handling. The main plot's axes are removed from it's |     for input handling. The main plot's axes are removed from its | ||||||
|     layout and placed in the surrounding exterior layouts to allow for |     layout and placed in the surrounding exterior layouts to allow for | ||||||
|     re-ordering if desired. |     re-ordering if desired. | ||||||
| 
 | 
 | ||||||
|  |  | ||||||
|  | @ -49,12 +49,17 @@ def xy_downsample( | ||||||
| 
 | 
 | ||||||
|     x_spacer: float = 0.5, |     x_spacer: float = 0.5, | ||||||
| 
 | 
 | ||||||
| ) -> tuple[np.ndarray, np.ndarray]: | ) -> tuple[ | ||||||
|  |     np.ndarray, | ||||||
|  |     np.ndarray, | ||||||
|  |     float, | ||||||
|  |     float, | ||||||
|  | ]: | ||||||
| 
 | 
 | ||||||
|     # downsample whenever more then 1 pixels per datum can be shown. |     # downsample whenever more then 1 pixels per datum can be shown. | ||||||
|     # always refresh data bounds until we get diffing |     # always refresh data bounds until we get diffing | ||||||
|     # working properly, see above.. |     # working properly, see above.. | ||||||
|     bins, x, y = ds_m4( |     bins, x, y, ymn, ymx = ds_m4( | ||||||
|         x, |         x, | ||||||
|         y, |         y, | ||||||
|         uppx, |         uppx, | ||||||
|  | @ -67,7 +72,7 @@ def xy_downsample( | ||||||
|     )).flatten() |     )).flatten() | ||||||
|     y = y.flatten() |     y = y.flatten() | ||||||
| 
 | 
 | ||||||
|     return x, y |     return x, y, ymn, ymx | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @njit( | @njit( | ||||||
|  |  | ||||||
|  | @ -19,6 +19,7 @@ Position info and display | ||||||
| 
 | 
 | ||||||
| """ | """ | ||||||
| from __future__ import annotations | from __future__ import annotations | ||||||
|  | from copy import copy | ||||||
| from dataclasses import dataclass | from dataclasses import dataclass | ||||||
| from functools import partial | from functools import partial | ||||||
| from math import floor, copysign | from math import floor, copysign | ||||||
|  | @ -105,8 +106,8 @@ async def update_pnl_from_feed( | ||||||
|                             # compute and display pnl status |                             # compute and display pnl status | ||||||
|                             order_mode.pane.pnl_label.format( |                             order_mode.pane.pnl_label.format( | ||||||
|                                 pnl=copysign(1, size) * pnl( |                                 pnl=copysign(1, size) * pnl( | ||||||
|                                     # live.avg_price, |                                     # live.ppu, | ||||||
|                                     order_mode.current_pp.live_pp.avg_price, |                                     order_mode.current_pp.live_pp.ppu, | ||||||
|                                     tick['price'], |                                     tick['price'], | ||||||
|                                 ), |                                 ), | ||||||
|                             ) |                             ) | ||||||
|  | @ -165,12 +166,29 @@ class SettingsPane: | ||||||
|         key: str, |         key: str, | ||||||
|         value: str, |         value: str, | ||||||
| 
 | 
 | ||||||
|  |     ) -> None: | ||||||
|  |         ''' | ||||||
|  |         Try to apply some input setting (by the user), revert to previous setting if it fails | ||||||
|  |         display new value if applied. | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         self.apply_setting(key, value) | ||||||
|  |         self.update_status_ui(pp=self.order_mode.current_pp) | ||||||
|  | 
 | ||||||
|  |     def apply_setting( | ||||||
|  |         self, | ||||||
|  | 
 | ||||||
|  |         key: str, | ||||||
|  |         value: str, | ||||||
|  | 
 | ||||||
|     ) -> bool: |     ) -> bool: | ||||||
|         ''' |         ''' | ||||||
|         Called on any order pane edit field value change. |         Called on any order pane edit field value change. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         mode = self.order_mode |         mode = self.order_mode | ||||||
|  |         tracker = mode.current_pp | ||||||
|  |         alloc = tracker.alloc | ||||||
| 
 | 
 | ||||||
|         # an account switch request |         # an account switch request | ||||||
|         if key == 'account': |         if key == 'account': | ||||||
|  | @ -206,25 +224,28 @@ class SettingsPane: | ||||||
|             # load the new account's allocator |             # load the new account's allocator | ||||||
|             alloc = tracker.alloc |             alloc = tracker.alloc | ||||||
| 
 | 
 | ||||||
|         else: |  | ||||||
|             tracker = mode.current_pp |  | ||||||
|             alloc = tracker.alloc |  | ||||||
| 
 |  | ||||||
|         size_unit = alloc.size_unit |  | ||||||
| 
 |  | ||||||
|         # WRITE any settings to current pp's allocator |         # WRITE any settings to current pp's allocator | ||||||
|         try: |  | ||||||
|         if key == 'size_unit': |         if key == 'size_unit': | ||||||
|             # implicit re-write of value if input |             # implicit re-write of value if input | ||||||
|             # is the "text name" of the units. |             # is the "text name" of the units. | ||||||
|             # yah yah, i know this is badd.. |             # yah yah, i know this is badd.. | ||||||
|             alloc.size_unit = value |             alloc.size_unit = value | ||||||
|             else: | 
 | ||||||
|  |         elif key != 'account':  # numeric fields entry | ||||||
|  |             try: | ||||||
|                 value = puterize(value) |                 value = puterize(value) | ||||||
|  |             except ValueError as err: | ||||||
|  |                 log.error(err.args[0]) | ||||||
|  |                 return False | ||||||
|  | 
 | ||||||
|             if key == 'limit': |             if key == 'limit': | ||||||
|  |                 if value <= 0: | ||||||
|  |                     log.error('limit must be > 0') | ||||||
|  |                     return False | ||||||
|  | 
 | ||||||
|                 pp = mode.current_pp.live_pp |                 pp = mode.current_pp.live_pp | ||||||
| 
 | 
 | ||||||
|                     if size_unit == 'currency': |                 if alloc.size_unit == 'currency': | ||||||
|                     dsize = pp.dsize |                     dsize = pp.dsize | ||||||
|                     if dsize > value: |                     if dsize > value: | ||||||
|                         log.error( |                         log.error( | ||||||
|  | @ -246,29 +267,42 @@ class SettingsPane: | ||||||
| 
 | 
 | ||||||
|             elif key == 'slots': |             elif key == 'slots': | ||||||
|                 if value <= 0: |                 if value <= 0: | ||||||
|                         raise ValueError('slots must be > 0') |                     # raise ValueError('slots must be > 0') | ||||||
|  |                     log.error('limit must be > 0') | ||||||
|  |                     return False | ||||||
|  | 
 | ||||||
|                 alloc.slots = int(value) |                 alloc.slots = int(value) | ||||||
| 
 | 
 | ||||||
|             else: |             else: | ||||||
|                 log.error(f'Unknown setting {key}') |                 log.error(f'Unknown setting {key}') | ||||||
|                 raise ValueError |                 raise ValueError | ||||||
| 
 | 
 | ||||||
|  |             # don't log account "change" case since it'll be submitted | ||||||
|  |             # on every mouse interaction. | ||||||
|             log.info(f'settings change: {key}: {value}') |             log.info(f'settings change: {key}: {value}') | ||||||
| 
 | 
 | ||||||
|         except ValueError: |         # TODO: maybe return a diff of settings so if we can an error we | ||||||
|             log.error(f'Invalid value for `{key}`: {value}') |         # can have general input handling code to report it through the | ||||||
|  |         # UI in some way? | ||||||
|  |         return True | ||||||
|  | 
 | ||||||
|  |     def update_status_ui( | ||||||
|  |         self, | ||||||
|  |         pp: PositionTracker, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  | 
 | ||||||
|  |         alloc = pp.alloc | ||||||
|  |         slots = alloc.slots | ||||||
|  |         used = alloc.slots_used(pp.live_pp) | ||||||
| 
 | 
 | ||||||
|         # READ out settings and update the status UI / settings widgets |         # READ out settings and update the status UI / settings widgets | ||||||
|         suffix = {'currency': ' $', 'units': ' u'}[size_unit] |         suffix = {'currency': ' $', 'units': ' u'}[alloc.size_unit] | ||||||
|         limit = alloc.limit() |         limit = alloc.limit() | ||||||
| 
 | 
 | ||||||
|         # TODO: a reverse look up from the position to the equivalent |  | ||||||
|         # account(s), if none then look to user config for default? |  | ||||||
|         self.update_status_ui(pp=tracker) |  | ||||||
| 
 |  | ||||||
|         step_size, currency_per_slot = alloc.step_sizes() |         step_size, currency_per_slot = alloc.step_sizes() | ||||||
| 
 | 
 | ||||||
|         if size_unit == 'currency': |         if alloc.size_unit == 'currency': | ||||||
|             step_size = currency_per_slot |             step_size = currency_per_slot | ||||||
| 
 | 
 | ||||||
|         self.step_label.format( |         self.step_label.format( | ||||||
|  | @ -286,23 +320,7 @@ class SettingsPane: | ||||||
|         self.form.fields['limit'].setText(str(limit)) |         self.form.fields['limit'].setText(str(limit)) | ||||||
| 
 | 
 | ||||||
|         # update of level marker size label based on any new settings |         # update of level marker size label based on any new settings | ||||||
|         tracker.update_from_pp() |         pp.update_from_pp() | ||||||
| 
 |  | ||||||
|         # TODO: maybe return a diff of settings so if we can an error we |  | ||||||
|         # can have general input handling code to report it through the |  | ||||||
|         # UI in some way? |  | ||||||
|         return True |  | ||||||
| 
 |  | ||||||
|     def update_status_ui( |  | ||||||
|         self, |  | ||||||
| 
 |  | ||||||
|         pp: PositionTracker, |  | ||||||
| 
 |  | ||||||
|     ) -> None: |  | ||||||
| 
 |  | ||||||
|         alloc = pp.alloc |  | ||||||
|         slots = alloc.slots |  | ||||||
|         used = alloc.slots_used(pp.live_pp) |  | ||||||
| 
 | 
 | ||||||
|         # calculate proportion of position size limit |         # calculate proportion of position size limit | ||||||
|         # that exists and display in fill bar |         # that exists and display in fill bar | ||||||
|  | @ -356,7 +374,7 @@ class SettingsPane: | ||||||
|             # last historical close price |             # last historical close price | ||||||
|             last = feed.shm.array[-1][['close']][0] |             last = feed.shm.array[-1][['close']][0] | ||||||
|             pnl_value = copysign(1, size) * pnl( |             pnl_value = copysign(1, size) * pnl( | ||||||
|                 tracker.live_pp.avg_price, |                 tracker.live_pp.ppu, | ||||||
|                 last, |                 last, | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|  | @ -440,6 +458,14 @@ def position_line( | ||||||
|     return line |     return line | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | _derivs = ( | ||||||
|  |     'future', | ||||||
|  |     'continuous_future', | ||||||
|  |     'option', | ||||||
|  |     'futures_option', | ||||||
|  | ) | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| class PositionTracker: | class PositionTracker: | ||||||
|     ''' |     ''' | ||||||
|     Track and display real-time positions for a single symbol |     Track and display real-time positions for a single symbol | ||||||
|  | @ -476,7 +502,7 @@ class PositionTracker: | ||||||
| 
 | 
 | ||||||
|         self.alloc = alloc |         self.alloc = alloc | ||||||
|         self.startup_pp = startup_pp |         self.startup_pp = startup_pp | ||||||
|         self.live_pp = startup_pp.copy() |         self.live_pp = copy(startup_pp) | ||||||
| 
 | 
 | ||||||
|         view = chart.getViewBox() |         view = chart.getViewBox() | ||||||
| 
 | 
 | ||||||
|  | @ -546,31 +572,71 @@ class PositionTracker: | ||||||
|     def update_from_pp( |     def update_from_pp( | ||||||
|         self, |         self, | ||||||
|         position: Optional[Position] = None, |         position: Optional[Position] = None, | ||||||
|  |         set_as_startup: bool = False, | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
|         '''Update graphics and data from average price and size passed in our |         ''' | ||||||
|         EMS ``BrokerdPosition`` msg. |         Update graphics and data from average price and size passed in | ||||||
|  |         our EMS ``BrokerdPosition`` msg. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|         # live pp updates |         # live pp updates | ||||||
|         pp = position or self.live_pp |         pp = position or self.live_pp | ||||||
|  |         if set_as_startup: | ||||||
|  |             startup_pp = pp | ||||||
|  |         else: | ||||||
|  |             startup_pp = self.startup_pp | ||||||
|  |         alloc = self.alloc | ||||||
|  | 
 | ||||||
|  |         # update allocator settings | ||||||
|  |         asset_type = pp.symbol.type_key | ||||||
|  | 
 | ||||||
|  |         # specific configs by asset class / type | ||||||
|  |         if asset_type in _derivs: | ||||||
|  |             # since it's harder to know how currency "applies" in this case | ||||||
|  |             # given leverage properties | ||||||
|  |             alloc.size_unit = '# units' | ||||||
|  | 
 | ||||||
|  |             # set units limit to slots size thus making make the next | ||||||
|  |             # entry step 1.0 | ||||||
|  |             alloc.units_limit = alloc.slots | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             alloc.size_unit = 'currency' | ||||||
|  | 
 | ||||||
|  |         # if the current position is already greater then the limit | ||||||
|  |         # settings, increase the limit to the current position | ||||||
|  |         if alloc.size_unit == 'currency': | ||||||
|  |             startup_size = self.startup_pp.size * startup_pp.ppu | ||||||
|  | 
 | ||||||
|  |             if startup_size > alloc.currency_limit: | ||||||
|  |                 alloc.currency_limit = round(startup_size, ndigits=2) | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             startup_size = abs(startup_pp.size) | ||||||
|  | 
 | ||||||
|  |             if startup_size > alloc.units_limit: | ||||||
|  |                 alloc.units_limit = startup_size | ||||||
|  | 
 | ||||||
|  |                 if asset_type in _derivs: | ||||||
|  |                     alloc.slots = alloc.units_limit | ||||||
| 
 | 
 | ||||||
|         self.update_line( |         self.update_line( | ||||||
|             pp.avg_price, |             pp.ppu, | ||||||
|             pp.size, |             pp.size, | ||||||
|             self.chart.linked.symbol.lot_size_digits, |             self.chart.linked.symbol.lot_size_digits, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # label updates |         # label updates | ||||||
|         self.size_label.fields['slots_used'] = round( |         self.size_label.fields['slots_used'] = round( | ||||||
|             self.alloc.slots_used(pp), ndigits=1) |             alloc.slots_used(pp), ndigits=1) | ||||||
|         self.size_label.render() |         self.size_label.render() | ||||||
| 
 | 
 | ||||||
|         if pp.size == 0: |         if pp.size == 0: | ||||||
|             self.hide() |             self.hide() | ||||||
| 
 | 
 | ||||||
|         else: |         else: | ||||||
|             self._level_marker.level = pp.avg_price |             self._level_marker.level = pp.ppu | ||||||
| 
 | 
 | ||||||
|             # these updates are critical to avoid lag on view/scene changes |             # these updates are critical to avoid lag on view/scene changes | ||||||
|             self._level_marker.update()  # trigger paint |             self._level_marker.update()  # trigger paint | ||||||
|  |  | ||||||
|  | @ -27,20 +27,20 @@ import time | ||||||
| from typing import Optional, Dict, Callable, Any | from typing import Optional, Dict, Callable, Any | ||||||
| import uuid | import uuid | ||||||
| 
 | 
 | ||||||
| from pydantic import BaseModel |  | ||||||
| import tractor | import tractor | ||||||
| import trio | import trio | ||||||
| from PyQt5.QtCore import Qt | from PyQt5.QtCore import Qt | ||||||
| 
 | 
 | ||||||
| from .. import config | from .. import config | ||||||
|  | from ..pp import Position | ||||||
| from ..clearing._client import open_ems, OrderBook | from ..clearing._client import open_ems, OrderBook | ||||||
| from ..clearing._allocate import ( | from ..clearing._allocate import ( | ||||||
|     mk_allocator, |     mk_allocator, | ||||||
|     Position, |  | ||||||
| ) | ) | ||||||
| from ._style import _font | from ._style import _font | ||||||
| from ..data._source import Symbol | from ..data._source import Symbol | ||||||
| from ..data.feed import Feed | from ..data.feed import Feed | ||||||
|  | from ..data.types import Struct | ||||||
| from ..log import get_logger | from ..log import get_logger | ||||||
| from ._editors import LineEditor, ArrowEditor | from ._editors import LineEditor, ArrowEditor | ||||||
| from ._lines import order_line, LevelLine | from ._lines import order_line, LevelLine | ||||||
|  | @ -49,17 +49,23 @@ from ._position import ( | ||||||
|     SettingsPane, |     SettingsPane, | ||||||
| ) | ) | ||||||
| from ._forms import FieldsForm | from ._forms import FieldsForm | ||||||
| # from ._label import FormatLabel |  | ||||||
| from ._window import MultiStatus | from ._window import MultiStatus | ||||||
| from ..clearing._messages import Order, BrokerdPosition | from ..clearing._messages import ( | ||||||
|  |     Order, | ||||||
|  |     Status, | ||||||
|  |     # BrokerdOrder, | ||||||
|  |     # BrokerdStatus, | ||||||
|  |     BrokerdPosition, | ||||||
|  | ) | ||||||
| from ._forms import open_form_input_handling | from ._forms import open_form_input_handling | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| log = get_logger(__name__) | log = get_logger(__name__) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| class OrderDialog(BaseModel): | class Dialog(Struct): | ||||||
|     '''Trade dialogue meta-data describing the lifetime |     ''' | ||||||
|  |     Trade dialogue meta-data describing the lifetime | ||||||
|     of an order submission to ``emsd`` from a chart. |     of an order submission to ``emsd`` from a chart. | ||||||
| 
 | 
 | ||||||
|     ''' |     ''' | ||||||
|  | @ -72,41 +78,6 @@ class OrderDialog(BaseModel): | ||||||
|     msgs: dict[str, dict] = {} |     msgs: dict[str, dict] = {} | ||||||
|     fills: Dict[str, Any] = {} |     fills: Dict[str, Any] = {} | ||||||
| 
 | 
 | ||||||
|     class Config: |  | ||||||
|         arbitrary_types_allowed = True |  | ||||||
|         underscore_attrs_are_private = False |  | ||||||
| 
 |  | ||||||
| 
 |  | ||||||
| def on_level_change_update_next_order_info( |  | ||||||
| 
 |  | ||||||
|     level: float, |  | ||||||
| 
 |  | ||||||
|     # these are all ``partial``-ed in at callback assignment time. |  | ||||||
|     line: LevelLine, |  | ||||||
|     order: Order, |  | ||||||
|     tracker: PositionTracker, |  | ||||||
| 
 |  | ||||||
| ) -> None: |  | ||||||
|     '''A callback applied for each level change to the line |  | ||||||
|     which will recompute the order size based on allocator |  | ||||||
|     settings. this is assigned inside |  | ||||||
|     ``OrderMode.line_from_order()`` |  | ||||||
| 
 |  | ||||||
|     ''' |  | ||||||
|     # NOTE: the ``Order.account`` is set at order stage time |  | ||||||
|     # inside ``OrderMode.line_from_order()``. |  | ||||||
|     order_info = tracker.alloc.next_order_info( |  | ||||||
|         startup_pp=tracker.startup_pp, |  | ||||||
|         live_pp=tracker.live_pp, |  | ||||||
|         price=level, |  | ||||||
|         action=order.action, |  | ||||||
|     ) |  | ||||||
|     line.update_labels(order_info) |  | ||||||
| 
 |  | ||||||
|     # update bound-in staged order |  | ||||||
|     order.price = level |  | ||||||
|     order.size = order_info['size'] |  | ||||||
| 
 |  | ||||||
| 
 | 
 | ||||||
| @dataclass | @dataclass | ||||||
| class OrderMode: | class OrderMode: | ||||||
|  | @ -143,7 +114,7 @@ class OrderMode: | ||||||
|     current_pp: Optional[PositionTracker] = None |     current_pp: Optional[PositionTracker] = None | ||||||
|     active: bool = False |     active: bool = False | ||||||
|     name: str = 'order' |     name: str = 'order' | ||||||
|     dialogs: dict[str, OrderDialog] = field(default_factory=dict) |     dialogs: dict[str, Dialog] = field(default_factory=dict) | ||||||
| 
 | 
 | ||||||
|     _colors = { |     _colors = { | ||||||
|         'alert': 'alert_yellow', |         'alert': 'alert_yellow', | ||||||
|  | @ -152,12 +123,45 @@ class OrderMode: | ||||||
|     } |     } | ||||||
|     _staged_order: Optional[Order] = None |     _staged_order: Optional[Order] = None | ||||||
| 
 | 
 | ||||||
|  |     def on_level_change_update_next_order_info( | ||||||
|  |         self, | ||||||
|  |         level: float, | ||||||
|  | 
 | ||||||
|  |         # these are all ``partial``-ed in at callback assignment time. | ||||||
|  |         line: LevelLine, | ||||||
|  |         order: Order, | ||||||
|  |         tracker: PositionTracker, | ||||||
|  | 
 | ||||||
|  |     ) -> None: | ||||||
|  |         ''' | ||||||
|  |         A callback applied for each level change to the line | ||||||
|  |         which will recompute the order size based on allocator | ||||||
|  |         settings. this is assigned inside | ||||||
|  |         ``OrderMode.line_from_order()`` | ||||||
|  | 
 | ||||||
|  |         ''' | ||||||
|  |         # NOTE: the ``Order.account`` is set at order stage time inside | ||||||
|  |         # ``OrderMode.line_from_order()`` or is inside ``Order`` msg | ||||||
|  |         # field for loaded orders. | ||||||
|  |         order_info = tracker.alloc.next_order_info( | ||||||
|  |             startup_pp=tracker.startup_pp, | ||||||
|  |             live_pp=tracker.live_pp, | ||||||
|  |             price=level, | ||||||
|  |             action=order.action, | ||||||
|  |         ) | ||||||
|  |         line.update_labels(order_info) | ||||||
|  | 
 | ||||||
|  |         # update bound-in staged order | ||||||
|  |         order.price = level | ||||||
|  |         order.size = order_info['size'] | ||||||
|  | 
 | ||||||
|  |         # when an order is changed we flip the settings side-pane to | ||||||
|  |         # reflect the corresponding account and pos info. | ||||||
|  |         self.pane.on_ui_settings_change('account', order.account) | ||||||
|  | 
 | ||||||
|     def line_from_order( |     def line_from_order( | ||||||
|         self, |         self, | ||||||
| 
 |  | ||||||
|         order: Order, |         order: Order, | ||||||
|         symbol: Symbol, |  | ||||||
| 
 |  | ||||||
|         **line_kwargs, |         **line_kwargs, | ||||||
| 
 | 
 | ||||||
|     ) -> LevelLine: |     ) -> LevelLine: | ||||||
|  | @ -175,8 +179,8 @@ class OrderMode: | ||||||
|             color=self._colors[order.action], |             color=self._colors[order.action], | ||||||
| 
 | 
 | ||||||
|             dotted=True if ( |             dotted=True if ( | ||||||
|                 order.exec_mode == 'dark' and |                 order.exec_mode == 'dark' | ||||||
|                 order.action != 'alert' |                 and order.action != 'alert' | ||||||
|             ) else False, |             ) else False, | ||||||
| 
 | 
 | ||||||
|             **line_kwargs, |             **line_kwargs, | ||||||
|  | @ -186,10 +190,12 @@ class OrderMode: | ||||||
|         # immediately |         # immediately | ||||||
|         if order.action != 'alert': |         if order.action != 'alert': | ||||||
|             line._on_level_change = partial( |             line._on_level_change = partial( | ||||||
|                 on_level_change_update_next_order_info, |                 self.on_level_change_update_next_order_info, | ||||||
|                 line=line, |                 line=line, | ||||||
|                 order=order, |                 order=order, | ||||||
|                 tracker=self.current_pp, |                 # use the corresponding position tracker for the | ||||||
|  |                 # order's account. | ||||||
|  |                 tracker=self.trackers[order.account], | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|         else: |         else: | ||||||
|  | @ -238,8 +244,6 @@ class OrderMode: | ||||||
| 
 | 
 | ||||||
|         line = self.line_from_order( |         line = self.line_from_order( | ||||||
|             order, |             order, | ||||||
|             symbol, |  | ||||||
| 
 |  | ||||||
|             show_markers=True, |             show_markers=True, | ||||||
|             # just for the stage line to avoid |             # just for the stage line to avoid | ||||||
|             # flickering while moving the cursor |             # flickering while moving the cursor | ||||||
|  | @ -251,7 +255,6 @@ class OrderMode: | ||||||
|             # prevent flickering of marker while moving/tracking cursor |             # prevent flickering of marker while moving/tracking cursor | ||||||
|             only_show_markers_on_hover=False, |             only_show_markers_on_hover=False, | ||||||
|         ) |         ) | ||||||
| 
 |  | ||||||
|         line = self.lines.stage_line(line) |         line = self.lines.stage_line(line) | ||||||
| 
 | 
 | ||||||
|         # hide crosshair y-line and label |         # hide crosshair y-line and label | ||||||
|  | @ -264,28 +267,26 @@ class OrderMode: | ||||||
| 
 | 
 | ||||||
|     def submit_order( |     def submit_order( | ||||||
|         self, |         self, | ||||||
|  |         send_msg: bool = True, | ||||||
|  |         order: Optional[Order] = None, | ||||||
| 
 | 
 | ||||||
|     ) -> OrderDialog: |     ) -> Dialog: | ||||||
|         '''Send execution order to EMS return a level line to |         ''' | ||||||
|  |         Send execution order to EMS return a level line to | ||||||
|         represent the order on a chart. |         represent the order on a chart. | ||||||
| 
 | 
 | ||||||
|         ''' |         ''' | ||||||
|  |         if not order: | ||||||
|             staged = self._staged_order |             staged = self._staged_order | ||||||
|         symbol: Symbol = staged.symbol |             # apply order fields for ems | ||||||
|             oid = str(uuid.uuid4()) |             oid = str(uuid.uuid4()) | ||||||
|  |             order = staged.copy() | ||||||
|  |             order.oid = oid | ||||||
| 
 | 
 | ||||||
|         # format order data for ems |         order.symbol = order.symbol.front_fqsn() | ||||||
|         fqsn = symbol.front_fqsn() |  | ||||||
|         order = staged.copy( |  | ||||||
|             update={ |  | ||||||
|                 'symbol': fqsn, |  | ||||||
|                 'oid': oid, |  | ||||||
|             } |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|         line = self.line_from_order( |         line = self.line_from_order( | ||||||
|             order, |             order, | ||||||
|             symbol, |  | ||||||
| 
 | 
 | ||||||
|             show_markers=True, |             show_markers=True, | ||||||
|             only_show_markers_on_hover=True, |             only_show_markers_on_hover=True, | ||||||
|  | @ -303,17 +304,17 @@ class OrderMode: | ||||||
|         # color once the submission ack arrives. |         # color once the submission ack arrives. | ||||||
|         self.lines.submit_line( |         self.lines.submit_line( | ||||||
|             line=line, |             line=line, | ||||||
|             uuid=oid, |             uuid=order.oid, | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         dialog = OrderDialog( |         dialog = Dialog( | ||||||
|             uuid=oid, |             uuid=order.oid, | ||||||
|             order=order, |             order=order, | ||||||
|             symbol=symbol, |             symbol=order.symbol, | ||||||
|             line=line, |             line=line, | ||||||
|             last_status_close=self.multistatus.open_status( |             last_status_close=self.multistatus.open_status( | ||||||
|                 f'submitting {self._trigger_type}-{order.action}', |                 f'submitting {order.exec_mode}-{order.action}', | ||||||
|                 final_msg=f'submitted {self._trigger_type}-{order.action}', |                 final_msg=f'submitted {order.exec_mode}-{order.action}', | ||||||
|                 clear_on_next=True, |                 clear_on_next=True, | ||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
|  | @ -323,14 +324,21 @@ class OrderMode: | ||||||
| 
 | 
 | ||||||
|         # enter submission which will be popped once a response |         # enter submission which will be popped once a response | ||||||
|         # from the EMS is received to move the order to a different# status |         # from the EMS is received to move the order to a different# status | ||||||
|         self.dialogs[oid] = dialog |         self.dialogs[order.oid] = dialog | ||||||
| 
 | 
 | ||||||
|         # hook up mouse drag handlers |         # hook up mouse drag handlers | ||||||
|         line._on_drag_start = self.order_line_modify_start |         line._on_drag_start = self.order_line_modify_start | ||||||
|         line._on_drag_end = self.order_line_modify_complete |         line._on_drag_end = self.order_line_modify_complete | ||||||
| 
 | 
 | ||||||
|         # send order cmd to ems |         # send order cmd to ems | ||||||
|  |         if send_msg: | ||||||
|             self.book.send(order) |             self.book.send(order) | ||||||
|  |         else: | ||||||
|  |             # just register for control over this order | ||||||
|  |             # TODO: some kind of mini-perms system here based on | ||||||
|  |             # an out-of-band tagging/auth sub-sys for multiplayer | ||||||
|  |             # order control? | ||||||
|  |             self.book._sent_orders[order.oid] = order | ||||||
| 
 | 
 | ||||||
|         return dialog |         return dialog | ||||||
| 
 | 
 | ||||||
|  | @ -368,7 +376,7 @@ class OrderMode: | ||||||
|         self, |         self, | ||||||
|         uuid: str |         uuid: str | ||||||
| 
 | 
 | ||||||
|     ) -> OrderDialog: |     ) -> Dialog: | ||||||
|         ''' |         ''' | ||||||
|         Order submitted status event handler. |         Order submitted status event handler. | ||||||
| 
 | 
 | ||||||
|  | @ -423,7 +431,7 @@ class OrderMode: | ||||||
|         self, |         self, | ||||||
| 
 | 
 | ||||||
|         uuid: str, |         uuid: str, | ||||||
|         msg: Dict[str, Any], |         msg: Status, | ||||||
| 
 | 
 | ||||||
|     ) -> None: |     ) -> None: | ||||||
| 
 | 
 | ||||||
|  | @ -447,7 +455,7 @@ class OrderMode: | ||||||
| 
 | 
 | ||||||
|                 # TODO: add in standard fill/exec info that maybe we |                 # TODO: add in standard fill/exec info that maybe we | ||||||
|                 # pack in a broker independent way? |                 # pack in a broker independent way? | ||||||
|                 f'{msg["resp"]}: {msg["trigger_price"]}', |                 f'{msg.resp}: {msg.req.price}', | ||||||
|             ], |             ], | ||||||
|         ) |         ) | ||||||
|         log.runtime(result) |         log.runtime(result) | ||||||
|  | @ -507,7 +515,7 @@ class OrderMode: | ||||||
|                     oid = dialog.uuid |                     oid = dialog.uuid | ||||||
| 
 | 
 | ||||||
|                     cancel_status_close = self.multistatus.open_status( |                     cancel_status_close = self.multistatus.open_status( | ||||||
|                         f'cancelling order {oid[:6]}', |                         f'cancelling order {oid}', | ||||||
|                         group_key=key, |                         group_key=key, | ||||||
|                     ) |                     ) | ||||||
|                     dialog.last_status_close = cancel_status_close |                     dialog.last_status_close = cancel_status_close | ||||||
|  | @ -517,6 +525,44 @@ class OrderMode: | ||||||
| 
 | 
 | ||||||
|         return ids |         return ids | ||||||
| 
 | 
 | ||||||
|  |     def load_unknown_dialog_from_msg( | ||||||
|  |         self, | ||||||
|  |         msg: Status, | ||||||
|  | 
 | ||||||
|  |     ) -> Dialog: | ||||||
|  |         # NOTE: the `.order` attr **must** be set with the | ||||||
|  |         # equivalent order msg in order to be loaded. | ||||||
|  |         order = msg.req | ||||||
|  |         oid = str(msg.oid) | ||||||
|  |         symbol = order.symbol | ||||||
|  | 
 | ||||||
|  |         # TODO: MEGA UGGG ZONEEEE! | ||||||
|  |         src = msg.src | ||||||
|  |         if ( | ||||||
|  |             src | ||||||
|  |             and src not in ('dark', 'paperboi') | ||||||
|  |             and src not in symbol | ||||||
|  |         ): | ||||||
|  |             fqsn = symbol + '.' + src | ||||||
|  |             brokername = src | ||||||
|  |         else: | ||||||
|  |             fqsn = symbol | ||||||
|  |             *head, brokername = fqsn.rsplit('.') | ||||||
|  | 
 | ||||||
|  |         # fill out complex fields | ||||||
|  |         order.oid = str(order.oid) | ||||||
|  |         order.brokers = [brokername] | ||||||
|  |         order.symbol = Symbol.from_fqsn( | ||||||
|  |             fqsn=fqsn, | ||||||
|  |             info={}, | ||||||
|  |         ) | ||||||
|  |         dialog = self.submit_order( | ||||||
|  |             send_msg=False, | ||||||
|  |             order=order, | ||||||
|  |         ) | ||||||
|  |         assert self.dialogs[oid] == dialog | ||||||
|  |         return dialog | ||||||
|  | 
 | ||||||
| 
 | 
 | ||||||
| @asynccontextmanager | @asynccontextmanager | ||||||
| async def open_order_mode( | async def open_order_mode( | ||||||
|  | @ -554,6 +600,7 @@ async def open_order_mode( | ||||||
|             trades_stream, |             trades_stream, | ||||||
|             position_msgs, |             position_msgs, | ||||||
|             brokerd_accounts, |             brokerd_accounts, | ||||||
|  |             ems_dialog_msgs, | ||||||
|         ), |         ), | ||||||
|         trio.open_nursery() as tn, |         trio.open_nursery() as tn, | ||||||
| 
 | 
 | ||||||
|  | @ -577,9 +624,9 @@ async def open_order_mode( | ||||||
|             providers=symbol.brokers |             providers=symbol.brokers | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         # XXX: ``brokerd`` delivers a set of account names that it allows |         # XXX: ``brokerd`` delivers a set of account names that it | ||||||
|         # use of but the user also can define the accounts they'd like |         # allows use of but the user also can define the accounts they'd | ||||||
|         # to use, in order, in their `brokers.toml` file. |         # like to use, in order, in their `brokers.toml` file. | ||||||
|         accounts = {} |         accounts = {} | ||||||
|         for name in brokerd_accounts: |         for name in brokerd_accounts: | ||||||
|             # ensure name is in ``brokers.toml`` |             # ensure name is in ``brokers.toml`` | ||||||
|  | @ -592,11 +639,6 @@ async def open_order_mode( | ||||||
|             iter(accounts.keys()) |             iter(accounts.keys()) | ||||||
|         ) if accounts else 'paper' |         ) if accounts else 'paper' | ||||||
| 
 | 
 | ||||||
|         # NOTE: requires the backend exactly specifies |  | ||||||
|         # the expected symbol key in its positions msg. |  | ||||||
|         pp_msgs = position_msgs.get(symkey, ()) |  | ||||||
|         pps_by_account = {msg['account']: msg for msg in pp_msgs} |  | ||||||
| 
 |  | ||||||
|         # update pp trackers with data relayed from ``brokerd``. |         # update pp trackers with data relayed from ``brokerd``. | ||||||
|         for account_name in accounts: |         for account_name in accounts: | ||||||
| 
 | 
 | ||||||
|  | @ -604,12 +646,11 @@ async def open_order_mode( | ||||||
|             startup_pp = Position( |             startup_pp = Position( | ||||||
|                 symbol=symbol, |                 symbol=symbol, | ||||||
|                 size=0, |                 size=0, | ||||||
|                 avg_price=0, |                 ppu=0, | ||||||
|  | 
 | ||||||
|  |                 # XXX: BLEH, do we care about this on the client side? | ||||||
|  |                 bsuid=symbol, | ||||||
|             ) |             ) | ||||||
|             msg = pps_by_account.get(account_name) |  | ||||||
|             if msg: |  | ||||||
|                 log.info(f'Loading pp for {symkey}:\n{pformat(msg)}') |  | ||||||
|                 startup_pp.update_from_msg(msg) |  | ||||||
| 
 | 
 | ||||||
|             # allocator config |             # allocator config | ||||||
|             alloc = mk_allocator( |             alloc = mk_allocator( | ||||||
|  | @ -644,7 +685,7 @@ async def open_order_mode( | ||||||
|         # setup order mode sidepane widgets |         # setup order mode sidepane widgets | ||||||
|         form: FieldsForm = chart.sidepane |         form: FieldsForm = chart.sidepane | ||||||
|         form.vbox.setSpacing( |         form.vbox.setSpacing( | ||||||
|             int((1 + 5/8)*_font.px_size) |             int((1 + 5 / 8) * _font.px_size) | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|         from ._feedstatus import mk_feed_label |         from ._feedstatus import mk_feed_label | ||||||
|  | @ -694,7 +735,7 @@ async def open_order_mode( | ||||||
|         order_pane.order_mode = mode |         order_pane.order_mode = mode | ||||||
| 
 | 
 | ||||||
|         # select a pp to track |         # select a pp to track | ||||||
|         tracker = trackers[pp_account] |         tracker: PositionTracker = trackers[pp_account] | ||||||
|         mode.current_pp = tracker |         mode.current_pp = tracker | ||||||
|         tracker.show() |         tracker.show() | ||||||
|         tracker.hide_info() |         tracker.hide_info() | ||||||
|  | @ -705,7 +746,6 @@ async def open_order_mode( | ||||||
|         # to order sync pane handler |         # to order sync pane handler | ||||||
|         for key in ('account', 'size_unit',): |         for key in ('account', 'size_unit',): | ||||||
|             w = form.fields[key] |             w = form.fields[key] | ||||||
| 
 |  | ||||||
|             w.currentTextChanged.connect( |             w.currentTextChanged.connect( | ||||||
|                 partial( |                 partial( | ||||||
|                     order_pane.on_selection_change, |                     order_pane.on_selection_change, | ||||||
|  | @ -728,6 +768,18 @@ async def open_order_mode( | ||||||
|         # Begin order-response streaming |         # Begin order-response streaming | ||||||
|         done() |         done() | ||||||
| 
 | 
 | ||||||
|  |         # Pack position messages by account, should only be one-to-one. | ||||||
|  |         # NOTE: requires the backend exactly specifies | ||||||
|  |         # the expected symbol key in its positions msg. | ||||||
|  |         for (broker, acctid), msgs in position_msgs.items(): | ||||||
|  |             for msg in msgs: | ||||||
|  |                 log.info(f'Loading pp for {symkey}:\n{pformat(msg)}') | ||||||
|  |                 await process_trade_msg( | ||||||
|  |                     mode, | ||||||
|  |                     book, | ||||||
|  |                     msg, | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|         # start async input handling for chart's view |         # start async input handling for chart's view | ||||||
|         async with ( |         async with ( | ||||||
| 
 | 
 | ||||||
|  | @ -746,38 +798,61 @@ async def open_order_mode( | ||||||
|             # to handle input since the ems connection is ready |             # to handle input since the ems connection is ready | ||||||
|             started.set() |             started.set() | ||||||
| 
 | 
 | ||||||
|  |             for oid, msg in ems_dialog_msgs.items(): | ||||||
|  | 
 | ||||||
|  |                 # HACK ALERT: ensure a resp field is filled out since | ||||||
|  |                 # techincally the call below expects a ``Status``. TODO: | ||||||
|  |                 # parse into proper ``Status`` equivalents ems-side? | ||||||
|  |                 # msg.setdefault('resp', msg['broker_details']['resp']) | ||||||
|  |                 # msg.setdefault('oid', msg['broker_details']['oid']) | ||||||
|  |                 msg['brokerd_msg'] = msg | ||||||
|  | 
 | ||||||
|  |                 await process_trade_msg( | ||||||
|  |                     mode, | ||||||
|  |                     book, | ||||||
|  |                     msg, | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|             tn.start_soon( |             tn.start_soon( | ||||||
|                 process_trades_and_update_ui, |                 process_trades_and_update_ui, | ||||||
|                 tn, |  | ||||||
|                 feed, |  | ||||||
|                 mode, |  | ||||||
|                 trades_stream, |                 trades_stream, | ||||||
|  |                 mode, | ||||||
|                 book, |                 book, | ||||||
|             ) |             ) | ||||||
|  | 
 | ||||||
|             yield mode |             yield mode | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def process_trades_and_update_ui( | async def process_trades_and_update_ui( | ||||||
| 
 | 
 | ||||||
|     n: trio.Nursery, |  | ||||||
|     feed: Feed, |  | ||||||
|     mode: OrderMode, |  | ||||||
|     trades_stream: tractor.MsgStream, |     trades_stream: tractor.MsgStream, | ||||||
|  |     mode: OrderMode, | ||||||
|     book: OrderBook, |     book: OrderBook, | ||||||
| 
 | 
 | ||||||
| ) -> None: | ) -> None: | ||||||
| 
 | 
 | ||||||
|     get_index = mode.chart.get_index |  | ||||||
|     global _pnl_tasks |  | ||||||
| 
 |  | ||||||
|     # this is where we receive **back** messages |     # this is where we receive **back** messages | ||||||
|     # about executions **from** the EMS actor |     # about executions **from** the EMS actor | ||||||
|     async for msg in trades_stream: |     async for msg in trades_stream: | ||||||
|  |         await process_trade_msg( | ||||||
|  |             mode, | ||||||
|  |             book, | ||||||
|  |             msg, | ||||||
|  |         ) | ||||||
| 
 | 
 | ||||||
|  | 
 | ||||||
|  | async def process_trade_msg( | ||||||
|  |     mode: OrderMode, | ||||||
|  |     book: OrderBook, | ||||||
|  |     msg: dict, | ||||||
|  | 
 | ||||||
|  | ) -> tuple[Dialog, Status]: | ||||||
|  | 
 | ||||||
|  |     get_index = mode.chart.get_index | ||||||
|     fmsg = pformat(msg) |     fmsg = pformat(msg) | ||||||
|         log.info(f'Received order msg:\n{fmsg}') |     log.debug(f'Received order msg:\n{fmsg}') | ||||||
| 
 |  | ||||||
|     name = msg['name'] |     name = msg['name'] | ||||||
|  | 
 | ||||||
|     if name in ( |     if name in ( | ||||||
|         'position', |         'position', | ||||||
|     ): |     ): | ||||||
|  | @ -785,19 +860,14 @@ async def process_trades_and_update_ui( | ||||||
|         pp_msg_symbol = msg['symbol'].lower() |         pp_msg_symbol = msg['symbol'].lower() | ||||||
|         fqsn = sym.front_fqsn() |         fqsn = sym.front_fqsn() | ||||||
|         broker, key = sym.front_feed() |         broker, key = sym.front_feed() | ||||||
|             # print( |  | ||||||
|             #     f'pp msg symbol: {pp_msg_symbol}\n', |  | ||||||
|             #     f'fqsn: {fqsn}\n', |  | ||||||
|             #     f'front key: {key}\n', |  | ||||||
|             # ) |  | ||||||
| 
 |  | ||||||
|         if ( |         if ( | ||||||
|                 pp_msg_symbol == fqsn.replace(f'.{broker}', '') |             pp_msg_symbol == fqsn | ||||||
|  |             or pp_msg_symbol == fqsn.removesuffix(f'.{broker}') | ||||||
|         ): |         ): | ||||||
|  |             log.info(f'{fqsn} matched pp msg: {fmsg}') | ||||||
|             tracker = mode.trackers[msg['account']] |             tracker = mode.trackers[msg['account']] | ||||||
|             tracker.live_pp.update_from_msg(msg) |             tracker.live_pp.update_from_msg(msg) | ||||||
|                 # update order pane widgets |             tracker.update_from_pp(set_as_startup=True)  # status/pane UI | ||||||
|                 tracker.update_from_pp() |  | ||||||
|             mode.pane.update_status_ui(tracker) |             mode.pane.update_status_ui(tracker) | ||||||
| 
 | 
 | ||||||
|             if tracker.live_pp.size: |             if tracker.live_pp.size: | ||||||
|  | @ -806,84 +876,107 @@ async def process_trades_and_update_ui( | ||||||
| 
 | 
 | ||||||
|         # short circuit to next msg to avoid |         # short circuit to next msg to avoid | ||||||
|         # unnecessary msg content lookups |         # unnecessary msg content lookups | ||||||
|             continue |         return | ||||||
| 
 | 
 | ||||||
|         resp = msg['resp'] |     msg = Status(**msg) | ||||||
|         oid = msg['oid'] |     resp = msg.resp | ||||||
|  |     oid = msg.oid | ||||||
|  |     dialog: Dialog = mode.dialogs.get(oid) | ||||||
| 
 | 
 | ||||||
|         dialog = mode.dialogs.get(oid) |     match msg: | ||||||
|         if dialog is None: |         case Status(resp='dark_open' | 'open'): | ||||||
|             log.warning(f'received msg for untracked dialog:\n{fmsg}') |  | ||||||
| 
 |  | ||||||
|             # TODO: enable pure tracking / mirroring of dialogs |  | ||||||
|             # is desired. |  | ||||||
|             continue |  | ||||||
| 
 |  | ||||||
|         # record message to dialog tracking |  | ||||||
|         dialog.msgs[oid] = msg |  | ||||||
| 
 |  | ||||||
|         # response to 'action' request (buy/sell) |  | ||||||
|         if resp in ( |  | ||||||
|             'dark_submitted', |  | ||||||
|             'broker_submitted' |  | ||||||
|         ): |  | ||||||
| 
 | 
 | ||||||
|  |             if dialog is not None: | ||||||
|                 # show line label once order is live |                 # show line label once order is live | ||||||
|                 mode.on_submit(oid) |                 mode.on_submit(oid) | ||||||
| 
 | 
 | ||||||
|         # resp to 'cancel' request or error condition |             else: | ||||||
|         # for action request |                 log.warning( | ||||||
|         elif resp in ( |                     f'received msg for untracked dialog:\n{fmsg}' | ||||||
|             'broker_cancelled', |                 ) | ||||||
|             'broker_inactive', |                 assert msg.resp in ('open', 'dark_open'), f'Unknown msg: {msg}' | ||||||
|             'broker_errored', | 
 | ||||||
|             'dark_cancelled' |                 sym = mode.chart.linked.symbol | ||||||
|  |                 fqsn = sym.front_fqsn() | ||||||
|  |                 order = Order(**msg.req) | ||||||
|  |                 if ( | ||||||
|  |                     ((order.symbol + f'.{msg.src}') == fqsn) | ||||||
|  | 
 | ||||||
|  |                     # a existing dark order for the same symbol | ||||||
|  |                     or ( | ||||||
|  |                         order.symbol == fqsn | ||||||
|  |                         and ( | ||||||
|  |                             msg.src in ('dark', 'paperboi') | ||||||
|  |                             or (msg.src in fqsn) | ||||||
|  | 
 | ||||||
|  |                         ) | ||||||
|  |                     ) | ||||||
|                 ): |                 ): | ||||||
|  |                     msg.req = order | ||||||
|  |                     dialog = mode.load_unknown_dialog_from_msg(msg) | ||||||
|  |                     mode.on_submit(oid) | ||||||
|  |                     # return dialog, msg | ||||||
|  | 
 | ||||||
|  |         case Status(resp='error'): | ||||||
|             # delete level line from view |             # delete level line from view | ||||||
|             mode.on_cancel(oid) |             mode.on_cancel(oid) | ||||||
|             broker_msg = msg['brokerd_msg'] |             broker_msg = msg.brokerd_msg | ||||||
|             log.warning(f'Order {oid} failed with:\n{pformat(broker_msg)}') |             log.error( | ||||||
|  |                 f'Order {oid}->{resp} with:\n{pformat(broker_msg)}' | ||||||
|  |             ) | ||||||
| 
 | 
 | ||||||
|         elif resp in ( |         case Status(resp='canceled'): | ||||||
|             'dark_triggered' |             # delete level line from view | ||||||
|  |             mode.on_cancel(oid) | ||||||
|  |             req = Order(**msg.req) | ||||||
|  |             log.cancel(f'Canceled {req.action}:{oid}') | ||||||
|  | 
 | ||||||
|  |         case Status( | ||||||
|  |             resp='triggered', | ||||||
|  |             # req=Order(exec_mode='dark')  # TODO: | ||||||
|  |             req={'exec_mode': 'dark'}, | ||||||
|         ): |         ): | ||||||
|  |             # TODO: UX for a "pending" clear/live order | ||||||
|             log.info(f'Dark order triggered for {fmsg}') |             log.info(f'Dark order triggered for {fmsg}') | ||||||
| 
 | 
 | ||||||
|         elif resp in ( |         case Status( | ||||||
|             'alert_triggered' |             resp='triggered', | ||||||
|  |             # req=Order(exec_mode='live', action='alert') as req, # TODO | ||||||
|  |             req={'exec_mode': 'live', 'action': 'alert'} as req, | ||||||
|         ): |         ): | ||||||
|             # should only be one "fill" for an alert |             # should only be one "fill" for an alert | ||||||
|             # add a triangle and remove the level line |             # add a triangle and remove the level line | ||||||
|  |             req = Order(**req) | ||||||
|             mode.on_fill( |             mode.on_fill( | ||||||
|                 oid, |                 oid, | ||||||
|                 price=msg['trigger_price'], |                 price=req.price, | ||||||
|                 arrow_index=get_index(time.time()), |                 arrow_index=get_index(time.time()), | ||||||
|             ) |             ) | ||||||
|             mode.lines.remove_line(uuid=oid) |             mode.lines.remove_line(uuid=oid) | ||||||
|  |             msg.req = req | ||||||
|             await mode.on_exec(oid, msg) |             await mode.on_exec(oid, msg) | ||||||
| 
 | 
 | ||||||
|         # response to completed 'action' request for buy/sell |         # response to completed 'dialog' for order request | ||||||
|         elif resp in ( |         case Status( | ||||||
|             'broker_executed', |             resp='closed', | ||||||
|  |             # req=Order() as req,  # TODO | ||||||
|  |             req=req, | ||||||
|         ): |         ): | ||||||
|             # right now this is just triggering a system alert |             msg.req = Order(**req) | ||||||
|             await mode.on_exec(oid, msg) |             await mode.on_exec(oid, msg) | ||||||
| 
 |  | ||||||
|             if msg['brokerd_msg']['remaining'] == 0: |  | ||||||
|             mode.lines.remove_line(uuid=oid) |             mode.lines.remove_line(uuid=oid) | ||||||
| 
 | 
 | ||||||
|         # each clearing tick is responded individually |         # each clearing tick is responded individually | ||||||
|         elif resp in ( |         case Status(resp='fill'): | ||||||
|             'broker_filled', |  | ||||||
|         ): |  | ||||||
| 
 | 
 | ||||||
|  |             # handle out-of-piker fills reporting? | ||||||
|             known_order = book._sent_orders.get(oid) |             known_order = book._sent_orders.get(oid) | ||||||
|             if not known_order: |             if not known_order: | ||||||
|                 log.warning(f'order {oid} is unknown') |                 log.warning(f'order {oid} is unknown') | ||||||
|                 continue |                 return | ||||||
| 
 | 
 | ||||||
|             action = known_order.action |             action = known_order.action | ||||||
|             details = msg['brokerd_msg'] |             details = msg.brokerd_msg | ||||||
| 
 | 
 | ||||||
|             # TODO: some kinda progress system |             # TODO: some kinda progress system | ||||||
|             mode.on_fill( |             mode.on_fill( | ||||||
|  | @ -892,9 +985,27 @@ async def process_trades_and_update_ui( | ||||||
|                 pointing='up' if action == 'buy' else 'down', |                 pointing='up' if action == 'buy' else 'down', | ||||||
| 
 | 
 | ||||||
|                 # TODO: put the actual exchange timestamp |                 # TODO: put the actual exchange timestamp | ||||||
|                 arrow_index=get_index(details['broker_time']), |                 arrow_index=get_index( | ||||||
|  |                     # TODO: note currently the ``kraken`` openOrders sub | ||||||
|  |                     # doesn't deliver their engine timestamp as part of | ||||||
|  |                     # it's schema, so this value is **not** from them | ||||||
|  |                     # (see our backend code). We should probably either | ||||||
|  |                     # include all provider-engine timestamps in the | ||||||
|  |                     # summary 'closed' status msg and/or figure out | ||||||
|  |                     # a way to indicate what is a `brokerd` stamp versus | ||||||
|  |                     # a true backend one? This will require finagling | ||||||
|  |                     # with how each backend tracks/summarizes time | ||||||
|  |                     # stamps for the downstream API. | ||||||
|  |                     details['broker_time'] | ||||||
|  |                 ), | ||||||
|             ) |             ) | ||||||
| 
 | 
 | ||||||
|             # TODO: how should we look this up? |             # TODO: how should we look this up? | ||||||
|             # tracker = mode.trackers[msg['account']] |             # tracker = mode.trackers[msg['account']] | ||||||
|             # tracker.live_pp.fills.append(msg) |             # tracker.live_pp.fills.append(msg) | ||||||
|  | 
 | ||||||
|  |     # record message to dialog tracking | ||||||
|  |     if dialog: | ||||||
|  |         dialog.msgs[oid] = msg | ||||||
|  | 
 | ||||||
|  |     return dialog, msg | ||||||
|  |  | ||||||
|  | @ -1,14 +1,13 @@ | ||||||
| # we require a pinned dev branch to get some edge features that | # we require a pinned dev branch to get some edge features that | ||||||
| # are often untested in tractor's CI and/or being tested by us | # are often untested in tractor's CI and/or being tested by us | ||||||
| # first before committing as core features in tractor's base. | # first before committing as core features in tractor's base. | ||||||
| -e git+https://github.com/goodboy/tractor.git@master#egg=tractor | -e git+https://github.com/goodboy/tractor.git@reentrant_moc#egg=tractor | ||||||
| 
 | 
 | ||||||
| # `pyqtgraph` peeps keep breaking, fixing, improving so might as well | # `pyqtgraph` peeps keep breaking, fixing, improving so might as well | ||||||
| # pin this to a dev branch that we have more control over especially | # pin this to a dev branch that we have more control over especially | ||||||
| # as more graphics stuff gets hashed out. | # as more graphics stuff gets hashed out. | ||||||
| -e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph | -e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph | ||||||
| 
 | 
 | ||||||
| 
 |  | ||||||
| # our async client for ``marketstore`` (the tsdb) | # our async client for ``marketstore`` (the tsdb) | ||||||
| -e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore | -e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore | ||||||
| 
 | 
 | ||||||
|  | @ -18,4 +17,7 @@ | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| # ``asyncvnc`` for sending interactions to ib-gw inside docker | # ``asyncvnc`` for sending interactions to ib-gw inside docker | ||||||
| -e git+https://github.com/pikers/asyncvnc.git@vid_passthrough#egg=asyncvnc | -e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc | ||||||
|  | 
 | ||||||
|  | # ``cryptofeed`` for connecting to various crypto exchanges + custom fixes | ||||||
|  | -e git+https://github.com/pikers/cryptofeed.git@date_parsing#egg=cryptofeed | ||||||
|  |  | ||||||
							
								
								
									
										5
									
								
								setup.py
								
								
								
								
							
							
						
						
									
										5
									
								
								setup.py
								
								
								
								
							|  | @ -41,23 +41,24 @@ setup( | ||||||
|     }, |     }, | ||||||
|     install_requires=[ |     install_requires=[ | ||||||
|         'toml', |         'toml', | ||||||
|  |         'tomli',  # fastest pure py reader | ||||||
|         'click', |         'click', | ||||||
|         'colorlog', |         'colorlog', | ||||||
|         'attrs', |         'attrs', | ||||||
|         'pygments', |         'pygments', | ||||||
|         'colorama',  # numba traceback coloring |         'colorama',  # numba traceback coloring | ||||||
|         'pydantic',  # structured data |         'msgspec',  # performant IPC messaging and structs | ||||||
| 
 | 
 | ||||||
|         # async |         # async | ||||||
|         'trio', |         'trio', | ||||||
|         'trio-websocket', |         'trio-websocket', | ||||||
|         'msgspec',  # performant IPC messaging |  | ||||||
|         'async_generator', |         'async_generator', | ||||||
| 
 | 
 | ||||||
|         # from github currently (see requirements.txt) |         # from github currently (see requirements.txt) | ||||||
|         # 'trimeter',  # not released yet.. |         # 'trimeter',  # not released yet.. | ||||||
|         # 'tractor', |         # 'tractor', | ||||||
|         # asyncvnc, |         # asyncvnc, | ||||||
|  |         # 'cryptofeed', | ||||||
| 
 | 
 | ||||||
|         # brokers |         # brokers | ||||||
|         'asks==2.4.8', |         'asks==2.4.8', | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue