Compare commits
631 Commits
310_plus
...
misc_broke
Author | SHA1 | Date |
---|---|---|
Tyler Goodlet | 090d1ba524 | |
Tyler Goodlet | afc45a8e16 | |
Tyler Goodlet | b89fd9652c | |
Tyler Goodlet | 94290c7d8b | |
Tyler Goodlet | 73379d3627 | |
Tyler Goodlet | 23835f2c08 | |
Tyler Goodlet | d2aee00a56 | |
Tyler Goodlet | cf6e44cb9c | |
Tyler Goodlet | a146ad9e69 | |
Tyler Goodlet | 70ad1a1860 | |
Tyler Goodlet | f3ef73ef41 | |
Tyler Goodlet | a9832dc0cb | |
Tyler Goodlet | 9be245e955 | |
Tyler Goodlet | 800773e585 | |
goodboy | 8d1eb81f16 | |
Tyler Goodlet | 963e5bdd62 | |
Tyler Goodlet | 55de9abc41 | |
Tyler Goodlet | 593db0ed0d | |
Tyler Goodlet | 06622105cd | |
Tyler Goodlet | 008ae47e14 | |
Tyler Goodlet | 81585d9e6e | |
Tyler Goodlet | f6b7057b0d | |
Tyler Goodlet | 76f920a16b | |
Tyler Goodlet | f232d6d4ee | |
Tyler Goodlet | b7e1443618 | |
Tyler Goodlet | 5d021ffb85 | |
Tyler Goodlet | 28fd795280 | |
Tyler Goodlet | c944db5f02 | |
Tyler Goodlet | 967e28b7ac | |
Tyler Goodlet | 2a158aea2c | |
Tyler Goodlet | 88870fdda7 | |
Tyler Goodlet | 326f153a47 | |
Tyler Goodlet | f5cd63ad35 | |
Tyler Goodlet | 1e96ca32df | |
Tyler Goodlet | c088963cf2 | |
Tyler Goodlet | 79fcbcc281 | |
Tyler Goodlet | ddbba76095 | |
Tyler Goodlet | 0a959c1c74 | |
Tyler Goodlet | e348968113 | |
Tyler Goodlet | 7bbe86d6fb | |
Tyler Goodlet | 7b9db86753 | |
Tyler Goodlet | 20a396270e | |
Tyler Goodlet | 81516c5204 | |
Tyler Goodlet | d6fb6fe3ae | |
Tyler Goodlet | 8476d8d056 | |
Tyler Goodlet | 36868bb86e | |
Tyler Goodlet | 29b6b3e54f | |
Tyler Goodlet | 8a01c9e42b | |
Tyler Goodlet | 2c4daf08e0 | |
Tyler Goodlet | 7daab6329d | |
Tyler Goodlet | bb6452b969 | |
Tyler Goodlet | 25bfe6f035 | |
Tyler Goodlet | 32b36aa042 | |
Tyler Goodlet | e7de5404d3 | |
Tyler Goodlet | 18dc8b08e4 | |
Tyler Goodlet | 5bf3cb8e4b | |
Tyler Goodlet | c7d5db5f90 | |
Tyler Goodlet | 1bf1965a8b | |
Tyler Goodlet | 051a8729b6 | |
Tyler Goodlet | 8e85ed92c8 | |
Tyler Goodlet | 2a9042b1b1 | |
Tyler Goodlet | 344a634cb6 | |
Tyler Goodlet | 508de6182a | |
Tyler Goodlet | 40000345a1 | |
goodboy | 220d38b4a9 | |
Esmeralda Gallardo | 888438ca25 | |
goodboy | d84bcf77c0 | |
Guillermo Rodriguez | 0474d66531 | |
algorandpa | f218b804b4 | |
Guillermo Rodriguez | 7b14f498a8 | |
Esmeralda Gallardo | 18e4352faf | |
Esmeralda Gallardo | a6e921548b | |
Esmeralda Gallardo | 3f5dec82ed | |
Esmeralda Gallardo | db0b59abaa | |
algorandpa | f5bcd1d91c | |
algorandpa | db11c3c0f8 | |
Tyler Goodlet | df6071ae9e | |
goodboy | cc1694760c | |
goodboy | 4d8b22dd8f | |
Tyler Goodlet | fd296a557e | |
Tyler Goodlet | 0de2f863bd | |
Tyler Goodlet | de93da202b | |
Tyler Goodlet | 5c459f21be | |
goodboy | 5915cf3acf | |
algorandpa | 997bf31bd4 | |
algorandpa | f3427bb13b | |
algorandpa | 6fa266e3e0 | |
Guillermo Rodriguez | 019a6432fb | |
goodboy | 209e1085ae | |
Tyler Goodlet | 0ef75e6aa6 | |
Tyler Goodlet | 243d0329f6 | |
Tyler Goodlet | a0ce9ecc0d | |
Tyler Goodlet | af9c30c3f5 | |
Zoltan | ebbfa47baf | |
Tyler Goodlet | 02fbc0a0ed | |
goodboy | 4729e4c6bc | |
goodboy | a44b8e3e22 | |
goodboy | 8a89303cb3 | |
Tyler Goodlet | e547b307f6 | |
Tyler Goodlet | 72ec9b1e10 | |
Tyler Goodlet | 40c70ae6d8 | |
Tyler Goodlet | d3fefdeaff | |
Tyler Goodlet | 8be005212f | |
Tyler Goodlet | 5a2795e76b | |
Tyler Goodlet | a987f0ab81 | |
Tyler Goodlet | d99b40317d | |
Tyler Goodlet | 9ae519f6fa | |
Tyler Goodlet | 8f3fe8e542 | |
Tyler Goodlet | 490d85aba5 | |
goodboy | ba2e1e04cd | |
Tyler Goodlet | 5d4929db9c | |
Tyler Goodlet | c41400ae18 | |
Tyler Goodlet | e71bd2cb1e | |
Tyler Goodlet | be24473fb4 | |
Tyler Goodlet | b524ea5c22 | |
Tyler Goodlet | d46945cb09 | |
Tyler Goodlet | 1d4fc6f327 | |
Tyler Goodlet | 5976acbe76 | |
goodboy | 11ecf9cb09 | |
goodboy | 2dac531729 | |
Tyler Goodlet | 1fadf58ab7 | |
Tyler Goodlet | ceca0d9fb7 | |
Tyler Goodlet | df16726211 | |
Tyler Goodlet | fb4f1732b6 | |
Tyler Goodlet | d5b357b69a | |
Tyler Goodlet | 610fb5f7c6 | |
Tyler Goodlet | 2b231ba631 | |
Tyler Goodlet | 286228c290 | |
Tyler Goodlet | a1a24da7b6 | |
Tyler Goodlet | 553d0557b6 | |
Tyler Goodlet | 2f7b272d8c | |
Tyler Goodlet | dc1edeecda | |
Tyler Goodlet | 4ca7817735 | |
Tyler Goodlet | 5b63585398 | |
Tyler Goodlet | 0000d9a314 | |
Tyler Goodlet | f7ec66362e | |
Tyler Goodlet | b7ef0596b9 | |
Tyler Goodlet | 143e86a80c | |
Tyler Goodlet | 956c7d3435 | |
Tyler Goodlet | 330d16262e | |
Tyler Goodlet | c7f57b940c | |
Tyler Goodlet | 27bd3c07af | |
Tyler Goodlet | 55dc27a197 | |
Tyler Goodlet | a11f20fac2 | |
Tyler Goodlet | daebb78755 | |
Tyler Goodlet | 90a395a069 | |
Tyler Goodlet | 23d0353934 | |
Tyler Goodlet | ede67ed184 | |
Tyler Goodlet | 811d21e111 | |
Tyler Goodlet | 54567d33da | |
Tyler Goodlet | 61ca5f7e19 | |
Tyler Goodlet | 7396624be0 | |
Tyler Goodlet | 25b90afbdb | |
Tyler Goodlet | 72dfeb2b4e | |
Tyler Goodlet | 6b34c9e866 | |
Tyler Goodlet | e7ec01b8e6 | |
Tyler Goodlet | fce7055c62 | |
Tyler Goodlet | bf7d5e9a71 | |
Tyler Goodlet | 2a866dde65 | |
Tyler Goodlet | 220981e718 | |
Tyler Goodlet | 8537a4091b | |
Tyler Goodlet | 71a11a23bd | |
Tyler Goodlet | fa368b1263 | |
Tyler Goodlet | e6dd1458f8 | |
Tyler Goodlet | 9486d993ce | |
Tyler Goodlet | 30994dac10 | |
Tyler Goodlet | 8a61211c8c | |
Tyler Goodlet | c43f7eb656 | |
goodboy | d05caa4b02 | |
Tyler Goodlet | 63e9af002d | |
goodboy | 5144299f4f | |
Tyler Goodlet | c437f9370a | |
Tyler Goodlet | 94f81587ab | |
Tyler Goodlet | 2bc25e3593 | |
Tyler Goodlet | 1d9ab7b0de | |
Tyler Goodlet | 4c96a4878e | |
Tyler Goodlet | 8cd56cb6d3 | |
Tyler Goodlet | c246dcef6f | |
Tyler Goodlet | 26d6e10ad7 | |
Tyler Goodlet | 3924c66bd0 | |
Tyler Goodlet | 2fbfe583dd | |
Tyler Goodlet | 525f805cdb | |
Tyler Goodlet | b65c02336d | |
Tyler Goodlet | d3abfce540 | |
Tyler Goodlet | 49433ea87d | |
goodboy | 31b0d8cee8 | |
Tyler Goodlet | 35871d0213 | |
Tyler Goodlet | 4877af9bc3 | |
Tyler Goodlet | 909e068121 | |
Tyler Goodlet | cf835b97ca | |
Tyler Goodlet | 30bce42c0b | |
Tyler Goodlet | 48ff4859e6 | |
Tyler Goodlet | 887583d27f | |
Tyler Goodlet | 45b97bf6c3 | |
Tyler Goodlet | 91397b85a4 | |
Tyler Goodlet | 47f81b31af | |
goodboy | 30c452cfd0 | |
Tyler Goodlet | fda1c5b554 | |
goodboy | d6c9834a9a | |
Tyler Goodlet | 41b0c11aaa | |
Tyler Goodlet | cc67d23eee | |
Tyler Goodlet | 4818af1445 | |
Tyler Goodlet | 2cf1742999 | |
Tyler Goodlet | 25ac6e6665 | |
Tyler Goodlet | 90754f979b | |
Tyler Goodlet | c0d490ed63 | |
Tyler Goodlet | 7c6d12d982 | |
Tyler Goodlet | fd8c05e024 | |
Tyler Goodlet | 5d65c86c84 | |
Tyler Goodlet | cf11e8d7d8 | |
Tyler Goodlet | ed868f6246 | |
goodboy | 5d371ad80e | |
Tyler Goodlet | 6897aed6b6 | |
Tyler Goodlet | a61a11f86b | |
Tyler Goodlet | 286f620f8e | |
Tyler Goodlet | b7e60b9653 | |
Tyler Goodlet | df42e7acc4 | |
Tyler Goodlet | e492e9ca0c | |
Tyler Goodlet | 44c6f6dfda | |
Tyler Goodlet | ad2100fe3f | |
Tyler Goodlet | ae64ac79a6 | |
Tyler Goodlet | 20663dfa1c | |
Tyler Goodlet | 70f2241d22 | |
Tyler Goodlet | b3fcc25e21 | |
Tyler Goodlet | 4f15ce346b | |
Tyler Goodlet | 445849337f | |
Tyler Goodlet | 3fd7107e08 | |
Tyler Goodlet | 73a02d54b7 | |
Tyler Goodlet | b734af6dd0 | |
Tyler Goodlet | f7c0ee930a | |
Tyler Goodlet | ead426abc4 | |
Tyler Goodlet | bcd6bbb7ca | |
Tyler Goodlet | 80929d080f | |
Tyler Goodlet | eed47b3733 | |
Tyler Goodlet | d5f0c59b57 | |
Tyler Goodlet | d11dc787a1 | |
Tyler Goodlet | 1e81feee46 | |
Tyler Goodlet | 40a9761943 | |
Tyler Goodlet | 256bcf36d3 | |
Tyler Goodlet | 9944277096 | |
Tyler Goodlet | f9dc5637fa | |
Tyler Goodlet | addedc20f1 | |
Tyler Goodlet | 1fa6e8d9ba | |
Tyler Goodlet | 2a06dc997f | |
Tyler Goodlet | 6b93eedcda | |
Tyler Goodlet | a786df65de | |
Tyler Goodlet | 8f2823d5f0 | |
Tyler Goodlet | 58fe220fde | |
Tyler Goodlet | 161448c31a | |
Tyler Goodlet | 1c685189d1 | |
Tyler Goodlet | ceac3f2ee4 | |
Tyler Goodlet | a07367fae2 | |
Tyler Goodlet | 006190d227 | |
Tyler Goodlet | 412197019e | |
Tyler Goodlet | 271e378ce3 | |
Tyler Goodlet | 8e07fda88f | |
Tyler Goodlet | a4935b8fa8 | |
Tyler Goodlet | 2b76baeb10 | |
Tyler Goodlet | 2dfa8976a0 | |
Tyler Goodlet | d3402f715b | |
Tyler Goodlet | f070f9a984 | |
Tyler Goodlet | 416270ee6c | |
Tyler Goodlet | 14bee778ec | |
Tyler Goodlet | 10c1944de5 | |
Tyler Goodlet | 7958d8ad4f | |
Tyler Goodlet | 50c5dc255c | |
Tyler Goodlet | 31735f26d3 | |
Tyler Goodlet | 2ef6460853 | |
Tyler Goodlet | 5e98a30537 | |
Tyler Goodlet | dd03ef42ac | |
Tyler Goodlet | 59884d251e | |
Tyler Goodlet | e06e257a81 | |
Tyler Goodlet | 6e574835c8 | |
Tyler Goodlet | 49ccfdd673 | |
Tyler Goodlet | 3a434f312b | |
Tyler Goodlet | bb4dc448b3 | |
Tyler Goodlet | 9846396df2 | |
Tyler Goodlet | f0d417ce42 | |
Tyler Goodlet | 55fc4114b4 | |
Tyler Goodlet | 97b074365b | |
Tyler Goodlet | f79c3617d6 | |
Tyler Goodlet | 861fe791eb | |
Tyler Goodlet | 60052ff73a | |
Tyler Goodlet | 4d2708cd42 | |
Tyler Goodlet | d1cc52dff5 | |
Tyler Goodlet | 4fa901dbcb | |
goodboy | f2c488c1e0 | |
Tyler Goodlet | 4a9c16d298 | |
Tyler Goodlet | b9d5b904f4 | |
Tyler Goodlet | 0aef762d9a | |
goodboy | c724117c1a | |
Tyler Goodlet | cc3bb85c66 | |
goodboy | 20817313b1 | |
Tyler Goodlet | 23d0b8a7ac | |
goodboy | 087a34f061 | |
Tyler Goodlet | 653f5c824b | |
Tyler Goodlet | f9217570ab | |
Tyler Goodlet | 7f224f0342 | |
Tyler Goodlet | 75a5f3795a | |
Tyler Goodlet | de9f215c83 | |
Tyler Goodlet | 848e345364 | |
Tyler Goodlet | 38b190e598 | |
Tyler Goodlet | 3a9bc8058f | |
Guillermo Rodriguez | 739a231afc | |
Tyler Goodlet | 7dfa4c3cde | |
Tyler Goodlet | 7b653fe4f4 | |
goodboy | 77a687bced | |
Tyler Goodlet | d5c1cdd91d | |
Tyler Goodlet | 46d3fe88ca | |
Tyler Goodlet | 5c8c5d8fbf | |
goodboy | 71412310c4 | |
Guillermo Rodriguez | 0c323fdc0b | |
Tyler Goodlet | 02f53d0c13 | |
Tyler Goodlet | 8792c97de6 | |
Tyler Goodlet | 980815d075 | |
Tyler Goodlet | 4cedfedc21 | |
Tyler Goodlet | fe3d0c6fdd | |
Tyler Goodlet | 9200e8da57 | |
Tyler Goodlet | 430d065da6 | |
Tyler Goodlet | ecd93cb05a | |
Guillermo Rodriguez | 4facd161a9 | |
goodboy | c5447fda06 | |
Guillermo Rodriguez | 0447612b34 | |
goodboy | b5499b8225 | |
Guillermo Rodriguez | 00aabddfe8 | |
Guillermo Rodriguez | 43fb720877 | |
Guillermo Rodriguez | 9626dbd7ac | |
Guillermo Rodriguez | f286c79a03 | |
Guillermo Rodriguez | accb0eee6c | |
Guillermo Rodriguez | e97dd1cbdb | |
Guillermo Rodriguez | 34fb497eb4 | |
Guillermo Rodriguez | 6669ba6590 | |
Guillermo Rodriguez | cb8099bb8c | |
Guillermo Rodriguez | 80a1a58bfc | |
Guillermo Rodriguez | d60f222bb7 | |
Guillermo Rodriguez | 2c2e43d8ac | |
Guillermo Rodriguez | 212b3d620d | |
Guillermo Rodriguez | 92090b01b8 | |
Guillermo Rodriguez | 9073fbc317 | |
Guillermo Rodriguez | f55f56a29f | |
Guillermo Rodriguez | 28e025d02e | |
Guillermo Rodriguez | e558e5837e | |
Guillermo Rodriguez | a0b415095a | |
Guillermo Rodriguez | 6df181c233 | |
Guillermo Rodriguez | 7acc4e3208 | |
Guillermo Rodriguez | 10ea242143 | |
Tyler Goodlet | eda6ecd529 | |
goodboy | cf5b0bf9c6 | |
Tyler Goodlet | b9dba48306 | |
Tyler Goodlet | 4d2e23b5ce | |
Tyler Goodlet | 973bf87e67 | |
Tyler Goodlet | 5861839783 | |
Tyler Goodlet | 06845e5504 | |
Tyler Goodlet | 43bdd4d022 | |
Tyler Goodlet | bafd2cb44f | |
Tyler Goodlet | be8fd32e7d | |
Tyler Goodlet | ee8c00684b | |
Tyler Goodlet | 7379dc03af | |
Tyler Goodlet | a602c47d47 | |
Tyler Goodlet | 317610e00a | |
Tyler Goodlet | c4af706d51 | |
Tyler Goodlet | 665bb183f7 | |
Tyler Goodlet | f6ba95a6c7 | |
Tyler Goodlet | e2cd8c4aef | |
Tyler Goodlet | c8bff81220 | |
Tyler Goodlet | 2aec1c5f1d | |
Tyler Goodlet | bec32956a8 | |
Tyler Goodlet | 91fdc7c5c7 | |
Tyler Goodlet | b59ed74bc1 | |
Tyler Goodlet | 16012f6f02 | |
Tyler Goodlet | 2b61672723 | |
Tyler Goodlet | 176b230a46 | |
Tyler Goodlet | 7fa9dbf869 | |
Tyler Goodlet | 87ed9abefa | |
Tyler Goodlet | 2548aae73d | |
Tyler Goodlet | 1cfa04927d | |
Tyler Goodlet | e34ea94f9f | |
Tyler Goodlet | 1510383738 | |
Tyler Goodlet | 016b669d63 | |
Tyler Goodlet | 682a0191ef | |
Tyler Goodlet | 9e36dbe47f | |
goodboy | 8bef67642e | |
Tyler Goodlet | 52febac6ae | |
Tyler Goodlet | f202699c25 | |
Tyler Goodlet | 0fb07670d2 | |
Tyler Goodlet | 73d2e7716f | |
Tyler Goodlet | 999ae5a1c6 | |
Tyler Goodlet | 23ba0e5e69 | |
Tyler Goodlet | 941a2196b3 | |
Tyler Goodlet | 0cf4e07b84 | |
Tyler Goodlet | 7bec989eed | |
Tyler Goodlet | 6856ca207f | |
Guillermo Rodriguez | 2e5616850c | |
Tyler Goodlet | a83bd9c608 | |
goodboy | 9651ca84bf | |
Tyler Goodlet | 109b35f6eb | |
Tyler Goodlet | e28c1748fc | |
Tyler Goodlet | 72889b4d1f | |
Tyler Goodlet | ae001c3dd7 | |
Tyler Goodlet | 2309e7ab05 | |
Tyler Goodlet | 46c51b55f7 | |
goodboy | a9185e7d6f | |
Tyler Goodlet | 3a0987e0be | |
Tyler Goodlet | d280a592b1 | |
goodboy | ef5829a6b7 | |
Tyler Goodlet | 30bcfdcc83 | |
Tyler Goodlet | 1a291939c3 | |
Tyler Goodlet | 69e501764a | |
goodboy | 7f3f7f0372 | |
Tyler Goodlet | 1cbf45b4c4 | |
Tyler Goodlet | 227a80469e | |
Tyler Goodlet | dc8072c6db | |
Tyler Goodlet | 808dbb12e6 | |
Tyler Goodlet | 44e21b1de9 | |
Tyler Goodlet | b3058b8c78 | |
Tyler Goodlet | db564d7977 | |
Tyler Goodlet | e6a3e8b65a | |
Tyler Goodlet | d43ba47ebe | |
Tyler Goodlet | 168c9863cb | |
Tyler Goodlet | 0fb31586fd | |
Tyler Goodlet | 8b609f531b | |
Tyler Goodlet | d502274eb9 | |
Tyler Goodlet | b1419c850d | |
Tyler Goodlet | aa7f24b6db | |
Tyler Goodlet | 319e68c855 | |
Tyler Goodlet | 64f920d7e5 | |
Tyler Goodlet | 3b79743c7b | |
Tyler Goodlet | 54008a1976 | |
Tyler Goodlet | b96b7a8b9c | |
Tyler Goodlet | 0fca1b3e1a | |
Tyler Goodlet | 2386270cad | |
Tyler Goodlet | 5b135fad61 | |
Tyler Goodlet | abb6854e74 | |
Tyler Goodlet | 22f9b2552c | |
Tyler Goodlet | 57f2478dc7 | |
Tyler Goodlet | 5dc9a61ec4 | |
Tyler Goodlet | b0d3d9bb01 | |
Tyler Goodlet | caecbaa231 | |
Tyler Goodlet | a20a8d95d5 | |
Tyler Goodlet | ba93f96c71 | |
Tyler Goodlet | 804e9afdde | |
Tyler Goodlet | 89bcaed15e | |
Tyler Goodlet | bb2f8e4304 | |
Tyler Goodlet | 8ab8268edc | |
Tyler Goodlet | bbcc55b24c | |
Tyler Goodlet | 9fa9c27e4d | |
Tyler Goodlet | d9b4c4a413 | |
Tyler Goodlet | 84cab1327d | |
Tyler Goodlet | df4cec930b | |
Tyler Goodlet | ab08dc582d | |
Tyler Goodlet | f79d9865a0 | |
Tyler Goodlet | 00378c330c | |
goodboy | 180b97b180 | |
Tyler Goodlet | f0b3a4d5c0 | |
goodboy | e2e66324cc | |
Tyler Goodlet | d950c78b81 | |
Tyler Goodlet | 7dbcbfdcd5 | |
Tyler Goodlet | 279c899de5 | |
Tyler Goodlet | db5aacdb9c | |
Tyler Goodlet | c7b84ab500 | |
Tyler Goodlet | 9967adb371 | |
Tyler Goodlet | 30ff793a22 | |
Tyler Goodlet | 666587991a | |
goodboy | 01005e40a8 | |
goodboy | d81e629c29 | |
Tyler Goodlet | 2766fad719 | |
Tyler Goodlet | ae71168216 | |
Tyler Goodlet | a0c238daa7 | |
Tyler Goodlet | 7cbdc6a246 | |
Tyler Goodlet | 2ff8be71aa | |
Tyler Goodlet | ddffaa952d | |
Tyler Goodlet | 5520e9ef21 | |
Tyler Goodlet | 958e542f7d | |
goodboy | 927bbc7258 | |
Tyler Goodlet | 45bef0cea9 | |
goodboy | a3d46f713e | |
Tyler Goodlet | 5684120c11 | |
Tyler Goodlet | ddb195ed2c | |
Tyler Goodlet | 6747831677 | |
Tyler Goodlet | 9326379b04 | |
Tyler Goodlet | 09d9a7ea2b | |
Tyler Goodlet | 45871d5846 | |
goodboy | bf7a49c19b | |
goodboy | 0a7fce087c | |
Tyler Goodlet | d3130ca04c | |
Tyler Goodlet | e30a3c5b54 | |
Tyler Goodlet | 2393965e83 | |
Tyler Goodlet | fb39da19f4 | |
Tyler Goodlet | a27431c34f | |
Tyler Goodlet | 070b9f3dc1 | |
goodboy | f2dba44169 | |
Tyler Goodlet | 0ef5da0881 | |
Tyler Goodlet | 0580b204a3 | |
Tyler Goodlet | 6ce699ae1f | |
Tyler Goodlet | 3aa72abacf | |
Tyler Goodlet | 04004525c1 | |
Tyler Goodlet | a7f0adf1cf | |
Tyler Goodlet | cef511092d | |
Tyler Goodlet | 4e5df973a9 | |
Tyler Goodlet | 6a1a62d8c0 | |
Tyler Goodlet | e0491cf2e7 | |
Tyler Goodlet | 90bc9b9730 | |
goodboy | f449672c68 | |
Tyler Goodlet | fd22f45178 | |
goodboy | 37f634a2ed | |
Tyler Goodlet | dfee9dd97e | |
Tyler Goodlet | 2a99f7a4d7 | |
Tyler Goodlet | b44e2d9ed9 | |
Tyler Goodlet | 795d4d76f4 | |
Tyler Goodlet | c26acb1fa8 | |
Tyler Goodlet | 11b6699a54 | |
Tyler Goodlet | f9bdd643cf | |
Tyler Goodlet | 2baea21c7d | |
Tyler Goodlet | bea0111753 | |
Tyler Goodlet | c870665be0 | |
Tyler Goodlet | 4ff1090284 | |
Tyler Goodlet | f22461a844 | |
Tyler Goodlet | 458c7211ee | |
Tyler Goodlet | 5cc4b19a7c | |
goodboy | f5236f658b | |
goodboy | a360b66cc0 | |
Tyler Goodlet | 4bcb791161 | |
Tyler Goodlet | 4c7c78c815 | |
Tyler Goodlet | 019867b413 | |
Tyler Goodlet | f356fb0a68 | |
goodboy | 756249ff70 | |
goodboy | 419ebebe72 | |
goodboy | a229996ebe | |
Tyler Goodlet | af01e89612 | |
Tyler Goodlet | 609034c634 | |
Tyler Goodlet | 95dd0e6bd6 | |
goodboy | 479ad1bb15 | |
Tyler Goodlet | d506235a8b | |
Tyler Goodlet | 7846446a44 | |
Tyler Goodlet | 214f864dcf | |
Tyler Goodlet | 4c0f2099aa | |
Tyler Goodlet | aea7bec2c3 | |
Tyler Goodlet | 47777e4192 | |
Tyler Goodlet | f6888057c3 | |
Tyler Goodlet | f65f56ec75 | |
Tyler Goodlet | 5d39b04552 | |
Tyler Goodlet | 735fbc6259 | |
Tyler Goodlet | fcd7e0f3f3 | |
Tyler Goodlet | 9106d13dfe | |
Tyler Goodlet | d3caad6e11 | |
Tyler Goodlet | f87a2a810a | |
Tyler Goodlet | 208e2e9e97 | |
Tyler Goodlet | 90cc6eb317 | |
Tyler Goodlet | b118becc84 | |
Tyler Goodlet | 7442d68ecf | |
Tyler Goodlet | 076c167d6e | |
Tyler Goodlet | 64d8cd448f | |
Tyler Goodlet | ec6a28a8b1 | |
Tyler Goodlet | cc15d02488 | |
goodboy | d5bc43e8dd | |
Tyler Goodlet | 287a2c8396 | |
Tyler Goodlet | 453ebdfe30 | |
Tyler Goodlet | 2b1fb90e03 | |
Tyler Goodlet | 695ba5288d | |
Tyler Goodlet | d6c32bba86 | |
Tyler Goodlet | fa89207583 | |
Tyler Goodlet | 557562e25c | |
Tyler Goodlet | c6efa2641b | |
Tyler Goodlet | 8a7e391b4e | |
Tyler Goodlet | aec48a1dd5 | |
Tyler Goodlet | 87f301500d | |
Tyler Goodlet | 566a54ffb6 | |
Tyler Goodlet | f9c4b3cc96 | |
Tyler Goodlet | a12e6800ff | |
Tyler Goodlet | cc68501c7a | |
Tyler Goodlet | 7ebf8a8dc0 | |
Tyler Goodlet | 4475823e48 | |
Tyler Goodlet | 3713288b48 | |
Tyler Goodlet | 4fdfb81876 | |
Tyler Goodlet | f32b4d37cb | |
Tyler Goodlet | 2063b9d8bb | |
Tyler Goodlet | fe14605034 | |
Tyler Goodlet | 68b32208de | |
Tyler Goodlet | f1fe369bbf | |
Tyler Goodlet | 16b2937d23 | |
Tyler Goodlet | bfad676b7c | |
Tyler Goodlet | c617a06905 | |
Tyler Goodlet | ff74f4302a | |
Tyler Goodlet | 21153a0e1e | |
Tyler Goodlet | b6f344f34a | |
Tyler Goodlet | ecdc747ced | |
Tyler Goodlet | 5147cd7be0 | |
Tyler Goodlet | 3dcb72d429 | |
Tyler Goodlet | fbee33b00d | |
Tyler Goodlet | 3991d8f911 | |
Tyler Goodlet | 7b2e8f1ba5 | |
Tyler Goodlet | cbcbb2b243 | |
Tyler Goodlet | cd3bfb1ea4 | |
Tyler Goodlet | 82b718d5a3 | |
Tyler Goodlet | 05a1a4e3d8 | |
Tyler Goodlet | 412138a75b | |
Tyler Goodlet | c1b63f4757 | |
Tyler Goodlet | 5d774bef90 | |
Tyler Goodlet | de77c7d209 | |
Tyler Goodlet | ce1eb11b59 | |
Tyler Goodlet | b629ce177d | |
Tyler Goodlet | 73fa320917 | |
Tyler Goodlet | dd05ed1371 | |
Tyler Goodlet | 2a641ab8b4 | |
Tyler Goodlet | f8f7ca350c | |
Tyler Goodlet | 88b4ccc768 | |
Tyler Goodlet | eb2bad5138 | |
Tyler Goodlet | f768576060 | |
Tyler Goodlet | add0e92335 | |
Tyler Goodlet | 1eb7e109e6 | |
Tyler Goodlet | 725909a94c | |
Tyler Goodlet | 050aa7594c | |
Tyler Goodlet | 450009ff9c | |
goodboy | b2d5892010 | |
goodboy | 5a3b465ac0 | |
Tyler Goodlet | be7afdaa89 | |
Tyler Goodlet | 1c561207f5 | |
Tyler Goodlet | ed2c962bb9 | |
Tyler Goodlet | 147ceca016 | |
Tyler Goodlet | 03a7940f83 | |
Tyler Goodlet | dd2a9f74f1 | |
Tyler Goodlet | 49c720af3c | |
Tyler Goodlet | c620517543 | |
Tyler Goodlet | a425c29ef1 | |
Tyler Goodlet | 783914c7fe | |
Tyler Goodlet | 920a394539 | |
Tyler Goodlet | e977597cd0 | |
Tyler Goodlet | 7a33ba64f1 | |
Tyler Goodlet | 191b94b67c | |
Tyler Goodlet | 4ad7b073c3 | |
Tyler Goodlet | d92ff9c7a0 |
|
@ -3,9 +3,8 @@ name: CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# Triggers the workflow on push or pull request events but only for the master branch
|
# Triggers the workflow on push or pull request events but only for the master branch
|
||||||
push:
|
|
||||||
branches: [ master ]
|
|
||||||
pull_request:
|
pull_request:
|
||||||
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
|
@ -14,6 +13,27 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
|
# test that we can generate a software distribution and install it
|
||||||
|
# thus avoid missing file issues after packaging.
|
||||||
|
sdist-linux:
|
||||||
|
name: 'sdist'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup python
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: '3.10'
|
||||||
|
|
||||||
|
- name: Build sdist
|
||||||
|
run: python setup.py sdist --formats=zip
|
||||||
|
|
||||||
|
- name: Install sdist from .zips
|
||||||
|
run: python -m pip install dist/*.zip
|
||||||
|
|
||||||
testing:
|
testing:
|
||||||
name: 'install + test-suite'
|
name: 'install + test-suite'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
@ -50,3 +50,8 @@ prefer_data_account = [
|
||||||
paper = "XX0000000"
|
paper = "XX0000000"
|
||||||
margin = "X0000000"
|
margin = "X0000000"
|
||||||
ira = "X0000000"
|
ira = "X0000000"
|
||||||
|
|
||||||
|
|
||||||
|
[deribit]
|
||||||
|
key_id = 'XXXXXXXX'
|
||||||
|
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
||||||
|
|
|
@ -3,11 +3,12 @@
|
||||||
version: "3.5"
|
version: "3.5"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
ib-gateway:
|
ib_gw_paper:
|
||||||
# other image tags available:
|
# other image tags available:
|
||||||
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
||||||
image: waytrade/ib-gateway:981.3j
|
# image: waytrade/ib-gateway:981.3j
|
||||||
restart: always
|
image: waytrade/ib-gateway:1012.2i
|
||||||
|
restart: 'no' # restart on boot whenev there's a crash or user clicsk
|
||||||
network_mode: 'host'
|
network_mode: 'host'
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -39,14 +40,12 @@ services:
|
||||||
# this compose file which looks something like:
|
# this compose file which looks something like:
|
||||||
# TWS_USERID='myuser'
|
# TWS_USERID='myuser'
|
||||||
# TWS_PASSWORD='guest'
|
# TWS_PASSWORD='guest'
|
||||||
# TRADING_MODE=paper (or live)
|
|
||||||
# VNC_SERVER_PASSWORD='diggity'
|
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
TWS_USERID: ${TWS_USERID}
|
TWS_USERID: ${TWS_USERID}
|
||||||
TWS_PASSWORD: ${TWS_PASSWORD}
|
TWS_PASSWORD: ${TWS_PASSWORD}
|
||||||
TRADING_MODE: ${TRADING_MODE:-paper}
|
TRADING_MODE: 'paper'
|
||||||
VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-}
|
VNC_SERVER_PASSWORD: 'doggy'
|
||||||
|
VNC_SERVER_PORT: '3003'
|
||||||
|
|
||||||
# ports:
|
# ports:
|
||||||
# - target: 4002
|
# - target: 4002
|
||||||
|
@ -62,3 +61,40 @@ services:
|
||||||
# - "127.0.0.1:4001:4001"
|
# - "127.0.0.1:4001:4001"
|
||||||
# - "127.0.0.1:4002:4002"
|
# - "127.0.0.1:4002:4002"
|
||||||
# - "127.0.0.1:5900:5900"
|
# - "127.0.0.1:5900:5900"
|
||||||
|
|
||||||
|
# ib_gw_live:
|
||||||
|
# image: waytrade/ib-gateway:1012.2i
|
||||||
|
# restart: no
|
||||||
|
# network_mode: 'host'
|
||||||
|
|
||||||
|
# volumes:
|
||||||
|
# - type: bind
|
||||||
|
# source: ./jts_live.ini
|
||||||
|
# target: /root/jts/jts.ini
|
||||||
|
# # don't let ibc clobber this file for
|
||||||
|
# # the main reason of not having a stupid
|
||||||
|
# # timezone set..
|
||||||
|
# read_only: true
|
||||||
|
|
||||||
|
# # force our own ibc config
|
||||||
|
# - type: bind
|
||||||
|
# source: ./ibc.ini
|
||||||
|
# target: /root/ibc/config.ini
|
||||||
|
|
||||||
|
# # force our noop script - socat isn't needed in host mode.
|
||||||
|
# - type: bind
|
||||||
|
# source: ./fork_ports_delayed.sh
|
||||||
|
# target: /root/scripts/fork_ports_delayed.sh
|
||||||
|
|
||||||
|
# # force our noop script - socat isn't needed in host mode.
|
||||||
|
# - type: bind
|
||||||
|
# source: ./run_x11_vnc.sh
|
||||||
|
# target: /root/scripts/run_x11_vnc.sh
|
||||||
|
# read_only: true
|
||||||
|
|
||||||
|
# # NOTE: to fill these out, define an `.env` file in the same dir as
|
||||||
|
# # this compose file which looks something like:
|
||||||
|
# environment:
|
||||||
|
# TRADING_MODE: 'live'
|
||||||
|
# VNC_SERVER_PASSWORD: 'doggy'
|
||||||
|
# VNC_SERVER_PORT: '3004'
|
||||||
|
|
|
@ -188,7 +188,7 @@ AcceptNonBrokerageAccountWarning=yes
|
||||||
#
|
#
|
||||||
# The default value is 60.
|
# The default value is 60.
|
||||||
|
|
||||||
LoginDialogDisplayTimeout = 60
|
LoginDialogDisplayTimeout=20
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -292,7 +292,7 @@ ExistingSessionDetectedAction=primary
|
||||||
# be set dynamically at run-time: most users will never need it,
|
# be set dynamically at run-time: most users will never need it,
|
||||||
# so don't use it unless you know you need it.
|
# so don't use it unless you know you need it.
|
||||||
|
|
||||||
OverrideTwsApiPort=4002
|
; OverrideTwsApiPort=4002
|
||||||
|
|
||||||
|
|
||||||
# Read-only Login
|
# Read-only Login
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
[IBGateway]
|
||||||
|
ApiOnly=true
|
||||||
|
LocalServerPort=4001
|
||||||
|
# NOTE: must be set if using IBC's "reject" mode
|
||||||
|
TrustedIPs=127.0.0.1
|
||||||
|
; RemoteHostOrderRouting=ndc1.ibllc.com
|
||||||
|
; WriteDebug=true
|
||||||
|
; RemotePortOrderRouting=4001
|
||||||
|
; useRemoteSettings=false
|
||||||
|
; tradingMode=p
|
||||||
|
; Steps=8
|
||||||
|
; colorPalletName=dark
|
||||||
|
|
||||||
|
# window geo, this may be useful for sending `xdotool` commands?
|
||||||
|
; MainWindow.Width=1986
|
||||||
|
; screenHeight=3960
|
||||||
|
|
||||||
|
|
||||||
|
[Logon]
|
||||||
|
Locale=en
|
||||||
|
# most markets are oriented around this zone
|
||||||
|
# so might as well hard code it.
|
||||||
|
TimeZone=America/New_York
|
||||||
|
UseSSL=true
|
||||||
|
displayedproxymsg=1
|
||||||
|
os_titlebar=true
|
||||||
|
s3store=true
|
||||||
|
useRemoteSettings=false
|
||||||
|
|
||||||
|
[Communication]
|
||||||
|
ctciAutoEncrypt=true
|
||||||
|
Region=usr
|
||||||
|
; Peer=cdc1.ibllc.com:4001
|
|
@ -1,16 +1,35 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
# start vnc server and listen for connections
|
||||||
|
# on port specced in `$VNC_SERVER_PORT`
|
||||||
|
|
||||||
# start VNC server
|
|
||||||
x11vnc \
|
x11vnc \
|
||||||
-ncache_cr \
|
-listen 127.0.0.1 \
|
||||||
-listen localhost \
|
-allow 127.0.0.1 \
|
||||||
|
-rfbport "${VNC_SERVER_PORT}" \
|
||||||
-display :1 \
|
-display :1 \
|
||||||
-forever \
|
-forever \
|
||||||
-shared \
|
-shared \
|
||||||
-logappend /var/log/x11vnc.log \
|
|
||||||
-bg \
|
-bg \
|
||||||
|
-nowf \
|
||||||
|
-noxdamage \
|
||||||
|
-noxfixes \
|
||||||
|
-no6 \
|
||||||
-noipv6 \
|
-noipv6 \
|
||||||
-autoport 3003 \
|
|
||||||
# can't use this because of ``asyncvnc`` issue:
|
|
||||||
|
# -nowcr \
|
||||||
|
# TODO: can't use this because of ``asyncvnc`` issue:
|
||||||
# https://github.com/barneygale/asyncvnc/issues/1
|
# https://github.com/barneygale/asyncvnc/issues/1
|
||||||
# -passwd 'ibcansmbz'
|
# -passwd 'ibcansmbz'
|
||||||
|
|
||||||
|
# XXX: optional graphics caching flags that seem to rekt the overlay
|
||||||
|
# of the 2 gw windows? When running a single gateway
|
||||||
|
# this seems to maybe optimize some memory usage?
|
||||||
|
# -ncache_cr \
|
||||||
|
# -ncache \
|
||||||
|
|
||||||
|
# NOTE: this will prevent logs from going to the console.
|
||||||
|
# -logappend /var/log/x11vnc.log \
|
||||||
|
|
||||||
|
# where to start allocating ports
|
||||||
|
# -autoport "${VNC_SERVER_PORT}" \
|
||||||
|
|
|
@ -18,3 +18,10 @@
|
||||||
piker: trading gear for hackers.
|
piker: trading gear for hackers.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from ._daemon import open_piker_runtime
|
||||||
|
from .data.feed import open_feed
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'open_piker_runtime',
|
||||||
|
'open_feed',
|
||||||
|
]
|
||||||
|
|
|
@ -22,10 +22,10 @@ from typing import Optional, Union, Callable, Any
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from msgspec import Struct
|
||||||
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
|
||||||
|
|
||||||
from .log import get_logger, get_console_log
|
from .log import get_logger, get_console_log
|
||||||
from .brokers import get_brokermod
|
from .brokers import get_brokermod
|
||||||
|
@ -35,7 +35,17 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
_root_dname = 'pikerd'
|
_root_dname = 'pikerd'
|
||||||
|
|
||||||
_registry_addr = ('127.0.0.1', 6116)
|
_default_registry_host: str = '127.0.0.1'
|
||||||
|
_default_registry_port: int = 6116
|
||||||
|
_default_reg_addr: tuple[str, int] = (
|
||||||
|
_default_registry_host,
|
||||||
|
_default_registry_port,
|
||||||
|
)
|
||||||
|
|
||||||
|
# NOTE: this value is set as an actor-global once the first endpoint
|
||||||
|
# who is capable, spawns a `pikerd` service tree.
|
||||||
|
_registry_addr: tuple[str, int] | None = None
|
||||||
|
|
||||||
_tractor_kwargs: dict[str, Any] = {
|
_tractor_kwargs: dict[str, Any] = {
|
||||||
# use a different registry addr then tractor's default
|
# use a different registry addr then tractor's default
|
||||||
'arbiter_addr': _registry_addr
|
'arbiter_addr': _registry_addr
|
||||||
|
@ -47,16 +57,13 @@ _root_modules = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class Services(BaseModel):
|
class Services(Struct):
|
||||||
|
|
||||||
actor_n: tractor._supervise.ActorNursery
|
actor_n: tractor._supervise.ActorNursery
|
||||||
service_n: trio.Nursery
|
service_n: trio.Nursery
|
||||||
debug_mode: bool # tractor sub-actor debug mode flag
|
debug_mode: bool # tractor sub-actor debug mode flag
|
||||||
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
||||||
|
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
async def start_service_task(
|
async def start_service_task(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
|
@ -138,6 +145,7 @@ async def open_pikerd(
|
||||||
# XXX: you should pretty much never want debug mode
|
# XXX: you should pretty much never want debug mode
|
||||||
# for data daemons when running in production.
|
# for data daemons when running in production.
|
||||||
debug_mode: bool = False,
|
debug_mode: bool = False,
|
||||||
|
registry_addr: None | tuple[str, int] = None,
|
||||||
|
|
||||||
) -> Optional[tractor._portal.Portal]:
|
) -> Optional[tractor._portal.Portal]:
|
||||||
'''
|
'''
|
||||||
|
@ -149,7 +157,13 @@ async def open_pikerd(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
global _services
|
global _services
|
||||||
assert _services is None
|
global _registry_addr
|
||||||
|
|
||||||
|
if (
|
||||||
|
_registry_addr is None
|
||||||
|
or registry_addr
|
||||||
|
):
|
||||||
|
_registry_addr = registry_addr or _default_reg_addr
|
||||||
|
|
||||||
# XXX: this may open a root actor as well
|
# XXX: this may open a root actor as well
|
||||||
async with (
|
async with (
|
||||||
|
@ -195,16 +209,22 @@ async def open_piker_runtime(
|
||||||
# XXX: you should pretty much never want debug mode
|
# XXX: you should pretty much never want debug mode
|
||||||
# for data daemons when running in production.
|
# for data daemons when running in production.
|
||||||
debug_mode: bool = False,
|
debug_mode: bool = False,
|
||||||
|
registry_addr: None | tuple[str, int] = None,
|
||||||
|
|
||||||
) -> Optional[tractor._portal.Portal]:
|
) -> tractor.Actor:
|
||||||
'''
|
'''
|
||||||
Start a piker actor who's runtime will automatically
|
Start a piker actor who's runtime will automatically sync with
|
||||||
sync with existing piker actors in local network
|
existing piker actors on the local link based on configuration.
|
||||||
based on configuration.
|
|
||||||
|
|
||||||
'''
|
'''
|
||||||
global _services
|
global _services
|
||||||
assert _services is None
|
global _registry_addr
|
||||||
|
|
||||||
|
if (
|
||||||
|
_registry_addr is None
|
||||||
|
or registry_addr
|
||||||
|
):
|
||||||
|
_registry_addr = registry_addr or _default_reg_addr
|
||||||
|
|
||||||
# XXX: this may open a root actor as well
|
# XXX: this may open a root actor as well
|
||||||
async with (
|
async with (
|
||||||
|
@ -220,7 +240,7 @@ async def open_piker_runtime(
|
||||||
# TODO: eventually we should be able to avoid
|
# TODO: eventually we should be able to avoid
|
||||||
# having the root have more then permissions to
|
# having the root have more then permissions to
|
||||||
# spawn other specialized daemons I think?
|
# spawn other specialized daemons I think?
|
||||||
enable_modules=_root_modules,
|
enable_modules=_root_modules + enable_modules,
|
||||||
) as _,
|
) as _,
|
||||||
):
|
):
|
||||||
yield tractor.current_actor()
|
yield tractor.current_actor()
|
||||||
|
@ -252,6 +272,7 @@ async def maybe_open_runtime(
|
||||||
@acm
|
@acm
|
||||||
async def maybe_open_pikerd(
|
async def maybe_open_pikerd(
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
|
registry_addr: None | tuple = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> Union[tractor._portal.Portal, Services]:
|
) -> Union[tractor._portal.Portal, Services]:
|
||||||
|
@ -264,11 +285,19 @@ async def maybe_open_pikerd(
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
|
||||||
# subtle, we must have the runtime up here or portal lookup will fail
|
# subtle, we must have the runtime up here or portal lookup will fail
|
||||||
async with maybe_open_runtime(loglevel, **kwargs):
|
async with (
|
||||||
|
maybe_open_runtime(loglevel, **kwargs),
|
||||||
async with tractor.find_actor(_root_dname) as portal:
|
tractor.find_actor(_root_dname) as portal
|
||||||
# assert portal is not None
|
):
|
||||||
if portal is not None:
|
# connect to any existing daemon presuming
|
||||||
|
# its registry socket was selected.
|
||||||
|
if (
|
||||||
|
portal is not None
|
||||||
|
and (
|
||||||
|
registry_addr is None
|
||||||
|
or portal.channel.raddr == registry_addr
|
||||||
|
)
|
||||||
|
):
|
||||||
yield portal
|
yield portal
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -278,6 +307,7 @@ async def maybe_open_pikerd(
|
||||||
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=kwargs.get('debug_mode', False),
|
debug_mode=kwargs.get('debug_mode', False),
|
||||||
|
registry_addr=registry_addr,
|
||||||
|
|
||||||
) as _:
|
) as _:
|
||||||
# in the case where we're starting up the
|
# in the case where we're starting up the
|
||||||
|
|
|
@ -18,7 +18,10 @@
|
||||||
Profiling wrappers for internal libs.
|
Profiling wrappers for internal libs.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from time import perf_counter
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
# NOTE: you can pass a flag to enable this:
|
# NOTE: you can pass a flag to enable this:
|
||||||
|
@ -44,3 +47,184 @@ def timeit(fn):
|
||||||
return res
|
return res
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
# Modified version of ``pyqtgraph.debug.Profiler`` that
|
||||||
|
# core seems hesitant to land in:
|
||||||
|
# https://github.com/pyqtgraph/pyqtgraph/pull/2281
|
||||||
|
class Profiler(object):
|
||||||
|
'''
|
||||||
|
Simple profiler allowing measurement of multiple time intervals.
|
||||||
|
|
||||||
|
By default, profilers are disabled. To enable profiling, set the
|
||||||
|
environment variable `PYQTGRAPHPROFILE` to a comma-separated list of
|
||||||
|
fully-qualified names of profiled functions.
|
||||||
|
|
||||||
|
Calling a profiler registers a message (defaulting to an increasing
|
||||||
|
counter) that contains the time elapsed since the last call. When the
|
||||||
|
profiler is about to be garbage-collected, the messages are passed to the
|
||||||
|
outer profiler if one is running, or printed to stdout otherwise.
|
||||||
|
|
||||||
|
If `delayed` is set to False, messages are immediately printed instead.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
def function(...):
|
||||||
|
profiler = Profiler()
|
||||||
|
... do stuff ...
|
||||||
|
profiler('did stuff')
|
||||||
|
... do other stuff ...
|
||||||
|
profiler('did other stuff')
|
||||||
|
# profiler is garbage-collected and flushed at function end
|
||||||
|
|
||||||
|
If this function is a method of class C, setting `PYQTGRAPHPROFILE` to
|
||||||
|
"C.function" (without the module name) will enable this profiler.
|
||||||
|
|
||||||
|
For regular functions, use the qualified name of the function, stripping
|
||||||
|
only the initial "pyqtgraph." prefix from the module.
|
||||||
|
'''
|
||||||
|
|
||||||
|
_profilers = os.environ.get("PYQTGRAPHPROFILE", None)
|
||||||
|
_profilers = _profilers.split(",") if _profilers is not None else []
|
||||||
|
|
||||||
|
_depth = 0
|
||||||
|
|
||||||
|
# NOTE: without this defined at the class level
|
||||||
|
# you won't see apprpriately "nested" sub-profiler
|
||||||
|
# instance calls.
|
||||||
|
_msgs = []
|
||||||
|
|
||||||
|
# set this flag to disable all or individual profilers at runtime
|
||||||
|
disable = False
|
||||||
|
|
||||||
|
class DisabledProfiler(object):
|
||||||
|
def __init__(self, *args, **kwds):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def mark(self, msg=None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
_disabledProfiler = DisabledProfiler()
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls,
|
||||||
|
msg=None,
|
||||||
|
disabled='env',
|
||||||
|
delayed=True,
|
||||||
|
ms_threshold: float = 0.0,
|
||||||
|
):
|
||||||
|
"""Optionally create a new profiler based on caller's qualname.
|
||||||
|
|
||||||
|
``ms_threshold`` can be set to value in ms for which, if the
|
||||||
|
total measured time of the lifetime of this profiler is **less
|
||||||
|
than** this value, then no profiling messages will be printed.
|
||||||
|
Setting ``delayed=False`` disables this feature since messages
|
||||||
|
are emitted immediately.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if (
|
||||||
|
disabled is True
|
||||||
|
or (
|
||||||
|
disabled == 'env'
|
||||||
|
and len(cls._profilers) == 0
|
||||||
|
)
|
||||||
|
):
|
||||||
|
return cls._disabledProfiler
|
||||||
|
|
||||||
|
# determine the qualified name of the caller function
|
||||||
|
caller_frame = sys._getframe(1)
|
||||||
|
try:
|
||||||
|
caller_object_type = type(caller_frame.f_locals["self"])
|
||||||
|
|
||||||
|
except KeyError: # we are in a regular function
|
||||||
|
qualifier = caller_frame.f_globals["__name__"].split(".", 1)[-1]
|
||||||
|
|
||||||
|
else: # we are in a method
|
||||||
|
qualifier = caller_object_type.__name__
|
||||||
|
func_qualname = qualifier + "." + caller_frame.f_code.co_name
|
||||||
|
|
||||||
|
if disabled == 'env' and func_qualname not in cls._profilers:
|
||||||
|
# don't do anything
|
||||||
|
return cls._disabledProfiler
|
||||||
|
|
||||||
|
# create an actual profiling object
|
||||||
|
cls._depth += 1
|
||||||
|
obj = super(Profiler, cls).__new__(cls)
|
||||||
|
obj._name = msg or func_qualname
|
||||||
|
obj._delayed = delayed
|
||||||
|
obj._markCount = 0
|
||||||
|
obj._finished = False
|
||||||
|
obj._firstTime = obj._lastTime = perf_counter()
|
||||||
|
obj._mt = ms_threshold
|
||||||
|
obj._newMsg("> Entering " + obj._name)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def __call__(self, msg=None):
|
||||||
|
"""Register or print a new message with timing information.
|
||||||
|
"""
|
||||||
|
if self.disable:
|
||||||
|
return
|
||||||
|
if msg is None:
|
||||||
|
msg = str(self._markCount)
|
||||||
|
|
||||||
|
self._markCount += 1
|
||||||
|
newTime = perf_counter()
|
||||||
|
ms = (newTime - self._lastTime) * 1000
|
||||||
|
self._newMsg(" %s: %0.4f ms", msg, ms)
|
||||||
|
self._lastTime = newTime
|
||||||
|
|
||||||
|
def mark(self, msg=None):
|
||||||
|
self(msg)
|
||||||
|
|
||||||
|
def _newMsg(self, msg, *args):
|
||||||
|
msg = " " * (self._depth - 1) + msg
|
||||||
|
if self._delayed:
|
||||||
|
self._msgs.append((msg, args))
|
||||||
|
else:
|
||||||
|
print(msg % args)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
def finish(self, msg=None):
|
||||||
|
"""Add a final message; flush the message list if no parent profiler.
|
||||||
|
"""
|
||||||
|
if self._finished or self.disable:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._finished = True
|
||||||
|
if msg is not None:
|
||||||
|
self(msg)
|
||||||
|
|
||||||
|
tot_ms = (perf_counter() - self._firstTime) * 1000
|
||||||
|
self._newMsg(
|
||||||
|
"< Exiting %s, total time: %0.4f ms",
|
||||||
|
self._name,
|
||||||
|
tot_ms,
|
||||||
|
)
|
||||||
|
|
||||||
|
if tot_ms < self._mt:
|
||||||
|
# print(f'{tot_ms} < {self._mt}, clearing')
|
||||||
|
# NOTE: this list **must** be an instance var to avoid
|
||||||
|
# deleting common messages during GC I think?
|
||||||
|
self._msgs.clear()
|
||||||
|
# else:
|
||||||
|
# print(f'{tot_ms} > {self._mt}, not clearing')
|
||||||
|
|
||||||
|
# XXX: why is this needed?
|
||||||
|
# don't we **want to show** nested profiler messages?
|
||||||
|
if self._msgs: # and self._depth < 1:
|
||||||
|
|
||||||
|
# if self._msgs:
|
||||||
|
print("\n".join([m[0] % m[1] for m in self._msgs]))
|
||||||
|
|
||||||
|
# clear all entries
|
||||||
|
self._msgs.clear()
|
||||||
|
# type(self)._msgs = []
|
||||||
|
|
||||||
|
type(self)._depth -= 1
|
||||||
|
|
|
@ -26,10 +26,21 @@ asks.init('trio')
|
||||||
|
|
||||||
__brokers__ = [
|
__brokers__ = [
|
||||||
'binance',
|
'binance',
|
||||||
'questrade',
|
|
||||||
'robinhood',
|
|
||||||
'ib',
|
'ib',
|
||||||
'kraken',
|
'kraken',
|
||||||
|
|
||||||
|
# broken but used to work
|
||||||
|
# 'questrade',
|
||||||
|
# 'robinhood',
|
||||||
|
|
||||||
|
# TODO: we should get on these stat!
|
||||||
|
# alpaca
|
||||||
|
# wstrade
|
||||||
|
# iex
|
||||||
|
|
||||||
|
# deribit
|
||||||
|
# kucoin
|
||||||
|
# bitso
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -33,15 +33,23 @@ import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
from pydantic.dataclasses import dataclass
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import wsproto
|
import wsproto
|
||||||
|
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
from ._util import resproc, SymbolNotFound
|
from ._util import (
|
||||||
from ..log import get_logger, get_console_log
|
resproc,
|
||||||
from ..data import ShmArray
|
SymbolNotFound,
|
||||||
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
DataUnavailable,
|
||||||
|
)
|
||||||
|
from ..log import (
|
||||||
|
get_logger,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
|
from ..data.types import Struct
|
||||||
|
from ..data._web_bs import (
|
||||||
|
open_autorecon_ws,
|
||||||
|
NoBsWs,
|
||||||
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -79,12 +87,14 @@ _show_wap_in_history = False
|
||||||
|
|
||||||
|
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||||
class Pair(BaseModel):
|
class Pair(Struct, frozen=True):
|
||||||
symbol: str
|
symbol: str
|
||||||
status: str
|
status: str
|
||||||
|
|
||||||
baseAsset: str
|
baseAsset: str
|
||||||
baseAssetPrecision: int
|
baseAssetPrecision: int
|
||||||
|
cancelReplaceAllowed: bool
|
||||||
|
allowTrailingStop: bool
|
||||||
quoteAsset: str
|
quoteAsset: str
|
||||||
quotePrecision: int
|
quotePrecision: int
|
||||||
quoteAssetPrecision: int
|
quoteAssetPrecision: int
|
||||||
|
@ -100,18 +110,21 @@ class Pair(BaseModel):
|
||||||
isSpotTradingAllowed: bool
|
isSpotTradingAllowed: bool
|
||||||
isMarginTradingAllowed: bool
|
isMarginTradingAllowed: bool
|
||||||
|
|
||||||
|
defaultSelfTradePreventionMode: str
|
||||||
|
allowedSelfTradePreventionModes: list[str]
|
||||||
|
|
||||||
filters: list[dict[str, Union[str, int, float]]]
|
filters: list[dict[str, Union[str, int, float]]]
|
||||||
permissions: list[str]
|
permissions: list[str]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class OHLC(Struct):
|
||||||
class OHLC:
|
'''
|
||||||
"""Description of the flattened OHLC quote format.
|
Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
For schema details see:
|
For schema details see:
|
||||||
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
||||||
|
|
||||||
"""
|
'''
|
||||||
time: int
|
time: int
|
||||||
|
|
||||||
open: float
|
open: float
|
||||||
|
@ -134,7 +147,9 @@ class OHLC:
|
||||||
|
|
||||||
|
|
||||||
# convert datetime obj timestamp to unixtime in milliseconds
|
# convert datetime obj timestamp to unixtime in milliseconds
|
||||||
def binance_timestamp(when):
|
def binance_timestamp(
|
||||||
|
when: datetime
|
||||||
|
) -> int:
|
||||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||||
|
|
||||||
|
|
||||||
|
@ -173,7 +188,7 @@ class Client:
|
||||||
params = {}
|
params = {}
|
||||||
|
|
||||||
if sym is not None:
|
if sym is not None:
|
||||||
sym = sym.upper()
|
sym = sym.lower()
|
||||||
params = {'symbol': sym}
|
params = {'symbol': sym}
|
||||||
|
|
||||||
resp = await self._api(
|
resp = await self._api(
|
||||||
|
@ -230,7 +245,7 @@ class Client:
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
|
||||||
if end_dt is None:
|
if end_dt is None:
|
||||||
end_dt = pendulum.now('UTC')
|
end_dt = pendulum.now('UTC').add(minutes=1)
|
||||||
|
|
||||||
if start_dt is None:
|
if start_dt is None:
|
||||||
start_dt = end_dt.start_of(
|
start_dt = end_dt.start_of(
|
||||||
|
@ -260,6 +275,7 @@ class Client:
|
||||||
for i, bar in enumerate(bars):
|
for i, bar in enumerate(bars):
|
||||||
|
|
||||||
bar = OHLC(*bar)
|
bar = OHLC(*bar)
|
||||||
|
bar.typecast()
|
||||||
|
|
||||||
row = []
|
row = []
|
||||||
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
||||||
|
@ -287,7 +303,7 @@ async def get_client() -> Client:
|
||||||
|
|
||||||
|
|
||||||
# validation type
|
# validation type
|
||||||
class AggTrade(BaseModel):
|
class AggTrade(Struct):
|
||||||
e: str # Event type
|
e: str # Event type
|
||||||
E: int # Event time
|
E: int # Event time
|
||||||
s: str # Symbol
|
s: str # Symbol
|
||||||
|
@ -341,7 +357,9 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
|
|
||||||
elif msg.get('e') == 'aggTrade':
|
elif msg.get('e') == 'aggTrade':
|
||||||
|
|
||||||
# validate
|
# NOTE: this is purely for a definition, ``msgspec.Struct``
|
||||||
|
# does not runtime-validate until you decode/encode.
|
||||||
|
# see: https://jcristharif.com/msgspec/structs.html#type-validation
|
||||||
msg = AggTrade(**msg)
|
msg = AggTrade(**msg)
|
||||||
|
|
||||||
# TODO: type out and require this quote format
|
# TODO: type out and require this quote format
|
||||||
|
@ -352,8 +370,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
'brokerd_ts': time.time(),
|
'brokerd_ts': time.time(),
|
||||||
'ticks': [{
|
'ticks': [{
|
||||||
'type': 'trade',
|
'type': 'trade',
|
||||||
'price': msg.p,
|
'price': float(msg.p),
|
||||||
'size': msg.q,
|
'size': float(msg.q),
|
||||||
'broker_ts': msg.T,
|
'broker_ts': msg.T,
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
|
@ -384,41 +402,39 @@ async def open_history_client(
|
||||||
async with open_cached_client('binance') as client:
|
async with open_cached_client('binance') as client:
|
||||||
|
|
||||||
async def get_ohlc(
|
async def get_ohlc(
|
||||||
end_dt: Optional[datetime] = None,
|
timeframe: float,
|
||||||
start_dt: Optional[datetime] = None,
|
end_dt: datetime | None = None,
|
||||||
|
start_dt: datetime | None = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
np.ndarray,
|
np.ndarray,
|
||||||
datetime, # start
|
datetime, # start
|
||||||
datetime, # end
|
datetime, # end
|
||||||
]:
|
]:
|
||||||
|
if timeframe != 60:
|
||||||
|
raise DataUnavailable('Only 1m bars are supported')
|
||||||
|
|
||||||
array = await client.bars(
|
array = await client.bars(
|
||||||
symbol,
|
symbol,
|
||||||
start_dt=start_dt,
|
start_dt=start_dt,
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
)
|
)
|
||||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
times = array['time']
|
||||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
if (
|
||||||
|
end_dt is None
|
||||||
|
):
|
||||||
|
inow = round(time.time())
|
||||||
|
if (inow - times[-1]) > 60:
|
||||||
|
await tractor.breakpoint()
|
||||||
|
|
||||||
|
start_dt = pendulum.from_timestamp(times[0])
|
||||||
|
end_dt = pendulum.from_timestamp(times[-1])
|
||||||
|
|
||||||
return array, start_dt, end_dt
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||||
|
|
||||||
|
|
||||||
async def backfill_bars(
|
|
||||||
sym: str,
|
|
||||||
shm: ShmArray, # type: ignore # noqa
|
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
|
||||||
) -> None:
|
|
||||||
"""Fill historical bars into shared mem / storage afap.
|
|
||||||
"""
|
|
||||||
with trio.CancelScope() as cs:
|
|
||||||
async with open_cached_client('binance') as client:
|
|
||||||
bars = await client.bars(symbol=sym)
|
|
||||||
shm.push(bars)
|
|
||||||
task_status.started(cs)
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
|
@ -448,12 +464,20 @@ async def stream_quotes(
|
||||||
d = cache[sym.upper()]
|
d = cache[sym.upper()]
|
||||||
syminfo = Pair(**d) # validation
|
syminfo = Pair(**d) # validation
|
||||||
|
|
||||||
si = sym_infos[sym] = syminfo.dict()
|
si = sym_infos[sym] = syminfo.to_dict()
|
||||||
|
filters = {}
|
||||||
|
for entry in syminfo.filters:
|
||||||
|
ftype = entry['filterType']
|
||||||
|
filters[ftype] = entry
|
||||||
|
|
||||||
# XXX: after manually inspecting the response format we
|
# XXX: after manually inspecting the response format we
|
||||||
# just directly pick out the info we need
|
# just directly pick out the info we need
|
||||||
si['price_tick_size'] = float(syminfo.filters[0]['tickSize'])
|
si['price_tick_size'] = float(
|
||||||
si['lot_tick_size'] = float(syminfo.filters[2]['stepSize'])
|
filters['PRICE_FILTER']['tickSize']
|
||||||
|
)
|
||||||
|
si['lot_tick_size'] = float(
|
||||||
|
filters['LOT_SIZE']['stepSize']
|
||||||
|
)
|
||||||
si['asset_type'] = 'crypto'
|
si['asset_type'] = 'crypto'
|
||||||
|
|
||||||
symbol = symbols[0]
|
symbol = symbols[0]
|
||||||
|
@ -495,6 +519,7 @@ async def stream_quotes(
|
||||||
subs.append("{sym}@bookTicker")
|
subs.append("{sym}@bookTicker")
|
||||||
|
|
||||||
# unsub from all pairs on teardown
|
# unsub from all pairs on teardown
|
||||||
|
if ws.connected():
|
||||||
await ws.send_msg({
|
await ws.send_msg({
|
||||||
"method": "UNSUBSCRIBE",
|
"method": "UNSUBSCRIBE",
|
||||||
"params": subs,
|
"params": subs,
|
||||||
|
|
|
@ -39,6 +39,148 @@ _config_dir = click.get_app_dir('piker')
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
|
|
||||||
|
|
||||||
|
OK = '\033[92m'
|
||||||
|
WARNING = '\033[93m'
|
||||||
|
FAIL = '\033[91m'
|
||||||
|
ENDC = '\033[0m'
|
||||||
|
|
||||||
|
|
||||||
|
def print_ok(s: str, **kwargs):
|
||||||
|
print(OK + s + ENDC, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def print_error(s: str, **kwargs):
|
||||||
|
print(FAIL + s + ENDC, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_method(client, meth_name: str):
|
||||||
|
print(f'checking client for method \'{meth_name}\'...', end='', flush=True)
|
||||||
|
method = getattr(client, meth_name, None)
|
||||||
|
assert method
|
||||||
|
print_ok('found!.')
|
||||||
|
return method
|
||||||
|
|
||||||
|
async def run_method(client, meth_name: str, **kwargs):
|
||||||
|
method = get_method(client, meth_name)
|
||||||
|
print('running...', end='', flush=True)
|
||||||
|
result = await method(**kwargs)
|
||||||
|
print_ok(f'done! result: {type(result)}')
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def run_test(broker_name: str):
|
||||||
|
brokermod = get_brokermod(broker_name)
|
||||||
|
total = 0
|
||||||
|
passed = 0
|
||||||
|
failed = 0
|
||||||
|
|
||||||
|
print(f'getting client...', end='', flush=True)
|
||||||
|
if not hasattr(brokermod, 'get_client'):
|
||||||
|
print_error('fail! no \'get_client\' context manager found.')
|
||||||
|
return
|
||||||
|
|
||||||
|
async with brokermod.get_client(is_brokercheck=True) as client:
|
||||||
|
print_ok(f'done! inside client context.')
|
||||||
|
|
||||||
|
# check for methods present on brokermod
|
||||||
|
method_list = [
|
||||||
|
'backfill_bars',
|
||||||
|
'get_client',
|
||||||
|
'trades_dialogue',
|
||||||
|
'open_history_client',
|
||||||
|
'open_symbol_search',
|
||||||
|
'stream_quotes',
|
||||||
|
|
||||||
|
]
|
||||||
|
|
||||||
|
for method in method_list:
|
||||||
|
print(
|
||||||
|
f'checking brokermod for method \'{method}\'...',
|
||||||
|
end='', flush=True)
|
||||||
|
if not hasattr(brokermod, method):
|
||||||
|
print_error(f'fail! method \'{method}\' not found.')
|
||||||
|
failed += 1
|
||||||
|
else:
|
||||||
|
print_ok('done!')
|
||||||
|
passed += 1
|
||||||
|
|
||||||
|
total += 1
|
||||||
|
|
||||||
|
# check for methods present con brokermod.Client and their
|
||||||
|
# results
|
||||||
|
|
||||||
|
# for private methods only check is present
|
||||||
|
method_list = [
|
||||||
|
'get_balances',
|
||||||
|
'get_assets',
|
||||||
|
'get_trades',
|
||||||
|
'get_xfers',
|
||||||
|
'submit_limit',
|
||||||
|
'submit_cancel',
|
||||||
|
'search_symbols',
|
||||||
|
]
|
||||||
|
|
||||||
|
for method_name in method_list:
|
||||||
|
try:
|
||||||
|
get_method(client, method_name)
|
||||||
|
passed += 1
|
||||||
|
|
||||||
|
except AssertionError:
|
||||||
|
print_error(f'fail! method \'{method_name}\' not found.')
|
||||||
|
failed += 1
|
||||||
|
|
||||||
|
total += 1
|
||||||
|
|
||||||
|
|
||||||
|
# check for methods present con brokermod.Client and their
|
||||||
|
# results
|
||||||
|
|
||||||
|
syms = await run_method(client, 'symbol_info')
|
||||||
|
total += 1
|
||||||
|
|
||||||
|
if len(syms) == 0:
|
||||||
|
raise BaseException('Empty Symbol list?')
|
||||||
|
|
||||||
|
passed += 1
|
||||||
|
|
||||||
|
first_sym = tuple(syms.keys())[0]
|
||||||
|
|
||||||
|
method_list = [
|
||||||
|
('cache_symbols', {}),
|
||||||
|
('search_symbols', {'pattern': first_sym[:-1]}),
|
||||||
|
('bars', {'symbol': first_sym})
|
||||||
|
]
|
||||||
|
|
||||||
|
for method_name, method_kwargs in method_list:
|
||||||
|
try:
|
||||||
|
await run_method(client, method_name, **method_kwargs)
|
||||||
|
passed += 1
|
||||||
|
|
||||||
|
except AssertionError:
|
||||||
|
print_error(f'fail! method \'{method_name}\' not found.')
|
||||||
|
failed += 1
|
||||||
|
|
||||||
|
total += 1
|
||||||
|
|
||||||
|
print(f'total: {total}, passed: {passed}, failed: {failed}')
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.argument('broker', nargs=1, required=True)
|
||||||
|
@click.pass_obj
|
||||||
|
def brokercheck(config, broker):
|
||||||
|
'''
|
||||||
|
Test broker apis for completeness.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async def bcheck_main():
|
||||||
|
async with maybe_spawn_brokerd(broker) as portal:
|
||||||
|
await portal.run(run_test, broker)
|
||||||
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
trio.run(run_test, broker)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--keys', '-k', multiple=True,
|
@click.option('--keys', '-k', multiple=True,
|
||||||
help='Return results only for these keys')
|
help='Return results only for these keys')
|
||||||
|
@ -193,6 +335,8 @@ def contracts(ctx, loglevel, broker, symbol, ids):
|
||||||
brokermod = get_brokermod(broker)
|
brokermod = get_brokermod(broker)
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
contracts = trio.run(partial(core.contracts, brokermod, symbol))
|
contracts = trio.run(partial(core.contracts, brokermod, symbol))
|
||||||
if not ids:
|
if not ids:
|
||||||
# just print out expiry dates which can be used with
|
# just print out expiry dates which can be used with
|
||||||
|
|
|
@ -0,0 +1,70 @@
|
||||||
|
``deribit`` backend
|
||||||
|
------------------
|
||||||
|
pretty good liquidity crypto derivatives, uses custom json rpc over ws for
|
||||||
|
client methods, then `cryptofeed` for data streams.
|
||||||
|
|
||||||
|
status
|
||||||
|
******
|
||||||
|
- supports option charts
|
||||||
|
- no order support yet
|
||||||
|
|
||||||
|
|
||||||
|
config
|
||||||
|
******
|
||||||
|
In order to get order mode support your ``brokers.toml``
|
||||||
|
needs to have something like the following:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[deribit]
|
||||||
|
key_id = 'XXXXXXXX'
|
||||||
|
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
||||||
|
|
||||||
|
To obtain an api id and secret you need to create an account, which can be a
|
||||||
|
real market account over at:
|
||||||
|
|
||||||
|
- deribit.com (requires KYC for deposit address)
|
||||||
|
|
||||||
|
Or a testnet account over at:
|
||||||
|
|
||||||
|
- test.deribit.com
|
||||||
|
|
||||||
|
For testnet once the account is created here is how you deposit fake crypto to
|
||||||
|
try it out:
|
||||||
|
|
||||||
|
1) Go to Wallet:
|
||||||
|
|
||||||
|
.. figure:: assets/0_wallet.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/0_wallet.png
|
||||||
|
:alt: wallet page
|
||||||
|
|
||||||
|
2) Then click on the elipsis menu and select deposit
|
||||||
|
|
||||||
|
.. figure:: assets/1_wallet_select_deposit.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/1_wallet_select_deposit.png
|
||||||
|
:alt: wallet deposit page
|
||||||
|
|
||||||
|
3) This will take you to the deposit address page
|
||||||
|
|
||||||
|
.. figure:: assets/2_gen_deposit_addr.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/2_gen_deposit_addr.png
|
||||||
|
:alt: generate deposit address page
|
||||||
|
|
||||||
|
4) After clicking generate you should see the address, copy it and go to the
|
||||||
|
`coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake
|
||||||
|
coins to that address.
|
||||||
|
|
||||||
|
.. figure:: assets/3_deposit_address.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/3_deposit_address.png
|
||||||
|
:alt: generated address
|
||||||
|
|
||||||
|
5) Back in the deposit address page you should see the deposit in your history
|
||||||
|
|
||||||
|
.. figure:: assets/4_wallet_deposit_history.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/4_wallet_deposit_history.png
|
||||||
|
:alt: wallet deposit history
|
|
@ -0,0 +1,65 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Deribit backend.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
from piker.log import get_logger
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
from .api import (
|
||||||
|
get_client,
|
||||||
|
)
|
||||||
|
from .feed import (
|
||||||
|
open_history_client,
|
||||||
|
open_symbol_search,
|
||||||
|
stream_quotes,
|
||||||
|
backfill_bars
|
||||||
|
)
|
||||||
|
# from .broker import (
|
||||||
|
# trades_dialogue,
|
||||||
|
# norm_trade_records,
|
||||||
|
# )
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'get_client',
|
||||||
|
# 'trades_dialogue',
|
||||||
|
'open_history_client',
|
||||||
|
'open_symbol_search',
|
||||||
|
'stream_quotes',
|
||||||
|
# 'norm_trade_records',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# tractor RPC enable arg
|
||||||
|
__enable_modules__: list[str] = [
|
||||||
|
'api',
|
||||||
|
'feed',
|
||||||
|
# 'broker',
|
||||||
|
]
|
||||||
|
|
||||||
|
# passed to ``tractor.ActorNursery.start_actor()``
|
||||||
|
_spawn_kwargs = {
|
||||||
|
'infect_asyncio': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
# annotation to let backend agnostic code
|
||||||
|
# know if ``brokerd`` should be spawned with
|
||||||
|
# ``tractor``'s aio mode.
|
||||||
|
_infect_asyncio: bool = True
|
|
@ -0,0 +1,672 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Deribit backend.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from contextlib import asynccontextmanager as acm, AsyncExitStack
|
||||||
|
from functools import partial
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Optional, Iterable, Callable
|
||||||
|
|
||||||
|
import pendulum
|
||||||
|
import asks
|
||||||
|
import trio
|
||||||
|
from trio_typing import Nursery, TaskStatus
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from piker.data.types import Struct
|
||||||
|
from piker.data._web_bs import (
|
||||||
|
NoBsWs,
|
||||||
|
open_autorecon_ws,
|
||||||
|
open_jsonrpc_session
|
||||||
|
)
|
||||||
|
|
||||||
|
from .._util import resproc
|
||||||
|
|
||||||
|
from piker import config
|
||||||
|
from piker.log import get_logger
|
||||||
|
|
||||||
|
from tractor.trionics import (
|
||||||
|
broadcast_receiver,
|
||||||
|
BroadcastReceiver,
|
||||||
|
maybe_open_context
|
||||||
|
)
|
||||||
|
from tractor import to_asyncio
|
||||||
|
|
||||||
|
from cryptofeed import FeedHandler
|
||||||
|
|
||||||
|
from cryptofeed.defines import (
|
||||||
|
DERIBIT,
|
||||||
|
L1_BOOK, TRADES,
|
||||||
|
OPTION, CALL, PUT
|
||||||
|
)
|
||||||
|
from cryptofeed.symbols import Symbol
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_spawn_kwargs = {
|
||||||
|
'infect_asyncio': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
_url = 'https://www.deribit.com'
|
||||||
|
_ws_url = 'wss://www.deribit.com/ws/api/v2'
|
||||||
|
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
|
||||||
|
|
||||||
|
|
||||||
|
# Broker specific ohlc schema (rest)
|
||||||
|
_ohlc_dtype = [
|
||||||
|
('index', int),
|
||||||
|
('time', int),
|
||||||
|
('open', float),
|
||||||
|
('high', float),
|
||||||
|
('low', float),
|
||||||
|
('close', float),
|
||||||
|
('volume', float),
|
||||||
|
('bar_wap', float), # will be zeroed by sampler if not filled
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class JSONRPCResult(Struct):
|
||||||
|
jsonrpc: str = '2.0'
|
||||||
|
id: int
|
||||||
|
result: Optional[dict] = None
|
||||||
|
error: Optional[dict] = None
|
||||||
|
usIn: int
|
||||||
|
usOut: int
|
||||||
|
usDiff: int
|
||||||
|
testnet: bool
|
||||||
|
|
||||||
|
class JSONRPCChannel(Struct):
|
||||||
|
jsonrpc: str = '2.0'
|
||||||
|
method: str
|
||||||
|
params: dict
|
||||||
|
|
||||||
|
|
||||||
|
class KLinesResult(Struct):
|
||||||
|
close: list[float]
|
||||||
|
cost: list[float]
|
||||||
|
high: list[float]
|
||||||
|
low: list[float]
|
||||||
|
open: list[float]
|
||||||
|
status: str
|
||||||
|
ticks: list[int]
|
||||||
|
volume: list[float]
|
||||||
|
|
||||||
|
class Trade(Struct):
|
||||||
|
trade_seq: int
|
||||||
|
trade_id: str
|
||||||
|
timestamp: int
|
||||||
|
tick_direction: int
|
||||||
|
price: float
|
||||||
|
mark_price: float
|
||||||
|
iv: float
|
||||||
|
instrument_name: str
|
||||||
|
index_price: float
|
||||||
|
direction: str
|
||||||
|
combo_trade_id: Optional[int] = 0,
|
||||||
|
combo_id: Optional[str] = '',
|
||||||
|
amount: float
|
||||||
|
|
||||||
|
class LastTradesResult(Struct):
|
||||||
|
trades: list[Trade]
|
||||||
|
has_more: bool
|
||||||
|
|
||||||
|
|
||||||
|
# convert datetime obj timestamp to unixtime in milliseconds
|
||||||
|
def deribit_timestamp(when):
|
||||||
|
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||||
|
|
||||||
|
|
||||||
|
def str_to_cb_sym(name: str) -> Symbol:
|
||||||
|
base, strike_price, expiry_date, option_type = name.split('-')
|
||||||
|
|
||||||
|
quote = base
|
||||||
|
|
||||||
|
if option_type == 'put':
|
||||||
|
option_type = PUT
|
||||||
|
elif option_type == 'call':
|
||||||
|
option_type = CALL
|
||||||
|
else:
|
||||||
|
raise Exception("Couldn\'t parse option type")
|
||||||
|
|
||||||
|
return Symbol(
|
||||||
|
base, quote,
|
||||||
|
type=OPTION,
|
||||||
|
strike_price=strike_price,
|
||||||
|
option_type=option_type,
|
||||||
|
expiry_date=expiry_date,
|
||||||
|
expiry_normalize=False)
|
||||||
|
|
||||||
|
|
||||||
|
def piker_sym_to_cb_sym(name: str) -> Symbol:
|
||||||
|
base, expiry_date, strike_price, option_type = tuple(
|
||||||
|
name.upper().split('-'))
|
||||||
|
|
||||||
|
quote = base
|
||||||
|
|
||||||
|
if option_type == 'P':
|
||||||
|
option_type = PUT
|
||||||
|
elif option_type == 'C':
|
||||||
|
option_type = CALL
|
||||||
|
else:
|
||||||
|
raise Exception("Couldn\'t parse option type")
|
||||||
|
|
||||||
|
return Symbol(
|
||||||
|
base, quote,
|
||||||
|
type=OPTION,
|
||||||
|
strike_price=strike_price,
|
||||||
|
option_type=option_type,
|
||||||
|
expiry_date=expiry_date.upper())
|
||||||
|
|
||||||
|
|
||||||
|
def cb_sym_to_deribit_inst(sym: Symbol):
|
||||||
|
# cryptofeed normalized
|
||||||
|
cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z']
|
||||||
|
|
||||||
|
# deribit specific
|
||||||
|
months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']
|
||||||
|
|
||||||
|
exp = sym.expiry_date
|
||||||
|
|
||||||
|
# YYMDD
|
||||||
|
# 01234
|
||||||
|
year, month, day = (
|
||||||
|
exp[:2], months[cb_norm.index(exp[2:3])], exp[3:])
|
||||||
|
|
||||||
|
otype = 'C' if sym.option_type == CALL else 'P'
|
||||||
|
|
||||||
|
return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}'
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
|
conf, path = config.load()
|
||||||
|
|
||||||
|
section = conf.get('deribit')
|
||||||
|
|
||||||
|
# TODO: document why we send this, basically because logging params for cryptofeed
|
||||||
|
conf['log'] = {}
|
||||||
|
conf['log']['disabled'] = True
|
||||||
|
|
||||||
|
if section is None:
|
||||||
|
log.warning(f'No config section found for deribit in {path}')
|
||||||
|
|
||||||
|
return conf
|
||||||
|
|
||||||
|
|
||||||
|
class Client:
|
||||||
|
|
||||||
|
def __init__(self, json_rpc: Callable) -> None:
|
||||||
|
self._pairs: dict[str, Any] = None
|
||||||
|
|
||||||
|
config = get_config().get('deribit', {})
|
||||||
|
|
||||||
|
if ('key_id' in config) and ('key_secret' in config):
|
||||||
|
self._key_id = config['key_id']
|
||||||
|
self._key_secret = config['key_secret']
|
||||||
|
|
||||||
|
else:
|
||||||
|
self._key_id = None
|
||||||
|
self._key_secret = None
|
||||||
|
|
||||||
|
self.json_rpc = json_rpc
|
||||||
|
|
||||||
|
@property
|
||||||
|
def currencies(self):
|
||||||
|
return ['btc', 'eth', 'sol', 'usd']
|
||||||
|
|
||||||
|
async def get_balances(self, kind: str = 'option') -> dict[str, float]:
|
||||||
|
"""Return the set of positions for this account
|
||||||
|
by symbol.
|
||||||
|
"""
|
||||||
|
balances = {}
|
||||||
|
|
||||||
|
for currency in self.currencies:
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'private/get_positions', params={
|
||||||
|
'currency': currency.upper(),
|
||||||
|
'kind': kind})
|
||||||
|
|
||||||
|
balances[currency] = resp.result
|
||||||
|
|
||||||
|
return balances
|
||||||
|
|
||||||
|
async def get_assets(self) -> dict[str, float]:
|
||||||
|
"""Return the set of asset balances for this account
|
||||||
|
by symbol.
|
||||||
|
"""
|
||||||
|
balances = {}
|
||||||
|
|
||||||
|
for currency in self.currencies:
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'private/get_account_summary', params={
|
||||||
|
'currency': currency.upper()})
|
||||||
|
|
||||||
|
balances[currency] = resp.result['balance']
|
||||||
|
|
||||||
|
return balances
|
||||||
|
|
||||||
|
async def submit_limit(
|
||||||
|
self,
|
||||||
|
symbol: str,
|
||||||
|
price: float,
|
||||||
|
action: str,
|
||||||
|
size: float
|
||||||
|
) -> dict:
|
||||||
|
"""Place an order
|
||||||
|
"""
|
||||||
|
params = {
|
||||||
|
'instrument_name': symbol.upper(),
|
||||||
|
'amount': size,
|
||||||
|
'type': 'limit',
|
||||||
|
'price': price,
|
||||||
|
}
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
f'private/{action}', params)
|
||||||
|
|
||||||
|
return resp.result
|
||||||
|
|
||||||
|
async def submit_cancel(self, oid: str):
|
||||||
|
"""Send cancel request for order id
|
||||||
|
"""
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'private/cancel', {'order_id': oid})
|
||||||
|
return resp.result
|
||||||
|
|
||||||
|
async def symbol_info(
|
||||||
|
self,
|
||||||
|
instrument: Optional[str] = None,
|
||||||
|
currency: str = 'btc', # BTC, ETH, SOL, USDC
|
||||||
|
kind: str = 'option',
|
||||||
|
expired: bool = False
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Get symbol info for the exchange.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._pairs:
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
# will retrieve all symbols by default
|
||||||
|
params = {
|
||||||
|
'currency': currency.upper(),
|
||||||
|
'kind': kind,
|
||||||
|
'expired': str(expired).lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = await self.json_rpc('public/get_instruments', params)
|
||||||
|
results = resp.result
|
||||||
|
|
||||||
|
instruments = {
|
||||||
|
item['instrument_name'].lower(): item
|
||||||
|
for item in results
|
||||||
|
}
|
||||||
|
|
||||||
|
if instrument is not None:
|
||||||
|
return instruments[instrument]
|
||||||
|
else:
|
||||||
|
return instruments
|
||||||
|
|
||||||
|
async def cache_symbols(
|
||||||
|
self,
|
||||||
|
) -> dict:
|
||||||
|
if not self._pairs:
|
||||||
|
self._pairs = await self.symbol_info()
|
||||||
|
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
async def search_symbols(
|
||||||
|
self,
|
||||||
|
pattern: str,
|
||||||
|
limit: int = 30,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
data = await self.symbol_info()
|
||||||
|
|
||||||
|
matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
data,
|
||||||
|
score_cutoff=35,
|
||||||
|
limit=limit
|
||||||
|
)
|
||||||
|
# repack in dict form
|
||||||
|
return {item[0]['instrument_name'].lower(): item[0]
|
||||||
|
for item in matches}
|
||||||
|
|
||||||
|
async def bars(
|
||||||
|
self,
|
||||||
|
symbol: str,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
limit: int = 1000,
|
||||||
|
as_np: bool = True,
|
||||||
|
) -> dict:
|
||||||
|
instrument = symbol
|
||||||
|
|
||||||
|
if end_dt is None:
|
||||||
|
end_dt = pendulum.now('UTC')
|
||||||
|
|
||||||
|
if start_dt is None:
|
||||||
|
start_dt = end_dt.start_of(
|
||||||
|
'minute').subtract(minutes=limit)
|
||||||
|
|
||||||
|
start_time = deribit_timestamp(start_dt)
|
||||||
|
end_time = deribit_timestamp(end_dt)
|
||||||
|
|
||||||
|
# https://docs.deribit.com/#public-get_tradingview_chart_data
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'public/get_tradingview_chart_data',
|
||||||
|
params={
|
||||||
|
'instrument_name': instrument.upper(),
|
||||||
|
'start_timestamp': start_time,
|
||||||
|
'end_timestamp': end_time,
|
||||||
|
'resolution': '1'
|
||||||
|
})
|
||||||
|
|
||||||
|
result = KLinesResult(**resp.result)
|
||||||
|
new_bars = []
|
||||||
|
for i in range(len(result.close)):
|
||||||
|
|
||||||
|
_open = result.open[i]
|
||||||
|
high = result.high[i]
|
||||||
|
low = result.low[i]
|
||||||
|
close = result.close[i]
|
||||||
|
volume = result.volume[i]
|
||||||
|
|
||||||
|
row = [
|
||||||
|
(start_time + (i * (60 * 1000))) / 1000.0, # time
|
||||||
|
result.open[i],
|
||||||
|
result.high[i],
|
||||||
|
result.low[i],
|
||||||
|
result.close[i],
|
||||||
|
result.volume[i],
|
||||||
|
0
|
||||||
|
]
|
||||||
|
|
||||||
|
new_bars.append((i,) + tuple(row))
|
||||||
|
|
||||||
|
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines
|
||||||
|
return array
|
||||||
|
|
||||||
|
async def last_trades(
|
||||||
|
self,
|
||||||
|
instrument: str,
|
||||||
|
count: int = 10
|
||||||
|
):
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'public/get_last_trades_by_instrument',
|
||||||
|
params={
|
||||||
|
'instrument_name': instrument,
|
||||||
|
'count': count
|
||||||
|
})
|
||||||
|
|
||||||
|
return LastTradesResult(**resp.result)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def get_client(
|
||||||
|
is_brokercheck: bool = False
|
||||||
|
) -> Client:
|
||||||
|
|
||||||
|
async with (
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
open_jsonrpc_session(
|
||||||
|
_testnet_ws_url, dtype=JSONRPCResult) as json_rpc
|
||||||
|
):
|
||||||
|
client = Client(json_rpc)
|
||||||
|
|
||||||
|
_refresh_token: Optional[str] = None
|
||||||
|
_access_token: Optional[str] = None
|
||||||
|
|
||||||
|
async def _auth_loop(
|
||||||
|
task_status: TaskStatus = trio.TASK_STATUS_IGNORED
|
||||||
|
):
|
||||||
|
"""Background task that adquires a first access token and then will
|
||||||
|
refresh the access token while the nursery isn't cancelled.
|
||||||
|
|
||||||
|
https://docs.deribit.com/?python#authentication-2
|
||||||
|
"""
|
||||||
|
renew_time = 10
|
||||||
|
access_scope = 'trade:read_write'
|
||||||
|
_expiry_time = time.time()
|
||||||
|
got_access = False
|
||||||
|
nonlocal _refresh_token
|
||||||
|
nonlocal _access_token
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if time.time() - _expiry_time < renew_time:
|
||||||
|
# if we are close to token expiry time
|
||||||
|
|
||||||
|
if _refresh_token != None:
|
||||||
|
# if we have a refresh token already dont need to send
|
||||||
|
# secret
|
||||||
|
params = {
|
||||||
|
'grant_type': 'refresh_token',
|
||||||
|
'refresh_token': _refresh_token,
|
||||||
|
'scope': access_scope
|
||||||
|
}
|
||||||
|
|
||||||
|
else:
|
||||||
|
# we don't have refresh token, send secret to initialize
|
||||||
|
params = {
|
||||||
|
'grant_type': 'client_credentials',
|
||||||
|
'client_id': client._key_id,
|
||||||
|
'client_secret': client._key_secret,
|
||||||
|
'scope': access_scope
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = await json_rpc('public/auth', params)
|
||||||
|
result = resp.result
|
||||||
|
|
||||||
|
_expiry_time = time.time() + result['expires_in']
|
||||||
|
_refresh_token = result['refresh_token']
|
||||||
|
|
||||||
|
if 'access_token' in result:
|
||||||
|
_access_token = result['access_token']
|
||||||
|
|
||||||
|
if not got_access:
|
||||||
|
# first time this loop runs we must indicate task is
|
||||||
|
# started, we have auth
|
||||||
|
got_access = True
|
||||||
|
task_status.started()
|
||||||
|
|
||||||
|
else:
|
||||||
|
await trio.sleep(renew_time / 2)
|
||||||
|
|
||||||
|
# if we have client creds launch auth loop
|
||||||
|
if client._key_id is not None:
|
||||||
|
await n.start(_auth_loop)
|
||||||
|
|
||||||
|
await client.cache_symbols()
|
||||||
|
yield client
|
||||||
|
n.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_feed_handler():
|
||||||
|
fh = FeedHandler(config=get_config())
|
||||||
|
yield fh
|
||||||
|
await to_asyncio.run_task(fh.stop_async)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_feed_handler() -> trio.abc.ReceiveStream:
|
||||||
|
async with maybe_open_context(
|
||||||
|
acm_func=open_feed_handler,
|
||||||
|
key='feedhandler',
|
||||||
|
) as (cache_hit, fh):
|
||||||
|
yield fh
|
||||||
|
|
||||||
|
|
||||||
|
async def aio_price_feed_relay(
|
||||||
|
fh: FeedHandler,
|
||||||
|
instrument: Symbol,
|
||||||
|
from_trio: asyncio.Queue,
|
||||||
|
to_trio: trio.abc.SendChannel,
|
||||||
|
) -> None:
|
||||||
|
async def _trade(data: dict, receipt_timestamp):
|
||||||
|
to_trio.send_nowait(('trade', {
|
||||||
|
'symbol': cb_sym_to_deribit_inst(
|
||||||
|
str_to_cb_sym(data.symbol)).lower(),
|
||||||
|
'last': data,
|
||||||
|
'broker_ts': time.time(),
|
||||||
|
'data': data.to_dict(),
|
||||||
|
'receipt': receipt_timestamp
|
||||||
|
}))
|
||||||
|
|
||||||
|
async def _l1(data: dict, receipt_timestamp):
|
||||||
|
to_trio.send_nowait(('l1', {
|
||||||
|
'symbol': cb_sym_to_deribit_inst(
|
||||||
|
str_to_cb_sym(data.symbol)).lower(),
|
||||||
|
'ticks': [
|
||||||
|
{'type': 'bid',
|
||||||
|
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||||
|
{'type': 'bsize',
|
||||||
|
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||||
|
{'type': 'ask',
|
||||||
|
'price': float(data.ask_price), 'size': float(data.ask_size)},
|
||||||
|
{'type': 'asize',
|
||||||
|
'price': float(data.ask_price), 'size': float(data.ask_size)}
|
||||||
|
]
|
||||||
|
}))
|
||||||
|
|
||||||
|
fh.add_feed(
|
||||||
|
DERIBIT,
|
||||||
|
channels=[TRADES, L1_BOOK],
|
||||||
|
symbols=[piker_sym_to_cb_sym(instrument)],
|
||||||
|
callbacks={
|
||||||
|
TRADES: _trade,
|
||||||
|
L1_BOOK: _l1
|
||||||
|
})
|
||||||
|
|
||||||
|
if not fh.running:
|
||||||
|
fh.run(
|
||||||
|
start_loop=False,
|
||||||
|
install_signal_handlers=False)
|
||||||
|
|
||||||
|
# sync with trio
|
||||||
|
to_trio.send_nowait(None)
|
||||||
|
|
||||||
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_price_feed(
|
||||||
|
instrument: str
|
||||||
|
) -> trio.abc.ReceiveStream:
|
||||||
|
async with maybe_open_feed_handler() as fh:
|
||||||
|
async with to_asyncio.open_channel_from(
|
||||||
|
partial(
|
||||||
|
aio_price_feed_relay,
|
||||||
|
fh,
|
||||||
|
instrument
|
||||||
|
)
|
||||||
|
) as (first, chan):
|
||||||
|
yield chan
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_price_feed(
|
||||||
|
instrument: str
|
||||||
|
) -> trio.abc.ReceiveStream:
|
||||||
|
|
||||||
|
# TODO: add a predicate to maybe_open_context
|
||||||
|
async with maybe_open_context(
|
||||||
|
acm_func=open_price_feed,
|
||||||
|
kwargs={
|
||||||
|
'instrument': instrument
|
||||||
|
},
|
||||||
|
key=f'{instrument}-price',
|
||||||
|
) as (cache_hit, feed):
|
||||||
|
if cache_hit:
|
||||||
|
yield broadcast_receiver(feed, 10)
|
||||||
|
else:
|
||||||
|
yield feed
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def aio_order_feed_relay(
|
||||||
|
fh: FeedHandler,
|
||||||
|
instrument: Symbol,
|
||||||
|
from_trio: asyncio.Queue,
|
||||||
|
to_trio: trio.abc.SendChannel,
|
||||||
|
) -> None:
|
||||||
|
async def _fill(data: dict, receipt_timestamp):
|
||||||
|
breakpoint()
|
||||||
|
|
||||||
|
async def _order_info(data: dict, receipt_timestamp):
|
||||||
|
breakpoint()
|
||||||
|
|
||||||
|
fh.add_feed(
|
||||||
|
DERIBIT,
|
||||||
|
channels=[FILLS, ORDER_INFO],
|
||||||
|
symbols=[instrument.upper()],
|
||||||
|
callbacks={
|
||||||
|
FILLS: _fill,
|
||||||
|
ORDER_INFO: _order_info,
|
||||||
|
})
|
||||||
|
|
||||||
|
if not fh.running:
|
||||||
|
fh.run(
|
||||||
|
start_loop=False,
|
||||||
|
install_signal_handlers=False)
|
||||||
|
|
||||||
|
# sync with trio
|
||||||
|
to_trio.send_nowait(None)
|
||||||
|
|
||||||
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_order_feed(
|
||||||
|
instrument: list[str]
|
||||||
|
) -> trio.abc.ReceiveStream:
|
||||||
|
async with maybe_open_feed_handler() as fh:
|
||||||
|
async with to_asyncio.open_channel_from(
|
||||||
|
partial(
|
||||||
|
aio_order_feed_relay,
|
||||||
|
fh,
|
||||||
|
instrument
|
||||||
|
)
|
||||||
|
) as (first, chan):
|
||||||
|
yield chan
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_order_feed(
|
||||||
|
instrument: str
|
||||||
|
) -> trio.abc.ReceiveStream:
|
||||||
|
|
||||||
|
# TODO: add a predicate to maybe_open_context
|
||||||
|
async with maybe_open_context(
|
||||||
|
acm_func=open_order_feed,
|
||||||
|
kwargs={
|
||||||
|
'instrument': instrument,
|
||||||
|
'fh': fh
|
||||||
|
},
|
||||||
|
key=f'{instrument}-order',
|
||||||
|
) as (cache_hit, feed):
|
||||||
|
if cache_hit:
|
||||||
|
yield broadcast_receiver(feed, 10)
|
||||||
|
else:
|
||||||
|
yield feed
|
Binary file not shown.
After Width: | Height: | Size: 169 KiB |
Binary file not shown.
After Width: | Height: | Size: 106 KiB |
Binary file not shown.
After Width: | Height: | Size: 59 KiB |
Binary file not shown.
After Width: | Height: | Size: 70 KiB |
Binary file not shown.
After Width: | Height: | Size: 132 KiB |
|
@ -0,0 +1,185 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Deribit backend.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Optional, Callable
|
||||||
|
import time
|
||||||
|
|
||||||
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import pendulum
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
from piker._cacheables import open_cached_client
|
||||||
|
from piker.log import get_logger, get_console_log
|
||||||
|
from piker.data import ShmArray
|
||||||
|
from piker.brokers._util import (
|
||||||
|
BrokerError,
|
||||||
|
DataUnavailable,
|
||||||
|
)
|
||||||
|
|
||||||
|
from cryptofeed import FeedHandler
|
||||||
|
|
||||||
|
from cryptofeed.defines import (
|
||||||
|
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
|
||||||
|
)
|
||||||
|
from cryptofeed.symbols import Symbol
|
||||||
|
|
||||||
|
from .api import (
|
||||||
|
Client, Trade,
|
||||||
|
get_config,
|
||||||
|
str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst,
|
||||||
|
maybe_open_price_feed
|
||||||
|
)
|
||||||
|
|
||||||
|
_spawn_kwargs = {
|
||||||
|
'infect_asyncio': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
instrument: str,
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
# TODO implement history getter for the new storage layer.
|
||||||
|
async with open_cached_client('deribit') as client:
|
||||||
|
|
||||||
|
async def get_ohlc(
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
datetime, # start
|
||||||
|
datetime, # end
|
||||||
|
]:
|
||||||
|
|
||||||
|
array = await client.bars(
|
||||||
|
instrument,
|
||||||
|
start_dt=start_dt,
|
||||||
|
end_dt=end_dt,
|
||||||
|
)
|
||||||
|
if len(array) == 0:
|
||||||
|
raise DataUnavailable
|
||||||
|
|
||||||
|
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||||
|
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||||
|
|
||||||
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
|
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_quotes(
|
||||||
|
|
||||||
|
send_chan: trio.abc.SendChannel,
|
||||||
|
symbols: list[str],
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
loglevel: str = None,
|
||||||
|
|
||||||
|
# startup sync
|
||||||
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
sym = symbols[0]
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_cached_client('deribit') as client,
|
||||||
|
send_chan as send_chan
|
||||||
|
):
|
||||||
|
|
||||||
|
init_msgs = {
|
||||||
|
# pass back token, and bool, signalling if we're the writer
|
||||||
|
# and that history has been written
|
||||||
|
sym: {
|
||||||
|
'symbol_info': {
|
||||||
|
'asset_type': 'option',
|
||||||
|
'price_tick_size': 0.0005
|
||||||
|
},
|
||||||
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
nsym = piker_sym_to_cb_sym(sym)
|
||||||
|
|
||||||
|
async with maybe_open_price_feed(sym) as stream:
|
||||||
|
|
||||||
|
cache = await client.cache_symbols()
|
||||||
|
|
||||||
|
last_trades = (await client.last_trades(
|
||||||
|
cb_sym_to_deribit_inst(nsym), count=1)).trades
|
||||||
|
|
||||||
|
if len(last_trades) == 0:
|
||||||
|
last_trade = None
|
||||||
|
async for typ, quote in stream:
|
||||||
|
if typ == 'trade':
|
||||||
|
last_trade = Trade(**(quote['data']))
|
||||||
|
break
|
||||||
|
|
||||||
|
else:
|
||||||
|
last_trade = Trade(**(last_trades[0]))
|
||||||
|
|
||||||
|
first_quote = {
|
||||||
|
'symbol': sym,
|
||||||
|
'last': last_trade.price,
|
||||||
|
'brokerd_ts': last_trade.timestamp,
|
||||||
|
'ticks': [{
|
||||||
|
'type': 'trade',
|
||||||
|
'price': last_trade.price,
|
||||||
|
'size': last_trade.amount,
|
||||||
|
'broker_ts': last_trade.timestamp
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
task_status.started((init_msgs, first_quote))
|
||||||
|
|
||||||
|
feed_is_live.set()
|
||||||
|
|
||||||
|
async for typ, quote in stream:
|
||||||
|
topic = quote['symbol']
|
||||||
|
await send_chan.send({topic: quote})
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def open_symbol_search(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
) -> Client:
|
||||||
|
async with open_cached_client('deribit') as client:
|
||||||
|
|
||||||
|
# load all symbols locally for fast search
|
||||||
|
cache = await client.cache_symbols()
|
||||||
|
await ctx.started()
|
||||||
|
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
|
||||||
|
async for pattern in stream:
|
||||||
|
# repack in dict form
|
||||||
|
await stream.send(
|
||||||
|
await client.search_symbols(pattern))
|
|
@ -0,0 +1,134 @@
|
||||||
|
``ib`` backend
|
||||||
|
--------------
|
||||||
|
more or less the "everything broker" for traditional and international
|
||||||
|
markets. they are the "go to" provider for automatic retail trading
|
||||||
|
and we interface to their APIs using the `ib_insync` project.
|
||||||
|
|
||||||
|
status
|
||||||
|
******
|
||||||
|
current support is *production grade* and both real-time data and order
|
||||||
|
management should be correct and fast. this backend is used by core devs
|
||||||
|
for live trading.
|
||||||
|
|
||||||
|
currently there is not yet full support for:
|
||||||
|
- options charting and trading
|
||||||
|
- paxos based crypto rt feeds and trading
|
||||||
|
|
||||||
|
|
||||||
|
config
|
||||||
|
******
|
||||||
|
In order to get order mode support your ``brokers.toml``
|
||||||
|
needs to have something like the following:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[ib]
|
||||||
|
hosts = [
|
||||||
|
"127.0.0.1",
|
||||||
|
]
|
||||||
|
# TODO: when we eventually spawn gateways in our
|
||||||
|
# container, we can just dynamically allocate these
|
||||||
|
# using IBC.
|
||||||
|
ports = [
|
||||||
|
4002,
|
||||||
|
4003,
|
||||||
|
4006,
|
||||||
|
4001,
|
||||||
|
7497,
|
||||||
|
]
|
||||||
|
|
||||||
|
# XXX: for a paper account the flex web query service
|
||||||
|
# is not supported so you have to manually download
|
||||||
|
# and XML report and put it in a location that can be
|
||||||
|
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||||
|
flex_token = '1111111111111111'
|
||||||
|
flex_trades_query_id = '6969696' # live accounts only?
|
||||||
|
|
||||||
|
# 3rd party web-api token
|
||||||
|
# (XXX: not sure if this works yet)
|
||||||
|
trade_log_token = '111111111111111'
|
||||||
|
|
||||||
|
# when clients are being scanned this determines
|
||||||
|
# which clients are preferred to be used for data feeds
|
||||||
|
# based on account names which are detected as active
|
||||||
|
# on each client.
|
||||||
|
prefer_data_account = [
|
||||||
|
# this has to be first in order to make data work with dual paper + live
|
||||||
|
'main',
|
||||||
|
'algopaper',
|
||||||
|
]
|
||||||
|
|
||||||
|
[ib.accounts]
|
||||||
|
main = 'U69696969'
|
||||||
|
algopaper = 'DU9696969'
|
||||||
|
|
||||||
|
|
||||||
|
If everything works correctly you should see any current positions
|
||||||
|
loaded in the pps pane on chart load and you should also be able to
|
||||||
|
check your trade records in the file::
|
||||||
|
|
||||||
|
<pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml
|
||||||
|
|
||||||
|
|
||||||
|
An example ledger file will have entries written verbatim from the
|
||||||
|
trade events schema:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
["0000e1a7.630f5e5a.01.01"]
|
||||||
|
secType = "FUT"
|
||||||
|
conId = 515416577
|
||||||
|
symbol = "MNQ"
|
||||||
|
lastTradeDateOrContractMonth = "20221216"
|
||||||
|
strike = 0.0
|
||||||
|
right = ""
|
||||||
|
multiplier = "2"
|
||||||
|
exchange = "GLOBEX"
|
||||||
|
primaryExchange = ""
|
||||||
|
currency = "USD"
|
||||||
|
localSymbol = "MNQZ2"
|
||||||
|
tradingClass = "MNQ"
|
||||||
|
includeExpired = false
|
||||||
|
secIdType = ""
|
||||||
|
secId = ""
|
||||||
|
comboLegsDescrip = ""
|
||||||
|
comboLegs = []
|
||||||
|
execId = "0000e1a7.630f5e5a.01.01"
|
||||||
|
time = 1661972086.0
|
||||||
|
acctNumber = "DU69696969"
|
||||||
|
side = "BOT"
|
||||||
|
shares = 1.0
|
||||||
|
price = 12372.75
|
||||||
|
permId = 441472655
|
||||||
|
clientId = 6116
|
||||||
|
orderId = 985
|
||||||
|
liquidation = 0
|
||||||
|
cumQty = 1.0
|
||||||
|
avgPrice = 12372.75
|
||||||
|
orderRef = ""
|
||||||
|
evRule = ""
|
||||||
|
evMultiplier = 0.0
|
||||||
|
modelCode = ""
|
||||||
|
lastLiquidity = 1
|
||||||
|
broker_time = 1661972086.0
|
||||||
|
name = "ib"
|
||||||
|
commission = 0.57
|
||||||
|
realizedPNL = 243.41
|
||||||
|
yield_ = 0.0
|
||||||
|
yieldRedemptionDate = 0
|
||||||
|
listingExchange = "GLOBEX"
|
||||||
|
date = "2022-08-31T18:54:46+00:00"
|
||||||
|
|
||||||
|
|
||||||
|
your ``pps.toml`` file will have position entries like,
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[ib.algopaper."mnq.globex.20221216"]
|
||||||
|
size = -1.0
|
||||||
|
ppu = 12423.630576923071
|
||||||
|
bsuid = 515416577
|
||||||
|
expiry = "2022-12-16T00:00:00+00:00"
|
||||||
|
clears = [
|
||||||
|
{ dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
|
||||||
|
]
|
|
@ -20,15 +20,10 @@ Interactive Brokers API backend.
|
||||||
Sub-modules within break into the core functionalities:
|
Sub-modules within break into the core functionalities:
|
||||||
|
|
||||||
- ``broker.py`` part for orders / trading endpoints
|
- ``broker.py`` part for orders / trading endpoints
|
||||||
- ``data.py`` for real-time data feed endpoints
|
- ``feed.py`` for real-time data feed endpoints
|
||||||
|
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||||
- ``client.py`` for the core API machinery which is ``trio``-ized
|
|
||||||
wrapping around ``ib_insync``.
|
wrapping around ``ib_insync``.
|
||||||
|
|
||||||
- ``report.py`` for the hackery to build manual pp calcs
|
|
||||||
to avoid ib's absolute bullshit FIFO style position
|
|
||||||
tracking..
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from .api import (
|
from .api import (
|
||||||
get_client,
|
get_client,
|
||||||
|
@ -38,7 +33,10 @@ from .feed import (
|
||||||
open_symbol_search,
|
open_symbol_search,
|
||||||
stream_quotes,
|
stream_quotes,
|
||||||
)
|
)
|
||||||
from .broker import trades_dialogue
|
from .broker import (
|
||||||
|
trades_dialogue,
|
||||||
|
norm_trade_records,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'get_client',
|
'get_client',
|
||||||
|
|
|
@ -29,6 +29,7 @@ import itertools
|
||||||
from math import isnan
|
from math import isnan
|
||||||
from typing import (
|
from typing import (
|
||||||
Any,
|
Any,
|
||||||
|
Optional,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
import asyncio
|
import asyncio
|
||||||
|
@ -38,16 +39,30 @@ import time
|
||||||
from types import SimpleNamespace
|
from types import SimpleNamespace
|
||||||
|
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor import to_asyncio
|
from tractor import to_asyncio
|
||||||
from ib_insync.wrapper import RequestError
|
import pendulum
|
||||||
from ib_insync.contract import Contract, ContractDetails
|
import ib_insync as ibis
|
||||||
|
from ib_insync.contract import (
|
||||||
|
Contract,
|
||||||
|
ContractDetails,
|
||||||
|
Option,
|
||||||
|
)
|
||||||
from ib_insync.order import Order
|
from ib_insync.order import Order
|
||||||
from ib_insync.ticker import Ticker
|
from ib_insync.ticker import Ticker
|
||||||
from ib_insync.objects import Position
|
from ib_insync.objects import (
|
||||||
import ib_insync as ibis
|
BarDataList,
|
||||||
from ib_insync.wrapper import Wrapper
|
Position,
|
||||||
|
Fill,
|
||||||
|
Execution,
|
||||||
|
CommissionReport,
|
||||||
|
)
|
||||||
|
from ib_insync.wrapper import (
|
||||||
|
Wrapper,
|
||||||
|
RequestError,
|
||||||
|
)
|
||||||
from ib_insync.client import Client as ib_Client
|
from ib_insync.client import Client as ib_Client
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
|
@ -65,26 +80,11 @@ _time_units = {
|
||||||
'h': ' hours',
|
'h': ' hours',
|
||||||
}
|
}
|
||||||
|
|
||||||
_time_frames = {
|
_bar_sizes = {
|
||||||
'1s': '1 Sec',
|
1: '1 Sec',
|
||||||
'5s': '5 Sec',
|
60: '1 min',
|
||||||
'30s': '30 Sec',
|
60*60: '1 hour',
|
||||||
'1m': 'OneMinute',
|
24*60*60: '1 day',
|
||||||
'2m': 'TwoMinutes',
|
|
||||||
'3m': 'ThreeMinutes',
|
|
||||||
'4m': 'FourMinutes',
|
|
||||||
'5m': 'FiveMinutes',
|
|
||||||
'10m': 'TenMinutes',
|
|
||||||
'15m': 'FifteenMinutes',
|
|
||||||
'20m': 'TwentyMinutes',
|
|
||||||
'30m': 'HalfHour',
|
|
||||||
'1h': 'OneHour',
|
|
||||||
'2h': 'TwoHours',
|
|
||||||
'4h': 'FourHours',
|
|
||||||
'D': 'OneDay',
|
|
||||||
'W': 'OneWeek',
|
|
||||||
'M': 'OneMonth',
|
|
||||||
'Y': 'OneYear',
|
|
||||||
}
|
}
|
||||||
|
|
||||||
_show_wap_in_history: bool = False
|
_show_wap_in_history: bool = False
|
||||||
|
@ -155,70 +155,118 @@ class NonShittyIB(ibis.IB):
|
||||||
self.client.apiEnd += self.disconnectedEvent
|
self.client.apiEnd += self.disconnectedEvent
|
||||||
|
|
||||||
|
|
||||||
|
_futes_venues = (
|
||||||
|
'GLOBEX',
|
||||||
|
'NYMEX',
|
||||||
|
'CME',
|
||||||
|
'CMECRYPTO',
|
||||||
|
'COMEX',
|
||||||
|
'CMDTY', # special name case..
|
||||||
|
'CBOT', # (treasury) yield futures
|
||||||
|
)
|
||||||
|
|
||||||
|
_adhoc_futes_set = {
|
||||||
|
|
||||||
|
# equities
|
||||||
|
'nq.cme',
|
||||||
|
'mnq.cme', # micro
|
||||||
|
|
||||||
|
'es.cme',
|
||||||
|
'mes.cme', # micro
|
||||||
|
|
||||||
|
# cypto$
|
||||||
|
'brr.cme',
|
||||||
|
'ethusdrr.cme',
|
||||||
|
|
||||||
|
# agriculture
|
||||||
|
'he.comex', # lean hogs
|
||||||
|
'le.comex', # live cattle (geezers)
|
||||||
|
'gf.comex', # feeder cattle (younguns)
|
||||||
|
|
||||||
|
# raw
|
||||||
|
'lb.comex', # random len lumber
|
||||||
|
|
||||||
|
# metals
|
||||||
|
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
||||||
|
'xauusd.cmdty', # london gold spot ^
|
||||||
|
'gc.comex',
|
||||||
|
'mgc.comex', # micro
|
||||||
|
|
||||||
|
# oil & gas
|
||||||
|
'cl.comex',
|
||||||
|
|
||||||
|
'xagusd.cmdty', # silver spot
|
||||||
|
'ni.comex', # silver futes
|
||||||
|
'qi.comex', # mini-silver futes
|
||||||
|
|
||||||
|
# treasury yields
|
||||||
|
# etfs by duration:
|
||||||
|
# SHY -> IEI -> IEF -> TLT
|
||||||
|
'zt.cbot', # 2y
|
||||||
|
'z3n.cbot', # 3y
|
||||||
|
'zf.cbot', # 5y
|
||||||
|
'zn.cbot', # 10y
|
||||||
|
'zb.cbot', # 30y
|
||||||
|
|
||||||
|
# (micros of above)
|
||||||
|
'2yy.cbot',
|
||||||
|
'5yy.cbot',
|
||||||
|
'10y.cbot',
|
||||||
|
'30y.cbot',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# taken from list here:
|
||||||
|
# https://www.interactivebrokers.com/en/trading/products-spot-currencies.php
|
||||||
|
_adhoc_fiat_set = set((
|
||||||
|
'USD, AED, AUD, CAD,'
|
||||||
|
'CHF, CNH, CZK, DKK,'
|
||||||
|
'EUR, GBP, HKD, HUF,'
|
||||||
|
'ILS, JPY, MXN, NOK,'
|
||||||
|
'NZD, PLN, RUB, SAR,'
|
||||||
|
'SEK, SGD, TRY, ZAR'
|
||||||
|
).split(' ,')
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# map of symbols to contract ids
|
# map of symbols to contract ids
|
||||||
_adhoc_cmdty_data_map = {
|
_adhoc_symbol_map = {
|
||||||
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
||||||
|
|
||||||
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
|
# NOTE: some cmdtys/metals don't have trade data like gold/usd:
|
||||||
# https://groups.io/g/twsapi/message/44174
|
# https://groups.io/g/twsapi/message/44174
|
||||||
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
|
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
|
||||||
}
|
}
|
||||||
|
for qsn in _adhoc_futes_set:
|
||||||
_futes_venues = (
|
sym, venue = qsn.split('.')
|
||||||
'GLOBEX',
|
assert venue.upper() in _futes_venues, f'{venue}'
|
||||||
'NYMEX',
|
_adhoc_symbol_map[sym.upper()] = (
|
||||||
'CME',
|
{'exchange': venue},
|
||||||
'CMECRYPTO',
|
{},
|
||||||
)
|
)
|
||||||
|
|
||||||
_adhoc_futes_set = {
|
|
||||||
|
|
||||||
# equities
|
|
||||||
'nq.globex',
|
|
||||||
'mnq.globex',
|
|
||||||
|
|
||||||
'es.globex',
|
|
||||||
'mes.globex',
|
|
||||||
|
|
||||||
# cypto$
|
|
||||||
'brr.cmecrypto',
|
|
||||||
'ethusdrr.cmecrypto',
|
|
||||||
|
|
||||||
# agriculture
|
|
||||||
'he.globex', # lean hogs
|
|
||||||
'le.globex', # live cattle (geezers)
|
|
||||||
'gf.globex', # feeder cattle (younguns)
|
|
||||||
|
|
||||||
# raw
|
|
||||||
'lb.globex', # random len lumber
|
|
||||||
|
|
||||||
# metals
|
|
||||||
'xauusd.cmdty', # gold spot
|
|
||||||
'gc.nymex',
|
|
||||||
'mgc.nymex',
|
|
||||||
|
|
||||||
'xagusd.cmdty', # silver spot
|
|
||||||
'ni.nymex', # silver futes
|
|
||||||
'qi.comex', # mini-silver futes
|
|
||||||
}
|
|
||||||
|
|
||||||
# exchanges we don't support at the moment due to not knowing
|
# exchanges we don't support at the moment due to not knowing
|
||||||
# how to do symbol-contract lookup correctly likely due
|
# how to do symbol-contract lookup correctly likely due
|
||||||
# to not having the data feeds subscribed.
|
# to not having the data feeds subscribed.
|
||||||
_exch_skip_list = {
|
_exch_skip_list = {
|
||||||
|
|
||||||
'ASX', # aussie stocks
|
'ASX', # aussie stocks
|
||||||
'MEXI', # mexican stocks
|
'MEXI', # mexican stocks
|
||||||
'VALUE', # no idea
|
|
||||||
}
|
|
||||||
|
|
||||||
# https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
|
# no idea
|
||||||
|
'VALUE',
|
||||||
|
'FUNDSERV',
|
||||||
|
'SWB2',
|
||||||
|
'PSE',
|
||||||
|
}
|
||||||
|
|
||||||
_enters = 0
|
_enters = 0
|
||||||
|
|
||||||
|
|
||||||
def bars_to_np(bars: list) -> np.ndarray:
|
def bars_to_np(bars: list) -> np.ndarray:
|
||||||
'''
|
'''
|
||||||
Convert a "bars list thing" (``BarsList`` type from ibis)
|
Convert a "bars list thing" (``BarDataList`` type from ibis)
|
||||||
into a numpy struct array.
|
into a numpy struct array.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -238,6 +286,27 @@ def bars_to_np(bars: list) -> np.ndarray:
|
||||||
return nparr
|
return nparr
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE: pacing violations exist for higher sample rates:
|
||||||
|
# https://interactivebrokers.github.io/tws-api/historical_limitations.html#pacing_violations
|
||||||
|
# Also see note on duration limits being lifted on 1m+ periods,
|
||||||
|
# but they say "use with discretion":
|
||||||
|
# https://interactivebrokers.github.io/tws-api/historical_limitations.html#non-available_hd
|
||||||
|
_samplings: dict[int, tuple[str, str]] = {
|
||||||
|
1: (
|
||||||
|
'1 secs',
|
||||||
|
f'{int(2e3)} S',
|
||||||
|
pendulum.duration(seconds=2e3),
|
||||||
|
),
|
||||||
|
# TODO: benchmark >1 D duration on query to see if
|
||||||
|
# throughput can be made faster during backfilling.
|
||||||
|
60: (
|
||||||
|
'1 min',
|
||||||
|
'1 D',
|
||||||
|
pendulum.duration(days=1),
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
'''
|
'''
|
||||||
IB wrapped for our broker backend API.
|
IB wrapped for our broker backend API.
|
||||||
|
@ -261,27 +330,29 @@ class Client:
|
||||||
|
|
||||||
# NOTE: the ib.client here is "throttled" to 45 rps by default
|
# NOTE: the ib.client here is "throttled" to 45 rps by default
|
||||||
|
|
||||||
async def trades(
|
async def trades(self) -> dict[str, Any]:
|
||||||
self,
|
'''
|
||||||
# api_only: bool = False,
|
Return list of trade-fills from current session in ``dict``.
|
||||||
|
|
||||||
) -> dict[str, Any]:
|
'''
|
||||||
|
fills: list[Fill] = self.ib.fills()
|
||||||
# orders = await self.ib.reqCompletedOrdersAsync(
|
norm_fills: list[dict] = []
|
||||||
# apiOnly=api_only
|
|
||||||
# )
|
|
||||||
fills = await self.ib.reqExecutionsAsync()
|
|
||||||
norm_fills = []
|
|
||||||
for fill in fills:
|
for fill in fills:
|
||||||
fill = fill._asdict() # namedtuple
|
fill = fill._asdict() # namedtuple
|
||||||
for key, val in fill.copy().items():
|
for key, val in fill.items():
|
||||||
if isinstance(val, Contract):
|
match val:
|
||||||
|
case Contract() | Execution() | CommissionReport():
|
||||||
fill[key] = asdict(val)
|
fill[key] = asdict(val)
|
||||||
|
|
||||||
norm_fills.append(fill)
|
norm_fills.append(fill)
|
||||||
|
|
||||||
return norm_fills
|
return norm_fills
|
||||||
|
|
||||||
|
async def orders(self) -> list[Order]:
|
||||||
|
return await self.ib.reqAllOpenOrdersAsync(
|
||||||
|
apiOnly=False,
|
||||||
|
)
|
||||||
|
|
||||||
async def bars(
|
async def bars(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
|
@ -290,52 +361,55 @@ class Client:
|
||||||
start_dt: Union[datetime, str] = "1970-01-01T00:00:00.000000-05:00",
|
start_dt: Union[datetime, str] = "1970-01-01T00:00:00.000000-05:00",
|
||||||
end_dt: Union[datetime, str] = "",
|
end_dt: Union[datetime, str] = "",
|
||||||
|
|
||||||
sample_period_s: str = 1, # ohlc sample period
|
# ohlc sample period in seconds
|
||||||
period_count: int = int(2e3), # <- max per 1s sample query
|
sample_period_s: int = 1,
|
||||||
|
|
||||||
) -> list[dict[str, Any]]:
|
# optional "duration of time" equal to the
|
||||||
|
# length of the returned history frame.
|
||||||
|
duration: Optional[str] = None,
|
||||||
|
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> tuple[BarDataList, np.ndarray, pendulum.Duration]:
|
||||||
'''
|
'''
|
||||||
Retreive OHLCV bars for a fqsn over a range to the present.
|
Retreive OHLCV bars for a fqsn over a range to the present.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
# See API docs here:
|
||||||
|
# https://interactivebrokers.github.io/tws-api/historical_data.html
|
||||||
bars_kwargs = {'whatToShow': 'TRADES'}
|
bars_kwargs = {'whatToShow': 'TRADES'}
|
||||||
|
bars_kwargs.update(kwargs)
|
||||||
|
bar_size, duration, dt_duration = _samplings[sample_period_s]
|
||||||
|
|
||||||
global _enters
|
global _enters
|
||||||
# log.info(f'REQUESTING BARS {_enters} @ end={end_dt}')
|
# log.info(f'REQUESTING BARS {_enters} @ end={end_dt}')
|
||||||
print(f'REQUESTING BARS {_enters} @ end={end_dt}')
|
print(
|
||||||
|
f"REQUESTING {duration}'s worth {bar_size} BARS\n"
|
||||||
|
f'{_enters} @ end={end_dt}"'
|
||||||
|
)
|
||||||
|
|
||||||
if not end_dt:
|
if not end_dt:
|
||||||
end_dt = ''
|
end_dt = ''
|
||||||
|
|
||||||
_enters += 1
|
_enters += 1
|
||||||
|
|
||||||
contract = await self.find_contract(fqsn)
|
contract = (await self.find_contracts(fqsn))[0]
|
||||||
bars_kwargs.update(getattr(contract, 'bars_kwargs', {}))
|
bars_kwargs.update(getattr(contract, 'bars_kwargs', {}))
|
||||||
|
|
||||||
# _min = min(2000*100, count)
|
|
||||||
bars = await self.ib.reqHistoricalDataAsync(
|
bars = await self.ib.reqHistoricalDataAsync(
|
||||||
contract,
|
contract,
|
||||||
endDateTime=end_dt,
|
endDateTime=end_dt,
|
||||||
formatDate=2,
|
formatDate=2,
|
||||||
|
|
||||||
# time history length values format:
|
|
||||||
# ``durationStr=integer{SPACE}unit (S|D|W|M|Y)``
|
|
||||||
|
|
||||||
# OHLC sampling values:
|
# OHLC sampling values:
|
||||||
# 1 secs, 5 secs, 10 secs, 15 secs, 30 secs, 1 min, 2 mins,
|
# 1 secs, 5 secs, 10 secs, 15 secs, 30 secs, 1 min, 2 mins,
|
||||||
# 3 mins, 5 mins, 10 mins, 15 mins, 20 mins, 30 mins,
|
# 3 mins, 5 mins, 10 mins, 15 mins, 20 mins, 30 mins,
|
||||||
# 1 hour, 2 hours, 3 hours, 4 hours, 8 hours, 1 day, 1W, 1M
|
# 1 hour, 2 hours, 3 hours, 4 hours, 8 hours, 1 day, 1W, 1M
|
||||||
# barSizeSetting='1 secs',
|
barSizeSetting=bar_size,
|
||||||
|
|
||||||
# durationStr='{count} S'.format(count=15000 * 5),
|
# time history length values format:
|
||||||
# durationStr='{count} D'.format(count=1),
|
# ``durationStr=integer{SPACE}unit (S|D|W|M|Y)``
|
||||||
# barSizeSetting='5 secs',
|
durationStr=duration,
|
||||||
|
|
||||||
durationStr='{count} S'.format(count=period_count),
|
|
||||||
# barSizeSetting='5 secs',
|
|
||||||
barSizeSetting='1 secs',
|
|
||||||
|
|
||||||
# barSizeSetting='1 min',
|
|
||||||
|
|
||||||
# always use extended hours
|
# always use extended hours
|
||||||
useRTH=False,
|
useRTH=False,
|
||||||
|
@ -346,11 +420,21 @@ class Client:
|
||||||
# whatToShow='TRADES',
|
# whatToShow='TRADES',
|
||||||
)
|
)
|
||||||
if not bars:
|
if not bars:
|
||||||
# TODO: raise underlying error here
|
# NOTE: there's 2 cases here to handle (and this should be
|
||||||
raise ValueError(f"No bars retreived for {fqsn}?")
|
# read alongside the implementation of
|
||||||
|
# ``.reqHistoricalDataAsync()``):
|
||||||
|
# - no data is returned for the period likely due to
|
||||||
|
# a weekend, holiday or other non-trading period prior to
|
||||||
|
# ``end_dt`` which exceeds the ``duration``,
|
||||||
|
# - a timeout occurred in which case insync internals return
|
||||||
|
# an empty list thing with bars.clear()...
|
||||||
|
return [], np.empty(0), dt_duration
|
||||||
|
# TODO: we could maybe raise ``NoData`` instead if we
|
||||||
|
# rewrite the method in the first case? right now there's no
|
||||||
|
# way to detect a timeout.
|
||||||
|
|
||||||
nparr = bars_to_np(bars)
|
nparr = bars_to_np(bars)
|
||||||
return bars, nparr
|
return bars, nparr, dt_duration
|
||||||
|
|
||||||
async def con_deats(
|
async def con_deats(
|
||||||
self,
|
self,
|
||||||
|
@ -364,7 +448,15 @@ class Client:
|
||||||
futs.append(self.ib.reqContractDetailsAsync(con))
|
futs.append(self.ib.reqContractDetailsAsync(con))
|
||||||
|
|
||||||
# batch request all details
|
# batch request all details
|
||||||
|
try:
|
||||||
results = await asyncio.gather(*futs)
|
results = await asyncio.gather(*futs)
|
||||||
|
except RequestError as err:
|
||||||
|
msg = err.message
|
||||||
|
if (
|
||||||
|
'No security definition' in msg
|
||||||
|
):
|
||||||
|
log.warning(f'{msg}: {contracts}')
|
||||||
|
return {}
|
||||||
|
|
||||||
# one set per future result
|
# one set per future result
|
||||||
details = {}
|
details = {}
|
||||||
|
@ -373,20 +465,11 @@ class Client:
|
||||||
# XXX: if there is more then one entry in the details list
|
# XXX: if there is more then one entry in the details list
|
||||||
# then the contract is so called "ambiguous".
|
# then the contract is so called "ambiguous".
|
||||||
for d in details_set:
|
for d in details_set:
|
||||||
con = d.contract
|
|
||||||
|
|
||||||
key = '.'.join([
|
# nested dataclass we probably don't need and that won't
|
||||||
con.symbol,
|
# IPC serialize..
|
||||||
con.primaryExchange or con.exchange,
|
|
||||||
])
|
|
||||||
expiry = con.lastTradeDateOrContractMonth
|
|
||||||
if expiry:
|
|
||||||
key += f'.{expiry}'
|
|
||||||
|
|
||||||
# nested dataclass we probably don't need and that
|
|
||||||
# won't IPC serialize..
|
|
||||||
d.secIdList = ''
|
d.secIdList = ''
|
||||||
|
key, calc_price = con2fqsn(d.contract)
|
||||||
details[key] = d
|
details[key] = d
|
||||||
|
|
||||||
return details
|
return details
|
||||||
|
@ -416,17 +499,20 @@ class Client:
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
# how many contracts to search "up to"
|
# how many contracts to search "up to"
|
||||||
upto: int = 3,
|
upto: int = 16,
|
||||||
asdicts: bool = True,
|
asdicts: bool = True,
|
||||||
|
|
||||||
) -> dict[str, ContractDetails]:
|
) -> dict[str, ContractDetails]:
|
||||||
|
|
||||||
# TODO add search though our adhoc-locally defined symbol set
|
# TODO add search though our adhoc-locally defined symbol set
|
||||||
# for futes/cmdtys/
|
# for futes/cmdtys/
|
||||||
|
try:
|
||||||
results = await self.search_stocks(
|
results = await self.search_stocks(
|
||||||
pattern,
|
pattern,
|
||||||
upto=upto,
|
upto=upto,
|
||||||
)
|
)
|
||||||
|
except ConnectionError:
|
||||||
|
return {}
|
||||||
|
|
||||||
for key, deats in results.copy().items():
|
for key, deats in results.copy().items():
|
||||||
|
|
||||||
|
@ -437,21 +523,54 @@ class Client:
|
||||||
if sectype == 'IND':
|
if sectype == 'IND':
|
||||||
results[f'{sym}.IND'] = tract
|
results[f'{sym}.IND'] = tract
|
||||||
results.pop(key)
|
results.pop(key)
|
||||||
exch = tract.exchange
|
# exch = tract.exchange
|
||||||
|
|
||||||
|
# XXX: add back one of these to get the weird deadlock
|
||||||
|
# on the debugger from root without the latest
|
||||||
|
# maybe_wait_for_debugger() fix in the `open_context()`
|
||||||
|
# exit.
|
||||||
|
# assert 0
|
||||||
|
# if con.exchange not in _exch_skip_list:
|
||||||
|
|
||||||
|
exch = tract.exchange
|
||||||
|
if exch not in _exch_skip_list:
|
||||||
|
|
||||||
|
# try to lookup any contracts from our adhoc set
|
||||||
|
# since often the exchange/venue is named slightly
|
||||||
|
# different (eg. BRR.CMECRYPTO` instead of just
|
||||||
|
# `.CME`).
|
||||||
|
info = _adhoc_symbol_map.get(sym)
|
||||||
|
if info:
|
||||||
|
con_kwargs, bars_kwargs = info
|
||||||
|
exch = con_kwargs['exchange']
|
||||||
|
|
||||||
if exch in _futes_venues:
|
|
||||||
# try get all possible contracts for symbol as per,
|
# try get all possible contracts for symbol as per,
|
||||||
# https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut
|
# https://interactivebrokers.github.io/tws-api/basic_contracts.html#fut
|
||||||
con = ibis.Future(
|
con = ibis.Future(
|
||||||
symbol=sym,
|
symbol=sym,
|
||||||
exchange=exch,
|
exchange=exch,
|
||||||
)
|
)
|
||||||
try:
|
# TODO: make this work, think it's something to do
|
||||||
|
# with the qualify flag.
|
||||||
|
# cons = await self.find_contracts(
|
||||||
|
# contract=con,
|
||||||
|
# err_on_qualify=False,
|
||||||
|
# )
|
||||||
|
# if cons:
|
||||||
all_deats = await self.con_deats([con])
|
all_deats = await self.con_deats([con])
|
||||||
results |= all_deats
|
results |= all_deats
|
||||||
|
|
||||||
except RequestError as err:
|
# forex pairs
|
||||||
log.warning(err.message)
|
elif sectype == 'CASH':
|
||||||
|
dst, src = tract.localSymbol.split('.')
|
||||||
|
pair_key = "/".join([dst, src])
|
||||||
|
exch = tract.exchange.lower()
|
||||||
|
results[f'{pair_key}.{exch}'] = tract
|
||||||
|
results.pop(key)
|
||||||
|
|
||||||
|
# XXX: again seems to trigger the weird tractor
|
||||||
|
# bug with the debugger..
|
||||||
|
# assert 0
|
||||||
|
|
||||||
return results
|
return results
|
||||||
|
|
||||||
|
@ -483,13 +602,19 @@ class Client:
|
||||||
|
|
||||||
return con
|
return con
|
||||||
|
|
||||||
async def find_contract(
|
async def get_con(
|
||||||
|
self,
|
||||||
|
conid: int,
|
||||||
|
) -> Contract:
|
||||||
|
return await self.ib.qualifyContractsAsync(
|
||||||
|
ibis.Contract(conId=conid)
|
||||||
|
)
|
||||||
|
|
||||||
|
def parse_patt2fqsn(
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
currency: str = 'USD',
|
|
||||||
**kwargs,
|
|
||||||
|
|
||||||
) -> Contract:
|
) -> tuple[str, str, str, str]:
|
||||||
|
|
||||||
# TODO: we can't use this currently because
|
# TODO: we can't use this currently because
|
||||||
# ``wrapper.starTicker()`` currently cashes ticker instances
|
# ``wrapper.starTicker()`` currently cashes ticker instances
|
||||||
|
@ -502,12 +627,30 @@ class Client:
|
||||||
# XXX UPDATE: we can probably do the tick/trades scraping
|
# XXX UPDATE: we can probably do the tick/trades scraping
|
||||||
# inside our eventkit handler instead to bypass this entirely?
|
# inside our eventkit handler instead to bypass this entirely?
|
||||||
|
|
||||||
|
currency = ''
|
||||||
|
|
||||||
|
# fqsn parsing stage
|
||||||
|
# ------------------
|
||||||
if '.ib' in pattern:
|
if '.ib' in pattern:
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import unpack_fqsn
|
||||||
broker, symbol, expiry = unpack_fqsn(pattern)
|
_, symbol, expiry = unpack_fqsn(pattern)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
symbol = pattern
|
symbol = pattern
|
||||||
|
expiry = ''
|
||||||
|
|
||||||
|
# another hack for forex pairs lul.
|
||||||
|
if (
|
||||||
|
'.idealpro' in symbol
|
||||||
|
# or '/' in symbol
|
||||||
|
):
|
||||||
|
exch = 'IDEALPRO'
|
||||||
|
symbol = symbol.removesuffix('.idealpro')
|
||||||
|
if '/' in symbol:
|
||||||
|
symbol, currency = symbol.split('/')
|
||||||
|
|
||||||
|
else:
|
||||||
|
# TODO: yes, a cache..
|
||||||
# try:
|
# try:
|
||||||
# # give the cache a go
|
# # give the cache a go
|
||||||
# return self._contracts[symbol]
|
# return self._contracts[symbol]
|
||||||
|
@ -518,45 +661,80 @@ class Client:
|
||||||
symbol, _, expiry = symbol.rpartition('.')
|
symbol, _, expiry = symbol.rpartition('.')
|
||||||
|
|
||||||
# use heuristics to figure out contract "type"
|
# use heuristics to figure out contract "type"
|
||||||
sym, exch = symbol.upper().rsplit('.', maxsplit=1)
|
symbol, exch = symbol.upper().rsplit('.', maxsplit=1)
|
||||||
|
|
||||||
qualify: bool = True
|
return symbol, currency, exch, expiry
|
||||||
|
|
||||||
|
async def find_contracts(
|
||||||
|
self,
|
||||||
|
pattern: Optional[str] = None,
|
||||||
|
contract: Optional[Contract] = None,
|
||||||
|
qualify: bool = True,
|
||||||
|
err_on_qualify: bool = True,
|
||||||
|
|
||||||
|
) -> Contract:
|
||||||
|
|
||||||
|
if pattern is not None:
|
||||||
|
symbol, currency, exch, expiry = self.parse_patt2fqsn(
|
||||||
|
pattern,
|
||||||
|
)
|
||||||
|
sectype = ''
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert contract
|
||||||
|
symbol = contract.symbol
|
||||||
|
sectype = contract.secType
|
||||||
|
exch = contract.exchange or contract.primaryExchange
|
||||||
|
expiry = contract.lastTradeDateOrContractMonth
|
||||||
|
currency = contract.currency
|
||||||
|
|
||||||
|
# contract searching stage
|
||||||
|
# ------------------------
|
||||||
|
|
||||||
# futes
|
# futes
|
||||||
if exch in _futes_venues:
|
if exch in _futes_venues:
|
||||||
if expiry:
|
if expiry:
|
||||||
# get the "front" contract
|
# get the "front" contract
|
||||||
contract = await self.get_fute(
|
con = await self.get_fute(
|
||||||
symbol=sym,
|
symbol=symbol,
|
||||||
exchange=exch,
|
exchange=exch,
|
||||||
expiry=expiry,
|
expiry=expiry,
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# get the "front" contract
|
# get the "front" contract
|
||||||
contract = await self.get_fute(
|
con = await self.get_fute(
|
||||||
symbol=sym,
|
symbol=symbol,
|
||||||
exchange=exch,
|
exchange=exch,
|
||||||
front=True,
|
front=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
qualify = False
|
elif (
|
||||||
|
exch in ('IDEALPRO')
|
||||||
elif exch in ('FOREX'):
|
or sectype == 'CASH'
|
||||||
currency = ''
|
):
|
||||||
symbol, currency = sym.split('/')
|
# if '/' in symbol:
|
||||||
|
# currency = ''
|
||||||
|
# symbol, currency = symbol.split('/')
|
||||||
con = ibis.Forex(
|
con = ibis.Forex(
|
||||||
symbol=symbol,
|
pair=''.join((symbol, currency)),
|
||||||
currency=currency,
|
currency=currency,
|
||||||
)
|
)
|
||||||
con.bars_kwargs = {'whatToShow': 'MIDPOINT'}
|
con.bars_kwargs = {'whatToShow': 'MIDPOINT'}
|
||||||
|
|
||||||
# commodities
|
# commodities
|
||||||
elif exch == 'CMDTY': # eg. XAUUSD.CMDTY
|
elif exch == 'CMDTY': # eg. XAUUSD.CMDTY
|
||||||
con_kwargs, bars_kwargs = _adhoc_cmdty_data_map[sym]
|
con_kwargs, bars_kwargs = _adhoc_symbol_map[symbol]
|
||||||
con = ibis.Commodity(**con_kwargs)
|
con = ibis.Commodity(**con_kwargs)
|
||||||
con.bars_kwargs = bars_kwargs
|
con.bars_kwargs = bars_kwargs
|
||||||
|
|
||||||
|
# crypto$
|
||||||
|
elif exch == 'PAXOS': # btc.paxos
|
||||||
|
con = ibis.Crypto(
|
||||||
|
symbol=symbol,
|
||||||
|
currency=currency,
|
||||||
|
)
|
||||||
|
|
||||||
# stonks
|
# stonks
|
||||||
else:
|
else:
|
||||||
# TODO: metadata system for all these exchange rules..
|
# TODO: metadata system for all these exchange rules..
|
||||||
|
@ -569,41 +747,61 @@ class Client:
|
||||||
exch = 'SMART'
|
exch = 'SMART'
|
||||||
|
|
||||||
else:
|
else:
|
||||||
exch = 'SMART'
|
# XXX: order is super important here since
|
||||||
|
# a primary == 'SMART' won't ever work.
|
||||||
primaryExchange = exch
|
primaryExchange = exch
|
||||||
|
exch = 'SMART'
|
||||||
|
|
||||||
con = ibis.Stock(
|
con = ibis.Stock(
|
||||||
symbol=sym,
|
symbol=symbol,
|
||||||
exchange=exch,
|
exchange=exch,
|
||||||
primaryExchange=primaryExchange,
|
primaryExchange=primaryExchange,
|
||||||
currency=currency,
|
currency=currency,
|
||||||
)
|
)
|
||||||
try:
|
|
||||||
exch = 'SMART' if not exch else exch
|
exch = 'SMART' if not exch else exch
|
||||||
if qualify:
|
|
||||||
contract = (await self.ib.qualifyContractsAsync(con))[0]
|
|
||||||
else:
|
|
||||||
assert contract
|
|
||||||
|
|
||||||
except IndexError:
|
contracts = [con]
|
||||||
|
if qualify:
|
||||||
|
try:
|
||||||
|
contracts = await self.ib.qualifyContractsAsync(con)
|
||||||
|
except RequestError as err:
|
||||||
|
msg = err.message
|
||||||
|
if (
|
||||||
|
'No security definition' in msg
|
||||||
|
and not err_on_qualify
|
||||||
|
):
|
||||||
|
log.warning(
|
||||||
|
f'Could not find def for {con}')
|
||||||
|
return None
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
if not contracts:
|
||||||
raise ValueError(f"No contract could be found {con}")
|
raise ValueError(f"No contract could be found {con}")
|
||||||
|
|
||||||
self._contracts[pattern] = contract
|
# pack all contracts into cache
|
||||||
|
for tract in contracts:
|
||||||
|
exch: str = tract.primaryExchange or tract.exchange or exch
|
||||||
|
pattern = f'{symbol}.{exch}'
|
||||||
|
expiry = tract.lastTradeDateOrContractMonth
|
||||||
|
# add an entry with expiry suffix if available
|
||||||
|
if expiry:
|
||||||
|
pattern += f'.{expiry}'
|
||||||
|
|
||||||
# add an aditional entry with expiry suffix if available
|
self._contracts[pattern.lower()] = tract
|
||||||
conexp = contract.lastTradeDateOrContractMonth
|
|
||||||
if conexp:
|
|
||||||
self._contracts[pattern + f'.{conexp}'] = contract
|
|
||||||
|
|
||||||
return contract
|
return contracts
|
||||||
|
|
||||||
async def get_head_time(
|
async def get_head_time(
|
||||||
self,
|
self,
|
||||||
contract: Contract,
|
fqsn: str,
|
||||||
) -> datetime:
|
|
||||||
"""Return the first datetime stamp for ``contract``.
|
|
||||||
|
|
||||||
"""
|
) -> datetime:
|
||||||
|
'''
|
||||||
|
Return the first datetime stamp for ``contract``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
contract = (await self.find_contracts(fqsn))[0]
|
||||||
return await self.ib.reqHeadTimeStampAsync(
|
return await self.ib.reqHeadTimeStampAsync(
|
||||||
contract,
|
contract,
|
||||||
whatToShow='TRADES',
|
whatToShow='TRADES',
|
||||||
|
@ -614,9 +812,10 @@ class Client:
|
||||||
async def get_sym_details(
|
async def get_sym_details(
|
||||||
self,
|
self,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
|
|
||||||
) -> tuple[Contract, Ticker, ContractDetails]:
|
) -> tuple[Contract, Ticker, ContractDetails]:
|
||||||
|
|
||||||
contract = await self.find_contract(symbol)
|
contract = (await self.find_contracts(symbol))[0]
|
||||||
ticker: Ticker = self.ib.reqMktData(
|
ticker: Ticker = self.ib.reqMktData(
|
||||||
contract,
|
contract,
|
||||||
snapshot=True,
|
snapshot=True,
|
||||||
|
@ -672,9 +871,7 @@ class Client:
|
||||||
# async to be consistent for the client proxy, and cuz why not.
|
# async to be consistent for the client proxy, and cuz why not.
|
||||||
def submit_limit(
|
def submit_limit(
|
||||||
self,
|
self,
|
||||||
# ignored since ib doesn't support defining your
|
oid: str, # ignored since doesn't support defining your own
|
||||||
# own order id
|
|
||||||
oid: str,
|
|
||||||
symbol: str,
|
symbol: str,
|
||||||
price: float,
|
price: float,
|
||||||
action: str,
|
action: str,
|
||||||
|
@ -690,6 +887,9 @@ class Client:
|
||||||
'''
|
'''
|
||||||
Place an order and return integer request id provided by client.
|
Place an order and return integer request id provided by client.
|
||||||
|
|
||||||
|
Relevant docs:
|
||||||
|
- https://interactivebrokers.github.io/tws-api/order_limitations.html
|
||||||
|
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
contract = self._contracts[symbol]
|
contract = self._contracts[symbol]
|
||||||
|
@ -715,6 +915,9 @@ class Client:
|
||||||
optOutSmartRouting=True,
|
optOutSmartRouting=True,
|
||||||
routeMarketableToBbo=True,
|
routeMarketableToBbo=True,
|
||||||
designatedLocation='SMART',
|
designatedLocation='SMART',
|
||||||
|
# TODO: make all orders GTC?
|
||||||
|
# https://interactivebrokers.github.io/tws-api/classIBApi_1_1Order.html#a95539081751afb9980f4c6bd1655a6ba
|
||||||
|
# goodTillDate=f"yyyyMMdd-HH:mm:ss",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
except AssertionError: # errrg insync..
|
except AssertionError: # errrg insync..
|
||||||
|
@ -804,6 +1007,76 @@ class Client:
|
||||||
return self.ib.positions(account=account)
|
return self.ib.positions(account=account)
|
||||||
|
|
||||||
|
|
||||||
|
def con2fqsn(
|
||||||
|
con: Contract,
|
||||||
|
_cache: dict[int, (str, bool)] = {}
|
||||||
|
|
||||||
|
) -> tuple[str, bool]:
|
||||||
|
'''
|
||||||
|
Convert contracts to fqsn-style strings to be used both in symbol-search
|
||||||
|
matching and as feed tokens passed to the front end data deed layer.
|
||||||
|
|
||||||
|
Previously seen contracts are cached by id.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# should be real volume for this contract by default
|
||||||
|
calc_price = False
|
||||||
|
if con.conId:
|
||||||
|
try:
|
||||||
|
return _cache[con.conId]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
suffix = con.primaryExchange or con.exchange
|
||||||
|
symbol = con.symbol
|
||||||
|
expiry = con.lastTradeDateOrContractMonth or ''
|
||||||
|
|
||||||
|
match con:
|
||||||
|
case Option():
|
||||||
|
# TODO: option symbol parsing and sane display:
|
||||||
|
symbol = con.localSymbol.replace(' ', '')
|
||||||
|
|
||||||
|
case ibis.Commodity():
|
||||||
|
# commodities and forex don't have an exchange name and
|
||||||
|
# no real volume so we have to calculate the price
|
||||||
|
suffix = con.secType
|
||||||
|
|
||||||
|
# no real volume on this tract
|
||||||
|
calc_price = True
|
||||||
|
|
||||||
|
case ibis.Forex() | ibis.Contract(secType='CASH'):
|
||||||
|
dst, src = con.localSymbol.split('.')
|
||||||
|
symbol = ''.join([dst, src])
|
||||||
|
suffix = con.exchange or 'idealpro'
|
||||||
|
|
||||||
|
# no real volume on forex feeds..
|
||||||
|
calc_price = True
|
||||||
|
|
||||||
|
if not suffix:
|
||||||
|
entry = _adhoc_symbol_map.get(
|
||||||
|
con.symbol or con.localSymbol
|
||||||
|
)
|
||||||
|
if entry:
|
||||||
|
meta, kwargs = entry
|
||||||
|
cid = meta.get('conId')
|
||||||
|
if cid:
|
||||||
|
assert con.conId == meta['conId']
|
||||||
|
suffix = meta['exchange']
|
||||||
|
|
||||||
|
# append a `.<suffix>` to the returned symbol
|
||||||
|
# key for derivatives that normally is the expiry
|
||||||
|
# date key.
|
||||||
|
if expiry:
|
||||||
|
suffix += f'.{expiry}'
|
||||||
|
|
||||||
|
fqsn_key = symbol.lower()
|
||||||
|
if suffix:
|
||||||
|
fqsn_key = '.'.join((fqsn_key, suffix)).lower()
|
||||||
|
|
||||||
|
_cache[con.conId] = fqsn_key, calc_price
|
||||||
|
return fqsn_key, calc_price
|
||||||
|
|
||||||
|
|
||||||
# per-actor API ep caching
|
# per-actor API ep caching
|
||||||
_client_cache: dict[tuple[str, int], Client] = {}
|
_client_cache: dict[tuple[str, int], Client] = {}
|
||||||
_scan_ignore: set[tuple[str, int]] = set()
|
_scan_ignore: set[tuple[str, int]] = set()
|
||||||
|
@ -811,10 +1084,23 @@ _scan_ignore: set[tuple[str, int]] = set()
|
||||||
|
|
||||||
def get_config() -> dict[str, Any]:
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
conf, path = config.load()
|
conf, path = config.load('brokers')
|
||||||
|
|
||||||
section = conf.get('ib')
|
section = conf.get('ib')
|
||||||
|
|
||||||
|
accounts = section.get('accounts')
|
||||||
|
if not accounts:
|
||||||
|
raise ValueError(
|
||||||
|
'brokers.toml -> `ib.accounts` must be defined\n'
|
||||||
|
f'location: {path}'
|
||||||
|
)
|
||||||
|
|
||||||
|
names = list(accounts.keys())
|
||||||
|
accts = section['accounts'] = bidict(accounts)
|
||||||
|
log.info(
|
||||||
|
f'brokers.toml defines {len(accts)} accounts: '
|
||||||
|
f'{pformat(names)}'
|
||||||
|
)
|
||||||
|
|
||||||
if section is None:
|
if section is None:
|
||||||
log.warning(f'No config section found for ib in {path}')
|
log.warning(f'No config section found for ib in {path}')
|
||||||
return {}
|
return {}
|
||||||
|
@ -836,6 +1122,7 @@ async def load_aio_clients(
|
||||||
# retry a few times to get the client going..
|
# retry a few times to get the client going..
|
||||||
connect_retries: int = 3,
|
connect_retries: int = 3,
|
||||||
connect_timeout: float = 0.5,
|
connect_timeout: float = 0.5,
|
||||||
|
disconnect_on_exit: bool = True,
|
||||||
|
|
||||||
) -> dict[str, Client]:
|
) -> dict[str, Client]:
|
||||||
'''
|
'''
|
||||||
|
@ -908,6 +1195,12 @@ async def load_aio_clients(
|
||||||
# careful.
|
# careful.
|
||||||
timeout=connect_timeout,
|
timeout=connect_timeout,
|
||||||
)
|
)
|
||||||
|
# create and cache client
|
||||||
|
client = Client(ib)
|
||||||
|
|
||||||
|
# update all actor-global caches
|
||||||
|
log.info(f"Caching client for {sockaddr}")
|
||||||
|
_client_cache[sockaddr] = client
|
||||||
break
|
break
|
||||||
|
|
||||||
except (
|
except (
|
||||||
|
@ -931,21 +1224,9 @@ async def load_aio_clients(
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Failed to connect on {port} for {i} time, retrying...')
|
f'Failed to connect on {port} for {i} time, retrying...')
|
||||||
|
|
||||||
# create and cache client
|
|
||||||
client = Client(ib)
|
|
||||||
|
|
||||||
# Pre-collect all accounts available for this
|
# Pre-collect all accounts available for this
|
||||||
# connection and map account names to this client
|
# connection and map account names to this client
|
||||||
# instance.
|
# instance.
|
||||||
pps = ib.positions()
|
|
||||||
if pps:
|
|
||||||
for pp in pps:
|
|
||||||
accounts_found[
|
|
||||||
accounts_def.inverse[pp.account]
|
|
||||||
] = client
|
|
||||||
|
|
||||||
# if there are accounts without positions we should still
|
|
||||||
# register them for this client
|
|
||||||
for value in ib.accountValues():
|
for value in ib.accountValues():
|
||||||
acct_number = value.account
|
acct_number = value.account
|
||||||
|
|
||||||
|
@ -966,10 +1247,6 @@ async def load_aio_clients(
|
||||||
f'{pformat(accounts_found)}'
|
f'{pformat(accounts_found)}'
|
||||||
)
|
)
|
||||||
|
|
||||||
# update all actor-global caches
|
|
||||||
log.info(f"Caching client for {sockaddr}")
|
|
||||||
_client_cache[sockaddr] = client
|
|
||||||
|
|
||||||
# XXX: why aren't we just updating this directy above
|
# XXX: why aren't we just updating this directy above
|
||||||
# instead of using the intermediary `accounts_found`?
|
# instead of using the intermediary `accounts_found`?
|
||||||
_accounts2clients.update(accounts_found)
|
_accounts2clients.update(accounts_found)
|
||||||
|
@ -987,10 +1264,11 @@ async def load_aio_clients(
|
||||||
finally:
|
finally:
|
||||||
# TODO: for re-scans we'll want to not teardown clients which
|
# TODO: for re-scans we'll want to not teardown clients which
|
||||||
# are up and stable right?
|
# are up and stable right?
|
||||||
|
if disconnect_on_exit:
|
||||||
for acct, client in _accounts2clients.items():
|
for acct, client in _accounts2clients.items():
|
||||||
log.info(f'Disconnecting {acct}@{client}')
|
log.info(f'Disconnecting {acct}@{client}')
|
||||||
client.ib.disconnect()
|
client.ib.disconnect()
|
||||||
_client_cache.pop((host, port))
|
_client_cache.pop((host, port), None)
|
||||||
|
|
||||||
|
|
||||||
async def load_clients_for_trio(
|
async def load_clients_for_trio(
|
||||||
|
@ -1019,9 +1297,6 @@ async def load_clients_for_trio(
|
||||||
await asyncio.sleep(float('inf'))
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
|
|
||||||
_proxies: dict[str, MethodProxy] = {}
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_client_proxies() -> tuple[
|
async def open_client_proxies() -> tuple[
|
||||||
dict[str, MethodProxy],
|
dict[str, MethodProxy],
|
||||||
|
@ -1029,7 +1304,6 @@ async def open_client_proxies() -> tuple[
|
||||||
]:
|
]:
|
||||||
async with (
|
async with (
|
||||||
tractor.trionics.maybe_open_context(
|
tractor.trionics.maybe_open_context(
|
||||||
# acm_func=open_client_proxies,
|
|
||||||
acm_func=tractor.to_asyncio.open_channel_from,
|
acm_func=tractor.to_asyncio.open_channel_from,
|
||||||
kwargs={'target': load_clients_for_trio},
|
kwargs={'target': load_clients_for_trio},
|
||||||
|
|
||||||
|
@ -1044,13 +1318,14 @@ async def open_client_proxies() -> tuple[
|
||||||
if cache_hit:
|
if cache_hit:
|
||||||
log.info(f'Re-using cached clients: {clients}')
|
log.info(f'Re-using cached clients: {clients}')
|
||||||
|
|
||||||
|
proxies = {}
|
||||||
for acct_name, client in clients.items():
|
for acct_name, client in clients.items():
|
||||||
proxy = await stack.enter_async_context(
|
proxy = await stack.enter_async_context(
|
||||||
open_client_proxy(client),
|
open_client_proxy(client),
|
||||||
)
|
)
|
||||||
_proxies[acct_name] = proxy
|
proxies[acct_name] = proxy
|
||||||
|
|
||||||
yield _proxies, clients
|
yield proxies, clients
|
||||||
|
|
||||||
|
|
||||||
def get_preferred_data_client(
|
def get_preferred_data_client(
|
||||||
|
@ -1199,11 +1474,13 @@ async def open_client_proxy(
|
||||||
event_table = {}
|
event_table = {}
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
to_asyncio.open_channel_from(
|
to_asyncio.open_channel_from(
|
||||||
open_aio_client_method_relay,
|
open_aio_client_method_relay,
|
||||||
client=client,
|
client=client,
|
||||||
event_consumers=event_table,
|
event_consumers=event_table,
|
||||||
) as (first, chan),
|
) as (first, chan),
|
||||||
|
|
||||||
trio.open_nursery() as relay_n,
|
trio.open_nursery() as relay_n,
|
||||||
):
|
):
|
||||||
|
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -22,6 +22,7 @@ import asyncio
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from dataclasses import asdict
|
from dataclasses import asdict
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
from functools import partial
|
||||||
from math import isnan
|
from math import isnan
|
||||||
import time
|
import time
|
||||||
from typing import (
|
from typing import (
|
||||||
|
@ -38,10 +39,14 @@ import tractor
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
from piker.data._sharedmem import ShmArray
|
from .._util import (
|
||||||
from .._util import SymbolNotFound, NoData
|
NoData,
|
||||||
|
DataUnavailable,
|
||||||
|
SymbolNotFound,
|
||||||
|
)
|
||||||
from .api import (
|
from .api import (
|
||||||
_adhoc_futes_set,
|
# _adhoc_futes_set,
|
||||||
|
con2fqsn,
|
||||||
log,
|
log,
|
||||||
load_aio_clients,
|
load_aio_clients,
|
||||||
ibis,
|
ibis,
|
||||||
|
@ -102,7 +107,7 @@ async def open_data_client() -> MethodProxy:
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_history_client(
|
async def open_history_client(
|
||||||
symbol: str,
|
fqsn: str,
|
||||||
|
|
||||||
) -> tuple[Callable, int]:
|
) -> tuple[Callable, int]:
|
||||||
'''
|
'''
|
||||||
|
@ -110,26 +115,75 @@ async def open_history_client(
|
||||||
that takes in ``pendulum.datetime`` and returns ``numpy`` arrays.
|
that takes in ``pendulum.datetime`` and returns ``numpy`` arrays.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
# TODO:
|
||||||
|
# - add logic to handle tradable hours and only grab
|
||||||
|
# valid bars in the range?
|
||||||
|
# - we want to avoid overrunning the underlying shm array buffer and
|
||||||
|
# we should probably calc the number of calls to make depending on
|
||||||
|
# that until we have the `marketstore` daemon in place in which case
|
||||||
|
# the shm size will be driven by user config and available sys
|
||||||
|
# memory.
|
||||||
|
|
||||||
async with open_data_client() as proxy:
|
async with open_data_client() as proxy:
|
||||||
|
|
||||||
|
max_timeout: float = 2.
|
||||||
|
mean: float = 0
|
||||||
|
count: int = 0
|
||||||
|
|
||||||
|
head_dt: None | datetime = None
|
||||||
|
if (
|
||||||
|
# fx cons seem to not provide this endpoint?
|
||||||
|
'idealpro' not in fqsn
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
head_dt = await proxy.get_head_time(fqsn=fqsn)
|
||||||
|
except RequestError:
|
||||||
|
head_dt = None
|
||||||
|
|
||||||
async def get_hist(
|
async def get_hist(
|
||||||
|
timeframe: float,
|
||||||
end_dt: Optional[datetime] = None,
|
end_dt: Optional[datetime] = None,
|
||||||
start_dt: Optional[datetime] = None,
|
start_dt: Optional[datetime] = None,
|
||||||
|
|
||||||
) -> tuple[np.ndarray, str]:
|
) -> tuple[np.ndarray, str]:
|
||||||
|
nonlocal max_timeout, mean, count
|
||||||
|
|
||||||
out, fails = await get_bars(proxy, symbol, end_dt=end_dt)
|
query_start = time.time()
|
||||||
|
out, timedout = await get_bars(
|
||||||
|
proxy,
|
||||||
|
fqsn,
|
||||||
|
timeframe,
|
||||||
|
end_dt=end_dt,
|
||||||
|
)
|
||||||
|
latency = time.time() - query_start
|
||||||
|
if (
|
||||||
|
not timedout
|
||||||
|
# and latency <= max_timeout
|
||||||
|
):
|
||||||
|
count += 1
|
||||||
|
mean += latency / count
|
||||||
|
print(
|
||||||
|
f'HISTORY FRAME QUERY LATENCY: {latency}\n'
|
||||||
|
f'mean: {mean}'
|
||||||
|
)
|
||||||
|
|
||||||
# TODO: add logic here to handle tradable hours and only grab
|
if (
|
||||||
# valid bars in the range
|
out is None
|
||||||
if out is None:
|
):
|
||||||
# could be trying to retreive bars over weekend
|
# could be trying to retreive bars over weekend
|
||||||
log.error(f"Can't grab bars starting at {end_dt}!?!?")
|
log.error(f"Can't grab bars starting at {end_dt}!?!?")
|
||||||
raise NoData(
|
raise NoData(
|
||||||
f'{end_dt}',
|
f'{end_dt}',
|
||||||
frame_size=2000,
|
# frame_size=2000,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
end_dt
|
||||||
|
and head_dt
|
||||||
|
and end_dt <= head_dt
|
||||||
|
):
|
||||||
|
raise DataUnavailable(f'First timestamp is {head_dt}')
|
||||||
|
|
||||||
bars, bars_array, first_dt, last_dt = out
|
bars, bars_array, first_dt, last_dt = out
|
||||||
|
|
||||||
# volume cleaning since there's -ve entries,
|
# volume cleaning since there's -ve entries,
|
||||||
|
@ -144,7 +198,7 @@ async def open_history_client(
|
||||||
# quite sure why.. needs some tinkering and probably
|
# quite sure why.. needs some tinkering and probably
|
||||||
# a lookthrough of the ``ib_insync`` machinery, for eg. maybe
|
# a lookthrough of the ``ib_insync`` machinery, for eg. maybe
|
||||||
# we have to do the batch queries on the `asyncio` side?
|
# we have to do the batch queries on the `asyncio` side?
|
||||||
yield get_hist, {'erlangs': 1, 'rate': 6}
|
yield get_hist, {'erlangs': 1, 'rate': 3}
|
||||||
|
|
||||||
|
|
||||||
_pacing: str = (
|
_pacing: str = (
|
||||||
|
@ -153,96 +207,19 @@ _pacing: str = (
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def get_bars(
|
async def wait_on_data_reset(
|
||||||
|
|
||||||
proxy: MethodProxy,
|
proxy: MethodProxy,
|
||||||
fqsn: str,
|
reset_type: str = 'data',
|
||||||
|
timeout: float = 16,
|
||||||
|
|
||||||
# blank to start which tells ib to look up the latest datum
|
task_status: TaskStatus[
|
||||||
end_dt: str = '',
|
tuple[
|
||||||
|
trio.CancelScope,
|
||||||
|
trio.Event,
|
||||||
|
]
|
||||||
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
|
) -> bool:
|
||||||
|
|
||||||
) -> (dict, np.ndarray):
|
|
||||||
'''
|
|
||||||
Retrieve historical data from a ``trio``-side task using
|
|
||||||
a ``MethoProxy``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
fails = 0
|
|
||||||
bars: Optional[list] = None
|
|
||||||
first_dt: datetime = None
|
|
||||||
last_dt: datetime = None
|
|
||||||
|
|
||||||
if end_dt:
|
|
||||||
last_dt = pendulum.from_timestamp(end_dt.timestamp())
|
|
||||||
|
|
||||||
for _ in range(10):
|
|
||||||
try:
|
|
||||||
out = await proxy.bars(
|
|
||||||
fqsn=fqsn,
|
|
||||||
end_dt=end_dt,
|
|
||||||
)
|
|
||||||
if out:
|
|
||||||
bars, bars_array = out
|
|
||||||
|
|
||||||
else:
|
|
||||||
await tractor.breakpoint()
|
|
||||||
|
|
||||||
if bars_array is None:
|
|
||||||
raise SymbolNotFound(fqsn)
|
|
||||||
|
|
||||||
first_dt = pendulum.from_timestamp(
|
|
||||||
bars[0].date.timestamp())
|
|
||||||
|
|
||||||
last_dt = pendulum.from_timestamp(
|
|
||||||
bars[-1].date.timestamp())
|
|
||||||
|
|
||||||
time = bars_array['time']
|
|
||||||
assert time[-1] == last_dt.timestamp()
|
|
||||||
assert time[0] == first_dt.timestamp()
|
|
||||||
log.info(
|
|
||||||
f'{len(bars)} bars retreived for {first_dt} -> {last_dt}'
|
|
||||||
)
|
|
||||||
|
|
||||||
return (bars, bars_array, first_dt, last_dt), fails
|
|
||||||
|
|
||||||
except RequestError as err:
|
|
||||||
msg = err.message
|
|
||||||
# why do we always need to rebind this?
|
|
||||||
# _err = err
|
|
||||||
|
|
||||||
if 'No market data permissions for' in msg:
|
|
||||||
# TODO: signalling for no permissions searches
|
|
||||||
raise NoData(
|
|
||||||
f'Symbol: {fqsn}',
|
|
||||||
)
|
|
||||||
|
|
||||||
elif (
|
|
||||||
err.code == 162
|
|
||||||
and 'HMDS query returned no data' in err.message
|
|
||||||
):
|
|
||||||
# XXX: this is now done in the storage mgmt layer
|
|
||||||
# and we shouldn't implicitly decrement the frame dt
|
|
||||||
# index since the upper layer may be doing so
|
|
||||||
# concurrently and we don't want to be delivering frames
|
|
||||||
# that weren't asked for.
|
|
||||||
log.warning(
|
|
||||||
f'NO DATA found ending @ {end_dt}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
# try to decrement start point and look further back
|
|
||||||
# end_dt = last_dt = last_dt.subtract(seconds=2000)
|
|
||||||
|
|
||||||
raise NoData(
|
|
||||||
f'Symbol: {fqsn}',
|
|
||||||
frame_size=2000,
|
|
||||||
)
|
|
||||||
|
|
||||||
elif _pacing in msg:
|
|
||||||
|
|
||||||
log.warning(
|
|
||||||
'History throttle rate reached!\n'
|
|
||||||
'Resetting farms with `ctrl-alt-f` hack\n'
|
|
||||||
)
|
|
||||||
# TODO: we might have to put a task lock around this
|
# TODO: we might have to put a task lock around this
|
||||||
# method..
|
# method..
|
||||||
hist_ev = proxy.status_event(
|
hist_ev = proxy.status_event(
|
||||||
|
@ -258,144 +235,259 @@ async def get_bars(
|
||||||
# live_ev = proxy.status_event(
|
# live_ev = proxy.status_event(
|
||||||
# 'Market data farm connection is OK:usfuture'
|
# 'Market data farm connection is OK:usfuture'
|
||||||
# )
|
# )
|
||||||
|
|
||||||
# try to wait on the reset event(s) to arrive, a timeout
|
# try to wait on the reset event(s) to arrive, a timeout
|
||||||
# will trigger a retry up to 6 times (for now).
|
# will trigger a retry up to 6 times (for now).
|
||||||
tries: int = 2
|
|
||||||
timeout: float = 10
|
|
||||||
|
|
||||||
# try 3 time with a data reset then fail over to
|
done = trio.Event()
|
||||||
# a connection reset.
|
with trio.move_on_after(timeout) as cs:
|
||||||
for i in range(1, tries):
|
|
||||||
|
task_status.started((cs, done))
|
||||||
|
|
||||||
log.warning('Sending DATA RESET request')
|
log.warning('Sending DATA RESET request')
|
||||||
await data_reset_hack(reset_type='data')
|
res = await data_reset_hack(reset_type=reset_type)
|
||||||
|
|
||||||
|
if not res:
|
||||||
|
log.warning(
|
||||||
|
'NO VNC DETECTED!\n'
|
||||||
|
'Manually press ctrl-alt-f on your IB java app'
|
||||||
|
)
|
||||||
|
done.set()
|
||||||
|
return False
|
||||||
|
|
||||||
with trio.move_on_after(timeout) as cs:
|
|
||||||
for name, ev in [
|
|
||||||
# TODO: not sure if waiting on other events
|
# TODO: not sure if waiting on other events
|
||||||
# is all that useful here or not. in theory
|
# is all that useful here or not.
|
||||||
# you could wait on one of the ones above
|
# - in theory you could wait on one of the ones above first
|
||||||
# first to verify the reset request was
|
# to verify the reset request was sent?
|
||||||
# sent?
|
# - we need the same for real-time quote feeds which can
|
||||||
|
# sometimes flake out and stop delivering..
|
||||||
|
for name, ev in [
|
||||||
('history', hist_ev),
|
('history', hist_ev),
|
||||||
]:
|
]:
|
||||||
await ev.wait()
|
await ev.wait()
|
||||||
log.info(f"{name} DATA RESET")
|
log.info(f"{name} DATA RESET")
|
||||||
break
|
done.set()
|
||||||
|
return True
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
if cs.cancel_called:
|
||||||
fails += 1
|
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Data reset {name} timeout, retrying {i}.'
|
'Data reset task canceled?'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
done.set()
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
_data_resetter_task: trio.Task | None = None
|
||||||
|
|
||||||
|
|
||||||
|
async def get_bars(
|
||||||
|
|
||||||
|
proxy: MethodProxy,
|
||||||
|
fqsn: str,
|
||||||
|
timeframe: int,
|
||||||
|
|
||||||
|
# blank to start which tells ib to look up the latest datum
|
||||||
|
end_dt: str = '',
|
||||||
|
|
||||||
|
# TODO: make this more dynamic based on measured frame rx latency?
|
||||||
|
# how long before we trigger a feed reset (seconds)
|
||||||
|
feed_reset_timeout: float = 3,
|
||||||
|
|
||||||
|
# how many days to subtract before giving up on further
|
||||||
|
# history queries for instrument, presuming that most don't
|
||||||
|
# not trade for a week XD
|
||||||
|
max_nodatas: int = 6,
|
||||||
|
|
||||||
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> (dict, np.ndarray):
|
||||||
|
'''
|
||||||
|
Retrieve historical data from a ``trio``-side task using
|
||||||
|
a ``MethoProxy``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
global _data_resetter_task
|
||||||
|
nodatas_count: int = 0
|
||||||
|
|
||||||
|
data_cs: trio.CancelScope | None = None
|
||||||
|
result: tuple[
|
||||||
|
ibis.objects.BarDataList,
|
||||||
|
np.ndarray,
|
||||||
|
datetime,
|
||||||
|
datetime,
|
||||||
|
] | None = None
|
||||||
|
result_ready = trio.Event()
|
||||||
|
|
||||||
|
async def query():
|
||||||
|
nonlocal result, data_cs, end_dt, nodatas_count
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
out = await proxy.bars(
|
||||||
|
fqsn=fqsn,
|
||||||
|
end_dt=end_dt,
|
||||||
|
sample_period_s=timeframe,
|
||||||
|
|
||||||
|
# ideally we cancel the request just before we
|
||||||
|
# cancel on the ``trio``-side and trigger a data
|
||||||
|
# reset hack.. the problem is there's no way (with
|
||||||
|
# current impl) to detect a cancel case.
|
||||||
|
# timeout=timeout,
|
||||||
|
)
|
||||||
|
if out is None:
|
||||||
|
raise NoData(f'{end_dt}')
|
||||||
|
|
||||||
|
bars, bars_array, dt_duration = out
|
||||||
|
|
||||||
|
if not bars:
|
||||||
|
log.warning(
|
||||||
|
f'History is blank for {dt_duration} from {end_dt}'
|
||||||
|
)
|
||||||
|
end_dt -= dt_duration
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
|
|
||||||
log.warning('Sending CONNECTION RESET')
|
if bars_array is None:
|
||||||
await data_reset_hack(reset_type='connection')
|
raise SymbolNotFound(fqsn)
|
||||||
|
|
||||||
with trio.move_on_after(timeout) as cs:
|
first_dt = pendulum.from_timestamp(
|
||||||
for name, ev in [
|
bars[0].date.timestamp())
|
||||||
# TODO: not sure if waiting on other events
|
|
||||||
# is all that useful here or not. in theory
|
|
||||||
# you could wait on one of the ones above
|
|
||||||
# first to verify the reset request was
|
|
||||||
# sent?
|
|
||||||
('history', hist_ev),
|
|
||||||
]:
|
|
||||||
await ev.wait()
|
|
||||||
log.info(f"{name} DATA RESET")
|
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
last_dt = pendulum.from_timestamp(
|
||||||
fails += 1
|
bars[-1].date.timestamp())
|
||||||
log.warning('Data CONNECTION RESET timeout!?')
|
|
||||||
|
time = bars_array['time']
|
||||||
|
assert time[-1] == last_dt.timestamp()
|
||||||
|
assert time[0] == first_dt.timestamp()
|
||||||
|
log.info(
|
||||||
|
f'{len(bars)} bars retreived {first_dt} -> {last_dt}'
|
||||||
|
)
|
||||||
|
|
||||||
|
if data_cs:
|
||||||
|
data_cs.cancel()
|
||||||
|
|
||||||
|
result = (bars, bars_array, first_dt, last_dt)
|
||||||
|
|
||||||
|
# signal data reset loop parent task
|
||||||
|
result_ready.set()
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except RequestError as err:
|
||||||
|
msg = err.message
|
||||||
|
|
||||||
|
if 'No market data permissions for' in msg:
|
||||||
|
# TODO: signalling for no permissions searches
|
||||||
|
raise NoData(
|
||||||
|
f'Symbol: {fqsn}',
|
||||||
|
)
|
||||||
|
|
||||||
|
elif err.code == 162:
|
||||||
|
if (
|
||||||
|
'HMDS query returned no data' in msg
|
||||||
|
):
|
||||||
|
# XXX: this is now done in the storage mgmt
|
||||||
|
# layer and we shouldn't implicitly decrement
|
||||||
|
# the frame dt index since the upper layer may
|
||||||
|
# be doing so concurrently and we don't want to
|
||||||
|
# be delivering frames that weren't asked for.
|
||||||
|
# try to decrement start point and look further back
|
||||||
|
# end_dt = end_dt.subtract(seconds=2000)
|
||||||
|
logmsg = "SUBTRACTING DAY from DT index"
|
||||||
|
if end_dt is not None:
|
||||||
|
end_dt = end_dt.subtract(days=1)
|
||||||
|
elif end_dt is None:
|
||||||
|
end_dt = pendulum.now().subtract(days=1)
|
||||||
|
|
||||||
|
log.warning(
|
||||||
|
f'NO DATA found ending @ {end_dt}\n'
|
||||||
|
+ logmsg
|
||||||
|
)
|
||||||
|
|
||||||
|
if nodatas_count >= max_nodatas:
|
||||||
|
raise DataUnavailable(
|
||||||
|
f'Presuming {fqsn} has no further history '
|
||||||
|
f'after {max_nodatas} tries..'
|
||||||
|
)
|
||||||
|
|
||||||
|
nodatas_count += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif 'API historical data query cancelled' in err.message:
|
||||||
|
log.warning(
|
||||||
|
'Query cancelled by IB (:eyeroll:):\n'
|
||||||
|
f'{err.message}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
elif (
|
||||||
|
'Trading TWS session is connected from a different IP'
|
||||||
|
in err.message
|
||||||
|
):
|
||||||
|
log.warning("ignoring ip address warning")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# XXX: more or less same as above timeout case
|
||||||
|
elif _pacing in msg:
|
||||||
|
log.warning(
|
||||||
|
'History throttle rate reached!\n'
|
||||||
|
'Resetting farms with `ctrl-alt-f` hack\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# cancel any existing reset task
|
||||||
|
if data_cs:
|
||||||
|
data_cs.cancel()
|
||||||
|
|
||||||
|
# spawn new data reset task
|
||||||
|
data_cs, reset_done = await nurse.start(
|
||||||
|
partial(
|
||||||
|
wait_on_data_reset,
|
||||||
|
proxy,
|
||||||
|
timeout=float('inf'),
|
||||||
|
reset_type='connection'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise
|
raise
|
||||||
|
|
||||||
return None, None
|
# TODO: make this global across all history task/requests
|
||||||
# else: # throttle wasn't fixed so error out immediately
|
# such that simultaneous symbol queries don't try data resettingn
|
||||||
# raise _err
|
# too fast..
|
||||||
|
unset_resetter: bool = False
|
||||||
|
async with trio.open_nursery() as nurse:
|
||||||
|
|
||||||
|
# start history request that we allow
|
||||||
|
# to run indefinitely until a result is acquired
|
||||||
|
nurse.start_soon(query)
|
||||||
|
|
||||||
async def backfill_bars(
|
# start history reset loop which waits up to the timeout
|
||||||
|
# for a result before triggering a data feed reset.
|
||||||
|
while not result_ready.is_set():
|
||||||
|
|
||||||
fqsn: str,
|
with trio.move_on_after(feed_reset_timeout):
|
||||||
shm: ShmArray, # type: ignore # noqa
|
await result_ready.wait()
|
||||||
|
break
|
||||||
|
|
||||||
# TODO: we want to avoid overrunning the underlying shm array buffer
|
if _data_resetter_task:
|
||||||
# and we should probably calc the number of calls to make depending
|
# don't double invoke the reset hack if another
|
||||||
# on that until we have the `marketstore` daemon in place in which
|
# requester task already has it covered.
|
||||||
# case the shm size will be driven by user config and available sys
|
|
||||||
# memory.
|
|
||||||
count: int = 16,
|
|
||||||
|
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Fill historical bars into shared mem / storage afap.
|
|
||||||
|
|
||||||
TODO: avoid pacing constraints:
|
|
||||||
https://github.com/pikers/piker/issues/128
|
|
||||||
|
|
||||||
'''
|
|
||||||
# last_dt1 = None
|
|
||||||
last_dt = None
|
|
||||||
|
|
||||||
with trio.CancelScope() as cs:
|
|
||||||
|
|
||||||
async with open_data_client() as proxy:
|
|
||||||
|
|
||||||
out, fails = await get_bars(proxy, fqsn)
|
|
||||||
|
|
||||||
if out is None:
|
|
||||||
raise RuntimeError("Could not pull currrent history?!")
|
|
||||||
|
|
||||||
(first_bars, bars_array, first_dt, last_dt) = out
|
|
||||||
vlm = bars_array['volume']
|
|
||||||
vlm[vlm < 0] = 0
|
|
||||||
last_dt = first_dt
|
|
||||||
|
|
||||||
# write historical data to buffer
|
|
||||||
shm.push(bars_array)
|
|
||||||
|
|
||||||
task_status.started(cs)
|
|
||||||
|
|
||||||
i = 0
|
|
||||||
while i < count:
|
|
||||||
|
|
||||||
out, fails = await get_bars(proxy, fqsn, end_dt=first_dt)
|
|
||||||
|
|
||||||
if out is None:
|
|
||||||
# could be trying to retreive bars over weekend
|
|
||||||
# TODO: add logic here to handle tradable hours and
|
|
||||||
# only grab valid bars in the range
|
|
||||||
log.error(f"Can't grab bars starting at {first_dt}!?!?")
|
|
||||||
|
|
||||||
# XXX: get_bars() should internally decrement dt by
|
|
||||||
# 2k seconds and try again.
|
|
||||||
continue
|
continue
|
||||||
|
else:
|
||||||
|
_data_resetter_task = trio.lowlevel.current_task()
|
||||||
|
unset_resetter = True
|
||||||
|
|
||||||
(first_bars, bars_array, first_dt, last_dt) = out
|
# spawn new data reset task
|
||||||
# last_dt1 = last_dt
|
data_cs, reset_done = await nurse.start(
|
||||||
# last_dt = first_dt
|
partial(
|
||||||
|
wait_on_data_reset,
|
||||||
|
proxy,
|
||||||
|
timeout=float('inf'),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
# sync wait on reset to complete
|
||||||
|
await reset_done.wait()
|
||||||
|
|
||||||
# volume cleaning since there's -ve entries,
|
_data_resetter_task = None if unset_resetter else _data_resetter_task
|
||||||
# wood luv to know what crookery that is..
|
return result, data_cs is not None
|
||||||
vlm = bars_array['volume']
|
|
||||||
vlm[vlm < 0] = 0
|
|
||||||
|
|
||||||
# TODO we should probably dig into forums to see what peeps
|
|
||||||
# think this data "means" and then use it as an indicator of
|
|
||||||
# sorts? dinkus has mentioned that $vlms for the day dont'
|
|
||||||
# match other platforms nor the summary stat tws shows in
|
|
||||||
# the monitor - it's probably worth investigating.
|
|
||||||
|
|
||||||
shm.push(bars_array, prepend=True)
|
|
||||||
i += 1
|
|
||||||
|
|
||||||
|
|
||||||
asset_type_map = {
|
asset_type_map = {
|
||||||
|
@ -413,6 +505,7 @@ asset_type_map = {
|
||||||
'WAR': 'warrant',
|
'WAR': 'warrant',
|
||||||
'IOPT': 'warran',
|
'IOPT': 'warran',
|
||||||
'BAG': 'bag',
|
'BAG': 'bag',
|
||||||
|
'CRYPTO': 'crypto', # bc it's diff then fiat?
|
||||||
# 'NEWS': 'news',
|
# 'NEWS': 'news',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -452,7 +545,9 @@ async def _setup_quote_stream(
|
||||||
|
|
||||||
to_trio.send_nowait(None)
|
to_trio.send_nowait(None)
|
||||||
|
|
||||||
async with load_aio_clients() as accts2clients:
|
async with load_aio_clients(
|
||||||
|
disconnect_on_exit=False,
|
||||||
|
) as accts2clients:
|
||||||
caccount_name, client = get_preferred_data_client(accts2clients)
|
caccount_name, client = get_preferred_data_client(accts2clients)
|
||||||
contract = contract or (await client.find_contract(symbol))
|
contract = contract or (await client.find_contract(symbol))
|
||||||
ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts))
|
ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts))
|
||||||
|
@ -498,10 +593,11 @@ async def _setup_quote_stream(
|
||||||
# Manually do the dereg ourselves.
|
# Manually do the dereg ourselves.
|
||||||
teardown()
|
teardown()
|
||||||
except trio.WouldBlock:
|
except trio.WouldBlock:
|
||||||
log.warning(
|
# log.warning(
|
||||||
f'channel is blocking symbol feed for {symbol}?'
|
# f'channel is blocking symbol feed for {symbol}?'
|
||||||
f'\n{to_trio.statistics}'
|
# f'\n{to_trio.statistics}'
|
||||||
)
|
# )
|
||||||
|
pass
|
||||||
|
|
||||||
# except trio.WouldBlock:
|
# except trio.WouldBlock:
|
||||||
# # for slow debugging purposes to avoid clobbering prompt
|
# # for slow debugging purposes to avoid clobbering prompt
|
||||||
|
@ -531,7 +627,8 @@ async def open_aio_quote_stream(
|
||||||
from_aio = _quote_streams.get(symbol)
|
from_aio = _quote_streams.get(symbol)
|
||||||
if from_aio:
|
if from_aio:
|
||||||
|
|
||||||
# if we already have a cached feed deliver a rx side clone to consumer
|
# if we already have a cached feed deliver a rx side clone
|
||||||
|
# to consumer
|
||||||
async with broadcast_receiver(
|
async with broadcast_receiver(
|
||||||
from_aio,
|
from_aio,
|
||||||
2**6,
|
2**6,
|
||||||
|
@ -553,38 +650,17 @@ async def open_aio_quote_stream(
|
||||||
|
|
||||||
|
|
||||||
# TODO: cython/mypyc/numba this!
|
# TODO: cython/mypyc/numba this!
|
||||||
|
# or we can at least cache a majority of the values
|
||||||
|
# except for the ones we expect to change?..
|
||||||
def normalize(
|
def normalize(
|
||||||
ticker: Ticker,
|
ticker: Ticker,
|
||||||
calc_price: bool = False
|
calc_price: bool = False
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
|
||||||
# should be real volume for this contract by default
|
|
||||||
calc_price = False
|
|
||||||
|
|
||||||
# check for special contract types
|
# check for special contract types
|
||||||
con = ticker.contract
|
con = ticker.contract
|
||||||
if type(con) in (
|
fqsn, calc_price = con2fqsn(con)
|
||||||
ibis.Commodity,
|
|
||||||
ibis.Forex,
|
|
||||||
):
|
|
||||||
# commodities and forex don't have an exchange name and
|
|
||||||
# no real volume so we have to calculate the price
|
|
||||||
suffix = con.secType
|
|
||||||
# no real volume on this tract
|
|
||||||
calc_price = True
|
|
||||||
|
|
||||||
else:
|
|
||||||
suffix = con.primaryExchange
|
|
||||||
if not suffix:
|
|
||||||
suffix = con.exchange
|
|
||||||
|
|
||||||
# append a `.<suffix>` to the returned symbol
|
|
||||||
# key for derivatives that normally is the expiry
|
|
||||||
# date key.
|
|
||||||
expiry = con.lastTradeDateOrContractMonth
|
|
||||||
if expiry:
|
|
||||||
suffix += f'.{expiry}'
|
|
||||||
|
|
||||||
# convert named tuples to dicts so we send usable keys
|
# convert named tuples to dicts so we send usable keys
|
||||||
new_ticks = []
|
new_ticks = []
|
||||||
|
@ -616,9 +692,7 @@ def normalize(
|
||||||
|
|
||||||
# generate fqsn with possible specialized suffix
|
# generate fqsn with possible specialized suffix
|
||||||
# for derivatives, note the lowercase.
|
# for derivatives, note the lowercase.
|
||||||
data['symbol'] = data['fqsn'] = '.'.join(
|
data['symbol'] = data['fqsn'] = fqsn
|
||||||
(con.symbol, suffix)
|
|
||||||
).lower()
|
|
||||||
|
|
||||||
# convert named tuples to dicts for transport
|
# convert named tuples to dicts for transport
|
||||||
tbts = data.get('tickByTicks')
|
tbts = data.get('tickByTicks')
|
||||||
|
@ -683,6 +757,13 @@ async def stream_quotes(
|
||||||
# TODO: more consistent field translation
|
# TODO: more consistent field translation
|
||||||
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
|
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
|
||||||
|
|
||||||
|
if atype in {
|
||||||
|
'forex',
|
||||||
|
'index',
|
||||||
|
'commodity',
|
||||||
|
}:
|
||||||
|
syminfo['no_vlm'] = True
|
||||||
|
|
||||||
# for stocks it seems TWS reports too small a tick size
|
# for stocks it seems TWS reports too small a tick size
|
||||||
# such that you can't submit orders with that granularity?
|
# such that you can't submit orders with that granularity?
|
||||||
min_tick = 0.01 if atype == 'stock' else 0
|
min_tick = 0.01 if atype == 'stock' else 0
|
||||||
|
@ -709,9 +790,9 @@ async def stream_quotes(
|
||||||
},
|
},
|
||||||
|
|
||||||
}
|
}
|
||||||
return init_msgs
|
return init_msgs, syminfo
|
||||||
|
|
||||||
init_msgs = mk_init_msgs()
|
init_msgs, syminfo = mk_init_msgs()
|
||||||
|
|
||||||
# TODO: we should instead spawn a task that waits on a feed to start
|
# TODO: we should instead spawn a task that waits on a feed to start
|
||||||
# and let it wait indefinitely..instead of this hard coded stuff.
|
# and let it wait indefinitely..instead of this hard coded stuff.
|
||||||
|
@ -720,7 +801,14 @@ async def stream_quotes(
|
||||||
|
|
||||||
# it might be outside regular trading hours so see if we can at
|
# it might be outside regular trading hours so see if we can at
|
||||||
# least grab history.
|
# least grab history.
|
||||||
if isnan(first_ticker.last):
|
if (
|
||||||
|
isnan(first_ticker.last)
|
||||||
|
and type(first_ticker.contract) not in (
|
||||||
|
ibis.Commodity,
|
||||||
|
ibis.Forex,
|
||||||
|
ibis.Crypto,
|
||||||
|
)
|
||||||
|
):
|
||||||
task_status.started((init_msgs, first_quote))
|
task_status.started((init_msgs, first_quote))
|
||||||
|
|
||||||
# it's not really live but this will unblock
|
# it's not really live but this will unblock
|
||||||
|
@ -731,41 +819,77 @@ async def stream_quotes(
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
return # we never expect feed to come up?
|
return # we never expect feed to come up?
|
||||||
|
|
||||||
async with open_aio_quote_stream(
|
cs: Optional[trio.CancelScope] = None
|
||||||
|
startup: bool = True
|
||||||
|
while (
|
||||||
|
startup
|
||||||
|
or cs.cancel_called
|
||||||
|
):
|
||||||
|
with trio.CancelScope() as cs:
|
||||||
|
async with (
|
||||||
|
trio.open_nursery() as nurse,
|
||||||
|
open_aio_quote_stream(
|
||||||
symbol=sym,
|
symbol=sym,
|
||||||
contract=con,
|
contract=con,
|
||||||
) as stream:
|
) as stream,
|
||||||
|
):
|
||||||
# ugh, clear ticks since we've consumed them
|
# ugh, clear ticks since we've consumed them
|
||||||
# (ahem, ib_insync is stateful trash)
|
# (ahem, ib_insync is stateful trash)
|
||||||
first_ticker.ticks = []
|
first_ticker.ticks = []
|
||||||
|
|
||||||
|
# only on first entry at feed boot up
|
||||||
|
if startup:
|
||||||
|
startup = False
|
||||||
task_status.started((init_msgs, first_quote))
|
task_status.started((init_msgs, first_quote))
|
||||||
|
|
||||||
|
# start a stream restarter task which monitors the
|
||||||
|
# data feed event.
|
||||||
|
async def reset_on_feed():
|
||||||
|
|
||||||
|
# TODO: this seems to be surpressed from the
|
||||||
|
# traceback in ``tractor``?
|
||||||
|
# assert 0
|
||||||
|
|
||||||
|
rt_ev = proxy.status_event(
|
||||||
|
'Market data farm connection is OK:usfarm'
|
||||||
|
)
|
||||||
|
await rt_ev.wait()
|
||||||
|
cs.cancel() # cancel called should now be set
|
||||||
|
|
||||||
|
nurse.start_soon(reset_on_feed)
|
||||||
|
|
||||||
async with aclosing(stream):
|
async with aclosing(stream):
|
||||||
if type(first_ticker.contract) not in (
|
if syminfo.get('no_vlm', False):
|
||||||
ibis.Commodity,
|
|
||||||
ibis.Forex
|
# generally speaking these feeds don't
|
||||||
):
|
# include vlm data.
|
||||||
# wait for real volume on feed (trading might be closed)
|
atype = syminfo['asset_type']
|
||||||
|
log.info(
|
||||||
|
f'No-vlm {sym}@{atype}, skipping quote poll'
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# wait for real volume on feed (trading might be
|
||||||
|
# closed)
|
||||||
while True:
|
while True:
|
||||||
ticker = await stream.receive()
|
ticker = await stream.receive()
|
||||||
|
|
||||||
# for a real volume contract we rait for the first
|
# for a real volume contract we rait for
|
||||||
# "real" trade to take place
|
# the first "real" trade to take place
|
||||||
if (
|
if (
|
||||||
# not calc_price
|
# not calc_price
|
||||||
# and not ticker.rtTime
|
# and not ticker.rtTime
|
||||||
not ticker.rtTime
|
not ticker.rtTime
|
||||||
):
|
):
|
||||||
# spin consuming tickers until we get a real
|
# spin consuming tickers until we
|
||||||
# market datum
|
# get a real market datum
|
||||||
log.debug(f"New unsent ticker: {ticker}")
|
log.debug(f"New unsent ticker: {ticker}")
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
log.debug("Received first real volume tick")
|
log.debug("Received first volume tick")
|
||||||
# ugh, clear ticks since we've consumed them
|
# ugh, clear ticks since we've
|
||||||
# (ahem, ib_insync is truly stateful trash)
|
# consumed them (ahem, ib_insync is
|
||||||
|
# truly stateful trash)
|
||||||
ticker.ticks = []
|
ticker.ticks = []
|
||||||
|
|
||||||
# XXX: this works because we don't use
|
# XXX: this works because we don't use
|
||||||
|
@ -781,7 +905,9 @@ async def stream_quotes(
|
||||||
# last = time.time()
|
# last = time.time()
|
||||||
async for ticker in stream:
|
async for ticker in stream:
|
||||||
quote = normalize(ticker)
|
quote = normalize(ticker)
|
||||||
await send_chan.send({quote['fqsn']: quote})
|
fqsn = quote['fqsn']
|
||||||
|
# print(f'sending {fqsn}:\n{quote}')
|
||||||
|
await send_chan.send({fqsn: quote})
|
||||||
|
|
||||||
# ugh, clear ticks since we've consumed them
|
# ugh, clear ticks since we've consumed them
|
||||||
ticker.ticks = []
|
ticker.ticks = []
|
||||||
|
@ -805,6 +931,9 @@ async def data_reset_hack(
|
||||||
successful.
|
successful.
|
||||||
- other OS support?
|
- other OS support?
|
||||||
- integration with ``ib-gw`` run in docker + Xorg?
|
- integration with ``ib-gw`` run in docker + Xorg?
|
||||||
|
- is it possible to offer a local server that can be accessed by
|
||||||
|
a client? Would be sure be handy for running native java blobs
|
||||||
|
that need to be wrangle.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
@ -835,7 +964,10 @@ async def data_reset_hack(
|
||||||
client.mouse.click()
|
client.mouse.click()
|
||||||
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
||||||
|
|
||||||
|
try:
|
||||||
await tractor.to_asyncio.run_task(vnc_click_hack)
|
await tractor.to_asyncio.run_task(vnc_click_hack)
|
||||||
|
except OSError:
|
||||||
|
return False
|
||||||
|
|
||||||
# we don't really need the ``xdotool`` approach any more B)
|
# we don't really need the ``xdotool`` approach any more B)
|
||||||
return True
|
return True
|
||||||
|
@ -850,15 +982,31 @@ async def open_symbol_search(
|
||||||
# TODO: load user defined symbol set locally for fast search?
|
# TODO: load user defined symbol set locally for fast search?
|
||||||
await ctx.started({})
|
await ctx.started({})
|
||||||
|
|
||||||
async with open_data_client() as proxy:
|
async with (
|
||||||
|
open_client_proxies() as (proxies, clients),
|
||||||
|
open_data_client() as data_proxy,
|
||||||
|
):
|
||||||
async with ctx.open_stream() as stream:
|
async with ctx.open_stream() as stream:
|
||||||
|
|
||||||
last = time.time()
|
# select a non-history client for symbol search to lighten
|
||||||
|
# the load in the main data node.
|
||||||
|
proxy = data_proxy
|
||||||
|
for name, proxy in proxies.items():
|
||||||
|
if proxy is data_proxy:
|
||||||
|
continue
|
||||||
|
break
|
||||||
|
|
||||||
|
ib_client = proxy._aio_ns.ib
|
||||||
|
log.info(f'Using {ib_client} for symbol search')
|
||||||
|
|
||||||
|
last = time.time()
|
||||||
async for pattern in stream:
|
async for pattern in stream:
|
||||||
log.debug(f'received {pattern}')
|
log.info(f'received {pattern}')
|
||||||
now = time.time()
|
now = time.time()
|
||||||
|
|
||||||
|
# this causes tractor hang...
|
||||||
|
# assert 0
|
||||||
|
|
||||||
assert pattern, 'IB can not accept blank search pattern'
|
assert pattern, 'IB can not accept blank search pattern'
|
||||||
|
|
||||||
# throttle search requests to no faster then 1Hz
|
# throttle search requests to no faster then 1Hz
|
||||||
|
@ -871,7 +1019,14 @@ async def open_symbol_search(
|
||||||
except trio.WouldBlock:
|
except trio.WouldBlock:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if not pattern or pattern.isspace():
|
if (
|
||||||
|
not pattern
|
||||||
|
or pattern.isspace()
|
||||||
|
|
||||||
|
# XXX: not sure if this is a bad assumption but it
|
||||||
|
# seems to make search snappier?
|
||||||
|
or len(pattern) < 1
|
||||||
|
):
|
||||||
log.warning('empty pattern received, skipping..')
|
log.warning('empty pattern received, skipping..')
|
||||||
|
|
||||||
# TODO: *BUG* if nothing is returned here the client
|
# TODO: *BUG* if nothing is returned here the client
|
||||||
|
@ -886,7 +1041,7 @@ async def open_symbol_search(
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
log.debug(f'searching for {pattern}')
|
log.info(f'searching for {pattern}')
|
||||||
|
|
||||||
last = time.time()
|
last = time.time()
|
||||||
|
|
||||||
|
@ -895,8 +1050,16 @@ async def open_symbol_search(
|
||||||
stock_results = []
|
stock_results = []
|
||||||
|
|
||||||
async def stash_results(target: Awaitable[list]):
|
async def stash_results(target: Awaitable[list]):
|
||||||
stock_results.extend(await target)
|
try:
|
||||||
|
results = await target
|
||||||
|
except tractor.trionics.Lagged:
|
||||||
|
print("IB SYM-SEARCH OVERRUN?!?")
|
||||||
|
return
|
||||||
|
|
||||||
|
stock_results.extend(results)
|
||||||
|
|
||||||
|
for i in range(10):
|
||||||
|
with trio.move_on_after(3) as cs:
|
||||||
async with trio.open_nursery() as sn:
|
async with trio.open_nursery() as sn:
|
||||||
sn.start_soon(
|
sn.start_soon(
|
||||||
stash_results,
|
stash_results,
|
||||||
|
@ -909,17 +1072,26 @@ async def open_symbol_search(
|
||||||
# trigger async request
|
# trigger async request
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
# match against our ad-hoc set immediately
|
if cs.cancelled_caught:
|
||||||
adhoc_matches = fuzzy.extractBests(
|
log.warning(
|
||||||
pattern,
|
f'Search timeout? {proxy._aio_ns.ib.client}'
|
||||||
list(_adhoc_futes_set),
|
|
||||||
score_cutoff=90,
|
|
||||||
)
|
)
|
||||||
log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
continue
|
||||||
adhoc_match_results = {}
|
else:
|
||||||
if adhoc_matches:
|
break
|
||||||
# TODO: do we need to pull contract details?
|
|
||||||
adhoc_match_results = {i[0]: {} for i in adhoc_matches}
|
# # match against our ad-hoc set immediately
|
||||||
|
# adhoc_matches = fuzzy.extractBests(
|
||||||
|
# pattern,
|
||||||
|
# list(_adhoc_futes_set),
|
||||||
|
# score_cutoff=90,
|
||||||
|
# )
|
||||||
|
# log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
||||||
|
# adhoc_match_results = {}
|
||||||
|
# if adhoc_matches:
|
||||||
|
# # TODO: do we need to pull contract details?
|
||||||
|
# adhoc_match_results = {i[0]: {} for i in
|
||||||
|
# adhoc_matches}
|
||||||
|
|
||||||
log.debug(f'fuzzy matching stocks {stock_results}')
|
log.debug(f'fuzzy matching stocks {stock_results}')
|
||||||
stock_matches = fuzzy.extractBests(
|
stock_matches = fuzzy.extractBests(
|
||||||
|
@ -928,7 +1100,8 @@ async def open_symbol_search(
|
||||||
score_cutoff=50,
|
score_cutoff=50,
|
||||||
)
|
)
|
||||||
|
|
||||||
matches = adhoc_match_results | {
|
# matches = adhoc_match_results | {
|
||||||
|
matches = {
|
||||||
item[0]: {} for item in stock_matches
|
item[0]: {} for item in stock_matches
|
||||||
}
|
}
|
||||||
# TODO: we used to deliver contract details
|
# TODO: we used to deliver contract details
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,64 @@
|
||||||
|
``kraken`` backend
|
||||||
|
------------------
|
||||||
|
though they don't have the most liquidity of all the cexes they sure are
|
||||||
|
accommodating to those of us who appreciate a little ``xmr``.
|
||||||
|
|
||||||
|
status
|
||||||
|
******
|
||||||
|
current support is *production grade* and both real-time data and order
|
||||||
|
management should be correct and fast. this backend is used by core devs
|
||||||
|
for live trading.
|
||||||
|
|
||||||
|
|
||||||
|
config
|
||||||
|
******
|
||||||
|
In order to get order mode support your ``brokers.toml``
|
||||||
|
needs to have something like the following:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[kraken]
|
||||||
|
accounts.spot = 'spot'
|
||||||
|
key_descr = "spot"
|
||||||
|
api_key = "69696969696969696696969696969696969696969696969696969696"
|
||||||
|
secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
|
||||||
|
|
||||||
|
|
||||||
|
If everything works correctly you should see any current positions
|
||||||
|
loaded in the pps pane on chart load and you should also be able to
|
||||||
|
check your trade records in the file::
|
||||||
|
|
||||||
|
<pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
|
||||||
|
|
||||||
|
|
||||||
|
An example ledger file will have entries written verbatim from the
|
||||||
|
trade events schema:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[TFJBKK-SMBZS-VJ4UWS]
|
||||||
|
ordertxid = "SMBZSA-7CNQU-3HWLNJ"
|
||||||
|
postxid = "SMBZSE-M7IF5-CFI7LT"
|
||||||
|
pair = "XXMRZEUR"
|
||||||
|
time = 1655691993.4133966
|
||||||
|
type = "buy"
|
||||||
|
ordertype = "limit"
|
||||||
|
price = "103.97000000"
|
||||||
|
cost = "499.99999977"
|
||||||
|
fee = "0.80000000"
|
||||||
|
vol = "4.80907954"
|
||||||
|
margin = "0.00000000"
|
||||||
|
misc = ""
|
||||||
|
|
||||||
|
|
||||||
|
your ``pps.toml`` file will have position entries like,
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[kraken.spot."xmreur.kraken"]
|
||||||
|
size = 4.80907954
|
||||||
|
ppu = 103.97000000
|
||||||
|
bsuid = "XXMRZEUR"
|
||||||
|
clears = [
|
||||||
|
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
||||||
|
]
|
|
@ -0,0 +1,61 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Kraken backend.
|
||||||
|
|
||||||
|
Sub-modules within break into the core functionalities:
|
||||||
|
|
||||||
|
- ``broker.py`` part for orders / trading endpoints
|
||||||
|
- ``feed.py`` for real-time data feed endpoints
|
||||||
|
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||||
|
wrapping around ``ib_insync``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
from piker.log import get_logger
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
from .api import (
|
||||||
|
get_client,
|
||||||
|
)
|
||||||
|
from .feed import (
|
||||||
|
open_history_client,
|
||||||
|
open_symbol_search,
|
||||||
|
stream_quotes,
|
||||||
|
)
|
||||||
|
from .broker import (
|
||||||
|
trades_dialogue,
|
||||||
|
norm_trade_records,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'get_client',
|
||||||
|
'trades_dialogue',
|
||||||
|
'open_history_client',
|
||||||
|
'open_symbol_search',
|
||||||
|
'stream_quotes',
|
||||||
|
'norm_trade_records',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# tractor RPC enable arg
|
||||||
|
__enable_modules__: list[str] = [
|
||||||
|
'api',
|
||||||
|
'feed',
|
||||||
|
'broker',
|
||||||
|
]
|
|
@ -0,0 +1,536 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Kraken web API wrapping.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
import itertools
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
import time
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
|
import pendulum
|
||||||
|
import asks
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
import urllib.parse
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import base64
|
||||||
|
import trio
|
||||||
|
|
||||||
|
from piker import config
|
||||||
|
from piker.brokers._util import (
|
||||||
|
resproc,
|
||||||
|
SymbolNotFound,
|
||||||
|
BrokerError,
|
||||||
|
DataThrottle,
|
||||||
|
)
|
||||||
|
from piker.pp import Transaction
|
||||||
|
from . import log
|
||||||
|
|
||||||
|
# <uri>/<version>/
|
||||||
|
_url = 'https://api.kraken.com/0'
|
||||||
|
|
||||||
|
|
||||||
|
# Broker specific ohlc schema which includes a vwap field
|
||||||
|
_ohlc_dtype = [
|
||||||
|
('index', int),
|
||||||
|
('time', int),
|
||||||
|
('open', float),
|
||||||
|
('high', float),
|
||||||
|
('low', float),
|
||||||
|
('close', float),
|
||||||
|
('volume', float),
|
||||||
|
('count', int),
|
||||||
|
('bar_wap', float),
|
||||||
|
]
|
||||||
|
|
||||||
|
# UI components allow this to be declared such that additional
|
||||||
|
# (historical) fields can be exposed.
|
||||||
|
ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||||
|
|
||||||
|
_show_wap_in_history = True
|
||||||
|
_symbol_info_translation: dict[str, str] = {
|
||||||
|
'tick_decimals': 'pair_decimals',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
|
conf, path = config.load()
|
||||||
|
section = conf.get('kraken')
|
||||||
|
|
||||||
|
if section is None:
|
||||||
|
log.warning(f'No config section found for kraken in {path}')
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return section
|
||||||
|
|
||||||
|
|
||||||
|
def get_kraken_signature(
|
||||||
|
urlpath: str,
|
||||||
|
data: dict[str, Any],
|
||||||
|
secret: str
|
||||||
|
) -> str:
|
||||||
|
postdata = urllib.parse.urlencode(data)
|
||||||
|
encoded = (str(data['nonce']) + postdata).encode()
|
||||||
|
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
||||||
|
|
||||||
|
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
||||||
|
sigdigest = base64.b64encode(mac.digest())
|
||||||
|
return sigdigest.decode()
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidKey(ValueError):
|
||||||
|
'''
|
||||||
|
EAPI:Invalid key
|
||||||
|
This error is returned when the API key used for the call is
|
||||||
|
either expired or disabled, please review the API key in your
|
||||||
|
Settings -> API tab of account management or generate a new one
|
||||||
|
and update your application.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class Client:
|
||||||
|
|
||||||
|
# global symbol normalization table
|
||||||
|
_ntable: dict[str, str] = {}
|
||||||
|
_atable: bidict[str, str] = bidict()
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config: dict[str, str],
|
||||||
|
name: str = '',
|
||||||
|
api_key: str = '',
|
||||||
|
secret: str = ''
|
||||||
|
) -> None:
|
||||||
|
self._sesh = asks.Session(connections=4)
|
||||||
|
self._sesh.base_location = _url
|
||||||
|
self._sesh.headers.update({
|
||||||
|
'User-Agent':
|
||||||
|
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||||
|
})
|
||||||
|
self.conf: dict[str, str] = config
|
||||||
|
self._pairs: list[str] = []
|
||||||
|
self._name = name
|
||||||
|
self._api_key = api_key
|
||||||
|
self._secret = secret
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pairs(self) -> dict[str, Any]:
|
||||||
|
if self._pairs is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Make sure to run `cache_symbols()` on startup!"
|
||||||
|
)
|
||||||
|
# retreive and cache all symbols
|
||||||
|
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
async def _public(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
resp = await self._sesh.post(
|
||||||
|
path=f'/public/{method}',
|
||||||
|
json=data,
|
||||||
|
timeout=float('inf')
|
||||||
|
)
|
||||||
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
async def _private(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict,
|
||||||
|
uri_path: str
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
headers = {
|
||||||
|
'Content-Type':
|
||||||
|
'application/x-www-form-urlencoded',
|
||||||
|
'API-Key':
|
||||||
|
self._api_key,
|
||||||
|
'API-Sign':
|
||||||
|
get_kraken_signature(uri_path, data, self._secret)
|
||||||
|
}
|
||||||
|
resp = await self._sesh.post(
|
||||||
|
path=f'/private/{method}',
|
||||||
|
data=data,
|
||||||
|
headers=headers,
|
||||||
|
timeout=float('inf')
|
||||||
|
)
|
||||||
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
async def endpoint(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict[str, Any]
|
||||||
|
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
uri_path = f'/0/private/{method}'
|
||||||
|
data['nonce'] = str(int(1000*time.time()))
|
||||||
|
return await self._private(method, data, uri_path)
|
||||||
|
|
||||||
|
async def get_balances(
|
||||||
|
self,
|
||||||
|
) -> dict[str, float]:
|
||||||
|
'''
|
||||||
|
Return the set of asset balances for this account
|
||||||
|
by symbol.
|
||||||
|
|
||||||
|
'''
|
||||||
|
resp = await self.endpoint(
|
||||||
|
'Balance',
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
by_bsuid = resp['result']
|
||||||
|
return {
|
||||||
|
self._atable[sym].lower(): float(bal)
|
||||||
|
for sym, bal in by_bsuid.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_assets(self) -> dict[str, dict]:
|
||||||
|
resp = await self._public('Assets', {})
|
||||||
|
return resp['result']
|
||||||
|
|
||||||
|
async def cache_assets(self) -> None:
|
||||||
|
assets = self.assets = await self.get_assets()
|
||||||
|
for bsuid, info in assets.items():
|
||||||
|
self._atable[bsuid] = info['altname']
|
||||||
|
|
||||||
|
async def get_trades(
|
||||||
|
self,
|
||||||
|
fetch_limit: int = 10,
|
||||||
|
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
'''
|
||||||
|
Get the trades (aka cleared orders) history from the rest endpoint:
|
||||||
|
https://docs.kraken.com/rest/#operation/getTradeHistory
|
||||||
|
|
||||||
|
'''
|
||||||
|
ofs = 0
|
||||||
|
trades_by_id: dict[str, Any] = {}
|
||||||
|
|
||||||
|
for i in itertools.count():
|
||||||
|
if i >= fetch_limit:
|
||||||
|
break
|
||||||
|
|
||||||
|
# increment 'ofs' pagination offset
|
||||||
|
ofs = i*50
|
||||||
|
|
||||||
|
resp = await self.endpoint(
|
||||||
|
'TradesHistory',
|
||||||
|
{'ofs': ofs},
|
||||||
|
)
|
||||||
|
by_id = resp['result']['trades']
|
||||||
|
trades_by_id.update(by_id)
|
||||||
|
|
||||||
|
# we can get up to 50 results per query
|
||||||
|
if (
|
||||||
|
len(by_id) < 50
|
||||||
|
):
|
||||||
|
err = resp.get('error')
|
||||||
|
if err:
|
||||||
|
raise BrokerError(err)
|
||||||
|
|
||||||
|
# we know we received the max amount of
|
||||||
|
# trade results so there may be more history.
|
||||||
|
# catch the end of the trades
|
||||||
|
count = resp['result']['count']
|
||||||
|
break
|
||||||
|
|
||||||
|
# santity check on update
|
||||||
|
assert count == len(trades_by_id.values())
|
||||||
|
return trades_by_id
|
||||||
|
|
||||||
|
async def get_xfers(
|
||||||
|
self,
|
||||||
|
asset: str,
|
||||||
|
src_asset: str = '',
|
||||||
|
|
||||||
|
) -> dict[str, Transaction]:
|
||||||
|
'''
|
||||||
|
Get asset balance transfer transactions.
|
||||||
|
|
||||||
|
Currently only withdrawals are supported.
|
||||||
|
|
||||||
|
'''
|
||||||
|
xfers: list[dict] = (await self.endpoint(
|
||||||
|
'WithdrawStatus',
|
||||||
|
{'asset': asset},
|
||||||
|
))['result']
|
||||||
|
|
||||||
|
# eg. resp schema:
|
||||||
|
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
||||||
|
# 'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
|
||||||
|
# 'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
|
||||||
|
# 'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
|
||||||
|
# 'amount': '0.00300726', 'fee': '0.00001000', 'time':
|
||||||
|
# 1658347714, 'status': 'Success'}]}
|
||||||
|
|
||||||
|
trans: dict[str, Transaction] = {}
|
||||||
|
for entry in xfers:
|
||||||
|
# look up the normalized name
|
||||||
|
asset = self._atable[entry['asset']].lower()
|
||||||
|
|
||||||
|
# XXX: this is in the asset units (likely) so it isn't
|
||||||
|
# quite the same as a commisions cost necessarily..)
|
||||||
|
cost = float(entry['fee'])
|
||||||
|
|
||||||
|
tran = Transaction(
|
||||||
|
fqsn=asset + '.kraken',
|
||||||
|
tid=entry['txid'],
|
||||||
|
dt=pendulum.from_timestamp(entry['time']),
|
||||||
|
bsuid=f'{asset}{src_asset}',
|
||||||
|
size=-1*(
|
||||||
|
float(entry['amount'])
|
||||||
|
+
|
||||||
|
cost
|
||||||
|
),
|
||||||
|
# since this will be treated as a "sell" it
|
||||||
|
# shouldn't be needed to compute the be price.
|
||||||
|
price='NaN',
|
||||||
|
|
||||||
|
# XXX: see note above
|
||||||
|
cost=0,
|
||||||
|
)
|
||||||
|
trans[tran.tid] = tran
|
||||||
|
|
||||||
|
return trans
|
||||||
|
|
||||||
|
async def submit_limit(
|
||||||
|
self,
|
||||||
|
symbol: str,
|
||||||
|
price: float,
|
||||||
|
action: str,
|
||||||
|
size: float,
|
||||||
|
reqid: str = None,
|
||||||
|
validate: bool = False # set True test call without a real submission
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Place an order and return integer request id provided by client.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# Build common data dict for common keys from both endpoints
|
||||||
|
data = {
|
||||||
|
"pair": symbol,
|
||||||
|
"price": str(price),
|
||||||
|
"validate": validate
|
||||||
|
}
|
||||||
|
if reqid is None:
|
||||||
|
# Build order data for kraken api
|
||||||
|
data |= {
|
||||||
|
"ordertype": "limit",
|
||||||
|
"type": action,
|
||||||
|
"volume": str(size),
|
||||||
|
}
|
||||||
|
return await self.endpoint('AddOrder', data)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Edit order data for kraken api
|
||||||
|
data["txid"] = reqid
|
||||||
|
return await self.endpoint('EditOrder', data)
|
||||||
|
|
||||||
|
async def submit_cancel(
|
||||||
|
self,
|
||||||
|
reqid: str,
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Send cancel request for order id ``reqid``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# txid is a transaction id given by kraken
|
||||||
|
return await self.endpoint('CancelOrder', {"txid": reqid})
|
||||||
|
|
||||||
|
async def symbol_info(
|
||||||
|
self,
|
||||||
|
pair: Optional[str] = None,
|
||||||
|
|
||||||
|
) -> dict[str, dict[str, str]]:
|
||||||
|
|
||||||
|
if pair is not None:
|
||||||
|
pairs = {'pair': pair}
|
||||||
|
else:
|
||||||
|
pairs = None # get all pairs
|
||||||
|
|
||||||
|
resp = await self._public('AssetPairs', pairs)
|
||||||
|
err = resp['error']
|
||||||
|
if err:
|
||||||
|
symbolname = pairs['pair'] if pair else None
|
||||||
|
raise SymbolNotFound(f'{symbolname}.kraken')
|
||||||
|
|
||||||
|
pairs = resp['result']
|
||||||
|
|
||||||
|
if pair is not None:
|
||||||
|
_, data = next(iter(pairs.items()))
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
return pairs
|
||||||
|
|
||||||
|
async def cache_symbols(
|
||||||
|
self,
|
||||||
|
) -> dict:
|
||||||
|
if not self._pairs:
|
||||||
|
self._pairs = await self.symbol_info()
|
||||||
|
|
||||||
|
ntable = {}
|
||||||
|
for restapikey, info in self._pairs.items():
|
||||||
|
ntable[restapikey] = ntable[info['wsname']] = info['altname']
|
||||||
|
|
||||||
|
self._ntable.update(ntable)
|
||||||
|
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
async def search_symbols(
|
||||||
|
self,
|
||||||
|
pattern: str,
|
||||||
|
limit: int = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
if self._pairs is not None:
|
||||||
|
data = self._pairs
|
||||||
|
else:
|
||||||
|
data = await self.symbol_info()
|
||||||
|
|
||||||
|
matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
data,
|
||||||
|
score_cutoff=50,
|
||||||
|
)
|
||||||
|
# repack in dict form
|
||||||
|
return {item[0]['altname']: item[0] for item in matches}
|
||||||
|
|
||||||
|
async def bars(
|
||||||
|
self,
|
||||||
|
symbol: str = 'XBTUSD',
|
||||||
|
|
||||||
|
# UTC 2017-07-02 12:53:20
|
||||||
|
since: Optional[Union[int, datetime]] = None,
|
||||||
|
count: int = 720, # <- max allowed per query
|
||||||
|
as_np: bool = True,
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
|
||||||
|
if since is None:
|
||||||
|
since = pendulum.now('UTC').start_of('minute').subtract(
|
||||||
|
minutes=count).timestamp()
|
||||||
|
|
||||||
|
elif isinstance(since, int):
|
||||||
|
since = pendulum.from_timestamp(since).timestamp()
|
||||||
|
|
||||||
|
else: # presumably a pendulum datetime
|
||||||
|
since = since.timestamp()
|
||||||
|
|
||||||
|
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||||
|
since = str(max(1499000000, int(since)))
|
||||||
|
json = await self._public(
|
||||||
|
'OHLC',
|
||||||
|
data={
|
||||||
|
'pair': symbol,
|
||||||
|
'since': since,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
res = json['result']
|
||||||
|
res.pop('last')
|
||||||
|
bars = next(iter(res.values()))
|
||||||
|
|
||||||
|
new_bars = []
|
||||||
|
|
||||||
|
first = bars[0]
|
||||||
|
last_nz_vwap = first[-3]
|
||||||
|
if last_nz_vwap == 0:
|
||||||
|
# use close if vwap is zero
|
||||||
|
last_nz_vwap = first[-4]
|
||||||
|
|
||||||
|
# convert all fields to native types
|
||||||
|
for i, bar in enumerate(bars):
|
||||||
|
# normalize weird zero-ed vwap values..cmon kraken..
|
||||||
|
# indicates vwap didn't change since last bar
|
||||||
|
vwap = float(bar.pop(-3))
|
||||||
|
if vwap != 0:
|
||||||
|
last_nz_vwap = vwap
|
||||||
|
if vwap == 0:
|
||||||
|
vwap = last_nz_vwap
|
||||||
|
|
||||||
|
# re-insert vwap as the last of the fields
|
||||||
|
bar.append(vwap)
|
||||||
|
|
||||||
|
new_bars.append(
|
||||||
|
(i,) + tuple(
|
||||||
|
ftype(bar[j]) for j, (name, ftype) in enumerate(
|
||||||
|
_ohlc_dtype[1:]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||||
|
return array
|
||||||
|
except KeyError:
|
||||||
|
errmsg = json['error'][0]
|
||||||
|
|
||||||
|
if 'not found' in errmsg:
|
||||||
|
raise SymbolNotFound(errmsg + f': {symbol}')
|
||||||
|
|
||||||
|
elif 'Too many requests' in errmsg:
|
||||||
|
raise DataThrottle(f'{symbol}')
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise BrokerError(errmsg)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def normalize_symbol(
|
||||||
|
cls,
|
||||||
|
ticker: str
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Normalize symbol names to to a 3x3 pair from the global
|
||||||
|
definition map which we build out from the data retreived from
|
||||||
|
the 'AssetPairs' endpoint, see methods above.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ticker = cls._ntable[ticker]
|
||||||
|
return ticker.lower()
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def get_client() -> Client:
|
||||||
|
|
||||||
|
conf = get_config()
|
||||||
|
if conf:
|
||||||
|
client = Client(
|
||||||
|
conf,
|
||||||
|
name=conf['key_descr'],
|
||||||
|
api_key=conf['api_key'],
|
||||||
|
secret=conf['secret']
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
client = Client({})
|
||||||
|
|
||||||
|
# at startup, load all symbols, and asset info in
|
||||||
|
# batch requests.
|
||||||
|
async with trio.open_nursery() as nurse:
|
||||||
|
nurse.start_soon(client.cache_assets)
|
||||||
|
await client.cache_symbols()
|
||||||
|
|
||||||
|
yield client
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,500 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Real-time and historical data feed endpoints.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
|
import time
|
||||||
|
|
||||||
|
from async_generator import aclosing
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
import pendulum
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import tractor
|
||||||
|
import trio
|
||||||
|
|
||||||
|
from piker._cacheables import open_cached_client
|
||||||
|
from piker.brokers._util import (
|
||||||
|
BrokerError,
|
||||||
|
DataThrottle,
|
||||||
|
DataUnavailable,
|
||||||
|
)
|
||||||
|
from piker.log import get_console_log
|
||||||
|
from piker.data import ShmArray
|
||||||
|
from piker.data.types import Struct
|
||||||
|
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
||||||
|
from . import log
|
||||||
|
from .api import (
|
||||||
|
Client,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# https://www.kraken.com/features/api#get-tradable-pairs
|
||||||
|
class Pair(Struct):
|
||||||
|
altname: str # alternate pair name
|
||||||
|
wsname: str # WebSocket pair name (if available)
|
||||||
|
aclass_base: str # asset class of base component
|
||||||
|
base: str # asset id of base component
|
||||||
|
aclass_quote: str # asset class of quote component
|
||||||
|
quote: str # asset id of quote component
|
||||||
|
lot: str # volume lot size
|
||||||
|
|
||||||
|
cost_decimals: int
|
||||||
|
costmin: float
|
||||||
|
pair_decimals: int # scaling decimal places for pair
|
||||||
|
lot_decimals: int # scaling decimal places for volume
|
||||||
|
|
||||||
|
# amount to multiply lot volume by to get currency volume
|
||||||
|
lot_multiplier: float
|
||||||
|
|
||||||
|
# array of leverage amounts available when buying
|
||||||
|
leverage_buy: list[int]
|
||||||
|
# array of leverage amounts available when selling
|
||||||
|
leverage_sell: list[int]
|
||||||
|
|
||||||
|
# fee schedule array in [volume, percent fee] tuples
|
||||||
|
fees: list[tuple[int, float]]
|
||||||
|
|
||||||
|
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||||
|
# maker/taker)
|
||||||
|
fees_maker: list[tuple[int, float]]
|
||||||
|
|
||||||
|
fee_volume_currency: str # volume discount currency
|
||||||
|
margin_call: str # margin call level
|
||||||
|
margin_stop: str # stop-out/liquidation margin level
|
||||||
|
ordermin: float # minimum order volume for pair
|
||||||
|
tick_size: float # min price step size
|
||||||
|
status: str
|
||||||
|
|
||||||
|
short_position_limit: float
|
||||||
|
long_position_limit: float
|
||||||
|
|
||||||
|
|
||||||
|
class OHLC(Struct):
|
||||||
|
'''
|
||||||
|
Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
|
For schema details see:
|
||||||
|
https://docs.kraken.com/websockets/#message-ohlc
|
||||||
|
|
||||||
|
'''
|
||||||
|
chan_id: int # internal kraken id
|
||||||
|
chan_name: str # eg. ohlc-1 (name-interval)
|
||||||
|
pair: str # fx pair
|
||||||
|
time: float # Begin time of interval, in seconds since epoch
|
||||||
|
etime: float # End time of interval, in seconds since epoch
|
||||||
|
open: float # Open price of interval
|
||||||
|
high: float # High price within interval
|
||||||
|
low: float # Low price within interval
|
||||||
|
close: float # Close price of interval
|
||||||
|
vwap: float # Volume weighted average price within interval
|
||||||
|
volume: float # Accumulated volume **within interval**
|
||||||
|
count: int # Number of trades within interval
|
||||||
|
# (sampled) generated tick data
|
||||||
|
ticks: list[Any] = []
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_messages(
|
||||||
|
ws: NoBsWs,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Message stream parser and heartbeat handler.
|
||||||
|
|
||||||
|
Deliver ws subscription messages as well as handle heartbeat logic
|
||||||
|
though a single async generator.
|
||||||
|
|
||||||
|
'''
|
||||||
|
too_slow_count = last_hb = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
|
||||||
|
with trio.move_on_after(5) as cs:
|
||||||
|
msg = await ws.recv_msg()
|
||||||
|
|
||||||
|
# trigger reconnection if heartbeat is laggy
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
|
||||||
|
too_slow_count += 1
|
||||||
|
|
||||||
|
if too_slow_count > 20:
|
||||||
|
log.warning(
|
||||||
|
"Heartbeat is too slow, resetting ws connection")
|
||||||
|
|
||||||
|
await ws._connect()
|
||||||
|
too_slow_count = 0
|
||||||
|
continue
|
||||||
|
|
||||||
|
match msg:
|
||||||
|
case {'event': 'heartbeat'}:
|
||||||
|
now = time.time()
|
||||||
|
delay = now - last_hb
|
||||||
|
last_hb = now
|
||||||
|
|
||||||
|
# XXX: why tf is this not printing without --tl flag?
|
||||||
|
log.debug(f"Heartbeat after {delay}")
|
||||||
|
# print(f"Heartbeat after {delay}")
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
case _:
|
||||||
|
# passthrough sub msgs
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
|
||||||
|
async def process_data_feed_msgs(
|
||||||
|
ws: NoBsWs,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Parse and pack data feed messages.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async for msg in stream_messages(ws):
|
||||||
|
match msg:
|
||||||
|
case {
|
||||||
|
'errorMessage': errmsg
|
||||||
|
}:
|
||||||
|
raise BrokerError(errmsg)
|
||||||
|
|
||||||
|
case {
|
||||||
|
'event': 'subscriptionStatus',
|
||||||
|
} as sub:
|
||||||
|
log.info(
|
||||||
|
'WS subscription is active:\n'
|
||||||
|
f'{sub}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case [
|
||||||
|
chan_id,
|
||||||
|
*payload_array,
|
||||||
|
chan_name,
|
||||||
|
pair
|
||||||
|
]:
|
||||||
|
if 'ohlc' in chan_name:
|
||||||
|
ohlc = OHLC(
|
||||||
|
chan_id,
|
||||||
|
chan_name,
|
||||||
|
pair,
|
||||||
|
*payload_array[0]
|
||||||
|
)
|
||||||
|
ohlc.typecast()
|
||||||
|
yield 'ohlc', ohlc
|
||||||
|
|
||||||
|
elif 'spread' in chan_name:
|
||||||
|
|
||||||
|
bid, ask, ts, bsize, asize = map(
|
||||||
|
float, payload_array[0])
|
||||||
|
|
||||||
|
# TODO: really makes you think IB has a horrible API...
|
||||||
|
quote = {
|
||||||
|
'symbol': pair.replace('/', ''),
|
||||||
|
'ticks': [
|
||||||
|
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||||
|
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||||
|
|
||||||
|
{'type': 'ask', 'price': ask, 'size': asize},
|
||||||
|
{'type': 'asize', 'price': ask, 'size': asize},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
yield 'l1', quote
|
||||||
|
|
||||||
|
# elif 'book' in msg[-2]:
|
||||||
|
# chan_id, *payload_array, chan_name, pair = msg
|
||||||
|
# print(msg)
|
||||||
|
|
||||||
|
case _:
|
||||||
|
print(f'UNHANDLED MSG: {msg}')
|
||||||
|
# yield msg
|
||||||
|
|
||||||
|
|
||||||
|
def normalize(
|
||||||
|
ohlc: OHLC,
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
quote = ohlc.to_dict()
|
||||||
|
quote['broker_ts'] = quote['time']
|
||||||
|
quote['brokerd_ts'] = time.time()
|
||||||
|
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
||||||
|
quote['last'] = quote['close']
|
||||||
|
quote['bar_wap'] = ohlc.vwap
|
||||||
|
|
||||||
|
# seriously eh? what's with this non-symmetry everywhere
|
||||||
|
# in subscription systems...
|
||||||
|
# XXX: piker style is always lowercases symbols.
|
||||||
|
topic = quote['pair'].replace('/', '').lower()
|
||||||
|
|
||||||
|
# print(quote)
|
||||||
|
return topic, quote
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
symbol: str,
|
||||||
|
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
# TODO implement history getter for the new storage layer.
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
# lol, kraken won't send any more then the "last"
|
||||||
|
# 720 1m bars.. so we have to just ignore further
|
||||||
|
# requests of this type..
|
||||||
|
queries: int = 0
|
||||||
|
|
||||||
|
async def get_ohlc(
|
||||||
|
timeframe: float,
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
datetime, # start
|
||||||
|
datetime, # end
|
||||||
|
]:
|
||||||
|
|
||||||
|
nonlocal queries
|
||||||
|
if (
|
||||||
|
queries > 0
|
||||||
|
or timeframe != 60
|
||||||
|
):
|
||||||
|
raise DataUnavailable(
|
||||||
|
'Only a single query for 1m bars supported')
|
||||||
|
|
||||||
|
count = 0
|
||||||
|
while count <= 3:
|
||||||
|
try:
|
||||||
|
array = await client.bars(
|
||||||
|
symbol,
|
||||||
|
since=end_dt,
|
||||||
|
)
|
||||||
|
count += 1
|
||||||
|
queries += 1
|
||||||
|
break
|
||||||
|
except DataThrottle:
|
||||||
|
log.warning(f'kraken OHLC throttle for {symbol}')
|
||||||
|
await trio.sleep(1)
|
||||||
|
|
||||||
|
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||||
|
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||||
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
|
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_quotes(
|
||||||
|
|
||||||
|
send_chan: trio.abc.SendChannel,
|
||||||
|
symbols: list[str],
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
loglevel: str = None,
|
||||||
|
|
||||||
|
# backend specific
|
||||||
|
sub_type: str = 'ohlc',
|
||||||
|
|
||||||
|
# startup sync
|
||||||
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Subscribe for ohlc stream of quotes for ``pairs``.
|
||||||
|
|
||||||
|
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
ws_pairs = {}
|
||||||
|
sym_infos = {}
|
||||||
|
|
||||||
|
async with open_cached_client('kraken') as client, send_chan as send_chan:
|
||||||
|
|
||||||
|
# keep client cached for real-time section
|
||||||
|
for sym in symbols:
|
||||||
|
|
||||||
|
# transform to upper since piker style is always lower
|
||||||
|
sym = sym.upper()
|
||||||
|
sym_info = await client.symbol_info(sym)
|
||||||
|
try:
|
||||||
|
si = Pair(**sym_info) # validation
|
||||||
|
except TypeError:
|
||||||
|
fields_diff = set(sym_info) - set(Pair.__struct_fields__)
|
||||||
|
raise TypeError(
|
||||||
|
f'Missing msg fields {fields_diff}'
|
||||||
|
)
|
||||||
|
syminfo = si.to_dict()
|
||||||
|
syminfo['price_tick_size'] = 1 / 10**si.pair_decimals
|
||||||
|
syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals
|
||||||
|
syminfo['asset_type'] = 'crypto'
|
||||||
|
sym_infos[sym] = syminfo
|
||||||
|
ws_pairs[sym] = si.wsname
|
||||||
|
|
||||||
|
symbol = symbols[0].lower()
|
||||||
|
|
||||||
|
init_msgs = {
|
||||||
|
# pass back token, and bool, signalling if we're the writer
|
||||||
|
# and that history has been written
|
||||||
|
symbol: {
|
||||||
|
'symbol_info': sym_infos[sym],
|
||||||
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def subscribe(ws: NoBsWs):
|
||||||
|
|
||||||
|
# XXX: setup subs
|
||||||
|
# https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
# specific logic for this in kraken's sync client:
|
||||||
|
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||||
|
ohlc_sub = {
|
||||||
|
'event': 'subscribe',
|
||||||
|
'pair': list(ws_pairs.values()),
|
||||||
|
'subscription': {
|
||||||
|
'name': 'ohlc',
|
||||||
|
'interval': 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO: we want to eventually allow unsubs which should
|
||||||
|
# be completely fine to request from a separate task
|
||||||
|
# since internally the ws methods appear to be FIFO
|
||||||
|
# locked.
|
||||||
|
await ws.send_msg(ohlc_sub)
|
||||||
|
|
||||||
|
# trade data (aka L1)
|
||||||
|
l1_sub = {
|
||||||
|
'event': 'subscribe',
|
||||||
|
'pair': list(ws_pairs.values()),
|
||||||
|
'subscription': {
|
||||||
|
'name': 'spread',
|
||||||
|
# 'depth': 10}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# pull a first quote and deliver
|
||||||
|
await ws.send_msg(l1_sub)
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
# unsub from all pairs on teardown
|
||||||
|
if ws.connected():
|
||||||
|
await ws.send_msg({
|
||||||
|
'pair': list(ws_pairs.values()),
|
||||||
|
'event': 'unsubscribe',
|
||||||
|
'subscription': ['ohlc', 'spread'],
|
||||||
|
})
|
||||||
|
|
||||||
|
# XXX: do we need to ack the unsub?
|
||||||
|
# await ws.recv_msg()
|
||||||
|
|
||||||
|
# see the tips on reconnection logic:
|
||||||
|
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||||
|
ws: NoBsWs
|
||||||
|
async with (
|
||||||
|
open_autorecon_ws(
|
||||||
|
'wss://ws.kraken.com/',
|
||||||
|
fixture=subscribe,
|
||||||
|
) as ws,
|
||||||
|
aclosing(process_data_feed_msgs(ws)) as msg_gen,
|
||||||
|
):
|
||||||
|
# pull a first quote and deliver
|
||||||
|
typ, ohlc_last = await anext(msg_gen)
|
||||||
|
topic, quote = normalize(ohlc_last)
|
||||||
|
|
||||||
|
task_status.started((init_msgs, quote))
|
||||||
|
|
||||||
|
# lol, only "closes" when they're margin squeezing clients ;P
|
||||||
|
feed_is_live.set()
|
||||||
|
|
||||||
|
# keep start of last interval for volume tracking
|
||||||
|
last_interval_start = ohlc_last.etime
|
||||||
|
|
||||||
|
# start streaming
|
||||||
|
async for typ, ohlc in msg_gen:
|
||||||
|
|
||||||
|
if typ == 'ohlc':
|
||||||
|
|
||||||
|
# TODO: can get rid of all this by using
|
||||||
|
# ``trades`` subscription...
|
||||||
|
|
||||||
|
# generate tick values to match time & sales pane:
|
||||||
|
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
|
||||||
|
volume = ohlc.volume
|
||||||
|
|
||||||
|
# new OHLC sample interval
|
||||||
|
if ohlc.etime > last_interval_start:
|
||||||
|
last_interval_start = ohlc.etime
|
||||||
|
tick_volume = volume
|
||||||
|
|
||||||
|
else:
|
||||||
|
# this is the tick volume *within the interval*
|
||||||
|
tick_volume = volume - ohlc_last.volume
|
||||||
|
|
||||||
|
ohlc_last = ohlc
|
||||||
|
last = ohlc.close
|
||||||
|
|
||||||
|
if tick_volume:
|
||||||
|
ohlc.ticks.append({
|
||||||
|
'type': 'trade',
|
||||||
|
'price': last,
|
||||||
|
'size': tick_volume,
|
||||||
|
})
|
||||||
|
|
||||||
|
topic, quote = normalize(ohlc)
|
||||||
|
|
||||||
|
elif typ == 'l1':
|
||||||
|
quote = ohlc
|
||||||
|
topic = quote['symbol'].lower()
|
||||||
|
|
||||||
|
await send_chan.send({topic: quote})
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def open_symbol_search(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
|
||||||
|
) -> Client:
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
# load all symbols locally for fast search
|
||||||
|
cache = await client.cache_symbols()
|
||||||
|
await ctx.started(cache)
|
||||||
|
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
|
||||||
|
async for pattern in stream:
|
||||||
|
|
||||||
|
matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
cache,
|
||||||
|
score_cutoff=50,
|
||||||
|
)
|
||||||
|
# repack in dict form
|
||||||
|
await stream.send(
|
||||||
|
{item[0]['altname']: item[0]
|
||||||
|
for item in matches}
|
||||||
|
)
|
|
@ -22,54 +22,10 @@ from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
from pydantic import BaseModel, validator
|
|
||||||
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ._messages import BrokerdPosition, Status
|
from ..data.types import Struct
|
||||||
|
from ..pp import Position
|
||||||
|
|
||||||
class Position(BaseModel):
|
|
||||||
'''
|
|
||||||
Basic pp (personal position) model with attached fills history.
|
|
||||||
|
|
||||||
This type should be IPC wire ready?
|
|
||||||
|
|
||||||
'''
|
|
||||||
symbol: Symbol
|
|
||||||
|
|
||||||
# last size and avg entry price
|
|
||||||
size: float
|
|
||||||
avg_price: float # TODO: contextual pricing
|
|
||||||
|
|
||||||
# ordered record of known constituent trade messages
|
|
||||||
fills: list[Status] = []
|
|
||||||
|
|
||||||
def update_from_msg(
|
|
||||||
self,
|
|
||||||
msg: BrokerdPosition,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
# XXX: better place to do this?
|
|
||||||
symbol = self.symbol
|
|
||||||
|
|
||||||
lot_size_digits = symbol.lot_size_digits
|
|
||||||
avg_price, size = (
|
|
||||||
round(msg['avg_price'], ndigits=symbol.tick_size_digits),
|
|
||||||
round(msg['size'], ndigits=lot_size_digits),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.avg_price = avg_price
|
|
||||||
self.size = size
|
|
||||||
|
|
||||||
@property
|
|
||||||
def dsize(self) -> float:
|
|
||||||
'''
|
|
||||||
The "dollar" size of the pp, normally in trading (fiat) unit
|
|
||||||
terms.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return self.avg_price * self.size
|
|
||||||
|
|
||||||
|
|
||||||
_size_units = bidict({
|
_size_units = bidict({
|
||||||
|
@ -84,34 +40,9 @@ SizeUnit = Enum(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Allocator(BaseModel):
|
class Allocator(Struct):
|
||||||
|
|
||||||
class Config:
|
|
||||||
validate_assignment = True
|
|
||||||
copy_on_model_validation = False
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
# required to get the account validator lookup working?
|
|
||||||
extra = 'allow'
|
|
||||||
underscore_attrs_are_private = False
|
|
||||||
|
|
||||||
symbol: Symbol
|
symbol: Symbol
|
||||||
account: Optional[str] = 'paper'
|
|
||||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
|
||||||
# a default at startup by passing in a `dict` but yet you can set
|
|
||||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
|
||||||
# unintuitive garbage.
|
|
||||||
size_unit: str = 'currency'
|
|
||||||
_size_units: dict[str, Optional[str]] = _size_units
|
|
||||||
|
|
||||||
@validator('size_unit', pre=True)
|
|
||||||
def maybe_lookup_key(cls, v):
|
|
||||||
# apply the corresponding enum key for the text "description" value
|
|
||||||
if v not in _size_units:
|
|
||||||
return _size_units.inverse[v]
|
|
||||||
|
|
||||||
assert v in _size_units
|
|
||||||
return v
|
|
||||||
|
|
||||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||||
# "sizes"
|
# "sizes"
|
||||||
|
@ -120,6 +51,28 @@ class Allocator(BaseModel):
|
||||||
units_limit: float
|
units_limit: float
|
||||||
currency_limit: float
|
currency_limit: float
|
||||||
slots: int
|
slots: int
|
||||||
|
account: Optional[str] = 'paper'
|
||||||
|
|
||||||
|
_size_units: bidict[str, Optional[str]] = _size_units
|
||||||
|
|
||||||
|
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||||
|
# a default at startup by passing in a `dict` but yet you can set
|
||||||
|
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||||
|
# unintuitive garbage.
|
||||||
|
_size_unit: str = 'currency'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_unit(self) -> str:
|
||||||
|
return self._size_unit
|
||||||
|
|
||||||
|
@size_unit.setter
|
||||||
|
def size_unit(self, v: str) -> Optional[str]:
|
||||||
|
if v not in _size_units:
|
||||||
|
v = _size_units.inverse[v]
|
||||||
|
|
||||||
|
assert v in _size_units
|
||||||
|
self._size_unit = v
|
||||||
|
return v
|
||||||
|
|
||||||
def step_sizes(
|
def step_sizes(
|
||||||
self,
|
self,
|
||||||
|
@ -140,10 +93,13 @@ class Allocator(BaseModel):
|
||||||
else:
|
else:
|
||||||
return self.units_limit
|
return self.units_limit
|
||||||
|
|
||||||
|
def limit_info(self) -> tuple[str, float]:
|
||||||
|
return self.size_unit, self.limit()
|
||||||
|
|
||||||
def next_order_info(
|
def next_order_info(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
# we only need a startup size for exit calcs, we can the
|
# we only need a startup size for exit calcs, we can then
|
||||||
# determine how large slots should be if the initial pp size was
|
# determine how large slots should be if the initial pp size was
|
||||||
# larger then the current live one, and the live one is smaller
|
# larger then the current live one, and the live one is smaller
|
||||||
# then the initial config settings.
|
# then the initial config settings.
|
||||||
|
@ -173,7 +129,7 @@ class Allocator(BaseModel):
|
||||||
l_sub_pp = self.units_limit - abs_live_size
|
l_sub_pp = self.units_limit - abs_live_size
|
||||||
|
|
||||||
elif size_unit == 'currency':
|
elif size_unit == 'currency':
|
||||||
live_cost_basis = abs_live_size * live_pp.avg_price
|
live_cost_basis = abs_live_size * live_pp.ppu
|
||||||
slot_size = currency_per_slot / price
|
slot_size = currency_per_slot / price
|
||||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||||
|
|
||||||
|
@ -184,12 +140,14 @@ class Allocator(BaseModel):
|
||||||
|
|
||||||
# an entry (adding-to or starting a pp)
|
# an entry (adding-to or starting a pp)
|
||||||
if (
|
if (
|
||||||
action == 'buy' and live_size > 0 or
|
|
||||||
action == 'sell' and live_size < 0 or
|
|
||||||
live_size == 0
|
live_size == 0
|
||||||
|
or (action == 'buy' and live_size > 0)
|
||||||
|
or action == 'sell' and live_size < 0
|
||||||
):
|
):
|
||||||
|
order_size = min(
|
||||||
order_size = min(slot_size, l_sub_pp)
|
slot_size,
|
||||||
|
max(l_sub_pp, 0),
|
||||||
|
)
|
||||||
|
|
||||||
# an exit (removing-from or going to net-zero pp)
|
# an exit (removing-from or going to net-zero pp)
|
||||||
else:
|
else:
|
||||||
|
@ -205,7 +163,7 @@ class Allocator(BaseModel):
|
||||||
if size_unit == 'currency':
|
if size_unit == 'currency':
|
||||||
# compute the "projected" limit's worth of units at the
|
# compute the "projected" limit's worth of units at the
|
||||||
# current pp (weighted) price:
|
# current pp (weighted) price:
|
||||||
slot_size = currency_per_slot / live_pp.avg_price
|
slot_size = currency_per_slot / live_pp.ppu
|
||||||
|
|
||||||
else:
|
else:
|
||||||
slot_size = u_per_slot
|
slot_size = u_per_slot
|
||||||
|
@ -244,7 +202,12 @@ class Allocator(BaseModel):
|
||||||
if order_size < slot_size:
|
if order_size < slot_size:
|
||||||
# compute a fractional slots size to display
|
# compute a fractional slots size to display
|
||||||
slots_used = self.slots_used(
|
slots_used = self.slots_used(
|
||||||
Position(symbol=sym, size=order_size, avg_price=price)
|
Position(
|
||||||
|
symbol=sym,
|
||||||
|
size=order_size,
|
||||||
|
ppu=price,
|
||||||
|
bsuid=sym,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -271,8 +234,8 @@ class Allocator(BaseModel):
|
||||||
abs_pp_size = abs(pp.size)
|
abs_pp_size = abs(pp.size)
|
||||||
|
|
||||||
if self.size_unit == 'currency':
|
if self.size_unit == 'currency':
|
||||||
# live_currency_size = size or (abs_pp_size * pp.avg_price)
|
# live_currency_size = size or (abs_pp_size * pp.ppu)
|
||||||
live_currency_size = abs_pp_size * pp.avg_price
|
live_currency_size = abs_pp_size * pp.ppu
|
||||||
prop = live_currency_size / self.currency_limit
|
prop = live_currency_size / self.currency_limit
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -284,14 +247,6 @@ class Allocator(BaseModel):
|
||||||
return round(prop * self.slots)
|
return round(prop * self.slots)
|
||||||
|
|
||||||
|
|
||||||
_derivs = (
|
|
||||||
'future',
|
|
||||||
'continuous_future',
|
|
||||||
'option',
|
|
||||||
'futures_option',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def mk_allocator(
|
def mk_allocator(
|
||||||
|
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
|
@ -300,7 +255,7 @@ def mk_allocator(
|
||||||
# default allocation settings
|
# default allocation settings
|
||||||
defaults: dict[str, float] = {
|
defaults: dict[str, float] = {
|
||||||
'account': None, # select paper by default
|
'account': None, # select paper by default
|
||||||
'size_unit': 'currency',
|
# 'size_unit': 'currency',
|
||||||
'units_limit': 400,
|
'units_limit': 400,
|
||||||
'currency_limit': 5e3,
|
'currency_limit': 5e3,
|
||||||
'slots': 4,
|
'slots': 4,
|
||||||
|
@ -318,42 +273,9 @@ def mk_allocator(
|
||||||
'currency_limit': 6e3,
|
'currency_limit': 6e3,
|
||||||
'slots': 6,
|
'slots': 6,
|
||||||
}
|
}
|
||||||
|
|
||||||
defaults.update(user_def)
|
defaults.update(user_def)
|
||||||
|
|
||||||
alloc = Allocator(
|
return Allocator(
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
**defaults,
|
**defaults,
|
||||||
)
|
)
|
||||||
|
|
||||||
asset_type = symbol.type_key
|
|
||||||
|
|
||||||
# specific configs by asset class / type
|
|
||||||
|
|
||||||
if asset_type in _derivs:
|
|
||||||
# since it's harder to know how currency "applies" in this case
|
|
||||||
# given leverage properties
|
|
||||||
alloc.size_unit = '# units'
|
|
||||||
|
|
||||||
# set units limit to slots size thus making make the next
|
|
||||||
# entry step 1.0
|
|
||||||
alloc.units_limit = alloc.slots
|
|
||||||
|
|
||||||
# if the current position is already greater then the limit
|
|
||||||
# settings, increase the limit to the current position
|
|
||||||
if alloc.size_unit == 'currency':
|
|
||||||
startup_size = startup_pp.size * startup_pp.avg_price
|
|
||||||
|
|
||||||
if startup_size > alloc.currency_limit:
|
|
||||||
alloc.currency_limit = round(startup_size, ndigits=2)
|
|
||||||
|
|
||||||
else:
|
|
||||||
startup_size = abs(startup_pp.size)
|
|
||||||
|
|
||||||
if startup_size > alloc.units_limit:
|
|
||||||
alloc.units_limit = startup_size
|
|
||||||
|
|
||||||
if asset_type in _derivs:
|
|
||||||
alloc.slots = alloc.units_limit
|
|
||||||
|
|
||||||
return alloc
|
|
||||||
|
|
|
@ -19,25 +19,24 @@ Orders and execution client API.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import Dict
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from dataclasses import dataclass, field
|
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import broadcast_receiver
|
from tractor.trionics import broadcast_receiver
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from ..data.types import Struct
|
||||||
from ._ems import _emsd_main
|
from ._ems import _emsd_main
|
||||||
from .._daemon import maybe_open_emsd
|
from .._daemon import maybe_open_emsd
|
||||||
from ._messages import Order, Cancel
|
from ._messages import Order, Cancel
|
||||||
|
from ..brokers import get_brokermod
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class OrderBook(Struct):
|
||||||
class OrderBook:
|
|
||||||
'''EMS-client-side order book ctl and tracking.
|
'''EMS-client-side order book ctl and tracking.
|
||||||
|
|
||||||
A style similar to "model-view" is used here where this api is
|
A style similar to "model-view" is used here where this api is
|
||||||
|
@ -52,20 +51,18 @@ class OrderBook:
|
||||||
# mem channels used to relay order requests to the EMS daemon
|
# mem channels used to relay order requests to the EMS daemon
|
||||||
_to_ems: trio.abc.SendChannel
|
_to_ems: trio.abc.SendChannel
|
||||||
_from_order_book: trio.abc.ReceiveChannel
|
_from_order_book: trio.abc.ReceiveChannel
|
||||||
|
_sent_orders: dict[str, Order] = {}
|
||||||
_sent_orders: Dict[str, Order] = field(default_factory=dict)
|
|
||||||
_ready_to_receive: trio.Event = trio.Event()
|
|
||||||
|
|
||||||
def send(
|
def send(
|
||||||
self,
|
self,
|
||||||
msg: Order,
|
msg: Order | dict,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
self._sent_orders[msg.oid] = msg
|
self._sent_orders[msg.oid] = msg
|
||||||
self._to_ems.send_nowait(msg.dict())
|
self._to_ems.send_nowait(msg)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def update(
|
def send_update(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
uuid: str,
|
uuid: str,
|
||||||
|
@ -73,9 +70,8 @@ class OrderBook:
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
cmd = self._sent_orders[uuid]
|
cmd = self._sent_orders[uuid]
|
||||||
msg = cmd.dict()
|
msg = cmd.copy(update=data)
|
||||||
msg.update(data)
|
self._sent_orders[uuid] = msg
|
||||||
self._sent_orders[uuid] = Order(**msg)
|
|
||||||
self._to_ems.send_nowait(msg)
|
self._to_ems.send_nowait(msg)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
@ -83,12 +79,18 @@ class OrderBook:
|
||||||
"""Cancel an order (or alert) in the EMS.
|
"""Cancel an order (or alert) in the EMS.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
cmd = self._sent_orders[uuid]
|
cmd = self._sent_orders.get(uuid)
|
||||||
|
if not cmd:
|
||||||
|
log.error(
|
||||||
|
f'Unknown order {uuid}!?\n'
|
||||||
|
f'Maybe there is a stale entry or line?\n'
|
||||||
|
f'You should report this as a bug!'
|
||||||
|
)
|
||||||
msg = Cancel(
|
msg = Cancel(
|
||||||
oid=uuid,
|
oid=uuid,
|
||||||
symbol=cmd.symbol,
|
symbol=cmd.symbol,
|
||||||
)
|
)
|
||||||
self._to_ems.send_nowait(msg.dict())
|
self._to_ems.send_nowait(msg)
|
||||||
|
|
||||||
|
|
||||||
_orders: OrderBook = None
|
_orders: OrderBook = None
|
||||||
|
@ -149,10 +151,17 @@ async def relay_order_cmds_from_sync_code(
|
||||||
book = get_orders()
|
book = get_orders()
|
||||||
async with book._from_order_book.subscribe() as orders_stream:
|
async with book._from_order_book.subscribe() as orders_stream:
|
||||||
async for cmd in orders_stream:
|
async for cmd in orders_stream:
|
||||||
if cmd['symbol'] == symbol_key:
|
sym = cmd.symbol
|
||||||
log.info(f'Send order cmd:\n{pformat(cmd)}')
|
msg = pformat(cmd)
|
||||||
|
if sym == symbol_key:
|
||||||
|
log.info(f'Send order cmd:\n{msg}')
|
||||||
# send msg over IPC / wire
|
# send msg over IPC / wire
|
||||||
await to_ems_stream.send(cmd)
|
await to_ems_stream.send(cmd)
|
||||||
|
else:
|
||||||
|
log.warning(
|
||||||
|
f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
|
||||||
|
f'\n{msg}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
@ -204,20 +213,35 @@ async def open_ems(
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import unpack_fqsn
|
||||||
broker, symbol, suffix = unpack_fqsn(fqsn)
|
broker, symbol, suffix = unpack_fqsn(fqsn)
|
||||||
|
|
||||||
|
mode: str = 'live'
|
||||||
|
|
||||||
async with maybe_open_emsd(broker) as portal:
|
async with maybe_open_emsd(broker) as portal:
|
||||||
|
|
||||||
|
mod = get_brokermod(broker)
|
||||||
|
if not getattr(mod, 'trades_dialogue', None):
|
||||||
|
mode = 'paper'
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
# connect to emsd
|
# connect to emsd
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
|
|
||||||
_emsd_main,
|
_emsd_main,
|
||||||
fqsn=fqsn,
|
fqsn=fqsn,
|
||||||
|
exec_mode=mode,
|
||||||
|
|
||||||
) as (ctx, (positions, accounts)),
|
) as (
|
||||||
|
ctx,
|
||||||
|
(
|
||||||
|
positions,
|
||||||
|
accounts,
|
||||||
|
dialogs,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
# open 2-way trade command stream
|
# open 2-way trade command stream
|
||||||
ctx.open_stream() as trades_stream,
|
ctx.open_stream() as trades_stream,
|
||||||
):
|
):
|
||||||
|
# start sync code order msg delivery task
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
n.start_soon(
|
n.start_soon(
|
||||||
relay_order_cmds_from_sync_code,
|
relay_order_cmds_from_sync_code,
|
||||||
|
@ -225,4 +249,10 @@ async def open_ems(
|
||||||
trades_stream
|
trades_stream
|
||||||
)
|
)
|
||||||
|
|
||||||
yield book, trades_stream, positions, accounts
|
yield (
|
||||||
|
book,
|
||||||
|
trades_stream,
|
||||||
|
positions,
|
||||||
|
accounts,
|
||||||
|
dialogs,
|
||||||
|
)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -15,108 +15,160 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Clearing system messagingn types and protocols.
|
Clearing sub-system message and protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Optional, Union
|
# from collections import (
|
||||||
|
# ChainMap,
|
||||||
# TODO: try out just encoding/send direction for now?
|
# deque,
|
||||||
# import msgspec
|
# )
|
||||||
from pydantic import BaseModel
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
Literal,
|
||||||
|
)
|
||||||
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: a composite for tracking msg flow on 2-legged
|
||||||
|
# dialogs.
|
||||||
|
# class Dialog(ChainMap):
|
||||||
|
# '''
|
||||||
|
# Msg collection abstraction to easily track the state changes of
|
||||||
|
# a msg flow in one high level, query-able and immutable construct.
|
||||||
|
|
||||||
|
# The main use case is to query data from a (long-running)
|
||||||
|
# msg-transaction-sequence
|
||||||
|
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# def update(
|
||||||
|
# self,
|
||||||
|
# msg,
|
||||||
|
# ) -> None:
|
||||||
|
# self.maps.insert(0, msg.to_dict())
|
||||||
|
|
||||||
|
# def flatten(self) -> dict:
|
||||||
|
# return dict(self)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: ``msgspec`` stuff worth paying attention to:
|
||||||
|
# - schema evolution:
|
||||||
|
# https://jcristharif.com/msgspec/usage.html#schema-evolution
|
||||||
|
# - for eg. ``BrokerdStatus``, instead just have separate messages?
|
||||||
|
# - use literals for a common msg determined by diff keys?
|
||||||
|
# - https://jcristharif.com/msgspec/usage.html#literal
|
||||||
|
|
||||||
|
# --------------
|
||||||
# Client -> emsd
|
# Client -> emsd
|
||||||
|
# --------------
|
||||||
|
|
||||||
|
class Order(Struct):
|
||||||
|
|
||||||
class Cancel(BaseModel):
|
# TODO: ideally we can combine these 2 fields into
|
||||||
'''Cancel msg for removing a dark (ems triggered) or
|
# 1 and just use the size polarity to determine a buy/sell.
|
||||||
broker-submitted (live) trigger/order.
|
# i would like to see this become more like
|
||||||
|
# https://jcristharif.com/msgspec/usage.html#literal
|
||||||
'''
|
# action: Literal[
|
||||||
action: str = 'cancel'
|
# 'live',
|
||||||
oid: str # uuid4
|
# 'dark',
|
||||||
symbol: str
|
# 'alert',
|
||||||
|
# ]
|
||||||
|
|
||||||
class Order(BaseModel):
|
|
||||||
|
|
||||||
action: str # {'buy', 'sell', 'alert'}
|
|
||||||
# internal ``emdsd`` unique "order id"
|
|
||||||
oid: str # uuid4
|
|
||||||
symbol: Union[str, Symbol]
|
|
||||||
account: str # should we set a default as '' ?
|
|
||||||
|
|
||||||
price: float
|
|
||||||
size: float
|
|
||||||
brokers: list[str]
|
|
||||||
|
|
||||||
# Assigned once initial ack is received
|
|
||||||
# ack_time_ns: Optional[int] = None
|
|
||||||
|
|
||||||
|
action: Literal[
|
||||||
|
'buy',
|
||||||
|
'sell',
|
||||||
|
'alert',
|
||||||
|
]
|
||||||
# determines whether the create execution
|
# determines whether the create execution
|
||||||
# will be submitted to the ems or directly to
|
# will be submitted to the ems or directly to
|
||||||
# the backend broker
|
# the backend broker
|
||||||
exec_mode: str # {'dark', 'live', 'paper'}
|
exec_mode: Literal[
|
||||||
|
'dark',
|
||||||
|
'live',
|
||||||
|
# 'paper', no right?
|
||||||
|
]
|
||||||
|
|
||||||
class Config:
|
# internal ``emdsd`` unique "order id"
|
||||||
# just for pre-loading a ``Symbol`` when used
|
oid: str # uuid4
|
||||||
# in the order mode staging process
|
symbol: str | Symbol
|
||||||
arbitrary_types_allowed = True
|
account: str # should we set a default as '' ?
|
||||||
# don't copy this model instance when used in
|
|
||||||
# a recursive model
|
|
||||||
copy_on_model_validation = False
|
|
||||||
|
|
||||||
|
price: float
|
||||||
|
size: float # -ve is "sell", +ve is "buy"
|
||||||
|
|
||||||
|
brokers: Optional[list[str]] = []
|
||||||
|
|
||||||
|
|
||||||
|
class Cancel(Struct):
|
||||||
|
'''
|
||||||
|
Cancel msg for removing a dark (ems triggered) or
|
||||||
|
broker-submitted (live) trigger/order.
|
||||||
|
|
||||||
|
'''
|
||||||
|
oid: str # uuid4
|
||||||
|
symbol: str
|
||||||
|
action: str = 'cancel'
|
||||||
|
|
||||||
|
|
||||||
|
# --------------
|
||||||
# Client <- emsd
|
# Client <- emsd
|
||||||
|
# --------------
|
||||||
# update msgs from ems which relay state change info
|
# update msgs from ems which relay state change info
|
||||||
# from the active clearing engine.
|
# from the active clearing engine.
|
||||||
|
|
||||||
|
class Status(Struct):
|
||||||
|
|
||||||
class Status(BaseModel):
|
time_ns: int
|
||||||
|
oid: str # uuid4 ems-order dialog id
|
||||||
|
|
||||||
|
resp: Literal[
|
||||||
|
'pending', # acked by broker but not yet open
|
||||||
|
'open',
|
||||||
|
'dark_open', # dark/algo triggered order is open in ems clearing loop
|
||||||
|
'triggered', # above triggered order sent to brokerd, or an alert closed
|
||||||
|
'closed', # fully cleared all size/units
|
||||||
|
'fill', # partial execution
|
||||||
|
'canceled',
|
||||||
|
'error',
|
||||||
|
]
|
||||||
|
|
||||||
name: str = 'status'
|
name: str = 'status'
|
||||||
oid: str # uuid4
|
|
||||||
time_ns: int
|
|
||||||
|
|
||||||
# {
|
|
||||||
# 'dark_submitted',
|
|
||||||
# 'dark_cancelled',
|
|
||||||
# 'dark_triggered',
|
|
||||||
|
|
||||||
# 'broker_submitted',
|
|
||||||
# 'broker_cancelled',
|
|
||||||
# 'broker_executed',
|
|
||||||
# 'broker_filled',
|
|
||||||
# 'broker_errored',
|
|
||||||
|
|
||||||
# 'alert_submitted',
|
|
||||||
# 'alert_triggered',
|
|
||||||
|
|
||||||
# }
|
|
||||||
resp: str # "response", see above
|
|
||||||
|
|
||||||
# symbol: str
|
|
||||||
|
|
||||||
# trigger info
|
|
||||||
trigger_price: Optional[float] = None
|
|
||||||
# price: float
|
|
||||||
|
|
||||||
# broker: Optional[str] = None
|
|
||||||
|
|
||||||
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
|
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
|
||||||
# normally allocated internally by the backend broker routing system
|
# normally allocated internally by the backend broker routing system
|
||||||
broker_reqid: Optional[Union[int, str]] = None
|
reqid: Optional[int | str] = None
|
||||||
|
|
||||||
# for relaying backend msg data "through" the ems layer
|
# the (last) source order/request msg if provided
|
||||||
|
# (eg. the Order/Cancel which causes this msg) and
|
||||||
|
# acts as a back-reference to the corresponding
|
||||||
|
# request message which was the source of this msg.
|
||||||
|
req: Order | None = None
|
||||||
|
|
||||||
|
# XXX: better design/name here?
|
||||||
|
# flag that can be set to indicate a message for an order
|
||||||
|
# event that wasn't originated by piker's emsd (eg. some external
|
||||||
|
# trading system which does it's own order control but that you
|
||||||
|
# might want to "track" using piker UIs/systems).
|
||||||
|
src: Optional[str] = None
|
||||||
|
|
||||||
|
# set when a cancel request msg was set for this order flow dialog
|
||||||
|
# but the brokerd dialog isn't yet in a cancelled state.
|
||||||
|
cancel_called: bool = False
|
||||||
|
|
||||||
|
# for relaying a boxed brokerd-dialog-side msg data "through" the
|
||||||
|
# ems layer to clients.
|
||||||
brokerd_msg: dict = {}
|
brokerd_msg: dict = {}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------
|
||||||
# emsd -> brokerd
|
# emsd -> brokerd
|
||||||
|
# ---------------
|
||||||
# requests *sent* from ems to respective backend broker daemon
|
# requests *sent* from ems to respective backend broker daemon
|
||||||
|
|
||||||
class BrokerdCancel(BaseModel):
|
class BrokerdCancel(Struct):
|
||||||
|
|
||||||
action: str = 'cancel'
|
|
||||||
oid: str # piker emsd order id
|
oid: str # piker emsd order id
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
|
||||||
|
@ -127,34 +179,39 @@ class BrokerdCancel(BaseModel):
|
||||||
# for setting a unique order id then this value will be relayed back
|
# for setting a unique order id then this value will be relayed back
|
||||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||||
# field
|
# field
|
||||||
reqid: Optional[Union[int, str]] = None
|
reqid: Optional[int | str] = None
|
||||||
|
action: str = 'cancel'
|
||||||
|
|
||||||
|
|
||||||
class BrokerdOrder(BaseModel):
|
class BrokerdOrder(Struct):
|
||||||
|
|
||||||
action: str # {buy, sell}
|
|
||||||
oid: str
|
oid: str
|
||||||
account: str
|
account: str
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
|
||||||
|
symbol: str # fqsn
|
||||||
|
price: float
|
||||||
|
size: float
|
||||||
|
|
||||||
|
# TODO: if we instead rely on a +ve/-ve size to determine
|
||||||
|
# the action we more or less don't need this field right?
|
||||||
|
action: str = '' # {buy, sell}
|
||||||
|
|
||||||
# "broker request id": broker specific/internal order id if this is
|
# "broker request id": broker specific/internal order id if this is
|
||||||
# None, creates a new order otherwise if the id is valid the backend
|
# None, creates a new order otherwise if the id is valid the backend
|
||||||
# api must modify the existing matching order. If the broker allows
|
# api must modify the existing matching order. If the broker allows
|
||||||
# for setting a unique order id then this value will be relayed back
|
# for setting a unique order id then this value will be relayed back
|
||||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||||
# field
|
# field
|
||||||
reqid: Optional[Union[int, str]] = None
|
reqid: Optional[int | str] = None
|
||||||
|
|
||||||
symbol: str # symbol.<providername> ?
|
|
||||||
price: float
|
|
||||||
size: float
|
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------
|
||||||
# emsd <- brokerd
|
# emsd <- brokerd
|
||||||
|
# ---------------
|
||||||
# requests *received* to ems from broker backend
|
# requests *received* to ems from broker backend
|
||||||
|
|
||||||
|
class BrokerdOrderAck(Struct):
|
||||||
class BrokerdOrderAck(BaseModel):
|
|
||||||
'''
|
'''
|
||||||
Immediate reponse to a brokerd order request providing the broker
|
Immediate reponse to a brokerd order request providing the broker
|
||||||
specific unique order id so that the EMS can associate this
|
specific unique order id so that the EMS can associate this
|
||||||
|
@ -162,42 +219,35 @@ class BrokerdOrderAck(BaseModel):
|
||||||
``.oid`` (which is a uuid4).
|
``.oid`` (which is a uuid4).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'ack'
|
|
||||||
|
|
||||||
# defined and provided by backend
|
# defined and provided by backend
|
||||||
reqid: Union[int, str]
|
reqid: int | str
|
||||||
|
|
||||||
# emsd id originally sent in matching request msg
|
# emsd id originally sent in matching request msg
|
||||||
oid: str
|
oid: str
|
||||||
account: str = ''
|
account: str = ''
|
||||||
|
name: str = 'ack'
|
||||||
|
|
||||||
|
|
||||||
class BrokerdStatus(BaseModel):
|
class BrokerdStatus(Struct):
|
||||||
|
|
||||||
name: str = 'status'
|
reqid: int | str
|
||||||
reqid: Union[int, str]
|
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
status: Literal[
|
||||||
|
'open',
|
||||||
|
'canceled',
|
||||||
|
'fill',
|
||||||
|
'pending',
|
||||||
|
'error',
|
||||||
|
]
|
||||||
|
|
||||||
# XXX: should be best effort set for every update
|
account: str
|
||||||
account: str = ''
|
name: str = 'status'
|
||||||
|
|
||||||
# {
|
|
||||||
# 'submitted',
|
|
||||||
# 'cancelled',
|
|
||||||
# 'filled',
|
|
||||||
# }
|
|
||||||
status: str
|
|
||||||
|
|
||||||
filled: float = 0.0
|
filled: float = 0.0
|
||||||
reason: str = ''
|
reason: str = ''
|
||||||
remaining: float = 0.0
|
remaining: float = 0.0
|
||||||
|
|
||||||
# XXX: better design/name here?
|
# external: bool = False
|
||||||
# flag that can be set to indicate a message for an order
|
|
||||||
# event that wasn't originated by piker's emsd (eg. some external
|
|
||||||
# trading system which does it's own order control but that you
|
|
||||||
# might want to "track" using piker UIs/systems).
|
|
||||||
external: bool = False
|
|
||||||
|
|
||||||
# XXX: not required schema as of yet
|
# XXX: not required schema as of yet
|
||||||
broker_details: dict = {
|
broker_details: dict = {
|
||||||
|
@ -205,59 +255,57 @@ class BrokerdStatus(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class BrokerdFill(BaseModel):
|
class BrokerdFill(Struct):
|
||||||
'''
|
'''
|
||||||
A single message indicating a "fill-details" event from the broker
|
A single message indicating a "fill-details" event from the broker
|
||||||
if avaiable.
|
if avaiable.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'fill'
|
|
||||||
reqid: Union[int, str]
|
|
||||||
time_ns: int
|
|
||||||
|
|
||||||
# order exeuction related
|
|
||||||
action: str
|
|
||||||
size: float
|
|
||||||
price: float
|
|
||||||
|
|
||||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
|
||||||
|
|
||||||
# brokerd timestamp required for order mode arrow placement on x-axis
|
# brokerd timestamp required for order mode arrow placement on x-axis
|
||||||
|
|
||||||
# TODO: maybe int if we force ns?
|
# TODO: maybe int if we force ns?
|
||||||
# we need to normalize this somehow since backends will use their
|
# we need to normalize this somehow since backends will use their
|
||||||
# own format and likely across many disparate epoch clocks...
|
# own format and likely across many disparate epoch clocks...
|
||||||
broker_time: float
|
broker_time: float
|
||||||
|
reqid: int | str
|
||||||
|
time_ns: int
|
||||||
|
|
||||||
|
# order exeuction related
|
||||||
|
size: float
|
||||||
|
price: float
|
||||||
|
|
||||||
|
name: str = 'fill'
|
||||||
|
action: Optional[str] = None
|
||||||
|
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||||
|
|
||||||
|
|
||||||
class BrokerdError(BaseModel):
|
class BrokerdError(Struct):
|
||||||
'''
|
'''
|
||||||
Optional error type that can be relayed to emsd for error handling.
|
Optional error type that can be relayed to emsd for error handling.
|
||||||
|
|
||||||
This is still a TODO thing since we're not sure how to employ it yet.
|
This is still a TODO thing since we're not sure how to employ it yet.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'error'
|
|
||||||
oid: str
|
oid: str
|
||||||
|
symbol: str
|
||||||
|
reason: str
|
||||||
|
|
||||||
# if no brokerd order request was actually submitted (eg. we errored
|
# if no brokerd order request was actually submitted (eg. we errored
|
||||||
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
||||||
reqid: Optional[Union[int, str]] = None
|
reqid: Optional[int | str] = None
|
||||||
|
|
||||||
symbol: str
|
name: str = 'error'
|
||||||
reason: str
|
|
||||||
broker_details: dict = {}
|
broker_details: dict = {}
|
||||||
|
|
||||||
|
|
||||||
class BrokerdPosition(BaseModel):
|
class BrokerdPosition(Struct):
|
||||||
'''Position update event from brokerd.
|
'''Position update event from brokerd.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'position'
|
|
||||||
|
|
||||||
broker: str
|
broker: str
|
||||||
account: str
|
account: str
|
||||||
symbol: str
|
symbol: str
|
||||||
currency: str
|
|
||||||
size: float
|
size: float
|
||||||
avg_price: float
|
avg_price: float
|
||||||
|
currency: str = ''
|
||||||
|
name: str = 'position'
|
||||||
|
|
|
@ -18,54 +18,71 @@
|
||||||
Fake trading for forward testing.
|
Fake trading for forward testing.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from collections import defaultdict
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
import itertools
|
||||||
import time
|
import time
|
||||||
from typing import Tuple, Optional, Callable
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
|
import pendulum
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
from .. import data
|
from .. import data
|
||||||
|
from ..data._source import Symbol
|
||||||
|
from ..data.types import Struct
|
||||||
|
from ..pp import (
|
||||||
|
Position,
|
||||||
|
Transaction,
|
||||||
|
)
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import unpack_fqsn
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
BrokerdCancel,
|
||||||
BrokerdFill, BrokerdPosition, BrokerdError
|
BrokerdOrder,
|
||||||
|
BrokerdOrderAck,
|
||||||
|
BrokerdStatus,
|
||||||
|
BrokerdFill,
|
||||||
|
BrokerdPosition,
|
||||||
|
BrokerdError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class PaperBoi(Struct):
|
||||||
class PaperBoi:
|
'''
|
||||||
"""
|
Emulates a broker order client providing approximately the same API
|
||||||
Emulates a broker order client providing the same API and
|
and delivering an order-event response stream but with methods for
|
||||||
delivering an order-event response stream but with methods for
|
|
||||||
triggering desired events based on forward testing engine
|
triggering desired events based on forward testing engine
|
||||||
requirements.
|
requirements (eg open, closed, fill msgs).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
broker: str
|
broker: str
|
||||||
|
|
||||||
ems_trades_stream: tractor.MsgStream
|
ems_trades_stream: tractor.MsgStream
|
||||||
|
|
||||||
# map of paper "live" orders which be used
|
# map of paper "live" orders which be used
|
||||||
# to simulate fills based on paper engine settings
|
# to simulate fills based on paper engine settings
|
||||||
_buys: bidict
|
_buys: defaultdict[str, bidict]
|
||||||
_sells: bidict
|
_sells: defaultdict[str, bidict]
|
||||||
_reqids: bidict
|
_reqids: bidict
|
||||||
_positions: dict[str, BrokerdPosition]
|
_positions: dict[str, Position]
|
||||||
|
_trade_ledger: dict[str, Any]
|
||||||
|
|
||||||
# init edge case L1 spread
|
# init edge case L1 spread
|
||||||
last_ask: Tuple[float, float] = (float('inf'), 0) # price, size
|
last_ask: tuple[float, float] = (float('inf'), 0) # price, size
|
||||||
last_bid: Tuple[float, float] = (0, 0)
|
last_bid: tuple[float, float] = (0, 0)
|
||||||
|
|
||||||
async def submit_limit(
|
async def submit_limit(
|
||||||
self,
|
self,
|
||||||
|
@ -75,27 +92,24 @@ class PaperBoi:
|
||||||
action: str,
|
action: str,
|
||||||
size: float,
|
size: float,
|
||||||
reqid: Optional[str],
|
reqid: Optional[str],
|
||||||
|
|
||||||
) -> int:
|
) -> int:
|
||||||
"""Place an order and return integer request id provided by client.
|
'''
|
||||||
|
Place an order and return integer request id provided by client.
|
||||||
"""
|
|
||||||
is_modify: bool = False
|
|
||||||
if reqid is None:
|
|
||||||
reqid = str(uuid.uuid4())
|
|
||||||
|
|
||||||
else:
|
|
||||||
# order is already existing, this is a modify
|
|
||||||
(oid, symbol, action, old_price) = self._reqids[reqid]
|
|
||||||
assert old_price != price
|
|
||||||
is_modify = True
|
|
||||||
|
|
||||||
# register order internally
|
|
||||||
self._reqids[reqid] = (oid, symbol, action, price)
|
|
||||||
|
|
||||||
|
'''
|
||||||
if action == 'alert':
|
if action == 'alert':
|
||||||
# bypass all fill simulation
|
# bypass all fill simulation
|
||||||
return reqid
|
return reqid
|
||||||
|
|
||||||
|
entry = self._reqids.get(reqid)
|
||||||
|
if entry:
|
||||||
|
# order is already existing, this is a modify
|
||||||
|
(oid, symbol, action, old_price) = entry
|
||||||
|
else:
|
||||||
|
# register order internally
|
||||||
|
self._reqids[reqid] = (oid, symbol, action, price)
|
||||||
|
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
# we checkpoint here quickly particulalry
|
# we checkpoint here quickly particulalry
|
||||||
# for dark orders since we want the dark_executed
|
# for dark orders since we want the dark_executed
|
||||||
|
@ -107,15 +121,18 @@ class PaperBoi:
|
||||||
size = -size
|
size = -size
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
status='submitted',
|
status='open',
|
||||||
|
# account=f'paper_{self.broker}',
|
||||||
|
account='paper',
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
broker=self.broker,
|
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
filled=0.0,
|
filled=0.0,
|
||||||
reason='paper_trigger',
|
reason='paper_trigger',
|
||||||
remaining=size,
|
remaining=size,
|
||||||
|
|
||||||
|
broker_details={'name': 'paperboi'},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
# if we're already a clearing price simulate an immediate fill
|
# if we're already a clearing price simulate an immediate fill
|
||||||
if (
|
if (
|
||||||
|
@ -123,28 +140,28 @@ class PaperBoi:
|
||||||
) or (
|
) or (
|
||||||
action == 'sell' and (clear_price := self.last_bid[0]) >= price
|
action == 'sell' and (clear_price := self.last_bid[0]) >= price
|
||||||
):
|
):
|
||||||
await self.fake_fill(symbol, clear_price, size, action, reqid, oid)
|
await self.fake_fill(
|
||||||
|
symbol,
|
||||||
|
clear_price,
|
||||||
|
size,
|
||||||
|
action,
|
||||||
|
reqid,
|
||||||
|
oid,
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
|
||||||
# register this submissions as a paper live order
|
# register this submissions as a paper live order
|
||||||
|
else:
|
||||||
# submit order to book simulation fill loop
|
# set the simulated order in the respective table for lookup
|
||||||
|
# and trigger by the simulated clearing task normally
|
||||||
|
# running ``simulate_fills()``.
|
||||||
if action == 'buy':
|
if action == 'buy':
|
||||||
orders = self._buys
|
orders = self._buys
|
||||||
|
|
||||||
elif action == 'sell':
|
elif action == 'sell':
|
||||||
orders = self._sells
|
orders = self._sells
|
||||||
|
|
||||||
# set the simulated order in the respective table for lookup
|
# {symbol -> bidict[oid, (<price data>)]}
|
||||||
# and trigger by the simulated clearing task normally
|
orders[symbol][oid] = (price, size, reqid, action)
|
||||||
# running ``simulate_fills()``.
|
|
||||||
|
|
||||||
if is_modify:
|
|
||||||
# remove any existing order for the old price
|
|
||||||
orders[symbol].pop((oid, old_price))
|
|
||||||
|
|
||||||
# buys/sells: (symbol -> (price -> order))
|
|
||||||
orders.setdefault(symbol, {})[(oid, price)] = (size, reqid, action)
|
|
||||||
|
|
||||||
return reqid
|
return reqid
|
||||||
|
|
||||||
|
@ -157,26 +174,26 @@ class PaperBoi:
|
||||||
oid, symbol, action, price = self._reqids[reqid]
|
oid, symbol, action, price = self._reqids[reqid]
|
||||||
|
|
||||||
if action == 'buy':
|
if action == 'buy':
|
||||||
self._buys[symbol].pop((oid, price))
|
self._buys[symbol].pop(oid, None)
|
||||||
elif action == 'sell':
|
elif action == 'sell':
|
||||||
self._sells[symbol].pop((oid, price))
|
self._sells[symbol].pop(oid, None)
|
||||||
|
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
await trio.sleep(0.05)
|
await trio.sleep(0.05)
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
status='cancelled',
|
status='canceled',
|
||||||
oid=oid,
|
account='paper',
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
broker=self.broker,
|
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
|
broker_details={'name': 'paperboi'},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
async def fake_fill(
|
async def fake_fill(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
symbol: str,
|
fqsn: str,
|
||||||
price: float,
|
price: float,
|
||||||
size: float,
|
size: float,
|
||||||
action: str, # one of {'buy', 'sell'}
|
action: str, # one of {'buy', 'sell'}
|
||||||
|
@ -190,21 +207,21 @@ class PaperBoi:
|
||||||
remaining: float = 0,
|
remaining: float = 0,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Pretend to fill a broker order @ price and size.
|
'''
|
||||||
|
Pretend to fill a broker order @ price and size.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
await trio.sleep(0.05)
|
await trio.sleep(0.05)
|
||||||
|
fill_time_ns = time.time_ns()
|
||||||
|
fill_time_s = time.time()
|
||||||
|
|
||||||
msg = BrokerdFill(
|
fill_msg = BrokerdFill(
|
||||||
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
time_ns=time.time_ns(),
|
time_ns=fill_time_ns,
|
||||||
|
|
||||||
action=action,
|
action=action,
|
||||||
size=size,
|
size=size,
|
||||||
price=price,
|
price=price,
|
||||||
|
|
||||||
broker_time=datetime.now().timestamp(),
|
broker_time=datetime.now().timestamp(),
|
||||||
broker_details={
|
broker_details={
|
||||||
'paper_info': {
|
'paper_info': {
|
||||||
|
@ -214,79 +231,67 @@ class PaperBoi:
|
||||||
'name': self.broker + '_paper',
|
'name': self.broker + '_paper',
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
log.info(f'Fake filling order:\n{fill_msg}')
|
||||||
|
await self.ems_trades_stream.send(fill_msg)
|
||||||
|
|
||||||
|
self._trade_ledger.update(fill_msg.to_dict())
|
||||||
|
|
||||||
if order_complete:
|
if order_complete:
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
|
# account=f'paper_{self.broker}',
|
||||||
status='filled',
|
account='paper',
|
||||||
|
status='closed',
|
||||||
filled=size,
|
filled=size,
|
||||||
remaining=0 if order_complete else remaining,
|
remaining=0 if order_complete else remaining,
|
||||||
|
|
||||||
action=action,
|
|
||||||
size=size,
|
|
||||||
price=price,
|
|
||||||
|
|
||||||
broker_details={
|
|
||||||
'paper_info': {
|
|
||||||
'oid': oid,
|
|
||||||
},
|
|
||||||
'name': self.broker,
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
# lookup any existing position
|
# lookup any existing position
|
||||||
token = f'{symbol}.{self.broker}'
|
key = fqsn.rstrip(f'.{self.broker}')
|
||||||
pp_msg = self._positions.setdefault(
|
pp = self._positions.setdefault(
|
||||||
token,
|
fqsn,
|
||||||
BrokerdPosition(
|
Position(
|
||||||
|
Symbol(
|
||||||
|
key=key,
|
||||||
|
broker_info={self.broker: {}},
|
||||||
|
),
|
||||||
|
size=size,
|
||||||
|
ppu=price,
|
||||||
|
bsuid=key,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
t = Transaction(
|
||||||
|
fqsn=fqsn,
|
||||||
|
tid=oid,
|
||||||
|
size=size,
|
||||||
|
price=price,
|
||||||
|
cost=0, # TODO: cost model
|
||||||
|
dt=pendulum.from_timestamp(fill_time_s),
|
||||||
|
bsuid=key,
|
||||||
|
)
|
||||||
|
pp.add_clear(t)
|
||||||
|
|
||||||
|
pp_msg = BrokerdPosition(
|
||||||
broker=self.broker,
|
broker=self.broker,
|
||||||
account='paper',
|
account='paper',
|
||||||
symbol=symbol,
|
symbol=fqsn,
|
||||||
# TODO: we need to look up the asset currency from
|
# TODO: we need to look up the asset currency from
|
||||||
# broker info. i guess for crypto this can be
|
# broker info. i guess for crypto this can be
|
||||||
# inferred from the pair?
|
# inferred from the pair?
|
||||||
currency='',
|
currency='',
|
||||||
size=0.0,
|
size=pp.size,
|
||||||
avg_price=0,
|
avg_price=pp.ppu,
|
||||||
)
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# "avg position price" calcs
|
await self.ems_trades_stream.send(pp_msg)
|
||||||
# TODO: eventually it'd be nice to have a small set of routines
|
|
||||||
# to do this stuff from a sequence of cleared orders to enable
|
|
||||||
# so called "contextual positions".
|
|
||||||
new_size = size + pp_msg.size
|
|
||||||
|
|
||||||
# old size minus the new size gives us size differential with
|
|
||||||
# +ve -> increase in pp size
|
|
||||||
# -ve -> decrease in pp size
|
|
||||||
size_diff = abs(new_size) - abs(pp_msg.size)
|
|
||||||
|
|
||||||
if new_size == 0:
|
|
||||||
pp_msg.avg_price = 0
|
|
||||||
|
|
||||||
elif size_diff > 0:
|
|
||||||
# only update the "average position price" when the position
|
|
||||||
# size increases not when it decreases (i.e. the position is
|
|
||||||
# being made smaller)
|
|
||||||
pp_msg.avg_price = (
|
|
||||||
abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
|
|
||||||
) / abs(new_size)
|
|
||||||
|
|
||||||
pp_msg.size = new_size
|
|
||||||
|
|
||||||
await self.ems_trades_stream.send(pp_msg.dict())
|
|
||||||
|
|
||||||
|
|
||||||
async def simulate_fills(
|
async def simulate_fills(
|
||||||
quote_stream: 'tractor.ReceiveStream', # noqa
|
quote_stream: tractor.MsgStream, # noqa
|
||||||
client: PaperBoi,
|
client: PaperBoi,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# TODO: more machinery to better simulate real-world market things:
|
# TODO: more machinery to better simulate real-world market things:
|
||||||
|
@ -306,61 +311,116 @@ async def simulate_fills(
|
||||||
|
|
||||||
# this stream may eventually contain multiple symbols
|
# this stream may eventually contain multiple symbols
|
||||||
async for quotes in quote_stream:
|
async for quotes in quote_stream:
|
||||||
|
|
||||||
for sym, quote in quotes.items():
|
for sym, quote in quotes.items():
|
||||||
|
|
||||||
for tick in iterticks(
|
for tick in iterticks(
|
||||||
quote,
|
quote,
|
||||||
# dark order price filter(s)
|
# dark order price filter(s)
|
||||||
types=('ask', 'bid', 'trade', 'last')
|
types=('ask', 'bid', 'trade', 'last')
|
||||||
):
|
):
|
||||||
# print(tick)
|
tick_price = tick['price']
|
||||||
tick_price = tick.get('price')
|
|
||||||
ttype = tick['type']
|
|
||||||
|
|
||||||
if ttype in ('ask',):
|
buys: bidict[str, tuple] = client._buys[sym]
|
||||||
|
iter_buys = reversed(sorted(
|
||||||
|
buys.values(),
|
||||||
|
key=itemgetter(0),
|
||||||
|
))
|
||||||
|
|
||||||
|
def buy_on_ask(our_price):
|
||||||
|
return tick_price <= our_price
|
||||||
|
|
||||||
|
sells: bidict[str, tuple] = client._sells[sym]
|
||||||
|
iter_sells = sorted(
|
||||||
|
sells.values(),
|
||||||
|
key=itemgetter(0)
|
||||||
|
)
|
||||||
|
|
||||||
|
def sell_on_bid(our_price):
|
||||||
|
return tick_price >= our_price
|
||||||
|
|
||||||
|
match tick:
|
||||||
|
|
||||||
|
# on an ask queue tick, only clear buy entries
|
||||||
|
case {
|
||||||
|
'price': tick_price,
|
||||||
|
'type': 'ask',
|
||||||
|
}:
|
||||||
client.last_ask = (
|
client.last_ask = (
|
||||||
tick_price,
|
tick_price,
|
||||||
tick.get('size', client.last_ask[1]),
|
tick.get('size', client.last_ask[1]),
|
||||||
)
|
)
|
||||||
|
|
||||||
orders = client._buys.get(sym, {})
|
iter_entries = zip(
|
||||||
|
iter_buys,
|
||||||
book_sequence = reversed(
|
itertools.repeat(buy_on_ask)
|
||||||
sorted(orders.keys(), key=itemgetter(1)))
|
)
|
||||||
|
|
||||||
def pred(our_price):
|
|
||||||
return tick_price < our_price
|
|
||||||
|
|
||||||
elif ttype in ('bid',):
|
|
||||||
|
|
||||||
|
# on a bid queue tick, only clear sell entries
|
||||||
|
case {
|
||||||
|
'price': tick_price,
|
||||||
|
'type': 'bid',
|
||||||
|
}:
|
||||||
client.last_bid = (
|
client.last_bid = (
|
||||||
tick_price,
|
tick_price,
|
||||||
tick.get('size', client.last_bid[1]),
|
tick.get('size', client.last_bid[1]),
|
||||||
)
|
)
|
||||||
|
|
||||||
orders = client._sells.get(sym, {})
|
iter_entries = zip(
|
||||||
book_sequence = sorted(orders.keys(), key=itemgetter(1))
|
iter_sells,
|
||||||
|
itertools.repeat(sell_on_bid)
|
||||||
|
)
|
||||||
|
|
||||||
def pred(our_price):
|
# TODO: fix this block, though it definitely
|
||||||
return tick_price > our_price
|
# costs a lot more CPU-wise
|
||||||
|
# - doesn't seem like clears are happening still on
|
||||||
|
# "resting" limit orders?
|
||||||
|
case {
|
||||||
|
'price': tick_price,
|
||||||
|
'type': ('trade' | 'last'),
|
||||||
|
}:
|
||||||
|
# in the clearing price / last price case we
|
||||||
|
# want to iterate both sides of our book for
|
||||||
|
# clears since we don't know which direction the
|
||||||
|
# price is going to move (especially with HFT)
|
||||||
|
# and thus we simply interleave both sides (buys
|
||||||
|
# and sells) until one side clears and then
|
||||||
|
# break until the next tick?
|
||||||
|
def interleave():
|
||||||
|
for pair in zip(
|
||||||
|
iter_buys,
|
||||||
|
iter_sells,
|
||||||
|
):
|
||||||
|
for order_info, pred in zip(
|
||||||
|
pair,
|
||||||
|
itertools.cycle([buy_on_ask, sell_on_bid]),
|
||||||
|
):
|
||||||
|
yield order_info, pred
|
||||||
|
|
||||||
elif ttype in ('trade', 'last'):
|
iter_entries = interleave()
|
||||||
# TODO: simulate actual book queues and our orders
|
|
||||||
# place in it, might require full L2 data?
|
# NOTE: all other (non-clearable) tick event types
|
||||||
|
# - we don't want to sping the simulated clear loop
|
||||||
|
# below unecessarily and further don't want to pop
|
||||||
|
# simulated live orders prematurely.
|
||||||
|
case _:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# iterate book prices descending
|
# iterate all potentially clearable book prices
|
||||||
for oid, our_price in book_sequence:
|
# in FIFO order per side.
|
||||||
if pred(our_price):
|
for order_info, pred in iter_entries:
|
||||||
|
(our_price, size, reqid, action) = order_info
|
||||||
|
|
||||||
# retreive order info
|
# print(order_info)
|
||||||
(size, reqid, action) = orders.pop((oid, our_price))
|
clearable = pred(our_price)
|
||||||
|
if clearable:
|
||||||
|
# pop and retreive order info
|
||||||
|
oid = {
|
||||||
|
'buy': buys,
|
||||||
|
'sell': sells
|
||||||
|
}[action].inverse.pop(order_info)
|
||||||
|
|
||||||
# clearing price would have filled entirely
|
# clearing price would have filled entirely
|
||||||
await client.fake_fill(
|
await client.fake_fill(
|
||||||
symbol=sym,
|
fqsn=sym,
|
||||||
# todo slippage to determine fill price
|
# todo slippage to determine fill price
|
||||||
price=tick_price,
|
price=tick_price,
|
||||||
size=size,
|
size=size,
|
||||||
|
@ -368,9 +428,6 @@ async def simulate_fills(
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
oid=oid,
|
oid=oid,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
# prices are iterated in sorted order so we're done
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
async def handle_order_requests(
|
async def handle_order_requests(
|
||||||
|
@ -380,68 +437,83 @@ async def handle_order_requests(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# order_request: dict
|
request_msg: dict
|
||||||
async for request_msg in ems_order_stream:
|
async for request_msg in ems_order_stream:
|
||||||
|
match request_msg:
|
||||||
action = request_msg['action']
|
case {'action': ('buy' | 'sell')}:
|
||||||
|
|
||||||
if action in {'buy', 'sell'}:
|
|
||||||
|
|
||||||
account = request_msg['account']
|
|
||||||
if account != 'paper':
|
|
||||||
log.error(
|
|
||||||
'This is a paper account, only a `paper` selection is valid'
|
|
||||||
)
|
|
||||||
await ems_order_stream.send(BrokerdError(
|
|
||||||
oid=request_msg['oid'],
|
|
||||||
symbol=request_msg['symbol'],
|
|
||||||
reason=f'Paper only. No account found: `{account}` ?',
|
|
||||||
).dict())
|
|
||||||
continue
|
|
||||||
|
|
||||||
# validate
|
|
||||||
order = BrokerdOrder(**request_msg)
|
order = BrokerdOrder(**request_msg)
|
||||||
|
account = order.account
|
||||||
|
|
||||||
# call our client api to submit the order
|
# error on bad inputs
|
||||||
reqid = await client.submit_limit(
|
reason = None
|
||||||
|
if account != 'paper':
|
||||||
|
reason = f'No account found:`{account}` (paper only)?'
|
||||||
|
|
||||||
|
elif order.size == 0:
|
||||||
|
reason = 'Invalid size: 0'
|
||||||
|
|
||||||
|
if reason:
|
||||||
|
log.error(reason)
|
||||||
|
await ems_order_stream.send(BrokerdError(
|
||||||
oid=order.oid,
|
oid=order.oid,
|
||||||
symbol=order.symbol,
|
symbol=order.symbol,
|
||||||
price=order.price,
|
reason=reason,
|
||||||
action=order.action,
|
))
|
||||||
size=order.size,
|
continue
|
||||||
|
|
||||||
# XXX: by default 0 tells ``ib_insync`` methods that
|
reqid = order.reqid or str(uuid.uuid4())
|
||||||
# there is no existing order so ask the client to create
|
|
||||||
# a new one (which it seems to do by allocating an int
|
|
||||||
# counter - collision prone..)
|
|
||||||
reqid=order.reqid,
|
|
||||||
)
|
|
||||||
|
|
||||||
# deliver ack that order has been submitted to broker routing
|
# deliver ack that order has been submitted to broker routing
|
||||||
await ems_order_stream.send(
|
await ems_order_stream.send(
|
||||||
BrokerdOrderAck(
|
BrokerdOrderAck(
|
||||||
|
|
||||||
# ems order request id
|
|
||||||
oid=order.oid,
|
oid=order.oid,
|
||||||
|
|
||||||
# broker specific request id
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
|
)
|
||||||
).dict()
|
|
||||||
)
|
)
|
||||||
|
|
||||||
elif action == 'cancel':
|
# call our client api to submit the order
|
||||||
msg = BrokerdCancel(**request_msg)
|
reqid = await client.submit_limit(
|
||||||
|
oid=order.oid,
|
||||||
|
symbol=f'{order.symbol}.{client.broker}',
|
||||||
|
price=order.price,
|
||||||
|
action=order.action,
|
||||||
|
size=order.size,
|
||||||
|
# XXX: by default 0 tells ``ib_insync`` methods that
|
||||||
|
# there is no existing order so ask the client to create
|
||||||
|
# a new one (which it seems to do by allocating an int
|
||||||
|
# counter - collision prone..)
|
||||||
|
reqid=reqid,
|
||||||
|
)
|
||||||
|
log.info(f'Submitted paper LIMIT {reqid}:\n{order}')
|
||||||
|
|
||||||
|
case {'action': 'cancel'}:
|
||||||
|
msg = BrokerdCancel(**request_msg)
|
||||||
await client.submit_cancel(
|
await client.submit_cancel(
|
||||||
reqid=msg.reqid
|
reqid=msg.reqid
|
||||||
)
|
)
|
||||||
|
|
||||||
else:
|
case _:
|
||||||
log.error(f'Unknown order command: {request_msg}')
|
log.error(f'Unknown order command: {request_msg}')
|
||||||
|
|
||||||
|
|
||||||
|
_reqids: bidict[str, tuple] = {}
|
||||||
|
_buys: defaultdict[
|
||||||
|
str, # symbol
|
||||||
|
bidict[
|
||||||
|
str, # oid
|
||||||
|
tuple[float, float, str, str], # order info
|
||||||
|
]
|
||||||
|
] = defaultdict(bidict)
|
||||||
|
_sells: defaultdict[
|
||||||
|
str, # symbol
|
||||||
|
bidict[
|
||||||
|
str, # oid
|
||||||
|
tuple[float, float, str, str], # order info
|
||||||
|
]
|
||||||
|
] = defaultdict(bidict)
|
||||||
|
_positions: dict[str, Position] = {}
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def trades_dialogue(
|
async def trades_dialogue(
|
||||||
|
|
||||||
|
@ -451,42 +523,62 @@ async def trades_dialogue(
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
tractor.log.get_console_log(loglevel)
|
tractor.log.get_console_log(loglevel)
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
data.open_feed(
|
data.open_feed(
|
||||||
[fqsn],
|
[fqsn],
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as feed,
|
) as feed,
|
||||||
|
|
||||||
):
|
):
|
||||||
|
pp_msgs: list[BrokerdPosition] = []
|
||||||
|
pos: Position
|
||||||
|
token: str # f'{symbol}.{self.broker}'
|
||||||
|
for token, pos in _positions.items():
|
||||||
|
pp_msgs.append(BrokerdPosition(
|
||||||
|
broker=broker,
|
||||||
|
account='paper',
|
||||||
|
symbol=pos.symbol.front_fqsn(),
|
||||||
|
size=pos.size,
|
||||||
|
avg_price=pos.ppu,
|
||||||
|
))
|
||||||
|
|
||||||
# TODO: load paper positions per broker from .toml config file
|
# TODO: load paper positions per broker from .toml config file
|
||||||
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
||||||
# await ctx.started(all_positions)
|
await ctx.started((
|
||||||
await ctx.started(({}, {'paper',}))
|
pp_msgs,
|
||||||
|
['paper'],
|
||||||
|
))
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as ems_stream,
|
ctx.open_stream() as ems_stream,
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
|
|
||||||
client = PaperBoi(
|
client = PaperBoi(
|
||||||
broker,
|
broker,
|
||||||
ems_stream,
|
ems_stream,
|
||||||
_buys={},
|
_buys=_buys,
|
||||||
_sells={},
|
_sells=_sells,
|
||||||
|
|
||||||
_reqids={},
|
_reqids=_reqids,
|
||||||
|
|
||||||
# TODO: load paper positions from ``positions.toml``
|
# TODO: load paper positions from ``positions.toml``
|
||||||
_positions={},
|
_positions=_positions,
|
||||||
|
|
||||||
|
# TODO: load postions from ledger file
|
||||||
|
_trade_ledger={},
|
||||||
)
|
)
|
||||||
|
|
||||||
n.start_soon(handle_order_requests, client, ems_stream)
|
n.start_soon(
|
||||||
|
handle_order_requests,
|
||||||
|
client,
|
||||||
|
ems_stream,
|
||||||
|
)
|
||||||
|
|
||||||
# paper engine simulator clearing task
|
# paper engine simulator clearing task
|
||||||
await simulate_fills(feed.stream, client)
|
await simulate_fills(feed.streams[broker], client)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
|
@ -511,6 +603,7 @@ async def open_paperboi(
|
||||||
# (we likely don't need more then one proc for basic
|
# (we likely don't need more then one proc for basic
|
||||||
# simulated order clearing)
|
# simulated order clearing)
|
||||||
if portal is None:
|
if portal is None:
|
||||||
|
log.info('Starting new paper-engine actor')
|
||||||
portal = await tn.start_actor(
|
portal = await tn.start_actor(
|
||||||
service_name,
|
service_name,
|
||||||
enable_modules=[__name__]
|
enable_modules=[__name__]
|
||||||
|
@ -523,5 +616,4 @@ async def open_paperboi(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
||||||
yield ctx, first
|
yield ctx, first
|
||||||
|
|
|
@ -27,25 +27,36 @@ import tractor
|
||||||
|
|
||||||
from ..log import get_console_log, get_logger, colorize_json
|
from ..log import get_console_log, get_logger, colorize_json
|
||||||
from ..brokers import get_brokermod
|
from ..brokers import get_brokermod
|
||||||
from .._daemon import _tractor_kwargs
|
from .._daemon import (
|
||||||
|
_tractor_kwargs,
|
||||||
|
_default_registry_host,
|
||||||
|
_default_registry_port,
|
||||||
|
)
|
||||||
from .. import config
|
from .. import config
|
||||||
|
|
||||||
|
|
||||||
log = get_logger('cli')
|
log = get_logger('cli')
|
||||||
DEFAULT_BROKER = 'questrade'
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
||||||
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
||||||
|
@click.option('--port', '-p', default=None, help='Port number to bind')
|
||||||
@click.option(
|
@click.option(
|
||||||
'--tsdb',
|
'--tsdb',
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
help='Enable local ``marketstore`` instance'
|
help='Enable local ``marketstore`` instance'
|
||||||
)
|
)
|
||||||
def pikerd(loglevel, host, tl, pdb, tsdb):
|
def pikerd(
|
||||||
|
loglevel: str,
|
||||||
|
host: str,
|
||||||
|
port: int,
|
||||||
|
tl: bool,
|
||||||
|
pdb: bool,
|
||||||
|
tsdb: bool,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Spawn the piker broker-daemon.
|
Spawn the piker broker-daemon.
|
||||||
|
|
||||||
|
@ -62,12 +73,21 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
"\n"
|
"\n"
|
||||||
))
|
))
|
||||||
|
|
||||||
|
reg_addr: None | tuple[str, int] = None
|
||||||
|
if host or port:
|
||||||
|
reg_addr = (
|
||||||
|
host or _default_registry_host,
|
||||||
|
int(port) or _default_registry_port,
|
||||||
|
)
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_pikerd(
|
open_pikerd(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=pdb,
|
debug_mode=pdb,
|
||||||
|
registry_addr=reg_addr,
|
||||||
|
|
||||||
), # normally delivers a ``Services`` handle
|
), # normally delivers a ``Services`` handle
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
|
@ -83,9 +103,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
|
|
||||||
)
|
)
|
||||||
log.info(
|
log.info(
|
||||||
f'`marketstore` up!\n'
|
f'`marketstored` up!\n'
|
||||||
f'`marketstored` pid: {pid}\n'
|
f'pid: {pid}\n'
|
||||||
f'docker container id: {cid}\n'
|
f'container id: {cid[:12]}\n'
|
||||||
f'config: {pformat(config)}'
|
f'config: {pformat(config)}'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -97,25 +117,46 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
@click.group(context_settings=config._context_defaults)
|
@click.group(context_settings=config._context_defaults)
|
||||||
@click.option(
|
@click.option(
|
||||||
'--brokers', '-b',
|
'--brokers', '-b',
|
||||||
default=[DEFAULT_BROKER],
|
default=None,
|
||||||
multiple=True,
|
multiple=True,
|
||||||
help='Broker backend to use'
|
help='Broker backend to use'
|
||||||
)
|
)
|
||||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--configdir', '-c', help='Configuration directory')
|
@click.option('--configdir', '-c', help='Configuration directory')
|
||||||
|
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
||||||
|
@click.option('--port', '-p', default=None, help='Port number to bind')
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def cli(ctx, brokers, loglevel, tl, configdir):
|
def cli(
|
||||||
|
ctx: click.Context,
|
||||||
|
brokers: list[str],
|
||||||
|
loglevel: str,
|
||||||
|
tl: bool,
|
||||||
|
configdir: str,
|
||||||
|
host: str,
|
||||||
|
port: int,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
if configdir is not None:
|
if configdir is not None:
|
||||||
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
|
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
|
||||||
config._override_config_dir(configdir)
|
config._override_config_dir(configdir)
|
||||||
|
|
||||||
ctx.ensure_object(dict)
|
ctx.ensure_object(dict)
|
||||||
|
|
||||||
if len(brokers) == 1:
|
if not brokers:
|
||||||
brokermods = [get_brokermod(brokers[0])]
|
# (try to) load all (supposedly) supported data/broker backends
|
||||||
else:
|
from piker.brokers import __brokers__
|
||||||
|
brokers = __brokers__
|
||||||
|
|
||||||
brokermods = [get_brokermod(broker) for broker in brokers]
|
brokermods = [get_brokermod(broker) for broker in brokers]
|
||||||
|
assert brokermods
|
||||||
|
|
||||||
|
reg_addr: None | tuple[str, int] = None
|
||||||
|
if host or port:
|
||||||
|
reg_addr = (
|
||||||
|
host or _default_registry_host,
|
||||||
|
int(port) or _default_registry_port,
|
||||||
|
)
|
||||||
|
|
||||||
ctx.obj.update({
|
ctx.obj.update({
|
||||||
'brokers': brokers,
|
'brokers': brokers,
|
||||||
|
@ -125,6 +166,7 @@ def cli(ctx, brokers, loglevel, tl, configdir):
|
||||||
'log': get_console_log(loglevel),
|
'log': get_console_log(loglevel),
|
||||||
'confdir': config._config_dir,
|
'confdir': config._config_dir,
|
||||||
'wl_path': config._watchlists_data_path,
|
'wl_path': config._watchlists_data_path,
|
||||||
|
'registry_addr': reg_addr,
|
||||||
})
|
})
|
||||||
|
|
||||||
# allow enabling same loglevel in ``tractor`` machinery
|
# allow enabling same loglevel in ``tractor`` machinery
|
||||||
|
@ -138,25 +180,26 @@ def cli(ctx, brokers, loglevel, tl, configdir):
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def services(config, tl, names):
|
def services(config, tl, names):
|
||||||
|
|
||||||
async def list_services():
|
from .._daemon import open_piker_runtime
|
||||||
|
|
||||||
async with tractor.get_arbiter(
|
async def list_services():
|
||||||
|
async with (
|
||||||
|
open_piker_runtime(
|
||||||
|
name='service_query',
|
||||||
|
loglevel=config['loglevel'] if tl else None,
|
||||||
|
),
|
||||||
|
tractor.get_arbiter(
|
||||||
*_tractor_kwargs['arbiter_addr']
|
*_tractor_kwargs['arbiter_addr']
|
||||||
) as portal:
|
) as portal
|
||||||
|
):
|
||||||
registry = await portal.run_from_ns('self', 'get_registry')
|
registry = await portal.run_from_ns('self', 'get_registry')
|
||||||
json_d = {}
|
json_d = {}
|
||||||
for key, socket in registry.items():
|
for key, socket in registry.items():
|
||||||
# name, uuid = uid
|
|
||||||
host, port = socket
|
host, port = socket
|
||||||
json_d[key] = f'{host}:{port}'
|
json_d[key] = f'{host}:{port}'
|
||||||
click.echo(f"{colorize_json(json_d)}")
|
click.echo(f"{colorize_json(json_d)}")
|
||||||
|
|
||||||
tractor.run(
|
trio.run(list_services)
|
||||||
list_services,
|
|
||||||
name='service_query',
|
|
||||||
loglevel=config['loglevel'] if tl else None,
|
|
||||||
arbiter_addr=_tractor_kwargs['arbiter_addr'],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _load_clis() -> None:
|
def _load_clis() -> None:
|
||||||
|
|
|
@ -21,6 +21,7 @@ Broker configuration mgmt.
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
from os import path
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
@ -111,6 +112,7 @@ if _parent_user:
|
||||||
|
|
||||||
_conf_names: set[str] = {
|
_conf_names: set[str] = {
|
||||||
'brokers',
|
'brokers',
|
||||||
|
'pps',
|
||||||
'trades',
|
'trades',
|
||||||
'watchlists',
|
'watchlists',
|
||||||
}
|
}
|
||||||
|
@ -147,19 +149,21 @@ def get_conf_path(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers',
|
||||||
|
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Return the default config path normally under
|
'''
|
||||||
``~/.config/piker`` on linux.
|
Return the top-level default config path normally under
|
||||||
|
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
||||||
|
name.
|
||||||
|
|
||||||
Contains files such as:
|
Contains files such as:
|
||||||
- brokers.toml
|
- brokers.toml
|
||||||
|
- pp.toml
|
||||||
- watchlists.toml
|
- watchlists.toml
|
||||||
- trades.toml
|
|
||||||
|
|
||||||
# maybe coming soon ;)
|
# maybe coming soon ;)
|
||||||
- signals.toml
|
- signals.toml
|
||||||
- strats.toml
|
- strats.toml
|
||||||
|
|
||||||
"""
|
'''
|
||||||
assert conf_name in _conf_names
|
assert conf_name in _conf_names
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
|
@ -173,7 +177,7 @@ def repodir():
|
||||||
Return the abspath to the repo directory.
|
Return the abspath to the repo directory.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
dirpath = os.path.abspath(
|
dirpath = path.abspath(
|
||||||
# we're 3 levels down in **this** module file
|
# we're 3 levels down in **this** module file
|
||||||
dirname(dirname(os.path.realpath(__file__)))
|
dirname(dirname(os.path.realpath(__file__)))
|
||||||
)
|
)
|
||||||
|
@ -182,7 +186,9 @@ def repodir():
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers',
|
||||||
path: str = None
|
path: str = None,
|
||||||
|
|
||||||
|
**tomlkws,
|
||||||
|
|
||||||
) -> (dict, str):
|
) -> (dict, str):
|
||||||
'''
|
'''
|
||||||
|
@ -190,6 +196,10 @@ def load(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
path = path or get_conf_path(conf_name)
|
path = path or get_conf_path(conf_name)
|
||||||
|
|
||||||
|
if not os.path.isdir(_config_dir):
|
||||||
|
os.mkdir(_config_dir)
|
||||||
|
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
|
|
||||||
|
@ -202,8 +212,11 @@ def load(
|
||||||
# if one exists.
|
# if one exists.
|
||||||
if os.path.isfile(template):
|
if os.path.isfile(template):
|
||||||
shutil.copyfile(template, path)
|
shutil.copyfile(template, path)
|
||||||
|
else:
|
||||||
|
with open(path, 'r'):
|
||||||
|
pass # touch it
|
||||||
|
|
||||||
config = toml.load(path)
|
config = toml.load(path, **tomlkws)
|
||||||
log.debug(f"Read config file {path}")
|
log.debug(f"Read config file {path}")
|
||||||
return config, path
|
return config, path
|
||||||
|
|
||||||
|
@ -212,6 +225,7 @@ def write(
|
||||||
config: dict, # toml config as dict
|
config: dict, # toml config as dict
|
||||||
name: str = 'brokers',
|
name: str = 'brokers',
|
||||||
path: str = None,
|
path: str = None,
|
||||||
|
**toml_kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
''''
|
''''
|
||||||
|
@ -235,11 +249,14 @@ def write(
|
||||||
f"{path}"
|
f"{path}"
|
||||||
)
|
)
|
||||||
with open(path, 'w') as cf:
|
with open(path, 'w') as cf:
|
||||||
return toml.dump(config, cf)
|
return toml.dump(
|
||||||
|
config,
|
||||||
|
cf,
|
||||||
|
**toml_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_accounts(
|
def load_accounts(
|
||||||
|
|
||||||
providers: Optional[list[str]] = None
|
providers: Optional[list[str]] = None
|
||||||
|
|
||||||
) -> bidict[str, Optional[str]]:
|
) -> bidict[str, Optional[str]]:
|
||||||
|
|
|
@ -37,8 +37,13 @@ from docker.models.containers import Container as DockerContainer
|
||||||
from docker.errors import (
|
from docker.errors import (
|
||||||
DockerException,
|
DockerException,
|
||||||
APIError,
|
APIError,
|
||||||
|
# ContainerError,
|
||||||
|
)
|
||||||
|
import requests
|
||||||
|
from requests.exceptions import (
|
||||||
|
ConnectionError,
|
||||||
|
ReadTimeout,
|
||||||
)
|
)
|
||||||
from requests.exceptions import ConnectionError, ReadTimeout
|
|
||||||
|
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .. import config
|
from .. import config
|
||||||
|
@ -50,8 +55,8 @@ class DockerNotStarted(Exception):
|
||||||
'Prolly you dint start da daemon bruh'
|
'Prolly you dint start da daemon bruh'
|
||||||
|
|
||||||
|
|
||||||
class ContainerError(RuntimeError):
|
class ApplicationLogError(Exception):
|
||||||
'Error reported via app-container logging level'
|
'App in container reported an error in logs'
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
@ -96,9 +101,9 @@ async def open_docker(
|
||||||
# not perms?
|
# not perms?
|
||||||
raise
|
raise
|
||||||
|
|
||||||
finally:
|
# finally:
|
||||||
if client:
|
# if client:
|
||||||
client.close()
|
# client.close()
|
||||||
|
|
||||||
|
|
||||||
class Container:
|
class Container:
|
||||||
|
@ -156,7 +161,7 @@ class Container:
|
||||||
|
|
||||||
# print(f'level: {level}')
|
# print(f'level: {level}')
|
||||||
if level in ('error', 'fatal'):
|
if level in ('error', 'fatal'):
|
||||||
raise ContainerError(msg)
|
raise ApplicationLogError(msg)
|
||||||
|
|
||||||
if patt in msg:
|
if patt in msg:
|
||||||
return True
|
return True
|
||||||
|
@ -185,12 +190,29 @@ class Container:
|
||||||
if 'is not running' in err.explanation:
|
if 'is not running' in err.explanation:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def hard_kill(self, start: float) -> None:
|
||||||
|
delay = time.time() - start
|
||||||
|
# get out the big guns, bc apparently marketstore
|
||||||
|
# doesn't actually know how to terminate gracefully
|
||||||
|
# :eyeroll:...
|
||||||
|
log.error(
|
||||||
|
f'SIGKILL-ing: {self.cntr.id} after {delay}s\n'
|
||||||
|
)
|
||||||
|
self.try_signal('SIGKILL')
|
||||||
|
self.cntr.wait(
|
||||||
|
timeout=3,
|
||||||
|
condition='not-running',
|
||||||
|
)
|
||||||
|
|
||||||
async def cancel(
|
async def cancel(
|
||||||
self,
|
self,
|
||||||
stop_msg: str,
|
stop_msg: str,
|
||||||
|
hard_kill: bool = False,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
cid = self.cntr.id
|
cid = self.cntr.id
|
||||||
|
|
||||||
# first try a graceful cancel
|
# first try a graceful cancel
|
||||||
log.cancel(
|
log.cancel(
|
||||||
f'SIGINT cancelling container: {cid}\n'
|
f'SIGINT cancelling container: {cid}\n'
|
||||||
|
@ -199,16 +221,26 @@ class Container:
|
||||||
self.try_signal('SIGINT')
|
self.try_signal('SIGINT')
|
||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
for _ in range(30):
|
for _ in range(6):
|
||||||
|
|
||||||
with trio.move_on_after(0.5) as cs:
|
with trio.move_on_after(0.5) as cs:
|
||||||
cs.shield = True
|
log.cancel('polling for CNTR logs...')
|
||||||
await self.process_logs_until(stop_msg)
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.process_logs_until(stop_msg)
|
||||||
|
except ApplicationLogError:
|
||||||
|
hard_kill = True
|
||||||
|
else:
|
||||||
# if we aren't cancelled on above checkpoint then we
|
# if we aren't cancelled on above checkpoint then we
|
||||||
# assume we read the expected stop msg and terminated.
|
# assume we read the expected stop msg and
|
||||||
|
# terminated.
|
||||||
break
|
break
|
||||||
|
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
# on timeout just try a hard kill after
|
||||||
|
# a quick container sync-wait.
|
||||||
|
hard_kill = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
log.info(f'Polling for container shutdown:\n{cid}')
|
log.info(f'Polling for container shutdown:\n{cid}')
|
||||||
|
|
||||||
|
@ -218,6 +250,7 @@ class Container:
|
||||||
condition='not-running',
|
condition='not-running',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# graceful exit if we didn't time out
|
||||||
break
|
break
|
||||||
|
|
||||||
except (
|
except (
|
||||||
|
@ -229,24 +262,22 @@ class Container:
|
||||||
except (
|
except (
|
||||||
docker.errors.APIError,
|
docker.errors.APIError,
|
||||||
ConnectionError,
|
ConnectionError,
|
||||||
|
requests.exceptions.ConnectionError,
|
||||||
|
trio.Cancelled,
|
||||||
):
|
):
|
||||||
log.exception('Docker connection failure')
|
log.exception('Docker connection failure')
|
||||||
break
|
self.hard_kill(start)
|
||||||
else:
|
raise
|
||||||
delay = time.time() - start
|
|
||||||
log.error(
|
|
||||||
f'Failed to kill container {cid} after {delay}s\n'
|
|
||||||
'sending SIGKILL..'
|
|
||||||
)
|
|
||||||
# get out the big guns, bc apparently marketstore
|
|
||||||
# doesn't actually know how to terminate gracefully
|
|
||||||
# :eyeroll:...
|
|
||||||
self.try_signal('SIGKILL')
|
|
||||||
self.cntr.wait(
|
|
||||||
timeout=3,
|
|
||||||
condition='not-running',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
except trio.Cancelled:
|
||||||
|
log.exception('trio cancelled...')
|
||||||
|
self.hard_kill(start)
|
||||||
|
else:
|
||||||
|
hard_kill = True
|
||||||
|
|
||||||
|
if hard_kill:
|
||||||
|
self.hard_kill(start)
|
||||||
|
else:
|
||||||
log.cancel(f'Container stopped: {cid}')
|
log.cancel(f'Container stopped: {cid}')
|
||||||
|
|
||||||
|
|
||||||
|
@ -289,14 +320,12 @@ async def open_ahabd(
|
||||||
))
|
))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# TODO: we might eventually want a proxy-style msg-prot here
|
# TODO: we might eventually want a proxy-style msg-prot here
|
||||||
# to allow remote control of containers without needing
|
# to allow remote control of containers without needing
|
||||||
# callers to have root perms?
|
# callers to have root perms?
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
with trio.CancelScope(shield=True):
|
|
||||||
await cntr.cancel(stop_msg)
|
await cntr.cancel(stop_msg)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -56,7 +56,7 @@ def iterticks(
|
||||||
sig = (
|
sig = (
|
||||||
time,
|
time,
|
||||||
tick['price'],
|
tick['price'],
|
||||||
tick['size']
|
tick.get('size')
|
||||||
)
|
)
|
||||||
|
|
||||||
if ttype == 'dark_trade':
|
if ttype == 'dark_trade':
|
||||||
|
|
|
@ -22,7 +22,9 @@ financial data flows.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from collections import Counter
|
from collections import Counter
|
||||||
import time
|
import time
|
||||||
from typing import TYPE_CHECKING, Optional, Union
|
from typing import (
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
|
@ -37,6 +39,9 @@ if TYPE_CHECKING:
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_default_delay_s: float = 1.0
|
||||||
|
|
||||||
|
|
||||||
class sampler:
|
class sampler:
|
||||||
'''
|
'''
|
||||||
Global sampling engine registry.
|
Global sampling engine registry.
|
||||||
|
@ -104,14 +109,18 @@ async def increment_ohlc_buffer(
|
||||||
# TODO: do we want to support dynamically
|
# TODO: do we want to support dynamically
|
||||||
# adding a "lower" lowest increment period?
|
# adding a "lower" lowest increment period?
|
||||||
await trio.sleep(ad)
|
await trio.sleep(ad)
|
||||||
total_s += lowest
|
total_s += delay_s
|
||||||
|
|
||||||
# increment all subscribed shm arrays
|
# increment all subscribed shm arrays
|
||||||
# TODO:
|
# TODO:
|
||||||
# - this in ``numba``
|
# - this in ``numba``
|
||||||
# - just lookup shms for this step instead of iterating?
|
# - just lookup shms for this step instead of iterating?
|
||||||
for delay_s, shms in sampler.ohlcv_shms.items():
|
for this_delay_s, shms in sampler.ohlcv_shms.items():
|
||||||
if total_s % delay_s != 0:
|
|
||||||
|
# short-circuit on any not-ready because slower sample
|
||||||
|
# rate consuming shm buffers.
|
||||||
|
if total_s % this_delay_s != 0:
|
||||||
|
# print(f'skipping `{this_delay_s}s` sample update')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# TODO: ``numba`` this!
|
# TODO: ``numba`` this!
|
||||||
|
@ -130,7 +139,7 @@ async def increment_ohlc_buffer(
|
||||||
# this copies non-std fields (eg. vwap) from the last datum
|
# this copies non-std fields (eg. vwap) from the last datum
|
||||||
last[
|
last[
|
||||||
['time', 'volume', 'open', 'high', 'low', 'close']
|
['time', 'volume', 'open', 'high', 'low', 'close']
|
||||||
][0] = (t + delay_s, 0, close, close, close, close)
|
][0] = (t + this_delay_s, 0, close, close, close, close)
|
||||||
|
|
||||||
# write to the buffer
|
# write to the buffer
|
||||||
shm.push(last)
|
shm.push(last)
|
||||||
|
@ -140,7 +149,7 @@ async def increment_ohlc_buffer(
|
||||||
|
|
||||||
async def broadcast(
|
async def broadcast(
|
||||||
delay_s: int,
|
delay_s: int,
|
||||||
shm: Optional[ShmArray] = None,
|
shm: ShmArray | None = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -152,7 +161,6 @@ async def broadcast(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
subs = sampler.subscribers.get(delay_s, ())
|
subs = sampler.subscribers.get(delay_s, ())
|
||||||
|
|
||||||
first = last = -1
|
first = last = -1
|
||||||
|
|
||||||
if shm is None:
|
if shm is None:
|
||||||
|
@ -221,7 +229,8 @@ async def iter_ohlc_periods(
|
||||||
async def sample_and_broadcast(
|
async def sample_and_broadcast(
|
||||||
|
|
||||||
bus: _FeedsBus, # noqa
|
bus: _FeedsBus, # noqa
|
||||||
shm: ShmArray,
|
rt_shm: ShmArray,
|
||||||
|
hist_shm: ShmArray,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
brokername: str,
|
brokername: str,
|
||||||
sum_tick_vlm: bool = True,
|
sum_tick_vlm: bool = True,
|
||||||
|
@ -234,6 +243,8 @@ async def sample_and_broadcast(
|
||||||
|
|
||||||
# iterate stream delivered by broker
|
# iterate stream delivered by broker
|
||||||
async for quotes in quote_stream:
|
async for quotes in quote_stream:
|
||||||
|
# print(quotes)
|
||||||
|
|
||||||
# TODO: ``numba`` this!
|
# TODO: ``numba`` this!
|
||||||
for broker_symbol, quote in quotes.items():
|
for broker_symbol, quote in quotes.items():
|
||||||
# TODO: in theory you can send the IPC msg *before* writing
|
# TODO: in theory you can send the IPC msg *before* writing
|
||||||
|
@ -257,8 +268,12 @@ async def sample_and_broadcast(
|
||||||
|
|
||||||
last = tick['price']
|
last = tick['price']
|
||||||
|
|
||||||
|
# more compact inline-way to do this assignment
|
||||||
|
# to both buffers?
|
||||||
|
for shm in [rt_shm, hist_shm]:
|
||||||
# update last entry
|
# update last entry
|
||||||
# benchmarked in the 4-5 us range
|
# benchmarked in the 4-5 us range
|
||||||
|
# for shm in [rt_shm, hist_shm]:
|
||||||
o, high, low, v = shm.array[-1][
|
o, high, low, v = shm.array[-1][
|
||||||
['open', 'high', 'low', 'volume']
|
['open', 'high', 'low', 'volume']
|
||||||
]
|
]
|
||||||
|
@ -293,29 +308,29 @@ async def sample_and_broadcast(
|
||||||
volume,
|
volume,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: PUT THIS IN A ``_FeedsBus.broadcast()`` method!
|
||||||
# XXX: we need to be very cautious here that no
|
# XXX: we need to be very cautious here that no
|
||||||
# context-channel is left lingering which doesn't have
|
# context-channel is left lingering which doesn't have
|
||||||
# a far end receiver actor-task. In such a case you can
|
# a far end receiver actor-task. In such a case you can
|
||||||
# end up triggering backpressure which which will
|
# end up triggering backpressure which which will
|
||||||
# eventually block this producer end of the feed and
|
# eventually block this producer end of the feed and
|
||||||
# thus other consumers still attached.
|
# thus other consumers still attached.
|
||||||
|
sub_key: str = broker_symbol.lower()
|
||||||
subs: list[
|
subs: list[
|
||||||
tuple[
|
tuple[
|
||||||
Union[tractor.MsgStream, trio.MemorySendChannel],
|
tractor.MsgStream | trio.MemorySendChannel,
|
||||||
tractor.Context,
|
float | None, # tick throttle in Hz
|
||||||
Optional[float], # tick throttle in Hz
|
|
||||||
]
|
]
|
||||||
] = bus._subscribers[broker_symbol.lower()]
|
] = bus.get_subs(sub_key)
|
||||||
|
|
||||||
# NOTE: by default the broker backend doesn't append
|
# NOTE: by default the broker backend doesn't append
|
||||||
# it's own "name" into the fqsn schema (but maybe it
|
# it's own "name" into the fqsn schema (but maybe it
|
||||||
# should?) so we have to manually generate the correct
|
# should?) so we have to manually generate the correct
|
||||||
# key here.
|
# key here.
|
||||||
bsym = f'{broker_symbol}.{brokername}'
|
fqsn = f'{broker_symbol}.{brokername}'
|
||||||
lags: int = 0
|
lags: int = 0
|
||||||
|
|
||||||
for (stream, ctx, tick_throttle) in subs:
|
for (stream, tick_throttle) in subs.copy():
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with trio.move_on_after(0.2) as cs:
|
with trio.move_on_after(0.2) as cs:
|
||||||
if tick_throttle:
|
if tick_throttle:
|
||||||
|
@ -323,47 +338,39 @@ async def sample_and_broadcast(
|
||||||
# pushes to the ``uniform_rate_send()`` below.
|
# pushes to the ``uniform_rate_send()`` below.
|
||||||
try:
|
try:
|
||||||
stream.send_nowait(
|
stream.send_nowait(
|
||||||
(bsym, quote)
|
(fqsn, quote)
|
||||||
)
|
)
|
||||||
except trio.WouldBlock:
|
except trio.WouldBlock:
|
||||||
|
overruns[sub_key] += 1
|
||||||
|
ctx = stream._ctx
|
||||||
chan = ctx.chan
|
chan = ctx.chan
|
||||||
if ctx:
|
|
||||||
log.warning(
|
log.warning(
|
||||||
f'Feed overrun {bus.brokername} ->'
|
f'Feed OVERRUN {sub_key}'
|
||||||
f'{chan.uid} !!!'
|
'@{bus.brokername} -> \n'
|
||||||
|
f'feed @ {chan.uid}\n'
|
||||||
|
f'throttle = {tick_throttle} Hz'
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
key = id(stream)
|
if overruns[sub_key] > 6:
|
||||||
overruns[key] += 1
|
|
||||||
log.warning(
|
|
||||||
f'Feed overrun {broker_symbol}'
|
|
||||||
'@{bus.brokername} -> '
|
|
||||||
f'feed @ {tick_throttle} Hz'
|
|
||||||
)
|
|
||||||
if overruns[key] > 6:
|
|
||||||
# TODO: should we check for the
|
# TODO: should we check for the
|
||||||
# context being cancelled? this
|
# context being cancelled? this
|
||||||
# could happen but the
|
# could happen but the
|
||||||
# channel-ipc-pipe is still up.
|
# channel-ipc-pipe is still up.
|
||||||
if not chan.connected():
|
if (
|
||||||
|
not chan.connected()
|
||||||
|
or ctx._cancel_called
|
||||||
|
):
|
||||||
log.warning(
|
log.warning(
|
||||||
'Dropping broken consumer:\n'
|
'Dropping broken consumer:\n'
|
||||||
f'{broker_symbol}:'
|
f'{sub_key}:'
|
||||||
f'{ctx.cid}@{chan.uid}'
|
f'{ctx.cid}@{chan.uid}'
|
||||||
)
|
)
|
||||||
await stream.aclose()
|
await stream.aclose()
|
||||||
raise trio.BrokenResourceError
|
raise trio.BrokenResourceError
|
||||||
else:
|
|
||||||
log.warning(
|
|
||||||
'Feed getting overrun bro!\n'
|
|
||||||
f'{broker_symbol}:'
|
|
||||||
f'{ctx.cid}@{chan.uid}'
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
await stream.send(
|
await stream.send(
|
||||||
{bsym: quote}
|
{fqsn: quote}
|
||||||
)
|
)
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
if cs.cancelled_caught:
|
||||||
|
@ -391,13 +398,9 @@ async def sample_and_broadcast(
|
||||||
# so far seems like no since this should all
|
# so far seems like no since this should all
|
||||||
# be single-threaded. Doing it anyway though
|
# be single-threaded. Doing it anyway though
|
||||||
# since there seems to be some kinda race..
|
# since there seems to be some kinda race..
|
||||||
try:
|
bus.remove_subs(
|
||||||
subs.remove((stream, tick_throttle))
|
sub_key,
|
||||||
except ValueError:
|
{(stream, tick_throttle)},
|
||||||
log.error(
|
|
||||||
f'Stream was already removed from subs!?\n'
|
|
||||||
f'{broker_symbol}:'
|
|
||||||
f'{ctx.cid}@{chan.uid}'
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -27,13 +27,14 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||||
if _USE_POSIX:
|
if _USE_POSIX:
|
||||||
from _posixshmem import shm_unlink
|
from _posixshmem import shm_unlink
|
||||||
|
|
||||||
import tractor
|
# import msgspec
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel
|
|
||||||
from numpy.lib import recfunctions as rfn
|
from numpy.lib import recfunctions as rfn
|
||||||
|
import tractor
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._source import base_iohlc_dtype
|
from ._source import base_iohlc_dtype
|
||||||
|
from .types import Struct
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -49,7 +50,11 @@ _rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
|
||||||
|
|
||||||
|
|
||||||
def cuckoff_mantracker():
|
def cuckoff_mantracker():
|
||||||
|
'''
|
||||||
|
Disable all ``multiprocessing``` "resource tracking" machinery since
|
||||||
|
it's an absolute multi-threaded mess of non-SC madness.
|
||||||
|
|
||||||
|
'''
|
||||||
from multiprocessing import resource_tracker as mantracker
|
from multiprocessing import resource_tracker as mantracker
|
||||||
|
|
||||||
# Tell the "resource tracker" thing to fuck off.
|
# Tell the "resource tracker" thing to fuck off.
|
||||||
|
@ -107,36 +112,39 @@ class SharedInt:
|
||||||
log.warning(f'Shm for {name} already unlinked?')
|
log.warning(f'Shm for {name} already unlinked?')
|
||||||
|
|
||||||
|
|
||||||
class _Token(BaseModel):
|
class _Token(Struct, frozen=True):
|
||||||
'''
|
'''
|
||||||
Internal represenation of a shared memory "token"
|
Internal represenation of a shared memory "token"
|
||||||
which can be used to key a system wide post shm entry.
|
which can be used to key a system wide post shm entry.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
frozen = True
|
|
||||||
|
|
||||||
shm_name: str # this servers as a "key" value
|
shm_name: str # this servers as a "key" value
|
||||||
shm_first_index_name: str
|
shm_first_index_name: str
|
||||||
shm_last_index_name: str
|
shm_last_index_name: str
|
||||||
dtype_descr: tuple
|
dtype_descr: tuple
|
||||||
|
size: int # in struct-array index / row terms
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dtype(self) -> np.dtype:
|
def dtype(self) -> np.dtype:
|
||||||
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||||
|
|
||||||
def as_msg(self):
|
def as_msg(self):
|
||||||
return self.dict()
|
return self.to_dict()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_msg(cls, msg: dict) -> _Token:
|
def from_msg(cls, msg: dict) -> _Token:
|
||||||
if isinstance(msg, _Token):
|
if isinstance(msg, _Token):
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
# TODO: native struct decoding
|
||||||
|
# return _token_dec.decode(msg)
|
||||||
|
|
||||||
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||||
return _Token(**msg)
|
return _Token(**msg)
|
||||||
|
|
||||||
|
|
||||||
|
# _token_dec = msgspec.msgpack.Decoder(_Token)
|
||||||
|
|
||||||
# TODO: this api?
|
# TODO: this api?
|
||||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
||||||
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
||||||
|
@ -155,6 +163,7 @@ def get_shm_token(key: str) -> _Token:
|
||||||
|
|
||||||
def _make_token(
|
def _make_token(
|
||||||
key: str,
|
key: str,
|
||||||
|
size: int,
|
||||||
dtype: Optional[np.dtype] = None,
|
dtype: Optional[np.dtype] = None,
|
||||||
) -> _Token:
|
) -> _Token:
|
||||||
'''
|
'''
|
||||||
|
@ -167,7 +176,8 @@ def _make_token(
|
||||||
shm_name=key,
|
shm_name=key,
|
||||||
shm_first_index_name=key + "_first",
|
shm_first_index_name=key + "_first",
|
||||||
shm_last_index_name=key + "_last",
|
shm_last_index_name=key + "_last",
|
||||||
dtype_descr=np.dtype(dtype).descr
|
dtype_descr=tuple(np.dtype(dtype).descr),
|
||||||
|
size=size,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -219,6 +229,7 @@ class ShmArray:
|
||||||
shm_first_index_name=self._first._shm.name,
|
shm_first_index_name=self._first._shm.name,
|
||||||
shm_last_index_name=self._last._shm.name,
|
shm_last_index_name=self._last._shm.name,
|
||||||
dtype_descr=tuple(self._array.dtype.descr),
|
dtype_descr=tuple(self._array.dtype.descr),
|
||||||
|
size=self._len,
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -433,7 +444,7 @@ class ShmArray:
|
||||||
def open_shm_array(
|
def open_shm_array(
|
||||||
|
|
||||||
key: Optional[str] = None,
|
key: Optional[str] = None,
|
||||||
size: int = _default_size,
|
size: int = _default_size, # see above
|
||||||
dtype: Optional[np.dtype] = None,
|
dtype: Optional[np.dtype] = None,
|
||||||
readonly: bool = False,
|
readonly: bool = False,
|
||||||
|
|
||||||
|
@ -464,7 +475,8 @@ def open_shm_array(
|
||||||
|
|
||||||
token = _make_token(
|
token = _make_token(
|
||||||
key=key,
|
key=key,
|
||||||
dtype=dtype
|
size=size,
|
||||||
|
dtype=dtype,
|
||||||
)
|
)
|
||||||
|
|
||||||
# create single entry arrays for storing an first and last indices
|
# create single entry arrays for storing an first and last indices
|
||||||
|
@ -516,15 +528,15 @@ def open_shm_array(
|
||||||
# "unlink" created shm on process teardown by
|
# "unlink" created shm on process teardown by
|
||||||
# pushing teardown calls onto actor context stack
|
# pushing teardown calls onto actor context stack
|
||||||
|
|
||||||
tractor._actor._lifetime_stack.callback(shmarr.close)
|
stack = tractor.current_actor().lifetime_stack
|
||||||
tractor._actor._lifetime_stack.callback(shmarr.destroy)
|
stack.callback(shmarr.close)
|
||||||
|
stack.callback(shmarr.destroy)
|
||||||
|
|
||||||
return shmarr
|
return shmarr
|
||||||
|
|
||||||
|
|
||||||
def attach_shm_array(
|
def attach_shm_array(
|
||||||
token: tuple[str, str, tuple[str, str]],
|
token: tuple[str, str, tuple[str, str]],
|
||||||
size: int = _default_size,
|
|
||||||
readonly: bool = True,
|
readonly: bool = True,
|
||||||
|
|
||||||
) -> ShmArray:
|
) -> ShmArray:
|
||||||
|
@ -563,7 +575,7 @@ def attach_shm_array(
|
||||||
raise _err
|
raise _err
|
||||||
|
|
||||||
shmarr = np.ndarray(
|
shmarr = np.ndarray(
|
||||||
(size,),
|
(token.size,),
|
||||||
dtype=token.dtype,
|
dtype=token.dtype,
|
||||||
buffer=shm.buf
|
buffer=shm.buf
|
||||||
)
|
)
|
||||||
|
@ -602,8 +614,8 @@ def attach_shm_array(
|
||||||
if key not in _known_tokens:
|
if key not in _known_tokens:
|
||||||
_known_tokens[key] = token
|
_known_tokens[key] = token
|
||||||
|
|
||||||
# "close" attached shm on process teardown
|
# "close" attached shm on actor teardown
|
||||||
tractor._actor._lifetime_stack.callback(sha.close)
|
tractor.current_actor().lifetime_stack.callback(sha.close)
|
||||||
|
|
||||||
return sha
|
return sha
|
||||||
|
|
||||||
|
@ -631,6 +643,7 @@ def maybe_open_shm_array(
|
||||||
use ``attach_shm_array``.
|
use ``attach_shm_array``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
size = kwargs.pop('size', _default_size)
|
||||||
try:
|
try:
|
||||||
# see if we already know this key
|
# see if we already know this key
|
||||||
token = _known_tokens[key]
|
token = _known_tokens[key]
|
||||||
|
@ -638,7 +651,11 @@ def maybe_open_shm_array(
|
||||||
except KeyError:
|
except KeyError:
|
||||||
log.warning(f"Could not find {key} in shms cache")
|
log.warning(f"Could not find {key} in shms cache")
|
||||||
if dtype:
|
if dtype:
|
||||||
token = _make_token(key, dtype)
|
token = _make_token(
|
||||||
|
key,
|
||||||
|
size=size,
|
||||||
|
dtype=dtype,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
return attach_shm_array(token=token, **kwargs), False
|
return attach_shm_array(token=token, **kwargs), False
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
|
|
|
@ -23,7 +23,8 @@ import decimal
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel
|
|
||||||
|
from .types import Struct
|
||||||
# from numba import from_dtype
|
# from numba import from_dtype
|
||||||
|
|
||||||
|
|
||||||
|
@ -126,7 +127,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Symbol(BaseModel):
|
class Symbol(Struct):
|
||||||
'''
|
'''
|
||||||
I guess this is some kinda container thing for dealing with
|
I guess this is some kinda container thing for dealing with
|
||||||
all the different meta-data formats from brokers?
|
all the different meta-data formats from brokers?
|
||||||
|
@ -152,9 +153,7 @@ class Symbol(BaseModel):
|
||||||
info: dict[str, Any],
|
info: dict[str, Any],
|
||||||
suffix: str = '',
|
suffix: str = '',
|
||||||
|
|
||||||
# XXX: like wtf..
|
) -> Symbol:
|
||||||
# ) -> 'Symbol':
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
tick_size = info.get('price_tick_size', 0.01)
|
tick_size = info.get('price_tick_size', 0.01)
|
||||||
lot_tick_size = info.get('lot_tick_size', 0.0)
|
lot_tick_size = info.get('lot_tick_size', 0.0)
|
||||||
|
@ -175,9 +174,7 @@ class Symbol(BaseModel):
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
info: dict[str, Any],
|
info: dict[str, Any],
|
||||||
|
|
||||||
# XXX: like wtf..
|
) -> Symbol:
|
||||||
# ) -> 'Symbol':
|
|
||||||
) -> None:
|
|
||||||
broker, key, suffix = unpack_fqsn(fqsn)
|
broker, key, suffix = unpack_fqsn(fqsn)
|
||||||
return cls.from_broker_info(
|
return cls.from_broker_info(
|
||||||
broker,
|
broker,
|
||||||
|
@ -221,6 +218,10 @@ class Symbol(BaseModel):
|
||||||
else:
|
else:
|
||||||
return (key, broker)
|
return (key, broker)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fqsn(self) -> str:
|
||||||
|
return '.'.join(self.tokens()).lower()
|
||||||
|
|
||||||
def front_fqsn(self) -> str:
|
def front_fqsn(self) -> str:
|
||||||
'''
|
'''
|
||||||
fqsn = "fully qualified symbol name"
|
fqsn = "fully qualified symbol name"
|
||||||
|
@ -240,7 +241,7 @@ class Symbol(BaseModel):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
tokens = self.tokens()
|
tokens = self.tokens()
|
||||||
fqsn = '.'.join(tokens)
|
fqsn = '.'.join(map(str.lower, tokens))
|
||||||
return fqsn
|
return fqsn
|
||||||
|
|
||||||
def iterfqsns(self) -> list[str]:
|
def iterfqsns(self) -> list[str]:
|
||||||
|
|
|
@ -18,13 +18,24 @@
|
||||||
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager, AsyncExitStack
|
from contextlib import (
|
||||||
|
asynccontextmanager,
|
||||||
|
AsyncExitStack,
|
||||||
|
)
|
||||||
|
from itertools import count
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Any, Callable, AsyncGenerator
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
AsyncGenerator,
|
||||||
|
Iterable,
|
||||||
|
)
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import trio_websocket
|
import trio_websocket
|
||||||
|
from wsproto.utilities import LocalProtocolError
|
||||||
from trio_websocket._impl import (
|
from trio_websocket._impl import (
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
DisconnectionTimeout,
|
DisconnectionTimeout,
|
||||||
|
@ -35,39 +46,49 @@ from trio_websocket._impl import (
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
|
from .types import Struct
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class NoBsWs:
|
class NoBsWs:
|
||||||
"""Make ``trio_websocket`` sockets stay up no matter the bs.
|
'''
|
||||||
|
Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||||
|
|
||||||
"""
|
You can provide a ``fixture`` async-context-manager which will be
|
||||||
|
enter/exitted around each reconnect operation.
|
||||||
|
'''
|
||||||
recon_errors = (
|
recon_errors = (
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
DisconnectionTimeout,
|
DisconnectionTimeout,
|
||||||
ConnectionRejected,
|
ConnectionRejected,
|
||||||
HandshakeError,
|
HandshakeError,
|
||||||
ConnectionTimeout,
|
ConnectionTimeout,
|
||||||
|
LocalProtocolError,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
token: str,
|
|
||||||
stack: AsyncExitStack,
|
stack: AsyncExitStack,
|
||||||
fixture: Callable,
|
fixture: Optional[Callable] = None,
|
||||||
serializer: ModuleType = json,
|
serializer: ModuleType = json
|
||||||
):
|
):
|
||||||
self.url = url
|
self.url = url
|
||||||
self.token = token
|
|
||||||
self.fixture = fixture
|
self.fixture = fixture
|
||||||
self._stack = stack
|
self._stack = stack
|
||||||
self._ws: 'WebSocketConnection' = None # noqa
|
self._ws: 'WebSocketConnection' = None # noqa
|
||||||
|
|
||||||
|
# TODO: is there some method we can call
|
||||||
|
# on the underlying `._ws` to get this?
|
||||||
|
self._connected: bool = False
|
||||||
|
|
||||||
async def _connect(
|
async def _connect(
|
||||||
self,
|
self,
|
||||||
tries: int = 1000,
|
tries: int = 1000,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
self._connected = False
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
await self._stack.aclose()
|
await self._stack.aclose()
|
||||||
|
@ -82,19 +103,18 @@ class NoBsWs:
|
||||||
self._ws = await self._stack.enter_async_context(
|
self._ws = await self._stack.enter_async_context(
|
||||||
trio_websocket.open_websocket_url(self.url)
|
trio_websocket.open_websocket_url(self.url)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if self.fixture is not None:
|
||||||
# rerun user code fixture
|
# rerun user code fixture
|
||||||
if self.token == '':
|
|
||||||
ret = await self._stack.enter_async_context(
|
ret = await self._stack.enter_async_context(
|
||||||
self.fixture(self)
|
self.fixture(self)
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
ret = await self._stack.enter_async_context(
|
|
||||||
self.fixture(self, self.token)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert ret is None
|
assert ret is None
|
||||||
|
|
||||||
log.info(f'Connection success: {self.url}')
|
log.info(f'Connection success: {self.url}')
|
||||||
|
|
||||||
|
self._connected = True
|
||||||
return self._ws
|
return self._ws
|
||||||
|
|
||||||
except self.recon_errors as err:
|
except self.recon_errors as err:
|
||||||
|
@ -104,11 +124,15 @@ class NoBsWs:
|
||||||
f'{type(err)}...retry attempt {i}'
|
f'{type(err)}...retry attempt {i}'
|
||||||
)
|
)
|
||||||
await trio.sleep(0.5)
|
await trio.sleep(0.5)
|
||||||
|
self._connected = False
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
log.exception('ws connection fail...')
|
log.exception('ws connection fail...')
|
||||||
raise last_err
|
raise last_err
|
||||||
|
|
||||||
|
def connected(self) -> bool:
|
||||||
|
return self._connected
|
||||||
|
|
||||||
async def send_msg(
|
async def send_msg(
|
||||||
self,
|
self,
|
||||||
data: Any,
|
data: Any,
|
||||||
|
@ -128,21 +152,26 @@ class NoBsWs:
|
||||||
except self.recon_errors:
|
except self.recon_errors:
|
||||||
await self._connect()
|
await self._connect()
|
||||||
|
|
||||||
|
def __aiter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __anext__(self):
|
||||||
|
return await self.recv_msg()
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_autorecon_ws(
|
async def open_autorecon_ws(
|
||||||
url: str,
|
url: str,
|
||||||
|
|
||||||
# TODO: proper type annot smh
|
# TODO: proper type cannot smh
|
||||||
fixture: Callable,
|
fixture: Optional[Callable] = None,
|
||||||
# used for authenticated websockets
|
|
||||||
token: str = '',
|
|
||||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
async with AsyncExitStack() as stack:
|
async with AsyncExitStack() as stack:
|
||||||
ws = NoBsWs(url, token, stack, fixture=fixture)
|
ws = NoBsWs(url, stack, fixture=fixture)
|
||||||
await ws._connect()
|
await ws._connect()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -150,3 +179,114 @@ async def open_autorecon_ws(
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
await stack.aclose()
|
await stack.aclose()
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
JSONRPC response-request style machinery for transparent multiplexing of msgs
|
||||||
|
over a NoBsWs.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class JSONRPCResult(Struct):
|
||||||
|
id: int
|
||||||
|
jsonrpc: str = '2.0'
|
||||||
|
result: Optional[dict] = None
|
||||||
|
error: Optional[dict] = None
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def open_jsonrpc_session(
|
||||||
|
url: str,
|
||||||
|
start_id: int = 0,
|
||||||
|
response_type: type = JSONRPCResult,
|
||||||
|
request_type: Optional[type] = None,
|
||||||
|
request_hook: Optional[Callable] = None,
|
||||||
|
error_hook: Optional[Callable] = None,
|
||||||
|
) -> Callable[[str, dict], dict]:
|
||||||
|
|
||||||
|
async with (
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
open_autorecon_ws(url) as ws
|
||||||
|
):
|
||||||
|
rpc_id: Iterable = count(start_id)
|
||||||
|
rpc_results: dict[int, dict] = {}
|
||||||
|
|
||||||
|
async def json_rpc(method: str, params: dict) -> dict:
|
||||||
|
'''
|
||||||
|
perform a json rpc call and wait for the result, raise exception in
|
||||||
|
case of error field present on response
|
||||||
|
'''
|
||||||
|
msg = {
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': next(rpc_id),
|
||||||
|
'method': method,
|
||||||
|
'params': params
|
||||||
|
}
|
||||||
|
_id = msg['id']
|
||||||
|
|
||||||
|
rpc_results[_id] = {
|
||||||
|
'result': None,
|
||||||
|
'event': trio.Event()
|
||||||
|
}
|
||||||
|
|
||||||
|
await ws.send_msg(msg)
|
||||||
|
|
||||||
|
await rpc_results[_id]['event'].wait()
|
||||||
|
|
||||||
|
ret = rpc_results[_id]['result']
|
||||||
|
|
||||||
|
del rpc_results[_id]
|
||||||
|
|
||||||
|
if ret.error is not None:
|
||||||
|
raise Exception(json.dumps(ret.error, indent=4))
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
async def recv_task():
|
||||||
|
'''
|
||||||
|
receives every ws message and stores it in its corresponding
|
||||||
|
result field, then sets the event to wakeup original sender
|
||||||
|
tasks. also recieves responses to requests originated from
|
||||||
|
the server side.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async for msg in ws:
|
||||||
|
match msg:
|
||||||
|
case {
|
||||||
|
'result': _,
|
||||||
|
'id': mid,
|
||||||
|
} if res_entry := rpc_results.get(mid):
|
||||||
|
|
||||||
|
res_entry['result'] = response_type(**msg)
|
||||||
|
res_entry['event'].set()
|
||||||
|
|
||||||
|
case {
|
||||||
|
'result': _,
|
||||||
|
'id': mid,
|
||||||
|
} if not rpc_results.get(mid):
|
||||||
|
log.warning(
|
||||||
|
f'Unexpected ws msg: {json.dumps(msg, indent=4)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
case {
|
||||||
|
'method': _,
|
||||||
|
'params': _,
|
||||||
|
}:
|
||||||
|
log.debug(f'Recieved\n{msg}')
|
||||||
|
if request_hook:
|
||||||
|
await request_hook(request_type(**msg))
|
||||||
|
|
||||||
|
case {
|
||||||
|
'error': error
|
||||||
|
}:
|
||||||
|
log.warning(f'Recieved\n{error}')
|
||||||
|
if error_hook:
|
||||||
|
await error_hook(response_type(**msg))
|
||||||
|
|
||||||
|
case _:
|
||||||
|
log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
|
||||||
|
|
||||||
|
n.start_soon(recv_task)
|
||||||
|
yield json_rpc
|
||||||
|
n.cancel_scope.cancel()
|
||||||
|
|
1675
piker/data/feed.py
1675
piker/data/feed.py
File diff suppressed because it is too large
Load Diff
|
@ -37,8 +37,8 @@ import time
|
||||||
from math import isnan
|
from math import isnan
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import msgpack
|
from msgspec.msgpack import encode, decode
|
||||||
import pyqtgraph as pg
|
# import pyqtgraph as pg
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
from trio_websocket import open_websocket_url
|
from trio_websocket import open_websocket_url
|
||||||
|
@ -56,6 +56,7 @@ if TYPE_CHECKING:
|
||||||
|
|
||||||
from .feed import maybe_open_feed
|
from .feed import maybe_open_feed
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -131,7 +132,10 @@ def start_marketstore(
|
||||||
|
|
||||||
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
||||||
|
|
||||||
# create when dne
|
# create dirs when dne
|
||||||
|
if not os.path.isdir(config._config_dir):
|
||||||
|
os.mkdir(config._config_dir)
|
||||||
|
|
||||||
if not os.path.isdir(mktsdir):
|
if not os.path.isdir(mktsdir):
|
||||||
os.mkdir(mktsdir)
|
os.mkdir(mktsdir)
|
||||||
|
|
||||||
|
@ -387,50 +391,54 @@ class Storage:
|
||||||
async def load(
|
async def load(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
|
timeframe: int,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
dict[int, np.ndarray], # timeframe (in secs) to series
|
np.ndarray, # timeframe sampled array-series
|
||||||
Optional[datetime], # first dt
|
Optional[datetime], # first dt
|
||||||
Optional[datetime], # last dt
|
Optional[datetime], # last dt
|
||||||
]:
|
]:
|
||||||
|
|
||||||
first_tsdb_dt, last_tsdb_dt = None, None
|
first_tsdb_dt, last_tsdb_dt = None, None
|
||||||
tsdb_arrays = await self.read_ohlcv(
|
hist = await self.read_ohlcv(
|
||||||
fqsn,
|
fqsn,
|
||||||
# on first load we don't need to pull the max
|
# on first load we don't need to pull the max
|
||||||
# history per request size worth.
|
# history per request size worth.
|
||||||
limit=3000,
|
limit=3000,
|
||||||
|
timeframe=timeframe,
|
||||||
)
|
)
|
||||||
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
log.info(f'Loaded tsdb history {hist}')
|
||||||
|
|
||||||
if tsdb_arrays:
|
if len(hist):
|
||||||
fastest = list(tsdb_arrays.values())[0]
|
times = hist['Epoch']
|
||||||
times = fastest['Epoch']
|
|
||||||
first, last = times[0], times[-1]
|
first, last = times[0], times[-1]
|
||||||
first_tsdb_dt, last_tsdb_dt = map(
|
first_tsdb_dt, last_tsdb_dt = map(
|
||||||
pendulum.from_timestamp, [first, last]
|
pendulum.from_timestamp, [first, last]
|
||||||
)
|
)
|
||||||
|
|
||||||
return tsdb_arrays, first_tsdb_dt, last_tsdb_dt
|
return (
|
||||||
|
hist, # array-data
|
||||||
|
first_tsdb_dt, # start of query-frame
|
||||||
|
last_tsdb_dt, # most recent
|
||||||
|
)
|
||||||
|
|
||||||
async def read_ohlcv(
|
async def read_ohlcv(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
timeframe: Optional[Union[int, str]] = None,
|
timeframe: int | str,
|
||||||
end: Optional[int] = None,
|
end: Optional[int] = None,
|
||||||
limit: int = int(800e3),
|
limit: int = int(800e3),
|
||||||
|
|
||||||
) -> tuple[
|
) -> np.ndarray:
|
||||||
MarketstoreClient,
|
|
||||||
Union[dict, np.ndarray]
|
|
||||||
]:
|
|
||||||
client = self.client
|
client = self.client
|
||||||
syms = await client.list_symbols()
|
syms = await client.list_symbols()
|
||||||
|
|
||||||
if fqsn not in syms:
|
if fqsn not in syms:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
tfstr = tf_in_1s[1]
|
# use the provided timeframe or 1s by default
|
||||||
|
tfstr = tf_in_1s.get(timeframe, tf_in_1s[1])
|
||||||
|
|
||||||
params = Params(
|
params = Params(
|
||||||
symbols=fqsn,
|
symbols=fqsn,
|
||||||
|
@ -444,58 +452,68 @@ class Storage:
|
||||||
limit=limit,
|
limit=limit,
|
||||||
)
|
)
|
||||||
|
|
||||||
if timeframe is None:
|
|
||||||
log.info(f'starting {fqsn} tsdb granularity scan..')
|
|
||||||
# loop through and try to find highest granularity
|
|
||||||
for tfstr in tf_in_1s.values():
|
|
||||||
try:
|
try:
|
||||||
log.info(f'querying for {tfstr}@{fqsn}')
|
|
||||||
params.set('timeframe', tfstr)
|
|
||||||
result = await client.query(params)
|
result = await client.query(params)
|
||||||
break
|
|
||||||
|
|
||||||
except purerpc.grpclib.exceptions.UnknownError:
|
except purerpc.grpclib.exceptions.UnknownError:
|
||||||
# XXX: this is already logged by the container and
|
# indicate there is no history for this timeframe
|
||||||
# thus shows up through `marketstored` logs relay.
|
|
||||||
# log.warning(f'{tfstr}@{fqsn} not found')
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
else:
|
|
||||||
result = await client.query(params)
|
|
||||||
|
|
||||||
# TODO: it turns out column access on recarrays is actually slower:
|
# TODO: it turns out column access on recarrays is actually slower:
|
||||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||||
# it might make sense to make these structured arrays?
|
# it might make sense to make these structured arrays?
|
||||||
# Fill out a `numpy` array-results map
|
data_set = result.by_symbols()[fqsn]
|
||||||
arrays = {}
|
array = data_set.array
|
||||||
for fqsn, data_set in result.by_symbols().items():
|
|
||||||
arrays.setdefault(fqsn, {})[
|
|
||||||
tf_in_1s.inverse[data_set.timeframe]
|
|
||||||
] = data_set.array
|
|
||||||
|
|
||||||
return arrays[fqsn][timeframe] if timeframe else arrays[fqsn]
|
# XXX: ensure sample rate is as expected
|
||||||
|
time = data_set.array['Epoch']
|
||||||
|
if len(time) > 1:
|
||||||
|
time_step = time[-1] - time[-2]
|
||||||
|
ts = tf_in_1s.inverse[data_set.timeframe]
|
||||||
|
|
||||||
|
if time_step != ts:
|
||||||
|
log.warning(
|
||||||
|
f'MKTS BUG: wrong timeframe loaded: {time_step}'
|
||||||
|
'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG'
|
||||||
|
f'WIPING HISTORY FOR {ts}s'
|
||||||
|
)
|
||||||
|
await self.delete_ts(fqsn, timeframe)
|
||||||
|
|
||||||
|
# try reading again..
|
||||||
|
return await self.read_ohlcv(
|
||||||
|
fqsn,
|
||||||
|
timeframe,
|
||||||
|
end,
|
||||||
|
limit,
|
||||||
|
)
|
||||||
|
|
||||||
|
return array
|
||||||
|
|
||||||
async def delete_ts(
|
async def delete_ts(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
timeframe: Optional[Union[int, str]] = None,
|
timeframe: Optional[Union[int, str]] = None,
|
||||||
|
fmt: str = 'OHLCV',
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
|
||||||
client = self.client
|
client = self.client
|
||||||
syms = await client.list_symbols()
|
syms = await client.list_symbols()
|
||||||
print(syms)
|
print(syms)
|
||||||
# if key not in syms:
|
if key not in syms:
|
||||||
# raise KeyError(f'`{fqsn}` table key not found?')
|
raise KeyError(f'`{key}` table key not found in\n{syms}?')
|
||||||
|
|
||||||
return await client.destroy(tbk=key)
|
tbk = mk_tbk((
|
||||||
|
key,
|
||||||
|
tf_in_1s.get(timeframe, tf_in_1s[60]),
|
||||||
|
fmt,
|
||||||
|
))
|
||||||
|
return await client.destroy(tbk=tbk)
|
||||||
|
|
||||||
async def write_ohlcv(
|
async def write_ohlcv(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
ohlcv: np.ndarray,
|
ohlcv: np.ndarray,
|
||||||
|
timeframe: int,
|
||||||
append_and_duplicate: bool = True,
|
append_and_duplicate: bool = True,
|
||||||
limit: int = int(800e3),
|
limit: int = int(800e3),
|
||||||
|
|
||||||
|
@ -519,17 +537,18 @@ class Storage:
|
||||||
|
|
||||||
m, r = divmod(len(mkts_array), limit)
|
m, r = divmod(len(mkts_array), limit)
|
||||||
|
|
||||||
|
tfkey = tf_in_1s[timeframe]
|
||||||
for i in range(m, 1):
|
for i in range(m, 1):
|
||||||
to_push = mkts_array[i-1:i*limit]
|
to_push = mkts_array[i-1:i*limit]
|
||||||
|
|
||||||
# write to db
|
# write to db
|
||||||
resp = await self.client.write(
|
resp = await self.client.write(
|
||||||
to_push,
|
to_push,
|
||||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
# NOTE: will will append duplicates
|
||||||
# for the same timestamp-index.
|
# for the same timestamp-index.
|
||||||
# TODO: pre deduplicate?
|
# TODO: pre-deduplicate?
|
||||||
isvariablelength=append_and_duplicate,
|
isvariablelength=append_and_duplicate,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -548,7 +567,7 @@ class Storage:
|
||||||
# write to db
|
# write to db
|
||||||
resp = await self.client.write(
|
resp = await self.client.write(
|
||||||
to_push,
|
to_push,
|
||||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
# NOTE: will will append duplicates
|
||||||
# for the same timestamp-index.
|
# for the same timestamp-index.
|
||||||
|
@ -577,6 +596,7 @@ class Storage:
|
||||||
# def delete_range(self, start_dt, end_dt) -> None:
|
# def delete_range(self, start_dt, end_dt) -> None:
|
||||||
# ...
|
# ...
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_storage_client(
|
async def open_storage_client(
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
|
@ -626,7 +646,7 @@ async def tsdb_history_update(
|
||||||
# * the original data feed arch blurb:
|
# * the original data feed arch blurb:
|
||||||
# - https://github.com/pikers/piker/issues/98
|
# - https://github.com/pikers/piker/issues/98
|
||||||
#
|
#
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
disabled=False, # not pg_profile_enabled(),
|
disabled=False, # not pg_profile_enabled(),
|
||||||
delayed=False,
|
delayed=False,
|
||||||
)
|
)
|
||||||
|
@ -638,34 +658,35 @@ async def tsdb_history_update(
|
||||||
[fqsn],
|
[fqsn],
|
||||||
start_stream=False,
|
start_stream=False,
|
||||||
|
|
||||||
) as (feed, stream),
|
) as feed,
|
||||||
):
|
):
|
||||||
profiler(f'opened feed for {fqsn}')
|
profiler(f'opened feed for {fqsn}')
|
||||||
|
|
||||||
to_append = feed.shm.array
|
# to_append = feed.hist_shm.array
|
||||||
to_prepend = None
|
# to_prepend = None
|
||||||
|
|
||||||
if fqsn:
|
if fqsn:
|
||||||
symbol = feed.symbols.get(fqsn)
|
flume = feed.flumes[fqsn]
|
||||||
|
symbol = flume.symbol
|
||||||
if symbol:
|
if symbol:
|
||||||
fqsn = symbol.front_fqsn()
|
fqsn = symbol.fqsn
|
||||||
|
|
||||||
# diff db history with shm and only write the missing portions
|
# diff db history with shm and only write the missing portions
|
||||||
ohlcv = feed.shm.array
|
# ohlcv = flume.hist_shm.array
|
||||||
|
|
||||||
# TODO: use pg profiler
|
# TODO: use pg profiler
|
||||||
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
# for secs in (1, 60):
|
||||||
# hist diffing
|
# tsdb_array = await storage.read_ohlcv(
|
||||||
if tsdb_arrays:
|
# fqsn,
|
||||||
for secs in (1, 60):
|
# timeframe=timeframe,
|
||||||
ts = tsdb_arrays.get(secs)
|
# )
|
||||||
if ts is not None and len(ts):
|
# # hist diffing:
|
||||||
# these aren't currently used but can be referenced from
|
# # these aren't currently used but can be referenced from
|
||||||
# within the embedded ipython shell below.
|
# # within the embedded ipython shell below.
|
||||||
to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
# to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
||||||
to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
# to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
||||||
|
|
||||||
profiler('Finished db arrays diffs')
|
# profiler('Finished db arrays diffs')
|
||||||
|
|
||||||
syms = await storage.client.list_symbols()
|
syms = await storage.client.list_symbols()
|
||||||
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||||
|
@ -774,12 +795,13 @@ async def stream_quotes(
|
||||||
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
||||||
# send subs topics to server
|
# send subs topics to server
|
||||||
resp = await ws.send_message(
|
resp = await ws.send_message(
|
||||||
msgpack.dumps({'streams': list(tbks.values())})
|
|
||||||
|
encode({'streams': list(tbks.values())})
|
||||||
)
|
)
|
||||||
log.info(resp)
|
log.info(resp)
|
||||||
|
|
||||||
async def recv() -> dict[str, Any]:
|
async def recv() -> dict[str, Any]:
|
||||||
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
return decode((await ws.get_message()), encoding='utf-8')
|
||||||
|
|
||||||
streams = (await recv())['streams']
|
streams = (await recv())['streams']
|
||||||
log.info(f"Subscribed to {streams}")
|
log.info(f"Subscribed to {streams}")
|
||||||
|
|
|
@ -0,0 +1,88 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Built-in (extension) types.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
from typing import Optional
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
import msgspec
|
||||||
|
|
||||||
|
|
||||||
|
class Struct(
|
||||||
|
msgspec.Struct,
|
||||||
|
|
||||||
|
# https://jcristharif.com/msgspec/structs.html#tagged-unions
|
||||||
|
# tag='pikerstruct',
|
||||||
|
# tag=True,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
A "human friendlier" (aka repl buddy) struct subtype.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
f: getattr(self, f)
|
||||||
|
for f in self.__struct_fields__
|
||||||
|
}
|
||||||
|
|
||||||
|
# Lul, doesn't seem to work that well..
|
||||||
|
# def __repr__(self):
|
||||||
|
# # only turn on pprint when we detect a python REPL
|
||||||
|
# # at runtime B)
|
||||||
|
# if (
|
||||||
|
# hasattr(sys, 'ps1')
|
||||||
|
# # TODO: check if we're in pdb
|
||||||
|
# ):
|
||||||
|
# return self.pformat()
|
||||||
|
|
||||||
|
# return super().__repr__()
|
||||||
|
|
||||||
|
def pformat(self) -> str:
|
||||||
|
return f'Struct({pformat(self.to_dict())})'
|
||||||
|
|
||||||
|
def copy(
|
||||||
|
self,
|
||||||
|
update: Optional[dict] = None,
|
||||||
|
|
||||||
|
) -> msgspec.Struct:
|
||||||
|
'''
|
||||||
|
Validate-typecast all self defined fields, return a copy of us
|
||||||
|
with all such fields.
|
||||||
|
|
||||||
|
This is kinda like the default behaviour in `pydantic.BaseModel`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if update:
|
||||||
|
for k, v in update.items():
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
# roundtrip serialize to validate
|
||||||
|
return msgspec.msgpack.Decoder(
|
||||||
|
type=type(self)
|
||||||
|
).decode(
|
||||||
|
msgspec.msgpack.Encoder().encode(self)
|
||||||
|
)
|
||||||
|
|
||||||
|
def typecast(
|
||||||
|
self,
|
||||||
|
# fields: Optional[list[str]] = None,
|
||||||
|
) -> None:
|
||||||
|
for fname, ftype in self.__annotations__.items():
|
||||||
|
setattr(self, fname, ftype(getattr(self, fname)))
|
|
@ -78,7 +78,8 @@ class Fsp:
|
||||||
# + the consuming fsp *to* the consumers output
|
# + the consuming fsp *to* the consumers output
|
||||||
# shm flow.
|
# shm flow.
|
||||||
_flow_registry: dict[
|
_flow_registry: dict[
|
||||||
tuple[_Token, str], _Token,
|
tuple[_Token, str],
|
||||||
|
tuple[_Token, Optional[ShmArray]],
|
||||||
] = {}
|
] = {}
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -120,7 +121,6 @@ class Fsp:
|
||||||
):
|
):
|
||||||
return self.func(*args, **kwargs)
|
return self.func(*args, **kwargs)
|
||||||
|
|
||||||
# TODO: lru_cache this? prettty sure it'll work?
|
|
||||||
def get_shm(
|
def get_shm(
|
||||||
self,
|
self,
|
||||||
src_shm: ShmArray,
|
src_shm: ShmArray,
|
||||||
|
@ -131,12 +131,27 @@ class Fsp:
|
||||||
for this "instance" of a signal processor for
|
for this "instance" of a signal processor for
|
||||||
the given ``key``.
|
the given ``key``.
|
||||||
|
|
||||||
|
The destination shm "token" and array are cached if possible to
|
||||||
|
minimize multiple stdlib/system calls.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
dst_token = self._flow_registry[
|
dst_token, maybe_array = self._flow_registry[
|
||||||
(src_shm._token, self.name)
|
(src_shm._token, self.name)
|
||||||
]
|
]
|
||||||
shm = attach_shm_array(dst_token)
|
if maybe_array is None:
|
||||||
return shm
|
self._flow_registry[
|
||||||
|
(src_shm._token, self.name)
|
||||||
|
] = (
|
||||||
|
dst_token,
|
||||||
|
# "cache" the ``ShmArray`` such that
|
||||||
|
# we call the underlying "attach" code as few
|
||||||
|
# times as possible as per:
|
||||||
|
# - https://github.com/pikers/piker/issues/359
|
||||||
|
# - https://github.com/pikers/piker/issues/332
|
||||||
|
maybe_array := attach_shm_array(dst_token)
|
||||||
|
)
|
||||||
|
|
||||||
|
return maybe_array
|
||||||
|
|
||||||
|
|
||||||
def fsp(
|
def fsp(
|
||||||
|
|
|
@ -26,7 +26,6 @@ from typing import (
|
||||||
)
|
)
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
|
@ -35,14 +34,18 @@ from tractor.msg import NamespacePath
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .. import data
|
from .. import data
|
||||||
from ..data import attach_shm_array
|
from ..data import attach_shm_array
|
||||||
from ..data.feed import Feed
|
from ..data.feed import (
|
||||||
|
Flume,
|
||||||
|
)
|
||||||
from ..data._sharedmem import ShmArray
|
from ..data._sharedmem import ShmArray
|
||||||
|
from ..data._sampling import _default_delay_s
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ._api import (
|
from ._api import (
|
||||||
Fsp,
|
Fsp,
|
||||||
_load_builtins,
|
_load_builtins,
|
||||||
_Token,
|
_Token,
|
||||||
)
|
)
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -77,7 +80,7 @@ async def filter_quotes_by_sym(
|
||||||
async def fsp_compute(
|
async def fsp_compute(
|
||||||
|
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
feed: Feed,
|
flume: Flume,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
|
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
|
@ -90,7 +93,7 @@ async def fsp_compute(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
disabled=True
|
disabled=True
|
||||||
)
|
)
|
||||||
|
@ -105,7 +108,7 @@ async def fsp_compute(
|
||||||
filter_quotes_by_sym(fqsn, quote_stream),
|
filter_quotes_by_sym(fqsn, quote_stream),
|
||||||
|
|
||||||
# XXX: currently the ``ohlcv`` arg
|
# XXX: currently the ``ohlcv`` arg
|
||||||
feed.shm,
|
flume.rt_shm,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Conduct a single iteration of fsp with historical bars input
|
# Conduct a single iteration of fsp with historical bars input
|
||||||
|
@ -114,7 +117,7 @@ async def fsp_compute(
|
||||||
dict[str, np.ndarray], # multi-output case
|
dict[str, np.ndarray], # multi-output case
|
||||||
np.ndarray, # single output case
|
np.ndarray, # single output case
|
||||||
]
|
]
|
||||||
history_output = await out_stream.__anext__()
|
history_output = await anext(out_stream)
|
||||||
|
|
||||||
func_name = func.__name__
|
func_name = func.__name__
|
||||||
profiler(f'{func_name} generated history')
|
profiler(f'{func_name} generated history')
|
||||||
|
@ -261,7 +264,7 @@ async def cascade(
|
||||||
destination shm array buffer.
|
destination shm array buffer.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
disabled=False
|
disabled=False
|
||||||
)
|
)
|
||||||
|
@ -284,9 +287,10 @@ async def cascade(
|
||||||
# TODO: ugh i hate this wind/unwind to list over the wire
|
# TODO: ugh i hate this wind/unwind to list over the wire
|
||||||
# but not sure how else to do it.
|
# but not sure how else to do it.
|
||||||
for (token, fsp_name, dst_token) in shm_registry:
|
for (token, fsp_name, dst_token) in shm_registry:
|
||||||
Fsp._flow_registry[
|
Fsp._flow_registry[(
|
||||||
(_Token.from_msg(token), fsp_name)
|
_Token.from_msg(token),
|
||||||
] = _Token.from_msg(dst_token)
|
fsp_name,
|
||||||
|
)] = _Token.from_msg(dst_token), None
|
||||||
|
|
||||||
fsp: Fsp = reg.get(
|
fsp: Fsp = reg.get(
|
||||||
NamespacePath(ns_path)
|
NamespacePath(ns_path)
|
||||||
|
@ -307,12 +311,12 @@ async def cascade(
|
||||||
# needs to get throttled the ticks we generate.
|
# needs to get throttled the ticks we generate.
|
||||||
# tick_throttle=60,
|
# tick_throttle=60,
|
||||||
|
|
||||||
) as (feed, quote_stream):
|
) as feed:
|
||||||
symbol = feed.symbols[fqsn]
|
|
||||||
|
|
||||||
|
flume = feed.flumes[fqsn]
|
||||||
|
symbol = flume.symbol
|
||||||
|
assert src.token == flume.rt_shm.token
|
||||||
profiler(f'{func}: feed up')
|
profiler(f'{func}: feed up')
|
||||||
|
|
||||||
assert src.token == feed.shm.token
|
|
||||||
# last_len = new_len = len(src.array)
|
# last_len = new_len = len(src.array)
|
||||||
|
|
||||||
func_name = func.__name__
|
func_name = func.__name__
|
||||||
|
@ -324,8 +328,8 @@ async def cascade(
|
||||||
|
|
||||||
fsp_compute,
|
fsp_compute,
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
feed=feed,
|
flume=flume,
|
||||||
quote_stream=quote_stream,
|
quote_stream=flume.stream,
|
||||||
|
|
||||||
# shm
|
# shm
|
||||||
src=src,
|
src=src,
|
||||||
|
@ -374,7 +378,8 @@ async def cascade(
|
||||||
'key': dst_shm_token,
|
'key': dst_shm_token,
|
||||||
'first': dst._first.value,
|
'first': dst._first.value,
|
||||||
'last': dst._last.value,
|
'last': dst._last.value,
|
||||||
}})
|
}
|
||||||
|
})
|
||||||
return tracker, index
|
return tracker, index
|
||||||
|
|
||||||
def is_synced(
|
def is_synced(
|
||||||
|
@ -418,18 +423,23 @@ async def cascade(
|
||||||
# detect sample period step for subscription to increment
|
# detect sample period step for subscription to increment
|
||||||
# signal
|
# signal
|
||||||
times = src.array['time']
|
times = src.array['time']
|
||||||
|
if len(times) > 1:
|
||||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||||
|
else:
|
||||||
|
# our default "HFT" sample rate.
|
||||||
|
delay_s = _default_delay_s
|
||||||
|
|
||||||
# Increment the underlying shared memory buffer on every
|
# Increment the underlying shared memory buffer on every
|
||||||
# "increment" msg received from the underlying data feed.
|
# "increment" msg received from the underlying data feed.
|
||||||
async with feed.index_stream(
|
async with flume.index_stream(
|
||||||
int(delay_s)
|
int(delay_s)
|
||||||
) as istream:
|
) as istream:
|
||||||
|
|
||||||
profiler(f'{func_name}: sample stream up')
|
profiler(f'{func_name}: sample stream up')
|
||||||
profiler.finish()
|
profiler.finish()
|
||||||
|
|
||||||
async for _ in istream:
|
async for i in istream:
|
||||||
|
# print(f'FSP incrementing {i}')
|
||||||
|
|
||||||
# respawn the compute task if the source
|
# respawn the compute task if the source
|
||||||
# array has been updated such that we compute
|
# array has been updated such that we compute
|
||||||
|
|
|
@ -0,0 +1,975 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
'''
|
||||||
|
Personal/Private position parsing, calculating, summarizing in a way
|
||||||
|
that doesn't try to cuk most humans who prefer to not lose their moneys..
|
||||||
|
(looking at you `ib` and dirt-bird friends)
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import contextmanager as cm
|
||||||
|
from pprint import pformat
|
||||||
|
import os
|
||||||
|
from os import path
|
||||||
|
from math import copysign
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
import pendulum
|
||||||
|
from pendulum import datetime, now
|
||||||
|
import tomli
|
||||||
|
import toml
|
||||||
|
|
||||||
|
from . import config
|
||||||
|
from .brokers import get_brokermod
|
||||||
|
from .clearing._messages import BrokerdPosition, Status
|
||||||
|
from .data._source import Symbol
|
||||||
|
from .log import get_logger
|
||||||
|
from .data.types import Struct
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def open_trade_ledger(
|
||||||
|
broker: str,
|
||||||
|
account: str,
|
||||||
|
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Indempotently create and read in a trade log file from the
|
||||||
|
``<configuration_dir>/ledgers/`` directory.
|
||||||
|
|
||||||
|
Files are named per broker account of the form
|
||||||
|
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
|
||||||
|
name as defined in the user's ``brokers.toml`` config.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ldir = path.join(config._config_dir, 'ledgers')
|
||||||
|
if not path.isdir(ldir):
|
||||||
|
os.makedirs(ldir)
|
||||||
|
|
||||||
|
fname = f'trades_{broker}_{account}.toml'
|
||||||
|
tradesfile = path.join(ldir, fname)
|
||||||
|
|
||||||
|
if not path.isfile(tradesfile):
|
||||||
|
log.info(
|
||||||
|
f'Creating new local trades ledger: {tradesfile}'
|
||||||
|
)
|
||||||
|
with open(tradesfile, 'w') as cf:
|
||||||
|
pass # touch
|
||||||
|
with open(tradesfile, 'rb') as cf:
|
||||||
|
start = time.time()
|
||||||
|
ledger = tomli.load(cf)
|
||||||
|
print(f'Ledger load took {time.time() - start}s')
|
||||||
|
cpy = ledger.copy()
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield cpy
|
||||||
|
finally:
|
||||||
|
if cpy != ledger:
|
||||||
|
# TODO: show diff output?
|
||||||
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||||
|
print(f'Updating ledger for {tradesfile}:\n')
|
||||||
|
ledger.update(cpy)
|
||||||
|
|
||||||
|
# we write on close the mutated ledger data
|
||||||
|
with open(tradesfile, 'w') as cf:
|
||||||
|
toml.dump(ledger, cf)
|
||||||
|
|
||||||
|
|
||||||
|
class Transaction(Struct, frozen=True):
|
||||||
|
# TODO: should this be ``.to`` (see below)?
|
||||||
|
fqsn: str
|
||||||
|
|
||||||
|
tid: Union[str, int] # unique transaction id
|
||||||
|
size: float
|
||||||
|
price: float
|
||||||
|
cost: float # commisions or other additional costs
|
||||||
|
dt: datetime
|
||||||
|
expiry: Optional[datetime] = None
|
||||||
|
|
||||||
|
# optional key normally derived from the broker
|
||||||
|
# backend which ensures the instrument-symbol this record
|
||||||
|
# is for is truly unique.
|
||||||
|
bsuid: Optional[Union[str, int]] = None
|
||||||
|
|
||||||
|
# optional fqsn for the source "asset"/money symbol?
|
||||||
|
# from: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class Position(Struct):
|
||||||
|
'''
|
||||||
|
Basic pp (personal/piker position) model with attached clearing
|
||||||
|
transaction history.
|
||||||
|
|
||||||
|
'''
|
||||||
|
symbol: Symbol
|
||||||
|
|
||||||
|
# can be +ve or -ve for long/short
|
||||||
|
size: float
|
||||||
|
|
||||||
|
# "breakeven price" above or below which pnl moves above and below
|
||||||
|
# zero for the entirety of the current "trade state".
|
||||||
|
ppu: float
|
||||||
|
|
||||||
|
# unique backend symbol id
|
||||||
|
bsuid: str
|
||||||
|
|
||||||
|
split_ratio: Optional[int] = None
|
||||||
|
|
||||||
|
# ordered record of known constituent trade messages
|
||||||
|
clears: dict[
|
||||||
|
Union[str, int, Status], # trade id
|
||||||
|
dict[str, Any], # transaction history summaries
|
||||||
|
] = {}
|
||||||
|
first_clear_dt: Optional[datetime] = None
|
||||||
|
|
||||||
|
expiry: Optional[datetime] = None
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
f: getattr(self, f)
|
||||||
|
for f in self.__struct_fields__
|
||||||
|
}
|
||||||
|
|
||||||
|
def to_pretoml(self) -> tuple[str, dict]:
|
||||||
|
'''
|
||||||
|
Prep this position's data contents for export to toml including
|
||||||
|
re-structuring of the ``.clears`` table to an array of
|
||||||
|
inline-subtables for better ``pps.toml`` compactness.
|
||||||
|
|
||||||
|
'''
|
||||||
|
d = self.to_dict()
|
||||||
|
clears = d.pop('clears')
|
||||||
|
expiry = d.pop('expiry')
|
||||||
|
|
||||||
|
if self.split_ratio is None:
|
||||||
|
d.pop('split_ratio')
|
||||||
|
|
||||||
|
# should be obvious from clears/event table
|
||||||
|
d.pop('first_clear_dt')
|
||||||
|
|
||||||
|
# TODO: we need to figure out how to have one top level
|
||||||
|
# listing venue here even when the backend isn't providing
|
||||||
|
# it via the trades ledger..
|
||||||
|
# drop symbol obj in serialized form
|
||||||
|
s = d.pop('symbol')
|
||||||
|
fqsn = s.front_fqsn()
|
||||||
|
|
||||||
|
if self.expiry is None:
|
||||||
|
d.pop('expiry', None)
|
||||||
|
elif expiry:
|
||||||
|
d['expiry'] = str(expiry)
|
||||||
|
|
||||||
|
toml_clears_list = []
|
||||||
|
|
||||||
|
# reverse sort so latest clears are at top of section?
|
||||||
|
for tid, data in sorted(
|
||||||
|
list(clears.items()),
|
||||||
|
|
||||||
|
# sort by datetime
|
||||||
|
key=lambda item: item[1]['dt'],
|
||||||
|
):
|
||||||
|
inline_table = toml.TomlDecoder().get_empty_inline_table()
|
||||||
|
|
||||||
|
# serialize datetime to parsable `str`
|
||||||
|
inline_table['dt'] = str(data['dt'])
|
||||||
|
|
||||||
|
# insert optional clear fields in column order
|
||||||
|
for k in ['ppu', 'accum_size']:
|
||||||
|
val = data.get(k)
|
||||||
|
if val:
|
||||||
|
inline_table[k] = val
|
||||||
|
|
||||||
|
# insert required fields
|
||||||
|
for k in ['price', 'size', 'cost']:
|
||||||
|
inline_table[k] = data[k]
|
||||||
|
|
||||||
|
inline_table['tid'] = tid
|
||||||
|
toml_clears_list.append(inline_table)
|
||||||
|
|
||||||
|
d['clears'] = toml_clears_list
|
||||||
|
|
||||||
|
return fqsn, d
|
||||||
|
|
||||||
|
def ensure_state(self) -> None:
|
||||||
|
'''
|
||||||
|
Audit either the `.size` and `.ppu` local instance vars against
|
||||||
|
the clears table calculations and return the calc-ed values if
|
||||||
|
they differ and log warnings to console.
|
||||||
|
|
||||||
|
'''
|
||||||
|
clears = list(self.clears.values())
|
||||||
|
self.first_clear_dt = min(list(entry['dt'] for entry in clears))
|
||||||
|
last_clear = clears[-1]
|
||||||
|
|
||||||
|
csize = self.calc_size()
|
||||||
|
accum = last_clear['accum_size']
|
||||||
|
if not self.expired():
|
||||||
|
if (
|
||||||
|
csize != accum
|
||||||
|
and csize != round(accum * self.split_ratio or 1)
|
||||||
|
):
|
||||||
|
raise ValueError(f'Size mismatch: {csize}')
|
||||||
|
else:
|
||||||
|
assert csize == 0, 'Contract is expired but non-zero size?'
|
||||||
|
|
||||||
|
if self.size != csize:
|
||||||
|
log.warning(
|
||||||
|
'Position state mismatch:\n'
|
||||||
|
f'{self.size} => {csize}'
|
||||||
|
)
|
||||||
|
self.size = csize
|
||||||
|
|
||||||
|
cppu = self.calc_ppu()
|
||||||
|
ppu = last_clear['ppu']
|
||||||
|
if (
|
||||||
|
cppu != ppu
|
||||||
|
and self.split_ratio is not None
|
||||||
|
# handle any split info entered (for now) manually by user
|
||||||
|
and cppu != (ppu / self.split_ratio)
|
||||||
|
):
|
||||||
|
raise ValueError(f'PPU mismatch: {cppu}')
|
||||||
|
|
||||||
|
if self.ppu != cppu:
|
||||||
|
log.warning(
|
||||||
|
'Position state mismatch:\n'
|
||||||
|
f'{self.ppu} => {cppu}'
|
||||||
|
)
|
||||||
|
self.ppu = cppu
|
||||||
|
|
||||||
|
def update_from_msg(
|
||||||
|
self,
|
||||||
|
msg: BrokerdPosition,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# XXX: better place to do this?
|
||||||
|
symbol = self.symbol
|
||||||
|
|
||||||
|
lot_size_digits = symbol.lot_size_digits
|
||||||
|
ppu, size = (
|
||||||
|
round(
|
||||||
|
msg['avg_price'],
|
||||||
|
ndigits=symbol.tick_size_digits
|
||||||
|
),
|
||||||
|
round(
|
||||||
|
msg['size'],
|
||||||
|
ndigits=lot_size_digits
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.ppu = ppu
|
||||||
|
self.size = size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dsize(self) -> float:
|
||||||
|
'''
|
||||||
|
The "dollar" size of the pp, normally in trading (fiat) unit
|
||||||
|
terms.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return self.ppu * self.size
|
||||||
|
|
||||||
|
# TODO: idea: "real LIFO" dynamic positioning.
|
||||||
|
# - when a trade takes place where the pnl for
|
||||||
|
# the (set of) trade(s) is below the breakeven price
|
||||||
|
# it may be that the trader took a +ve pnl on a short(er)
|
||||||
|
# term trade in the same account.
|
||||||
|
# - in this case we could recalc the be price to
|
||||||
|
# be reverted back to it's prior value before the nearest term
|
||||||
|
# trade was opened.?
|
||||||
|
# def lifo_price() -> float:
|
||||||
|
# ...
|
||||||
|
|
||||||
|
def calc_ppu(
|
||||||
|
self,
|
||||||
|
# include transaction cost in breakeven price
|
||||||
|
# and presume the worst case of the same cost
|
||||||
|
# to exit this transaction (even though in reality
|
||||||
|
# it will be dynamic based on exit stratetgy).
|
||||||
|
cost_scalar: float = 2,
|
||||||
|
|
||||||
|
) -> float:
|
||||||
|
'''
|
||||||
|
Compute the "price-per-unit" price for the given non-zero sized
|
||||||
|
rolling position.
|
||||||
|
|
||||||
|
The recurrence relation which computes this (exponential) mean
|
||||||
|
per new clear which **increases** the accumulative postiion size
|
||||||
|
is:
|
||||||
|
|
||||||
|
ppu[-1] = (
|
||||||
|
ppu[-2] * accum_size[-2]
|
||||||
|
+
|
||||||
|
ppu[-1] * size
|
||||||
|
) / accum_size[-1]
|
||||||
|
|
||||||
|
where `cost_basis` for the current step is simply the price
|
||||||
|
* size of the most recent clearing transaction.
|
||||||
|
|
||||||
|
'''
|
||||||
|
asize_h: list[float] = [] # historical accumulative size
|
||||||
|
ppu_h: list[float] = [] # historical price-per-unit
|
||||||
|
|
||||||
|
clears = list(self.clears.items())
|
||||||
|
|
||||||
|
for i, (tid, entry) in enumerate(clears):
|
||||||
|
|
||||||
|
clear_size = entry['size']
|
||||||
|
clear_price = entry['price']
|
||||||
|
|
||||||
|
last_accum_size = asize_h[-1] if asize_h else 0
|
||||||
|
accum_size = last_accum_size + clear_size
|
||||||
|
accum_sign = copysign(1, accum_size)
|
||||||
|
|
||||||
|
sign_change: bool = False
|
||||||
|
|
||||||
|
if accum_size == 0:
|
||||||
|
ppu_h.append(0)
|
||||||
|
asize_h.append(0)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# test if the pp somehow went "passed" a net zero size state
|
||||||
|
# resulting in a change of the "sign" of the size (+ve for
|
||||||
|
# long, -ve for short).
|
||||||
|
sign_change = (
|
||||||
|
copysign(1, last_accum_size) + accum_sign == 0
|
||||||
|
and last_accum_size != 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# since we passed the net-zero-size state the new size
|
||||||
|
# after sum should be the remaining size the new
|
||||||
|
# "direction" (aka, long vs. short) for this clear.
|
||||||
|
if sign_change:
|
||||||
|
clear_size = accum_size
|
||||||
|
abs_diff = abs(accum_size)
|
||||||
|
asize_h.append(0)
|
||||||
|
ppu_h.append(0)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# old size minus the new size gives us size diff with
|
||||||
|
# +ve -> increase in pp size
|
||||||
|
# -ve -> decrease in pp size
|
||||||
|
abs_diff = abs(accum_size) - abs(last_accum_size)
|
||||||
|
|
||||||
|
# XXX: LIFO breakeven price update. only an increaze in size
|
||||||
|
# of the position contributes the breakeven price,
|
||||||
|
# a decrease does not (i.e. the position is being made
|
||||||
|
# smaller).
|
||||||
|
# abs_clear_size = abs(clear_size)
|
||||||
|
abs_new_size = abs(accum_size)
|
||||||
|
|
||||||
|
if abs_diff > 0:
|
||||||
|
|
||||||
|
cost_basis = (
|
||||||
|
# cost basis for this clear
|
||||||
|
clear_price * abs(clear_size)
|
||||||
|
+
|
||||||
|
# transaction cost
|
||||||
|
accum_sign * cost_scalar * entry['cost']
|
||||||
|
)
|
||||||
|
|
||||||
|
if asize_h:
|
||||||
|
size_last = abs(asize_h[-1])
|
||||||
|
cb_last = ppu_h[-1] * size_last
|
||||||
|
ppu = (cost_basis + cb_last) / abs_new_size
|
||||||
|
|
||||||
|
else:
|
||||||
|
ppu = cost_basis / abs_new_size
|
||||||
|
|
||||||
|
ppu_h.append(ppu)
|
||||||
|
asize_h.append(accum_size)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# on "exit" clears from a given direction,
|
||||||
|
# only the size changes not the price-per-unit
|
||||||
|
# need to be updated since the ppu remains constant
|
||||||
|
# and gets weighted by the new size.
|
||||||
|
asize_h.append(accum_size)
|
||||||
|
ppu_h.append(ppu_h[-1])
|
||||||
|
|
||||||
|
final_ppu = ppu_h[-1] if ppu_h else 0
|
||||||
|
|
||||||
|
# handle any split info entered (for now) manually by user
|
||||||
|
if self.split_ratio is not None:
|
||||||
|
final_ppu /= self.split_ratio
|
||||||
|
|
||||||
|
return final_ppu
|
||||||
|
|
||||||
|
def expired(self) -> bool:
|
||||||
|
'''
|
||||||
|
Predicate which checks if the contract/instrument is past its expiry.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return bool(self.expiry) and self.expiry < now()
|
||||||
|
|
||||||
|
def calc_size(self) -> float:
|
||||||
|
'''
|
||||||
|
Calculate the unit size of this position in the destination
|
||||||
|
asset using the clears/trade event table; zero if expired.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size: float = 0
|
||||||
|
|
||||||
|
# time-expired pps (normally derivatives) are "closed"
|
||||||
|
# and have a zero size.
|
||||||
|
if self.expired():
|
||||||
|
return 0
|
||||||
|
|
||||||
|
for tid, entry in self.clears.items():
|
||||||
|
size += entry['size']
|
||||||
|
|
||||||
|
if self.split_ratio is not None:
|
||||||
|
size = round(size * self.split_ratio)
|
||||||
|
|
||||||
|
return size
|
||||||
|
|
||||||
|
def minimize_clears(
|
||||||
|
self,
|
||||||
|
|
||||||
|
) -> dict[str, dict]:
|
||||||
|
'''
|
||||||
|
Minimize the position's clears entries by removing
|
||||||
|
all transactions before the last net zero size to avoid
|
||||||
|
unecessary history irrelevant to the current pp state.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size: float = 0
|
||||||
|
clears_since_zero: list[tuple(str, dict)] = []
|
||||||
|
|
||||||
|
# TODO: we might just want to always do this when iterating
|
||||||
|
# a ledger? keep a state of the last net-zero and only do the
|
||||||
|
# full iterate when no state was stashed?
|
||||||
|
|
||||||
|
# scan for the last "net zero" position by iterating
|
||||||
|
# transactions until the next net-zero size, rinse, repeat.
|
||||||
|
for tid, clear in self.clears.items():
|
||||||
|
size += clear['size']
|
||||||
|
clears_since_zero.append((tid, clear))
|
||||||
|
|
||||||
|
if size == 0:
|
||||||
|
clears_since_zero.clear()
|
||||||
|
|
||||||
|
self.clears = dict(clears_since_zero)
|
||||||
|
return self.clears
|
||||||
|
|
||||||
|
def add_clear(
|
||||||
|
self,
|
||||||
|
t: Transaction,
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Update clearing table and populate rolling ppu and accumulative
|
||||||
|
size in both the clears entry and local attrs state.
|
||||||
|
|
||||||
|
'''
|
||||||
|
clear = self.clears[t.tid] = {
|
||||||
|
'cost': t.cost,
|
||||||
|
'price': t.price,
|
||||||
|
'size': t.size,
|
||||||
|
'dt': t.dt,
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO: compute these incrementally instead
|
||||||
|
# of re-looping through each time resulting in O(n**2)
|
||||||
|
# behaviour..?
|
||||||
|
|
||||||
|
# NOTE: we compute these **after** adding the entry in order to
|
||||||
|
# make the recurrence relation math work inside
|
||||||
|
# ``.calc_size()``.
|
||||||
|
self.size = clear['accum_size'] = self.calc_size()
|
||||||
|
self.ppu = clear['ppu'] = self.calc_ppu()
|
||||||
|
|
||||||
|
return clear
|
||||||
|
|
||||||
|
def sugest_split(self) -> float:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class PpTable(Struct):
|
||||||
|
|
||||||
|
brokername: str
|
||||||
|
acctid: str
|
||||||
|
pps: dict[str, Position]
|
||||||
|
conf: Optional[dict] = {}
|
||||||
|
|
||||||
|
def update_from_trans(
|
||||||
|
self,
|
||||||
|
trans: dict[str, Transaction],
|
||||||
|
cost_scalar: float = 2,
|
||||||
|
|
||||||
|
) -> dict[str, Position]:
|
||||||
|
|
||||||
|
pps = self.pps
|
||||||
|
updated: dict[str, Position] = {}
|
||||||
|
|
||||||
|
# lifo update all pps from records
|
||||||
|
for tid, t in trans.items():
|
||||||
|
|
||||||
|
pp = pps.setdefault(
|
||||||
|
t.bsuid,
|
||||||
|
|
||||||
|
# if no existing pp, allocate fresh one.
|
||||||
|
Position(
|
||||||
|
Symbol.from_fqsn(
|
||||||
|
t.fqsn,
|
||||||
|
info={},
|
||||||
|
),
|
||||||
|
size=0.0,
|
||||||
|
ppu=0.0,
|
||||||
|
bsuid=t.bsuid,
|
||||||
|
expiry=t.expiry,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
clears = pp.clears
|
||||||
|
if clears:
|
||||||
|
first_clear_dt = pp.first_clear_dt
|
||||||
|
|
||||||
|
# don't do updates for ledger records we already have
|
||||||
|
# included in the current pps state.
|
||||||
|
if (
|
||||||
|
t.tid in clears
|
||||||
|
or first_clear_dt and t.dt < first_clear_dt
|
||||||
|
):
|
||||||
|
# NOTE: likely you'll see repeats of the same
|
||||||
|
# ``Transaction`` passed in here if/when you are restarting
|
||||||
|
# a ``brokerd.ib`` where the API will re-report trades from
|
||||||
|
# the current session, so we need to make sure we don't
|
||||||
|
# "double count" these in pp calculations.
|
||||||
|
continue
|
||||||
|
|
||||||
|
# update clearing table
|
||||||
|
pp.add_clear(t)
|
||||||
|
updated[t.bsuid] = pp
|
||||||
|
|
||||||
|
# minimize clears tables and update sizing.
|
||||||
|
for bsuid, pp in updated.items():
|
||||||
|
pp.ensure_state()
|
||||||
|
|
||||||
|
return updated
|
||||||
|
|
||||||
|
def dump_active(
|
||||||
|
self,
|
||||||
|
) -> tuple[
|
||||||
|
dict[str, Position],
|
||||||
|
dict[str, Position]
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Iterate all tabulated positions, render active positions to
|
||||||
|
a ``dict`` format amenable to serialization (via TOML) and drop
|
||||||
|
from state (``.pps``) as well as return in a ``dict`` all
|
||||||
|
``Position``s which have recently closed.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# NOTE: newly closed position are also important to report/return
|
||||||
|
# since a consumer, like an order mode UI ;), might want to react
|
||||||
|
# based on the closure (for example removing the breakeven line
|
||||||
|
# and clearing the entry from any lists/monitors).
|
||||||
|
closed_pp_objs: dict[str, Position] = {}
|
||||||
|
open_pp_objs: dict[str, Position] = {}
|
||||||
|
|
||||||
|
pp_objs = self.pps
|
||||||
|
for bsuid in list(pp_objs):
|
||||||
|
pp = pp_objs[bsuid]
|
||||||
|
|
||||||
|
# XXX: debug hook for size mismatches
|
||||||
|
# qqqbsuid = 320227571
|
||||||
|
# if bsuid == qqqbsuid:
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
|
pp.ensure_state()
|
||||||
|
|
||||||
|
if (
|
||||||
|
# "net-zero" is a "closed" position
|
||||||
|
pp.size == 0
|
||||||
|
|
||||||
|
# time-expired pps (normally derivatives) are "closed"
|
||||||
|
or (pp.expiry and pp.expiry < now())
|
||||||
|
):
|
||||||
|
# for expired cases
|
||||||
|
pp.size = 0
|
||||||
|
|
||||||
|
# NOTE: we DO NOT pop the pp here since it can still be
|
||||||
|
# used to check for duplicate clears that may come in as
|
||||||
|
# new transaction from some backend API and need to be
|
||||||
|
# ignored; the closed positions won't be written to the
|
||||||
|
# ``pps.toml`` since ``pp_active_entries`` above is what's
|
||||||
|
# written.
|
||||||
|
closed_pp_objs[bsuid] = pp
|
||||||
|
|
||||||
|
else:
|
||||||
|
open_pp_objs[bsuid] = pp
|
||||||
|
|
||||||
|
return open_pp_objs, closed_pp_objs
|
||||||
|
|
||||||
|
def to_toml(
|
||||||
|
self,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
|
||||||
|
active, closed = self.dump_active()
|
||||||
|
|
||||||
|
# ONLY dict-serialize all active positions; those that are closed
|
||||||
|
# we don't store in the ``pps.toml``.
|
||||||
|
to_toml_dict = {}
|
||||||
|
|
||||||
|
for bsuid, pos in active.items():
|
||||||
|
|
||||||
|
# keep the minimal amount of clears that make up this
|
||||||
|
# position since the last net-zero state.
|
||||||
|
pos.minimize_clears()
|
||||||
|
pos.ensure_state()
|
||||||
|
|
||||||
|
# serialize to pre-toml form
|
||||||
|
fqsn, asdict = pos.to_pretoml()
|
||||||
|
log.info(f'Updating active pp: {fqsn}')
|
||||||
|
|
||||||
|
# XXX: ugh, it's cuz we push the section under
|
||||||
|
# the broker name.. maybe we need to rethink this?
|
||||||
|
brokerless_key = fqsn.removeprefix(f'{self.brokername}.')
|
||||||
|
to_toml_dict[brokerless_key] = asdict
|
||||||
|
|
||||||
|
return to_toml_dict
|
||||||
|
|
||||||
|
def write_config(self) -> None:
|
||||||
|
'''
|
||||||
|
Write the current position table to the user's ``pps.toml``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# TODO: show diff output?
|
||||||
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||||
|
print(f'Updating ``pps.toml`` for {path}:\n')
|
||||||
|
|
||||||
|
# active, closed_pp_objs = table.dump_active()
|
||||||
|
pp_entries = self.to_toml()
|
||||||
|
self.conf[self.brokername][self.acctid] = pp_entries
|
||||||
|
|
||||||
|
# TODO: why tf haven't they already done this for inline
|
||||||
|
# tables smh..
|
||||||
|
enc = PpsEncoder(preserve=True)
|
||||||
|
# table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
|
||||||
|
enc.dump_funcs[
|
||||||
|
toml.decoder.InlineTableDict
|
||||||
|
] = enc.dump_inline_table
|
||||||
|
|
||||||
|
config.write(
|
||||||
|
self.conf,
|
||||||
|
'pps',
|
||||||
|
encoder=enc,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_pps_from_ledger(
|
||||||
|
|
||||||
|
brokername: str,
|
||||||
|
acctname: str,
|
||||||
|
|
||||||
|
# post normalization filter on ledger entries to be processed
|
||||||
|
filter_by: Optional[list[dict]] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
dict[str, Transaction],
|
||||||
|
dict[str, Position],
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Open a ledger file by broker name and account and read in and
|
||||||
|
process any trade records into our normalized ``Transaction`` form
|
||||||
|
and then update the equivalent ``Pptable`` and deliver the two
|
||||||
|
bsuid-mapped dict-sets of the transactions and pps.
|
||||||
|
|
||||||
|
'''
|
||||||
|
with (
|
||||||
|
open_trade_ledger(brokername, acctname) as ledger,
|
||||||
|
open_pps(brokername, acctname) as table,
|
||||||
|
):
|
||||||
|
if not ledger:
|
||||||
|
# null case, no ledger file with content
|
||||||
|
return {}
|
||||||
|
|
||||||
|
mod = get_brokermod(brokername)
|
||||||
|
src_records: dict[str, Transaction] = mod.norm_trade_records(ledger)
|
||||||
|
|
||||||
|
if filter_by:
|
||||||
|
records = {}
|
||||||
|
bsuids = set(filter_by)
|
||||||
|
for tid, r in src_records.items():
|
||||||
|
if r.bsuid in bsuids:
|
||||||
|
records[tid] = r
|
||||||
|
else:
|
||||||
|
records = src_records
|
||||||
|
|
||||||
|
updated = table.update_from_trans(records)
|
||||||
|
|
||||||
|
return records, updated
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: instead see if we can hack tomli and tomli-w to do the same:
|
||||||
|
# - https://github.com/hukkin/tomli
|
||||||
|
# - https://github.com/hukkin/tomli-w
|
||||||
|
class PpsEncoder(toml.TomlEncoder):
|
||||||
|
'''
|
||||||
|
Special "styled" encoder that makes a ``pps.toml`` redable and
|
||||||
|
compact by putting `.clears` tables inline and everything else
|
||||||
|
flat-ish.
|
||||||
|
|
||||||
|
'''
|
||||||
|
separator = ','
|
||||||
|
|
||||||
|
def dump_list(self, v):
|
||||||
|
'''
|
||||||
|
Dump an inline list with a newline after every element and
|
||||||
|
with consideration for denoted inline table types.
|
||||||
|
|
||||||
|
'''
|
||||||
|
retval = "[\n"
|
||||||
|
for u in v:
|
||||||
|
if isinstance(u, toml.decoder.InlineTableDict):
|
||||||
|
out = self.dump_inline_table(u)
|
||||||
|
else:
|
||||||
|
out = str(self.dump_value(u))
|
||||||
|
|
||||||
|
retval += " " + out + "," + "\n"
|
||||||
|
retval += "]"
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def dump_inline_table(self, section):
|
||||||
|
"""Preserve inline table in its compact syntax instead of expanding
|
||||||
|
into subsection.
|
||||||
|
https://github.com/toml-lang/toml#user-content-inline-table
|
||||||
|
"""
|
||||||
|
val_list = []
|
||||||
|
for k, v in section.items():
|
||||||
|
# if isinstance(v, toml.decoder.InlineTableDict):
|
||||||
|
if isinstance(v, dict):
|
||||||
|
val = self.dump_inline_table(v)
|
||||||
|
else:
|
||||||
|
val = str(self.dump_value(v))
|
||||||
|
|
||||||
|
val_list.append(k + " = " + val)
|
||||||
|
|
||||||
|
retval = "{ " + ", ".join(val_list) + " }"
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def dump_sections(self, o, sup):
|
||||||
|
retstr = ""
|
||||||
|
if sup != "" and sup[-1] != ".":
|
||||||
|
sup += '.'
|
||||||
|
retdict = self._dict()
|
||||||
|
arraystr = ""
|
||||||
|
for section in o:
|
||||||
|
qsection = str(section)
|
||||||
|
value = o[section]
|
||||||
|
|
||||||
|
if not re.match(r'^[A-Za-z0-9_-]+$', section):
|
||||||
|
qsection = toml.encoder._dump_str(section)
|
||||||
|
|
||||||
|
# arrayoftables = False
|
||||||
|
if (
|
||||||
|
self.preserve
|
||||||
|
and isinstance(value, toml.decoder.InlineTableDict)
|
||||||
|
):
|
||||||
|
retstr += (
|
||||||
|
qsection
|
||||||
|
+
|
||||||
|
" = "
|
||||||
|
+
|
||||||
|
self.dump_inline_table(o[section])
|
||||||
|
+
|
||||||
|
'\n' # only on the final terminating left brace
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: this code i'm pretty sure is just blatantly bad
|
||||||
|
# and/or wrong..
|
||||||
|
# if isinstance(o[section], list):
|
||||||
|
# for a in o[section]:
|
||||||
|
# if isinstance(a, dict):
|
||||||
|
# arrayoftables = True
|
||||||
|
# if arrayoftables:
|
||||||
|
# for a in o[section]:
|
||||||
|
# arraytabstr = "\n"
|
||||||
|
# arraystr += "[[" + sup + qsection + "]]\n"
|
||||||
|
# s, d = self.dump_sections(a, sup + qsection)
|
||||||
|
# if s:
|
||||||
|
# if s[0] == "[":
|
||||||
|
# arraytabstr += s
|
||||||
|
# else:
|
||||||
|
# arraystr += s
|
||||||
|
# while d:
|
||||||
|
# newd = self._dict()
|
||||||
|
# for dsec in d:
|
||||||
|
# s1, d1 = self.dump_sections(d[dsec], sup +
|
||||||
|
# qsection + "." +
|
||||||
|
# dsec)
|
||||||
|
# if s1:
|
||||||
|
# arraytabstr += ("[" + sup + qsection +
|
||||||
|
# "." + dsec + "]\n")
|
||||||
|
# arraytabstr += s1
|
||||||
|
# for s1 in d1:
|
||||||
|
# newd[dsec + "." + s1] = d1[s1]
|
||||||
|
# d = newd
|
||||||
|
# arraystr += arraytabstr
|
||||||
|
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
retdict[qsection] = o[section]
|
||||||
|
|
||||||
|
elif o[section] is not None:
|
||||||
|
retstr += (
|
||||||
|
qsection
|
||||||
|
+
|
||||||
|
" = "
|
||||||
|
+
|
||||||
|
str(self.dump_value(o[section]))
|
||||||
|
)
|
||||||
|
|
||||||
|
# if not isinstance(value, dict):
|
||||||
|
if not isinstance(value, toml.decoder.InlineTableDict):
|
||||||
|
# inline tables should not contain newlines:
|
||||||
|
# https://toml.io/en/v1.0.0#inline-table
|
||||||
|
retstr += '\n'
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError(value)
|
||||||
|
|
||||||
|
retstr += arraystr
|
||||||
|
return (retstr, retdict)
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def open_pps(
|
||||||
|
brokername: str,
|
||||||
|
acctid: str,
|
||||||
|
write_on_exit: bool = True,
|
||||||
|
|
||||||
|
) -> PpTable:
|
||||||
|
'''
|
||||||
|
Read out broker-specific position entries from
|
||||||
|
incremental update file: ``pps.toml``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
conf, path = config.load('pps')
|
||||||
|
brokersection = conf.setdefault(brokername, {})
|
||||||
|
pps = brokersection.setdefault(acctid, {})
|
||||||
|
|
||||||
|
# TODO: ideally we can pass in an existing
|
||||||
|
# pps state to this right? such that we
|
||||||
|
# don't have to do a ledger reload all the
|
||||||
|
# time.. a couple ideas I can think of,
|
||||||
|
# - mirror this in some client side actor which
|
||||||
|
# does the actual ledger updates (say the paper
|
||||||
|
# engine proc if we decide to always spawn it?),
|
||||||
|
# - do diffs against updates from the ledger writer
|
||||||
|
# actor and the in-mem state here?
|
||||||
|
|
||||||
|
pp_objs = {}
|
||||||
|
table = PpTable(
|
||||||
|
brokername,
|
||||||
|
acctid,
|
||||||
|
pp_objs,
|
||||||
|
conf=conf,
|
||||||
|
)
|
||||||
|
|
||||||
|
# unmarshal/load ``pps.toml`` config entries into object form
|
||||||
|
# and update `PpTable` obj entries.
|
||||||
|
for fqsn, entry in pps.items():
|
||||||
|
bsuid = entry['bsuid']
|
||||||
|
|
||||||
|
# convert clears sub-tables (only in this form
|
||||||
|
# for toml re-presentation) back into a master table.
|
||||||
|
clears_list = entry['clears']
|
||||||
|
|
||||||
|
# index clears entries in "object" form by tid in a top
|
||||||
|
# level dict instead of a list (as is presented in our
|
||||||
|
# ``pps.toml``).
|
||||||
|
clears = pp_objs.setdefault(bsuid, {})
|
||||||
|
|
||||||
|
# TODO: should be make a ``Struct`` for clear/event entries?
|
||||||
|
# convert "clear events table" from the toml config (list of
|
||||||
|
# a dicts) and load it into object form for use in position
|
||||||
|
# processing of new clear events.
|
||||||
|
trans: list[Transaction] = []
|
||||||
|
|
||||||
|
for clears_table in clears_list:
|
||||||
|
tid = clears_table.pop('tid')
|
||||||
|
dtstr = clears_table['dt']
|
||||||
|
dt = pendulum.parse(dtstr)
|
||||||
|
clears_table['dt'] = dt
|
||||||
|
trans.append(Transaction(
|
||||||
|
fqsn=bsuid,
|
||||||
|
bsuid=bsuid,
|
||||||
|
tid=tid,
|
||||||
|
size=clears_table['size'],
|
||||||
|
price=clears_table['price'],
|
||||||
|
cost=clears_table['cost'],
|
||||||
|
dt=dt,
|
||||||
|
))
|
||||||
|
clears[tid] = clears_table
|
||||||
|
|
||||||
|
size = entry['size']
|
||||||
|
|
||||||
|
# TODO: remove but, handle old field name for now
|
||||||
|
ppu = entry.get('ppu', entry.get('be_price', 0))
|
||||||
|
split_ratio = entry.get('split_ratio')
|
||||||
|
|
||||||
|
expiry = entry.get('expiry')
|
||||||
|
if expiry:
|
||||||
|
expiry = pendulum.parse(expiry)
|
||||||
|
|
||||||
|
pp = pp_objs[bsuid] = Position(
|
||||||
|
Symbol.from_fqsn(fqsn, info={}),
|
||||||
|
size=size,
|
||||||
|
ppu=ppu,
|
||||||
|
split_ratio=split_ratio,
|
||||||
|
expiry=expiry,
|
||||||
|
bsuid=entry['bsuid'],
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: super critical, we need to be sure to include
|
||||||
|
# all pps.toml clears to avoid reusing clears that were
|
||||||
|
# already included in the current incremental update
|
||||||
|
# state, since today's records may have already been
|
||||||
|
# processed!
|
||||||
|
for t in trans:
|
||||||
|
pp.add_clear(t)
|
||||||
|
|
||||||
|
# audit entries loaded from toml
|
||||||
|
pp.ensure_state()
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield table
|
||||||
|
finally:
|
||||||
|
if write_on_exit:
|
||||||
|
table.write_config()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import sys
|
||||||
|
|
||||||
|
args = sys.argv
|
||||||
|
assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
|
||||||
|
args = args[1:]
|
||||||
|
for acctid in args:
|
||||||
|
broker, name = acctid.split('.')
|
||||||
|
trans, updated_pps = load_pps_from_ledger(broker, name)
|
||||||
|
print(
|
||||||
|
f'Processing transactions into pps for {broker}:{acctid}\n'
|
||||||
|
f'{pformat(trans)}\n\n'
|
||||||
|
f'{pformat(updated_pps)}'
|
||||||
|
)
|
|
@ -32,16 +32,22 @@ def mk_marker_path(
|
||||||
style: str,
|
style: str,
|
||||||
|
|
||||||
) -> QGraphicsPathItem:
|
) -> QGraphicsPathItem:
|
||||||
"""Add a marker to be displayed on the line wrapped in a ``QGraphicsPathItem``
|
'''
|
||||||
ready to be placed using scene coordinates (not view).
|
Add a marker to be displayed on the line wrapped in
|
||||||
|
a ``QGraphicsPathItem`` ready to be placed using scene coordinates
|
||||||
|
(not view).
|
||||||
|
|
||||||
**Arguments**
|
**Arguments**
|
||||||
style String indicating the style of marker to add:
|
style String indicating the style of marker to add:
|
||||||
``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
|
``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
|
||||||
``'>|<'``, ``'^'``, ``'v'``, ``'o'``
|
``'>|<'``, ``'^'``, ``'v'``, ``'o'``
|
||||||
size Size of the marker in pixels.
|
|
||||||
|
|
||||||
"""
|
This code is taken nearly verbatim from the
|
||||||
|
`InfiniteLine.addMarker()` method but does not attempt do be aware
|
||||||
|
of low(er) level graphics controls and expects for the output
|
||||||
|
polygon to be applied to a ``QGraphicsPathItem``.
|
||||||
|
|
||||||
|
'''
|
||||||
path = QtGui.QPainterPath()
|
path = QtGui.QPainterPath()
|
||||||
|
|
||||||
if style == 'o':
|
if style == 'o':
|
||||||
|
@ -87,7 +93,8 @@ def mk_marker_path(
|
||||||
|
|
||||||
|
|
||||||
class LevelMarker(QGraphicsPathItem):
|
class LevelMarker(QGraphicsPathItem):
|
||||||
'''An arrow marker path graphich which redraws itself
|
'''
|
||||||
|
An arrow marker path graphich which redraws itself
|
||||||
to the specified view coordinate level on each paint cycle.
|
to the specified view coordinate level on each paint cycle.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -104,7 +111,8 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
# get polygon and scale
|
# get polygon and scale
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.scale(size, size)
|
# self.setScale(size, size)
|
||||||
|
self.setScale(size)
|
||||||
|
|
||||||
# interally generates path
|
# interally generates path
|
||||||
self._style = None
|
self._style = None
|
||||||
|
@ -114,6 +122,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
self.get_level = get_level
|
self.get_level = get_level
|
||||||
self._on_paint = on_paint
|
self._on_paint = on_paint
|
||||||
|
|
||||||
self.scene_x = lambda: chart.marker_right_points()[1]
|
self.scene_x = lambda: chart.marker_right_points()[1]
|
||||||
self.level: float = 0
|
self.level: float = 0
|
||||||
self.keep_in_view = keep_in_view
|
self.keep_in_view = keep_in_view
|
||||||
|
@ -149,12 +158,9 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
def w(self) -> float:
|
def w(self) -> float:
|
||||||
return self.path_br().width()
|
return self.path_br().width()
|
||||||
|
|
||||||
def position_in_view(
|
def position_in_view(self) -> None:
|
||||||
self,
|
'''
|
||||||
# level: float,
|
Show a pp off-screen indicator for a level label.
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''Show a pp off-screen indicator for a level label.
|
|
||||||
|
|
||||||
This is like in fps games where you have a gps "nav" indicator
|
This is like in fps games where you have a gps "nav" indicator
|
||||||
but your teammate is outside the range of view, except in 2D, on
|
but your teammate is outside the range of view, except in 2D, on
|
||||||
|
@ -162,7 +168,6 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
level = self.get_level()
|
level = self.get_level()
|
||||||
|
|
||||||
view = self.chart.getViewBox()
|
view = self.chart.getViewBox()
|
||||||
vr = view.state['viewRange']
|
vr = view.state['viewRange']
|
||||||
ymn, ymx = vr[1]
|
ymn, ymx = vr[1]
|
||||||
|
@ -186,7 +191,6 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
)
|
)
|
||||||
|
|
||||||
elif level < ymn: # pin to bottom of view
|
elif level < ymn: # pin to bottom of view
|
||||||
|
|
||||||
self.setPos(
|
self.setPos(
|
||||||
QPointF(
|
QPointF(
|
||||||
x,
|
x,
|
||||||
|
@ -211,7 +215,8 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
w: QtWidgets.QWidget
|
w: QtWidgets.QWidget
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Core paint which we override to always update
|
'''
|
||||||
|
Core paint which we override to always update
|
||||||
our marker position in scene coordinates from a
|
our marker position in scene coordinates from a
|
||||||
view cooridnate "level".
|
view cooridnate "level".
|
||||||
|
|
||||||
|
@ -235,11 +240,12 @@ def qgo_draw_markers(
|
||||||
right_offset: float,
|
right_offset: float,
|
||||||
|
|
||||||
) -> float:
|
) -> float:
|
||||||
"""Paint markers in ``pg.GraphicsItem`` style by first
|
'''
|
||||||
|
Paint markers in ``pg.GraphicsItem`` style by first
|
||||||
removing the view transform for the painter, drawing the markers
|
removing the view transform for the painter, drawing the markers
|
||||||
in scene coords, then restoring the view coords.
|
in scene coords, then restoring the view coords.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# paint markers in native coordinate system
|
# paint markers in native coordinate system
|
||||||
orig_tr = p.transform()
|
orig_tr = p.transform()
|
||||||
|
|
||||||
|
|
|
@ -19,15 +19,16 @@ Main app startup and run.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
from PyQt5.QtCore import QEvent
|
from PyQt5.QtCore import QEvent
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from .._daemon import maybe_spawn_brokerd
|
from .._daemon import maybe_spawn_brokerd
|
||||||
from ..brokers import get_brokermod
|
|
||||||
from . import _event
|
from . import _event
|
||||||
from ._exec import run_qtractor
|
from ._exec import run_qtractor
|
||||||
from ..data.feed import install_brokerd_search
|
from ..data.feed import install_brokerd_search
|
||||||
|
from ..data._source import unpack_fqsn
|
||||||
from . import _search
|
from . import _search
|
||||||
from ._chart import GodWidget
|
from ._chart import GodWidget
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
@ -36,27 +37,26 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def load_provider_search(
|
async def load_provider_search(
|
||||||
|
brokermod: str,
|
||||||
broker: str,
|
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
log.info(f'loading brokerd for {broker}..')
|
name = brokermod.name
|
||||||
|
log.info(f'loading brokerd for {name}..')
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
maybe_spawn_brokerd(
|
maybe_spawn_brokerd(
|
||||||
broker,
|
name,
|
||||||
loglevel=loglevel
|
loglevel=loglevel
|
||||||
) as portal,
|
) as portal,
|
||||||
|
|
||||||
install_brokerd_search(
|
install_brokerd_search(
|
||||||
portal,
|
portal,
|
||||||
get_brokermod(broker),
|
brokermod,
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
|
|
||||||
# keep search engine stream up until cancelled
|
# keep search engine stream up until cancelled
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
@ -66,8 +66,8 @@ async def _async_main(
|
||||||
# implicit required argument provided by ``qtractor_run()``
|
# implicit required argument provided by ``qtractor_run()``
|
||||||
main_widget: GodWidget,
|
main_widget: GodWidget,
|
||||||
|
|
||||||
sym: str,
|
syms: list[str],
|
||||||
brokernames: str,
|
brokers: dict[str, ModuleType],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -78,6 +78,8 @@ async def _async_main(
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from . import _display
|
from . import _display
|
||||||
|
from ._pg_overrides import _do_overrides
|
||||||
|
_do_overrides()
|
||||||
|
|
||||||
godwidget = main_widget
|
godwidget = main_widget
|
||||||
|
|
||||||
|
@ -97,6 +99,11 @@ async def _async_main(
|
||||||
sbar = godwidget.window.status_bar
|
sbar = godwidget.window.status_bar
|
||||||
starting_done = sbar.open_status('starting ze sexy chartz')
|
starting_done = sbar.open_status('starting ze sexy chartz')
|
||||||
|
|
||||||
|
needed_brokermods: dict[str, ModuleType] = {}
|
||||||
|
for fqsn in syms:
|
||||||
|
brokername, *_ = unpack_fqsn(fqsn)
|
||||||
|
needed_brokermods[brokername] = brokers[brokername]
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as root_n,
|
trio.open_nursery() as root_n,
|
||||||
):
|
):
|
||||||
|
@ -107,17 +114,20 @@ async def _async_main(
|
||||||
# setup search widget and focus main chart view at startup
|
# setup search widget and focus main chart view at startup
|
||||||
# search widget is a singleton alongside the godwidget
|
# search widget is a singleton alongside the godwidget
|
||||||
search = _search.SearchWidget(godwidget=godwidget)
|
search = _search.SearchWidget(godwidget=godwidget)
|
||||||
search.bar.unfocus()
|
# search.bar.unfocus()
|
||||||
|
# godwidget.hbox.addWidget(search)
|
||||||
godwidget.hbox.addWidget(search)
|
|
||||||
godwidget.search = search
|
godwidget.search = search
|
||||||
|
|
||||||
|
symbols: list[str] = []
|
||||||
|
|
||||||
|
for sym in syms:
|
||||||
symbol, _, provider = sym.rpartition('.')
|
symbol, _, provider = sym.rpartition('.')
|
||||||
|
symbols.append(symbol)
|
||||||
|
|
||||||
# this internally starts a ``display_symbol_data()`` task above
|
# this internally starts a ``display_symbol_data()`` task above
|
||||||
order_mode_ready = await godwidget.load_symbol(
|
order_mode_ready = await godwidget.load_symbols(
|
||||||
provider,
|
provider,
|
||||||
symbol,
|
symbols,
|
||||||
loglevel
|
loglevel
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -135,8 +145,12 @@ async def _async_main(
|
||||||
):
|
):
|
||||||
# load other providers into search **after**
|
# load other providers into search **after**
|
||||||
# the chart's select cache
|
# the chart's select cache
|
||||||
for broker in brokernames:
|
for brokername, mod in needed_brokermods.items():
|
||||||
root_n.start_soon(load_provider_search, broker, loglevel)
|
root_n.start_soon(
|
||||||
|
load_provider_search,
|
||||||
|
mod,
|
||||||
|
loglevel,
|
||||||
|
)
|
||||||
|
|
||||||
await order_mode_ready.wait()
|
await order_mode_ready.wait()
|
||||||
|
|
||||||
|
@ -165,8 +179,8 @@ async def _async_main(
|
||||||
|
|
||||||
|
|
||||||
def _main(
|
def _main(
|
||||||
sym: str,
|
syms: list[str],
|
||||||
brokernames: [str],
|
brokermods: list[ModuleType],
|
||||||
piker_loglevel: str,
|
piker_loglevel: str,
|
||||||
tractor_kwargs,
|
tractor_kwargs,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -177,7 +191,11 @@ def _main(
|
||||||
'''
|
'''
|
||||||
run_qtractor(
|
run_qtractor(
|
||||||
func=_async_main,
|
func=_async_main,
|
||||||
args=(sym, brokernames, piker_loglevel),
|
args=(
|
||||||
main_widget=GodWidget,
|
syms,
|
||||||
|
{mod.name: mod for mod in brokermods},
|
||||||
|
piker_loglevel,
|
||||||
|
),
|
||||||
|
main_widget_type=GodWidget,
|
||||||
tractor_kwargs=tractor_kwargs,
|
tractor_kwargs=tractor_kwargs,
|
||||||
)
|
)
|
||||||
|
|
|
@ -39,12 +39,17 @@ class Axis(pg.AxisItem):
|
||||||
'''
|
'''
|
||||||
A better axis that sizes tick contents considering font size.
|
A better axis that sizes tick contents considering font size.
|
||||||
|
|
||||||
|
Also includes tick values lru caching originally proposed in but never
|
||||||
|
accepted upstream:
|
||||||
|
https://github.com/pyqtgraph/pyqtgraph/pull/2160
|
||||||
|
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
linkedsplits,
|
linkedsplits,
|
||||||
typical_max_str: str = '100 000.000',
|
typical_max_str: str = '100 000.000',
|
||||||
text_color: str = 'bracket',
|
text_color: str = 'bracket',
|
||||||
|
lru_cache_tick_strings: bool = True,
|
||||||
**kwargs
|
**kwargs
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -91,6 +96,34 @@ class Axis(pg.AxisItem):
|
||||||
# size the pertinent axis dimension to a "typical value"
|
# size the pertinent axis dimension to a "typical value"
|
||||||
self.size_to_values()
|
self.size_to_values()
|
||||||
|
|
||||||
|
# NOTE: requires override ``.tickValues()`` method seen below.
|
||||||
|
if lru_cache_tick_strings:
|
||||||
|
self.tickStrings = lru_cache(
|
||||||
|
maxsize=2**20
|
||||||
|
)(self.tickStrings)
|
||||||
|
|
||||||
|
# NOTE: only overriden to cast tick values entries into tuples
|
||||||
|
# for use with the lru caching.
|
||||||
|
def tickValues(
|
||||||
|
self,
|
||||||
|
minVal: float,
|
||||||
|
maxVal: float,
|
||||||
|
size: int,
|
||||||
|
|
||||||
|
) -> list[tuple[float, tuple[str]]]:
|
||||||
|
'''
|
||||||
|
Repack tick values into tuples for lru caching.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ticks = []
|
||||||
|
for scalar, values in super().tickValues(minVal, maxVal, size):
|
||||||
|
ticks.append((
|
||||||
|
scalar,
|
||||||
|
tuple(values), # this
|
||||||
|
))
|
||||||
|
|
||||||
|
return ticks
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def text_color(self) -> str:
|
def text_color(self) -> str:
|
||||||
return self._text_color
|
return self._text_color
|
||||||
|
|
|
@ -19,7 +19,11 @@ High level chart-widget apis.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import Optional, TYPE_CHECKING
|
from typing import (
|
||||||
|
Iterator,
|
||||||
|
Optional,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtWidgets
|
from PyQt5 import QtCore, QtWidgets
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
|
@ -68,6 +72,9 @@ from ._forms import FieldsForm
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._overlay import PlotItemOverlay
|
from ._overlay import PlotItemOverlay
|
||||||
from ._flows import Flow
|
from ._flows import Flow
|
||||||
|
from ._search import SearchWidget
|
||||||
|
from . import _pg_overrides as pgo
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._display import DisplayState
|
from ._display import DisplayState
|
||||||
|
@ -85,6 +92,9 @@ class GodWidget(QWidget):
|
||||||
modify them.
|
modify them.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
search: SearchWidget
|
||||||
|
mode_name: str = 'god'
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
|
@ -94,6 +104,8 @@ class GodWidget(QWidget):
|
||||||
|
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
|
|
||||||
|
self.search: Optional[SearchWidget] = None
|
||||||
|
|
||||||
self.hbox = QHBoxLayout(self)
|
self.hbox = QHBoxLayout(self)
|
||||||
self.hbox.setContentsMargins(0, 0, 0, 0)
|
self.hbox.setContentsMargins(0, 0, 0, 0)
|
||||||
self.hbox.setSpacing(6)
|
self.hbox.setSpacing(6)
|
||||||
|
@ -115,7 +127,10 @@ class GodWidget(QWidget):
|
||||||
# self.vbox.addLayout(self.hbox)
|
# self.vbox.addLayout(self.hbox)
|
||||||
|
|
||||||
self._chart_cache: dict[str, LinkedSplits] = {}
|
self._chart_cache: dict[str, LinkedSplits] = {}
|
||||||
self.linkedsplits: Optional[LinkedSplits] = None
|
|
||||||
|
self.hist_linked: Optional[LinkedSplits] = None
|
||||||
|
self.rt_linked: Optional[LinkedSplits] = None
|
||||||
|
self._active_cursor: Optional[Cursor] = None
|
||||||
|
|
||||||
# assigned in the startup func `_async_main()`
|
# assigned in the startup func `_async_main()`
|
||||||
self._root_n: trio.Nursery = None
|
self._root_n: trio.Nursery = None
|
||||||
|
@ -123,6 +138,14 @@ class GodWidget(QWidget):
|
||||||
self._widgets: dict[str, QWidget] = {}
|
self._widgets: dict[str, QWidget] = {}
|
||||||
self._resizing: bool = False
|
self._resizing: bool = False
|
||||||
|
|
||||||
|
# TODO: do we need this, when would god get resized
|
||||||
|
# and the window does not? Never right?!
|
||||||
|
# self.reg_for_resize(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def linkedsplits(self) -> LinkedSplits:
|
||||||
|
return self.rt_linked
|
||||||
|
|
||||||
# def init_timeframes_ui(self):
|
# def init_timeframes_ui(self):
|
||||||
# self.tf_layout = QHBoxLayout()
|
# self.tf_layout = QHBoxLayout()
|
||||||
# self.tf_layout.setSpacing(0)
|
# self.tf_layout.setSpacing(0)
|
||||||
|
@ -148,25 +171,25 @@ class GodWidget(QWidget):
|
||||||
def set_chart_symbol(
|
def set_chart_symbol(
|
||||||
self,
|
self,
|
||||||
symbol_key: str, # of form <fqsn>.<providername>
|
symbol_key: str, # of form <fqsn>.<providername>
|
||||||
linkedsplits: LinkedSplits, # type: ignore
|
all_linked: tuple[LinkedSplits, LinkedSplits], # type: ignore
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# re-sort org cache symbol list in LIFO order
|
# re-sort org cache symbol list in LIFO order
|
||||||
cache = self._chart_cache
|
cache = self._chart_cache
|
||||||
cache.pop(symbol_key, None)
|
cache.pop(symbol_key, None)
|
||||||
cache[symbol_key] = linkedsplits
|
cache[symbol_key] = all_linked
|
||||||
|
|
||||||
def get_chart_symbol(
|
def get_chart_symbol(
|
||||||
self,
|
self,
|
||||||
symbol_key: str,
|
symbol_key: str,
|
||||||
|
|
||||||
) -> LinkedSplits: # type: ignore
|
) -> tuple[LinkedSplits, LinkedSplits]: # type: ignore
|
||||||
return self._chart_cache.get(symbol_key)
|
return self._chart_cache.get(symbol_key)
|
||||||
|
|
||||||
async def load_symbol(
|
async def load_symbols(
|
||||||
self,
|
self,
|
||||||
providername: str,
|
providername: str,
|
||||||
symbol_key: str,
|
symbol_keys: list[str],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
reset: bool = False,
|
reset: bool = False,
|
||||||
|
|
||||||
|
@ -177,81 +200,121 @@ class GodWidget(QWidget):
|
||||||
Expects a ``numpy`` structured array containing all the ohlcv fields.
|
Expects a ``numpy`` structured array containing all the ohlcv fields.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
fqsns: list[str] = []
|
||||||
|
|
||||||
# our symbol key style is always lower case
|
# our symbol key style is always lower case
|
||||||
symbol_key = symbol_key.lower()
|
for key in list(map(str.lower, symbol_keys)):
|
||||||
|
|
||||||
# fully qualified symbol name (SNS i guess is what we're making?)
|
# fully qualified symbol name (SNS i guess is what we're making?)
|
||||||
fqsn = '.'.join([symbol_key, providername])
|
fqsn = '.'.join([key, providername])
|
||||||
|
fqsns.append(fqsn)
|
||||||
|
|
||||||
linkedsplits = self.get_chart_symbol(fqsn)
|
# NOTE: for now we use the first symbol in the set as the "key"
|
||||||
|
# for the overlay of feeds on the chart.
|
||||||
|
group_key = fqsns[0]
|
||||||
|
|
||||||
|
all_linked = self.get_chart_symbol(group_key)
|
||||||
order_mode_started = trio.Event()
|
order_mode_started = trio.Event()
|
||||||
|
|
||||||
if not self.vbox.isEmpty():
|
if not self.vbox.isEmpty():
|
||||||
|
|
||||||
|
# XXX: seems to make switching slower?
|
||||||
|
# qframe = self.hist_linked.chart.qframe
|
||||||
|
# if qframe.sidepane is self.search:
|
||||||
|
# qframe.hbox.removeWidget(self.search)
|
||||||
|
|
||||||
|
for linked in [self.rt_linked, self.hist_linked]:
|
||||||
# XXX: this is CRITICAL especially with pixel buffer caching
|
# XXX: this is CRITICAL especially with pixel buffer caching
|
||||||
self.linkedsplits.hide()
|
linked.hide()
|
||||||
self.linkedsplits.unfocus()
|
linked.unfocus()
|
||||||
|
|
||||||
# XXX: pretty sure we don't need this
|
# XXX: pretty sure we don't need this
|
||||||
# remove any existing plots?
|
# remove any existing plots?
|
||||||
# XXX: ahh we might want to support cache unloading..
|
# XXX: ahh we might want to support cache unloading..
|
||||||
# self.vbox.removeWidget(self.linkedsplits)
|
# self.vbox.removeWidget(linked)
|
||||||
|
|
||||||
# switching to a new viewable chart
|
# switching to a new viewable chart
|
||||||
if linkedsplits is None or reset:
|
if all_linked is None or reset:
|
||||||
from ._display import display_symbol_data
|
from ._display import display_symbol_data
|
||||||
|
|
||||||
# we must load a fresh linked charts set
|
# we must load a fresh linked charts set
|
||||||
linkedsplits = LinkedSplits(self)
|
self.rt_linked = rt_charts = LinkedSplits(self)
|
||||||
|
self.hist_linked = hist_charts = LinkedSplits(self)
|
||||||
|
|
||||||
# spawn new task to start up and update new sub-chart instances
|
# spawn new task to start up and update new sub-chart instances
|
||||||
self._root_n.start_soon(
|
self._root_n.start_soon(
|
||||||
display_symbol_data,
|
display_symbol_data,
|
||||||
self,
|
self,
|
||||||
providername,
|
providername,
|
||||||
symbol_key,
|
fqsns,
|
||||||
loglevel,
|
loglevel,
|
||||||
order_mode_started,
|
order_mode_started,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.set_chart_symbol(fqsn, linkedsplits)
|
# self.vbox.addWidget(hist_charts)
|
||||||
self.vbox.addWidget(linkedsplits)
|
self.vbox.addWidget(rt_charts)
|
||||||
|
self.set_chart_symbol(
|
||||||
|
fqsn,
|
||||||
|
(hist_charts, rt_charts),
|
||||||
|
)
|
||||||
|
|
||||||
|
for linked in [hist_charts, rt_charts]:
|
||||||
|
linked.show()
|
||||||
|
linked.focus()
|
||||||
|
|
||||||
linkedsplits.show()
|
|
||||||
linkedsplits.focus()
|
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# symbol is already loaded and ems ready
|
# symbol is already loaded and ems ready
|
||||||
order_mode_started.set()
|
order_mode_started.set()
|
||||||
|
|
||||||
|
self.hist_linked, self.rt_linked = all_linked
|
||||||
|
|
||||||
|
for linked in all_linked:
|
||||||
# TODO:
|
# TODO:
|
||||||
# - we'll probably want per-instrument/provider state here?
|
# - we'll probably want per-instrument/provider state here?
|
||||||
# change the order config form over to the new chart
|
# change the order config form over to the new chart
|
||||||
|
|
||||||
# XXX: since the pp config is a singleton widget we have to
|
|
||||||
# also switch it over to the new chart's interal-layout
|
|
||||||
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
|
|
||||||
chart = linkedsplits.chart
|
|
||||||
|
|
||||||
# chart is already in memory so just focus it
|
# chart is already in memory so just focus it
|
||||||
linkedsplits.show()
|
linked.show()
|
||||||
linkedsplits.focus()
|
linked.focus()
|
||||||
linkedsplits.graphics_cycle()
|
linked.graphics_cycle()
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
# resume feeds *after* rendering chart view asap
|
# resume feeds *after* rendering chart view asap
|
||||||
|
chart = linked.chart
|
||||||
|
if chart:
|
||||||
chart.resume_all_feeds()
|
chart.resume_all_feeds()
|
||||||
|
|
||||||
# TODO: we need a check to see if the chart
|
# TODO: we need a check to see if the chart
|
||||||
# last had the xlast in view, if so then shift so it's
|
# last had the xlast in view, if so then shift so it's
|
||||||
# still in view, if the user was viewing history then
|
# still in view, if the user was viewing history then
|
||||||
# do nothing yah?
|
# do nothing yah?
|
||||||
chart.default_view()
|
self.rt_linked.chart.default_view()
|
||||||
|
|
||||||
self.linkedsplits = linkedsplits
|
# if a history chart instance is already up then
|
||||||
symbol = linkedsplits.symbol
|
# set the search widget as its sidepane.
|
||||||
|
hist_chart = self.hist_linked.chart
|
||||||
|
if hist_chart:
|
||||||
|
hist_chart.qframe.set_sidepane(self.search)
|
||||||
|
|
||||||
|
# NOTE: this is really stupid/hard to follow.
|
||||||
|
# we have to reposition the active position nav
|
||||||
|
# **AFTER** applying the search bar as a sidepane
|
||||||
|
# to the newly switched to symbol.
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
# TODO: probably stick this in some kinda `LooknFeel` API?
|
||||||
|
for tracker in self.rt_linked.mode.trackers.values():
|
||||||
|
pp_nav = tracker.nav
|
||||||
|
if tracker.live_pp.size:
|
||||||
|
pp_nav.show()
|
||||||
|
pp_nav.hide_info()
|
||||||
|
else:
|
||||||
|
pp_nav.hide()
|
||||||
|
|
||||||
|
# set window titlebar info
|
||||||
|
symbol = self.rt_linked.symbol
|
||||||
if symbol is not None:
|
if symbol is not None:
|
||||||
self.window.setWindowTitle(
|
self.window.setWindowTitle(
|
||||||
f'{symbol.front_fqsn()} '
|
f'{symbol.front_fqsn()} '
|
||||||
|
@ -268,11 +331,23 @@ class GodWidget(QWidget):
|
||||||
'''
|
'''
|
||||||
# go back to view-mode focus (aka chart focus)
|
# go back to view-mode focus (aka chart focus)
|
||||||
self.clearFocus()
|
self.clearFocus()
|
||||||
self.linkedsplits.chart.setFocus()
|
chart = self.rt_linked.chart
|
||||||
|
if chart:
|
||||||
|
chart.setFocus()
|
||||||
|
|
||||||
def resizeEvent(self, event: QtCore.QEvent) -> None:
|
def reg_for_resize(
|
||||||
|
self,
|
||||||
|
widget: QWidget,
|
||||||
|
) -> None:
|
||||||
|
getattr(widget, 'on_resize')
|
||||||
|
self._widgets[widget.mode_name] = widget
|
||||||
|
|
||||||
|
def on_win_resize(self, event: QtCore.QEvent) -> None:
|
||||||
'''
|
'''
|
||||||
Top level god widget resize handler.
|
Top level god widget handler from window (the real yaweh) resize
|
||||||
|
events such that any registered widgets which wish to be
|
||||||
|
notified are invoked using our pythonic `.on_resize()` method
|
||||||
|
api.
|
||||||
|
|
||||||
Where we do UX magic to make things not suck B)
|
Where we do UX magic to make things not suck B)
|
||||||
|
|
||||||
|
@ -288,6 +363,28 @@ class GodWidget(QWidget):
|
||||||
|
|
||||||
self._resizing = False
|
self._resizing = False
|
||||||
|
|
||||||
|
# on_resize = on_win_resize
|
||||||
|
|
||||||
|
def get_cursor(self) -> Cursor:
|
||||||
|
return self._active_cursor
|
||||||
|
|
||||||
|
def iter_linked(self) -> Iterator[LinkedSplits]:
|
||||||
|
for linked in [self.hist_linked, self.rt_linked]:
|
||||||
|
yield linked
|
||||||
|
|
||||||
|
def resize_all(self) -> None:
|
||||||
|
'''
|
||||||
|
Dynamic resize sequence: adjusts all sub-widgets/charts to
|
||||||
|
sensible default ratios of what space is detected as available
|
||||||
|
on the display / window.
|
||||||
|
|
||||||
|
'''
|
||||||
|
rt_linked = self.rt_linked
|
||||||
|
rt_linked.set_split_sizes()
|
||||||
|
self.rt_linked.resize_sidepanes()
|
||||||
|
self.hist_linked.resize_sidepanes(from_linked=rt_linked)
|
||||||
|
self.search.on_resize()
|
||||||
|
|
||||||
|
|
||||||
class ChartnPane(QFrame):
|
class ChartnPane(QFrame):
|
||||||
'''
|
'''
|
||||||
|
@ -300,9 +397,9 @@ class ChartnPane(QFrame):
|
||||||
https://doc.qt.io/qt-5/qwidget.html#composite-widgets
|
https://doc.qt.io/qt-5/qwidget.html#composite-widgets
|
||||||
|
|
||||||
'''
|
'''
|
||||||
sidepane: FieldsForm
|
sidepane: FieldsForm | SearchWidget
|
||||||
hbox: QHBoxLayout
|
hbox: QHBoxLayout
|
||||||
chart: Optional['ChartPlotWidget'] = None
|
chart: Optional[ChartPlotWidget] = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -314,7 +411,7 @@ class ChartnPane(QFrame):
|
||||||
|
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
|
|
||||||
self.sidepane = sidepane
|
self._sidepane = sidepane
|
||||||
self.chart = None
|
self.chart = None
|
||||||
|
|
||||||
hbox = self.hbox = QHBoxLayout(self)
|
hbox = self.hbox = QHBoxLayout(self)
|
||||||
|
@ -322,6 +419,21 @@ class ChartnPane(QFrame):
|
||||||
hbox.setContentsMargins(0, 0, 0, 0)
|
hbox.setContentsMargins(0, 0, 0, 0)
|
||||||
hbox.setSpacing(3)
|
hbox.setSpacing(3)
|
||||||
|
|
||||||
|
def set_sidepane(
|
||||||
|
self,
|
||||||
|
sidepane: FieldsForm | SearchWidget,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# add sidepane **after** chart; place it on axis side
|
||||||
|
self.hbox.addWidget(
|
||||||
|
sidepane,
|
||||||
|
alignment=Qt.AlignTop
|
||||||
|
)
|
||||||
|
self._sidepane = sidepane
|
||||||
|
|
||||||
|
def sidepane(self) -> FieldsForm | SearchWidget:
|
||||||
|
return self._sidepane
|
||||||
|
|
||||||
|
|
||||||
class LinkedSplits(QWidget):
|
class LinkedSplits(QWidget):
|
||||||
'''
|
'''
|
||||||
|
@ -356,6 +468,7 @@ class LinkedSplits(QWidget):
|
||||||
self.splitter = QSplitter(QtCore.Qt.Vertical)
|
self.splitter = QSplitter(QtCore.Qt.Vertical)
|
||||||
self.splitter.setMidLineWidth(0)
|
self.splitter.setMidLineWidth(0)
|
||||||
self.splitter.setHandleWidth(2)
|
self.splitter.setHandleWidth(2)
|
||||||
|
self.splitter.splitterMoved.connect(self.on_splitter_adjust)
|
||||||
|
|
||||||
self.layout = QVBoxLayout(self)
|
self.layout = QVBoxLayout(self)
|
||||||
self.layout.setContentsMargins(0, 0, 0, 0)
|
self.layout.setContentsMargins(0, 0, 0, 0)
|
||||||
|
@ -368,6 +481,16 @@ class LinkedSplits(QWidget):
|
||||||
|
|
||||||
self._symbol: Symbol = None
|
self._symbol: Symbol = None
|
||||||
|
|
||||||
|
def on_splitter_adjust(
|
||||||
|
self,
|
||||||
|
pos: int,
|
||||||
|
index: int,
|
||||||
|
) -> None:
|
||||||
|
# print(f'splitter moved pos:{pos}, index:{index}')
|
||||||
|
godw = self.godwidget
|
||||||
|
if self is godw.rt_linked:
|
||||||
|
godw.search.on_resize()
|
||||||
|
|
||||||
def graphics_cycle(self, **kwargs) -> None:
|
def graphics_cycle(self, **kwargs) -> None:
|
||||||
from . import _display
|
from . import _display
|
||||||
ds = self.display_state
|
ds = self.display_state
|
||||||
|
@ -383,27 +506,31 @@ class LinkedSplits(QWidget):
|
||||||
prop: Optional[float] = None,
|
prop: Optional[float] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Set the proportion of space allocated for linked subcharts.
|
'''
|
||||||
|
Set the proportion of space allocated for linked subcharts.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
ln = len(self.subplots)
|
ln = len(self.subplots) or 1
|
||||||
|
|
||||||
# proportion allocated to consumer subcharts
|
# proportion allocated to consumer subcharts
|
||||||
if not prop:
|
if not prop:
|
||||||
prop = 3/8*5/8
|
prop = 3/8
|
||||||
|
|
||||||
# if ln < 2:
|
h = self.height()
|
||||||
# prop = 3/8*5/8
|
histview_h = h * (6/16)
|
||||||
|
h = h - histview_h
|
||||||
# elif ln >= 2:
|
|
||||||
# prop = 3/8
|
|
||||||
|
|
||||||
major = 1 - prop
|
major = 1 - prop
|
||||||
min_h_ind = int((self.height() * prop) / ln)
|
min_h_ind = int((h * prop) / ln)
|
||||||
|
sizes = [
|
||||||
|
int(histview_h),
|
||||||
|
int(h * major),
|
||||||
|
]
|
||||||
|
|
||||||
sizes = [int(self.height() * major)]
|
# give all subcharts the same remaining proportional height
|
||||||
sizes.extend([min_h_ind] * ln)
|
sizes.extend([min_h_ind] * ln)
|
||||||
|
|
||||||
|
if self.godwidget.rt_linked is self:
|
||||||
self.splitter.setSizes(sizes)
|
self.splitter.setSizes(sizes)
|
||||||
|
|
||||||
def focus(self) -> None:
|
def focus(self) -> None:
|
||||||
|
@ -452,13 +579,6 @@ class LinkedSplits(QWidget):
|
||||||
# add crosshair graphic
|
# add crosshair graphic
|
||||||
self.chart.addItem(self.cursor)
|
self.chart.addItem(self.cursor)
|
||||||
|
|
||||||
# axis placement
|
|
||||||
if (
|
|
||||||
_xaxis_at == 'bottom' and
|
|
||||||
'bottom' in self.chart.plotItem.axes
|
|
||||||
):
|
|
||||||
self.chart.hideAxis('bottom')
|
|
||||||
|
|
||||||
# style?
|
# style?
|
||||||
self.chart.setFrameStyle(
|
self.chart.setFrameStyle(
|
||||||
QFrame.StyledPanel |
|
QFrame.StyledPanel |
|
||||||
|
@ -504,10 +624,15 @@ class LinkedSplits(QWidget):
|
||||||
'bottom': xaxis,
|
'bottom': xaxis,
|
||||||
}
|
}
|
||||||
|
|
||||||
qframe = ChartnPane(
|
if sidepane is not False:
|
||||||
|
parent = qframe = ChartnPane(
|
||||||
sidepane=sidepane,
|
sidepane=sidepane,
|
||||||
parent=self.splitter,
|
parent=self.splitter,
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
parent = self.splitter
|
||||||
|
qframe = None
|
||||||
|
|
||||||
cpw = ChartPlotWidget(
|
cpw = ChartPlotWidget(
|
||||||
|
|
||||||
# this name will be used to register the primary
|
# this name will be used to register the primary
|
||||||
|
@ -515,7 +640,7 @@ class LinkedSplits(QWidget):
|
||||||
name=name,
|
name=name,
|
||||||
data_key=array_key or name,
|
data_key=array_key or name,
|
||||||
|
|
||||||
parent=qframe,
|
parent=parent,
|
||||||
linkedsplits=self,
|
linkedsplits=self,
|
||||||
axisItems=axes,
|
axisItems=axes,
|
||||||
**cpw_kwargs,
|
**cpw_kwargs,
|
||||||
|
@ -523,6 +648,15 @@ class LinkedSplits(QWidget):
|
||||||
cpw.hideAxis('left')
|
cpw.hideAxis('left')
|
||||||
cpw.hideAxis('bottom')
|
cpw.hideAxis('bottom')
|
||||||
|
|
||||||
|
if (
|
||||||
|
_xaxis_at == 'bottom' and (
|
||||||
|
self.xaxis_chart
|
||||||
|
or (
|
||||||
|
not self.subplots
|
||||||
|
and self.xaxis_chart is None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
if self.xaxis_chart:
|
if self.xaxis_chart:
|
||||||
self.xaxis_chart.hideAxis('bottom')
|
self.xaxis_chart.hideAxis('bottom')
|
||||||
|
|
||||||
|
@ -531,13 +665,10 @@ class LinkedSplits(QWidget):
|
||||||
# https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master
|
# https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master
|
||||||
# _ = self.xaxis_chart.removeAxis('bottom', unlink=False)
|
# _ = self.xaxis_chart.removeAxis('bottom', unlink=False)
|
||||||
# assert 'bottom' not in self.xaxis_chart.plotItem.axes
|
# assert 'bottom' not in self.xaxis_chart.plotItem.axes
|
||||||
|
|
||||||
self.xaxis_chart = cpw
|
self.xaxis_chart = cpw
|
||||||
cpw.showAxis('bottom')
|
cpw.showAxis('bottom')
|
||||||
|
|
||||||
if self.xaxis_chart is None:
|
if qframe is not None:
|
||||||
self.xaxis_chart = cpw
|
|
||||||
|
|
||||||
qframe.chart = cpw
|
qframe.chart = cpw
|
||||||
qframe.hbox.addWidget(cpw)
|
qframe.hbox.addWidget(cpw)
|
||||||
|
|
||||||
|
@ -547,13 +678,15 @@ class LinkedSplits(QWidget):
|
||||||
assert cpw.parent() == qframe
|
assert cpw.parent() == qframe
|
||||||
|
|
||||||
# add sidepane **after** chart; place it on axis side
|
# add sidepane **after** chart; place it on axis side
|
||||||
qframe.hbox.addWidget(
|
qframe.set_sidepane(sidepane)
|
||||||
sidepane,
|
# qframe.hbox.addWidget(
|
||||||
alignment=Qt.AlignTop
|
# sidepane,
|
||||||
)
|
# alignment=Qt.AlignTop
|
||||||
|
# )
|
||||||
|
|
||||||
cpw.sidepane = sidepane
|
cpw.sidepane = sidepane
|
||||||
|
|
||||||
cpw.plotItem.vb.linkedsplits = self
|
cpw.plotItem.vb.linked = self
|
||||||
cpw.setFrameStyle(
|
cpw.setFrameStyle(
|
||||||
QtWidgets.QFrame.StyledPanel
|
QtWidgets.QFrame.StyledPanel
|
||||||
# | QtWidgets.QFrame.Plain
|
# | QtWidgets.QFrame.Plain
|
||||||
|
@ -614,9 +747,8 @@ class LinkedSplits(QWidget):
|
||||||
if not _is_main:
|
if not _is_main:
|
||||||
# track by name
|
# track by name
|
||||||
self.subplots[name] = cpw
|
self.subplots[name] = cpw
|
||||||
|
if qframe is not None:
|
||||||
self.splitter.addWidget(qframe)
|
self.splitter.addWidget(qframe)
|
||||||
# scale split regions
|
|
||||||
self.set_split_sizes()
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
assert style == 'bar', 'main chart must be OHLC'
|
assert style == 'bar', 'main chart must be OHLC'
|
||||||
|
@ -642,19 +774,28 @@ class LinkedSplits(QWidget):
|
||||||
|
|
||||||
def resize_sidepanes(
|
def resize_sidepanes(
|
||||||
self,
|
self,
|
||||||
|
from_linked: Optional[LinkedSplits] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Size all sidepanes based on the OHLC "main" plot and its
|
Size all sidepanes based on the OHLC "main" plot and its
|
||||||
sidepane width.
|
sidepane width.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
if from_linked:
|
||||||
|
main_chart = from_linked.chart
|
||||||
|
else:
|
||||||
main_chart = self.chart
|
main_chart = self.chart
|
||||||
if main_chart:
|
|
||||||
|
if main_chart and main_chart.sidepane:
|
||||||
sp_w = main_chart.sidepane.width()
|
sp_w = main_chart.sidepane.width()
|
||||||
for name, cpw in self.subplots.items():
|
for name, cpw in self.subplots.items():
|
||||||
cpw.sidepane.setMinimumWidth(sp_w)
|
cpw.sidepane.setMinimumWidth(sp_w)
|
||||||
cpw.sidepane.setMaximumWidth(sp_w)
|
cpw.sidepane.setMaximumWidth(sp_w)
|
||||||
|
|
||||||
|
if from_linked:
|
||||||
|
self.chart.sidepane.setMinimumWidth(sp_w)
|
||||||
|
|
||||||
|
|
||||||
class ChartPlotWidget(pg.PlotWidget):
|
class ChartPlotWidget(pg.PlotWidget):
|
||||||
'''
|
'''
|
||||||
|
@ -681,7 +822,8 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
# a better one?
|
# a better one?
|
||||||
def mk_vb(self, name: str) -> ChartView:
|
def mk_vb(self, name: str) -> ChartView:
|
||||||
cv = ChartView(name)
|
cv = ChartView(name)
|
||||||
cv.linkedsplits = self.linked
|
# link new view to chart's view set
|
||||||
|
cv.linked = self.linked
|
||||||
return cv
|
return cv
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -700,6 +842,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
static_yrange: Optional[tuple[float, float]] = None,
|
static_yrange: Optional[tuple[float, float]] = None,
|
||||||
|
|
||||||
|
parent=None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
|
@ -712,16 +855,20 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
# NOTE: must be set bfore calling ``.mk_vb()``
|
# NOTE: must be set bfore calling ``.mk_vb()``
|
||||||
self.linked = linkedsplits
|
self.linked = linkedsplits
|
||||||
|
self.sidepane: Optional[FieldsForm] = None
|
||||||
|
|
||||||
# source of our custom interactions
|
# source of our custom interactions
|
||||||
self.cv = cv = self.mk_vb(name)
|
self.cv = cv = self.mk_vb(name)
|
||||||
|
|
||||||
|
pi = pgo.PlotItem(viewBox=cv, **kwargs)
|
||||||
super().__init__(
|
super().__init__(
|
||||||
background=hcolor(view_color),
|
background=hcolor(view_color),
|
||||||
viewBox=cv,
|
viewBox=cv,
|
||||||
# parent=None,
|
# parent=None,
|
||||||
# plotItem=None,
|
# plotItem=None,
|
||||||
# antialias=True,
|
# antialias=True,
|
||||||
|
parent=parent,
|
||||||
|
plotItem=pi,
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
# give viewbox as reference to chart
|
# give viewbox as reference to chart
|
||||||
|
@ -760,12 +907,23 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
||||||
|
|
||||||
|
# indempotent startup flag for auto-yrange subsys
|
||||||
|
# to detect the "first time" y-domain graphics begin
|
||||||
|
# to be shown in the (main) graphics view.
|
||||||
|
self._on_screen: bool = False
|
||||||
|
|
||||||
def resume_all_feeds(self):
|
def resume_all_feeds(self):
|
||||||
|
try:
|
||||||
for feed in self._feeds.values():
|
for feed in self._feeds.values():
|
||||||
|
for flume in feed.flumes.values():
|
||||||
self.linked.godwidget._root_n.start_soon(feed.resume)
|
self.linked.godwidget._root_n.start_soon(feed.resume)
|
||||||
|
except RuntimeError:
|
||||||
|
# TODO: cancel the qtractor runtime here?
|
||||||
|
raise
|
||||||
|
|
||||||
def pause_all_feeds(self):
|
def pause_all_feeds(self):
|
||||||
for feed in self._feeds.values():
|
for feed in self._feeds.values():
|
||||||
|
for flume in feed.flumes.values():
|
||||||
self.linked.godwidget._root_n.start_soon(feed.pause)
|
self.linked.godwidget._root_n.start_soon(feed.pause)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -859,7 +1017,9 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
def default_view(
|
def default_view(
|
||||||
self,
|
self,
|
||||||
bars_from_y: int = 3000,
|
bars_from_y: int = int(616 * 3/8),
|
||||||
|
y_offset: int = 0,
|
||||||
|
do_ds: bool = True,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -897,8 +1057,12 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
# terms now that we've scaled either by user control
|
# terms now that we've scaled either by user control
|
||||||
# or to the default set of bars as per the immediate block
|
# or to the default set of bars as per the immediate block
|
||||||
# above.
|
# above.
|
||||||
|
if not y_offset:
|
||||||
marker_pos, l1_len = self.pre_l1_xs()
|
marker_pos, l1_len = self.pre_l1_xs()
|
||||||
end = xlast + l1_len + 1
|
end = xlast + l1_len + 1
|
||||||
|
else:
|
||||||
|
end = xlast + y_offset + 1
|
||||||
|
|
||||||
begin = end - (r - l)
|
begin = end - (r - l)
|
||||||
|
|
||||||
# for debugging
|
# for debugging
|
||||||
|
@ -920,8 +1084,11 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
max=end,
|
max=end,
|
||||||
padding=0,
|
padding=0,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if do_ds:
|
||||||
self.view.maybe_downsample_graphics()
|
self.view.maybe_downsample_graphics()
|
||||||
view._set_yrange()
|
view._set_yrange()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.linked.graphics_cycle()
|
self.linked.graphics_cycle()
|
||||||
except IndexError:
|
except IndexError:
|
||||||
|
@ -994,7 +1161,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
axis_side: str = 'right',
|
axis_side: str = 'right',
|
||||||
axis_kwargs: dict = {},
|
axis_kwargs: dict = {},
|
||||||
|
|
||||||
) -> pg.PlotItem:
|
) -> pgo.PlotItem:
|
||||||
|
|
||||||
# Custom viewbox impl
|
# Custom viewbox impl
|
||||||
cv = self.mk_vb(name)
|
cv = self.mk_vb(name)
|
||||||
|
@ -1003,13 +1170,14 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
allowed_sides = {'left', 'right'}
|
allowed_sides = {'left', 'right'}
|
||||||
if axis_side not in allowed_sides:
|
if axis_side not in allowed_sides:
|
||||||
raise ValueError(f'``axis_side``` must be in {allowed_sides}')
|
raise ValueError(f'``axis_side``` must be in {allowed_sides}')
|
||||||
|
|
||||||
yaxis = PriceAxis(
|
yaxis = PriceAxis(
|
||||||
orientation=axis_side,
|
orientation=axis_side,
|
||||||
linkedsplits=self.linked,
|
linkedsplits=self.linked,
|
||||||
**axis_kwargs,
|
**axis_kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
pi = pg.PlotItem(
|
pi = pgo.PlotItem(
|
||||||
parent=self.plotItem,
|
parent=self.plotItem,
|
||||||
name=name,
|
name=name,
|
||||||
enableMenu=False,
|
enableMenu=False,
|
||||||
|
@ -1022,19 +1190,27 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
)
|
)
|
||||||
pi.hideButtons()
|
pi.hideButtons()
|
||||||
|
|
||||||
# cv.enable_auto_yrange(self.view)
|
|
||||||
cv.enable_auto_yrange()
|
|
||||||
|
|
||||||
# compose this new plot's graphics with the current chart's
|
# compose this new plot's graphics with the current chart's
|
||||||
# existing one but with separate axes as neede and specified.
|
# existing one but with separate axes as neede and specified.
|
||||||
self.pi_overlay.add_plotitem(
|
self.pi_overlay.add_plotitem(
|
||||||
pi,
|
pi,
|
||||||
index=index,
|
index=index,
|
||||||
|
|
||||||
# only link x-axes,
|
# only link x-axes and
|
||||||
|
# don't relay any ``ViewBox`` derived event
|
||||||
|
# handlers since we only care about keeping charts
|
||||||
|
# x-synced on interaction (at least for now).
|
||||||
link_axes=(0,),
|
link_axes=(0,),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# connect auto-yrange callbacks *from* this new
|
||||||
|
# view **to** this parent and likewise *from* the
|
||||||
|
# main/parent chart back *to* the created overlay.
|
||||||
|
cv.enable_auto_yrange(src_vb=self.view)
|
||||||
|
# makes it so that interaction on the new overlay will reflect
|
||||||
|
# back on the main chart (which overlay was added to).
|
||||||
|
self.view.enable_auto_yrange(src_vb=cv)
|
||||||
|
|
||||||
# add axis title
|
# add axis title
|
||||||
# TODO: do we want this API to still work?
|
# TODO: do we want this API to still work?
|
||||||
# raxis = pi.getAxis('right')
|
# raxis = pi.getAxis('right')
|
||||||
|
@ -1096,7 +1272,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
# TODO: this probably needs its own method?
|
# TODO: this probably needs its own method?
|
||||||
if overlay:
|
if overlay:
|
||||||
if isinstance(overlay, pg.PlotItem):
|
if isinstance(overlay, pgo.PlotItem):
|
||||||
if overlay not in self.pi_overlay.overlays:
|
if overlay not in self.pi_overlay.overlays:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f'{overlay} must be from `.plotitem_overlay()`'
|
f'{overlay} must be from `.plotitem_overlay()`'
|
||||||
|
@ -1255,8 +1431,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
If ``bars_range`` is provided use that range.
|
If ``bars_range`` is provided use that range.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# print(f'Chart[{self.name}].maxmin()')
|
profiler = Profiler(
|
||||||
profiler = pg.debug.Profiler(
|
|
||||||
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
|
@ -1287,11 +1462,18 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
key = round(lbar), round(rbar)
|
key = round(lbar), round(rbar)
|
||||||
res = flow.maxmin(*key)
|
res = flow.maxmin(*key)
|
||||||
if res == (None, None):
|
|
||||||
log.error(
|
if (
|
||||||
|
res is None
|
||||||
|
):
|
||||||
|
log.warning(
|
||||||
f"{flow_key} no mxmn for bars_range => {key} !?"
|
f"{flow_key} no mxmn for bars_range => {key} !?"
|
||||||
)
|
)
|
||||||
res = 0, 0
|
res = 0, 0
|
||||||
|
if not self._on_screen:
|
||||||
|
self.default_view(do_ds=False)
|
||||||
|
self._on_screen = True
|
||||||
|
|
||||||
profiler(f'yrange mxmn: {key} -> {res}')
|
profiler(f'yrange mxmn: {key} -> {res}')
|
||||||
|
# print(f'{flow_key} yrange mxmn: {key} -> {res}')
|
||||||
return res
|
return res
|
||||||
|
|
|
@ -223,14 +223,20 @@ def ds_m4(
|
||||||
assert frames >= (xrange / uppx)
|
assert frames >= (xrange / uppx)
|
||||||
|
|
||||||
# call into ``numba``
|
# call into ``numba``
|
||||||
nb, i_win, y_out = _m4(
|
(
|
||||||
|
nb,
|
||||||
|
x_out,
|
||||||
|
y_out,
|
||||||
|
ymn,
|
||||||
|
ymx,
|
||||||
|
) = _m4(
|
||||||
x,
|
x,
|
||||||
y,
|
y,
|
||||||
|
|
||||||
frames,
|
frames,
|
||||||
|
|
||||||
# TODO: see func below..
|
# TODO: see func below..
|
||||||
# i_win,
|
# x_out,
|
||||||
# y_out,
|
# y_out,
|
||||||
|
|
||||||
# first index in x data to start at
|
# first index in x data to start at
|
||||||
|
@ -243,10 +249,11 @@ def ds_m4(
|
||||||
# filter out any overshoot in the input allocation arrays by
|
# filter out any overshoot in the input allocation arrays by
|
||||||
# removing zero-ed tail entries which should start at a certain
|
# removing zero-ed tail entries which should start at a certain
|
||||||
# index.
|
# index.
|
||||||
i_win = i_win[i_win != 0]
|
x_out = x_out[x_out != 0]
|
||||||
y_out = y_out[:i_win.size]
|
y_out = y_out[:x_out.size]
|
||||||
|
|
||||||
return nb, i_win, y_out
|
# print(f'M4 output ymn, ymx: {ymn},{ymx}')
|
||||||
|
return nb, x_out, y_out, ymn, ymx
|
||||||
|
|
||||||
|
|
||||||
@jit(
|
@jit(
|
||||||
|
@ -260,8 +267,8 @@ def _m4(
|
||||||
|
|
||||||
frames: int,
|
frames: int,
|
||||||
|
|
||||||
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
# TODO: using this approach, having the ``.zeros()`` alloc lines
|
||||||
# below, in put python was causing segs faults and alloc crashes..
|
# below in pure python, there were segs faults and alloc crashes..
|
||||||
# we might need to see how it behaves with shm arrays and consider
|
# we might need to see how it behaves with shm arrays and consider
|
||||||
# allocating them once at startup?
|
# allocating them once at startup?
|
||||||
|
|
||||||
|
@ -274,14 +281,22 @@ def _m4(
|
||||||
x_start: int,
|
x_start: int,
|
||||||
step: float,
|
step: float,
|
||||||
|
|
||||||
) -> int:
|
) -> tuple[
|
||||||
# nbins = len(i_win)
|
int,
|
||||||
# count = len(xs)
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
float,
|
||||||
|
float,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Implementation of the m4 algorithm in ``numba``:
|
||||||
|
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||||
|
|
||||||
|
'''
|
||||||
# these are pre-allocated and mutated by ``numba``
|
# these are pre-allocated and mutated by ``numba``
|
||||||
# code in-place.
|
# code in-place.
|
||||||
y_out = np.zeros((frames, 4), ys.dtype)
|
y_out = np.zeros((frames, 4), ys.dtype)
|
||||||
i_win = np.zeros(frames, xs.dtype)
|
x_out = np.zeros(frames, xs.dtype)
|
||||||
|
|
||||||
bincount = 0
|
bincount = 0
|
||||||
x_left = x_start
|
x_left = x_start
|
||||||
|
@ -295,24 +310,34 @@ def _m4(
|
||||||
|
|
||||||
# set all bins in the left-most entry to the starting left-most x value
|
# set all bins in the left-most entry to the starting left-most x value
|
||||||
# (aka a row broadcast).
|
# (aka a row broadcast).
|
||||||
i_win[bincount] = x_left
|
x_out[bincount] = x_left
|
||||||
# set all y-values to the first value passed in.
|
# set all y-values to the first value passed in.
|
||||||
y_out[bincount] = ys[0]
|
y_out[bincount] = ys[0]
|
||||||
|
|
||||||
|
# full input y-data mx and mn
|
||||||
|
mx: float = -np.inf
|
||||||
|
mn: float = np.inf
|
||||||
|
|
||||||
|
# compute OHLC style max / min values per window sized x-frame.
|
||||||
for i in range(len(xs)):
|
for i in range(len(xs)):
|
||||||
|
|
||||||
x = xs[i]
|
x = xs[i]
|
||||||
y = ys[i]
|
y = ys[i]
|
||||||
|
|
||||||
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||||
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||||
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||||
y_out[bincount, 3] = y
|
y_out[bincount, 3] = y
|
||||||
|
mx = max(mx, ymx)
|
||||||
|
mn = min(mn, ymn)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Find the next bin
|
# Find the next bin
|
||||||
while x >= x_left + step:
|
while x >= x_left + step:
|
||||||
x_left += step
|
x_left += step
|
||||||
|
|
||||||
bincount += 1
|
bincount += 1
|
||||||
i_win[bincount] = x_left
|
x_out[bincount] = x_left
|
||||||
y_out[bincount] = y
|
y_out[bincount] = y
|
||||||
|
|
||||||
return bincount, i_win, y_out
|
return bincount, x_out, y_out, mn, mx
|
||||||
|
|
|
@ -18,8 +18,13 @@
|
||||||
Mouse interaction graphics
|
Mouse interaction graphics
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import Optional, Callable
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -36,6 +41,12 @@ from ._style import (
|
||||||
from ._axes import YAxisLabel, XAxisLabel
|
from ._axes import YAxisLabel, XAxisLabel
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import (
|
||||||
|
ChartPlotWidget,
|
||||||
|
LinkedSplits,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -58,7 +69,7 @@ class LineDot(pg.CurvePoint):
|
||||||
curve: pg.PlotCurveItem,
|
curve: pg.PlotCurveItem,
|
||||||
index: int,
|
index: int,
|
||||||
|
|
||||||
plot: 'ChartPlotWidget', # type: ingore # noqa
|
plot: ChartPlotWidget, # type: ingore # noqa
|
||||||
pos=None,
|
pos=None,
|
||||||
color: str = 'default_light',
|
color: str = 'default_light',
|
||||||
|
|
||||||
|
@ -151,7 +162,7 @@ class ContentsLabel(pg.LabelItem):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
# chart: 'ChartPlotWidget', # noqa
|
# chart: ChartPlotWidget, # noqa
|
||||||
view: pg.ViewBox,
|
view: pg.ViewBox,
|
||||||
|
|
||||||
anchor_at: str = ('top', 'right'),
|
anchor_at: str = ('top', 'right'),
|
||||||
|
@ -244,7 +255,7 @@ class ContentsLabels:
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
linkedsplits: 'LinkedSplits', # type: ignore # noqa
|
linkedsplits: LinkedSplits, # type: ignore # noqa
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
@ -289,7 +300,7 @@ class ContentsLabels:
|
||||||
def add_label(
|
def add_label(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
chart: 'ChartPlotWidget', # type: ignore # noqa
|
chart: ChartPlotWidget, # type: ignore # noqa
|
||||||
name: str,
|
name: str,
|
||||||
anchor_at: tuple[str, str] = ('top', 'left'),
|
anchor_at: tuple[str, str] = ('top', 'left'),
|
||||||
update_func: Callable = ContentsLabel.update_from_value,
|
update_func: Callable = ContentsLabel.update_from_value,
|
||||||
|
@ -316,7 +327,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
linkedsplits: 'LinkedSplits', # noqa
|
linkedsplits: LinkedSplits, # noqa
|
||||||
digits: int = 0
|
digits: int = 0
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -325,6 +336,8 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
self.linked = linkedsplits
|
self.linked = linkedsplits
|
||||||
self.graphics: dict[str, pg.GraphicsObject] = {}
|
self.graphics: dict[str, pg.GraphicsObject] = {}
|
||||||
|
self.xaxis_label: Optional[XAxisLabel] = None
|
||||||
|
self.always_show_xlabel: bool = True
|
||||||
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
||||||
self.active_plot = None
|
self.active_plot = None
|
||||||
self.digits: int = digits
|
self.digits: int = digits
|
||||||
|
@ -385,7 +398,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
def add_plot(
|
def add_plot(
|
||||||
self,
|
self,
|
||||||
plot: 'ChartPlotWidget', # noqa
|
plot: ChartPlotWidget, # noqa
|
||||||
digits: int = 0,
|
digits: int = 0,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -469,7 +482,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
def add_curve_cursor(
|
def add_curve_cursor(
|
||||||
self,
|
self,
|
||||||
plot: 'ChartPlotWidget', # noqa
|
plot: ChartPlotWidget, # noqa
|
||||||
curve: 'PlotCurveItem', # noqa
|
curve: 'PlotCurveItem', # noqa
|
||||||
|
|
||||||
) -> LineDot:
|
) -> LineDot:
|
||||||
|
@ -491,17 +504,29 @@ class Cursor(pg.GraphicsObject):
|
||||||
log.debug(f"{(action, plot.name)}")
|
log.debug(f"{(action, plot.name)}")
|
||||||
if action == 'Enter':
|
if action == 'Enter':
|
||||||
self.active_plot = plot
|
self.active_plot = plot
|
||||||
|
plot.linked.godwidget._active_cursor = self
|
||||||
|
|
||||||
# show horiz line and y-label
|
# show horiz line and y-label
|
||||||
self.graphics[plot]['hl'].show()
|
self.graphics[plot]['hl'].show()
|
||||||
self.graphics[plot]['yl'].show()
|
self.graphics[plot]['yl'].show()
|
||||||
|
|
||||||
else: # Leave
|
if (
|
||||||
|
not self.always_show_xlabel
|
||||||
|
and not self.xaxis_label.isVisible()
|
||||||
|
):
|
||||||
|
self.xaxis_label.show()
|
||||||
|
|
||||||
# hide horiz line and y-label
|
# Leave: hide horiz line and y-label
|
||||||
|
else:
|
||||||
self.graphics[plot]['hl'].hide()
|
self.graphics[plot]['hl'].hide()
|
||||||
self.graphics[plot]['yl'].hide()
|
self.graphics[plot]['yl'].hide()
|
||||||
|
|
||||||
|
if (
|
||||||
|
not self.always_show_xlabel
|
||||||
|
and self.xaxis_label.isVisible()
|
||||||
|
):
|
||||||
|
self.xaxis_label.hide()
|
||||||
|
|
||||||
def mouseMoved(
|
def mouseMoved(
|
||||||
self,
|
self,
|
||||||
coords: tuple[QPointF], # noqa
|
coords: tuple[QPointF], # noqa
|
||||||
|
@ -590,6 +615,10 @@ class Cursor(pg.GraphicsObject):
|
||||||
left_axis_width += left.width()
|
left_axis_width += left.width()
|
||||||
|
|
||||||
# map back to abs (label-local) coordinates
|
# map back to abs (label-local) coordinates
|
||||||
|
if (
|
||||||
|
self.always_show_xlabel
|
||||||
|
or self.xaxis_label.isVisible()
|
||||||
|
):
|
||||||
self.xaxis_label.update_label(
|
self.xaxis_label.update_label(
|
||||||
abs_pos=(
|
abs_pos=(
|
||||||
plot.mapFromView(QPointF(vl_x, iy)) -
|
plot.mapFromView(QPointF(vl_x, iy)) -
|
||||||
|
|
|
@ -44,6 +44,7 @@ from ._style import hcolor
|
||||||
# ds_m4,
|
# ds_m4,
|
||||||
# )
|
# )
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -331,7 +332,7 @@ class Curve(pg.GraphicsObject):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
msg=f'Curve.paint(): `{self._name}`',
|
msg=f'Curve.paint(): `{self._name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
|
@ -466,7 +467,7 @@ class StepCurve(Curve):
|
||||||
def sub_paint(
|
def sub_paint(
|
||||||
self,
|
self,
|
||||||
p: QPainter,
|
p: QPainter,
|
||||||
profiler: pg.debug.Profiler,
|
profiler: Profiler,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||||
|
|
|
@ -21,19 +21,21 @@ this module ties together quote and computational (fsp) streams with
|
||||||
graphics update methods via our custom ``pyqtgraph`` charting api.
|
graphics update methods via our custom ``pyqtgraph`` charting api.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from dataclasses import dataclass
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import time
|
import time
|
||||||
from typing import Optional, Any, Callable
|
from typing import Optional, Any, Callable
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
import pendulum
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
|
|
||||||
# from .. import brokers
|
# from .. import brokers
|
||||||
from ..data.feed import open_feed
|
from ..data.feed import (
|
||||||
|
open_feed,
|
||||||
|
Feed,
|
||||||
|
Flume,
|
||||||
|
)
|
||||||
|
from ..data.types import Struct
|
||||||
from ._axes import YAxisLabel
|
from ._axes import YAxisLabel
|
||||||
from ._chart import (
|
from ._chart import (
|
||||||
ChartPlotWidget,
|
ChartPlotWidget,
|
||||||
|
@ -41,29 +43,36 @@ from ._chart import (
|
||||||
GodWidget,
|
GodWidget,
|
||||||
)
|
)
|
||||||
from ._l1 import L1Labels
|
from ._l1 import L1Labels
|
||||||
|
from ._style import hcolor
|
||||||
from ._fsp import (
|
from ._fsp import (
|
||||||
update_fsp_chart,
|
update_fsp_chart,
|
||||||
start_fsp_displays,
|
start_fsp_displays,
|
||||||
has_vlm,
|
has_vlm,
|
||||||
open_vlm_displays,
|
open_vlm_displays,
|
||||||
)
|
)
|
||||||
from ..data._sharedmem import ShmArray
|
from ..data._sharedmem import (
|
||||||
|
ShmArray,
|
||||||
|
)
|
||||||
from ..data._source import tf_in_1s
|
from ..data._source import tf_in_1s
|
||||||
from ._forms import (
|
from ._forms import (
|
||||||
FieldsForm,
|
FieldsForm,
|
||||||
mk_order_pane_layout,
|
mk_order_pane_layout,
|
||||||
)
|
)
|
||||||
from .order_mode import open_order_mode
|
from .order_mode import (
|
||||||
|
open_order_mode,
|
||||||
|
OrderMode,
|
||||||
|
)
|
||||||
from .._profile import (
|
from .._profile import (
|
||||||
pg_profile_enabled,
|
pg_profile_enabled,
|
||||||
ms_slower_then,
|
ms_slower_then,
|
||||||
)
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# TODO: load this from a config.toml!
|
# TODO: load this from a config.toml!
|
||||||
_quote_throttle_rate: int = 22 # Hz
|
_quote_throttle_rate: int = 16 # Hz
|
||||||
|
|
||||||
|
|
||||||
# a working tick-type-classes template
|
# a working tick-type-classes template
|
||||||
|
@ -105,6 +114,10 @@ def chart_maxmin(
|
||||||
mn, mx = out
|
mn, mx = out
|
||||||
|
|
||||||
mx_vlm_in_view = 0
|
mx_vlm_in_view = 0
|
||||||
|
|
||||||
|
# TODO: we need to NOT call this to avoid a manual
|
||||||
|
# np.max/min trigger and especially on the vlm_chart
|
||||||
|
# flows which aren't shown.. like vlm?
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
out = vlm_chart.maxmin()
|
out = vlm_chart.maxmin()
|
||||||
if out:
|
if out:
|
||||||
|
@ -118,39 +131,105 @@ def chart_maxmin(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class DisplayState(Struct):
|
||||||
class DisplayState:
|
|
||||||
'''
|
'''
|
||||||
Chart-local real-time graphics state container.
|
Chart-local real-time graphics state container.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
godwidget: GodWidget
|
||||||
quotes: dict[str, Any]
|
quotes: dict[str, Any]
|
||||||
|
|
||||||
maxmin: Callable
|
maxmin: Callable
|
||||||
ohlcv: ShmArray
|
ohlcv: ShmArray
|
||||||
|
hist_ohlcv: ShmArray
|
||||||
|
|
||||||
# high level chart handles
|
# high level chart handles
|
||||||
linked: LinkedSplits
|
|
||||||
chart: ChartPlotWidget
|
chart: ChartPlotWidget
|
||||||
vlm_chart: ChartPlotWidget
|
|
||||||
|
|
||||||
# axis labels
|
# axis labels
|
||||||
l1: L1Labels
|
l1: L1Labels
|
||||||
last_price_sticky: YAxisLabel
|
last_price_sticky: YAxisLabel
|
||||||
vlm_sticky: YAxisLabel
|
hist_last_price_sticky: YAxisLabel
|
||||||
|
|
||||||
# misc state tracking
|
# misc state tracking
|
||||||
vars: dict[str, Any]
|
vars: dict[str, Any] = {
|
||||||
|
'tick_margin': 0,
|
||||||
|
'i_last': 0,
|
||||||
|
'i_last_append': 0,
|
||||||
|
'last_mx_vlm': 0,
|
||||||
|
'last_mx': 0,
|
||||||
|
'last_mn': 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
vlm_chart: Optional[ChartPlotWidget] = None
|
||||||
|
vlm_sticky: Optional[YAxisLabel] = None
|
||||||
wap_in_history: bool = False
|
wap_in_history: bool = False
|
||||||
|
|
||||||
|
def incr_info(
|
||||||
|
self,
|
||||||
|
chart: Optional[ChartPlotWidget] = None,
|
||||||
|
shm: Optional[ShmArray] = None,
|
||||||
|
state: Optional[dict] = None, # pass in a copy if you don't
|
||||||
|
|
||||||
|
update_state: bool = True,
|
||||||
|
update_uppx: float = 16,
|
||||||
|
|
||||||
|
) -> tuple:
|
||||||
|
|
||||||
|
shm = shm or self.ohlcv
|
||||||
|
chart = chart or self.chart
|
||||||
|
state = state or self.vars
|
||||||
|
|
||||||
|
if not update_state:
|
||||||
|
state = state.copy()
|
||||||
|
|
||||||
|
# compute the first available graphic's x-units-per-pixel
|
||||||
|
uppx = chart.view.x_uppx()
|
||||||
|
|
||||||
|
# NOTE: this used to be implemented in a dedicated
|
||||||
|
# "increment task": ``check_for_new_bars()`` but it doesn't
|
||||||
|
# make sense to do a whole task switch when we can just do
|
||||||
|
# this simple index-diff and all the fsp sub-curve graphics
|
||||||
|
# are diffed on each draw cycle anyway; so updates to the
|
||||||
|
# "curve" length is already automatic.
|
||||||
|
|
||||||
|
# increment the view position by the sample offset.
|
||||||
|
i_step = shm.index
|
||||||
|
i_diff = i_step - state['i_last']
|
||||||
|
state['i_last'] = i_step
|
||||||
|
|
||||||
|
append_diff = i_step - state['i_last_append']
|
||||||
|
|
||||||
|
# update the "last datum" (aka extending the flow graphic with
|
||||||
|
# new data) only if the number of unit steps is >= the number of
|
||||||
|
# such unit steps per pixel (aka uppx). Iow, if the zoom level
|
||||||
|
# is such that a datum(s) update to graphics wouldn't span
|
||||||
|
# to a new pixel, we don't update yet.
|
||||||
|
do_append = (append_diff >= uppx)
|
||||||
|
if do_append:
|
||||||
|
state['i_last_append'] = i_step
|
||||||
|
|
||||||
|
do_rt_update = uppx < update_uppx
|
||||||
|
|
||||||
|
_, _, _, r = chart.bars_range()
|
||||||
|
liv = r >= i_step
|
||||||
|
|
||||||
|
# TODO: pack this into a struct
|
||||||
|
return (
|
||||||
|
uppx,
|
||||||
|
liv,
|
||||||
|
do_append,
|
||||||
|
i_diff,
|
||||||
|
append_diff,
|
||||||
|
do_rt_update,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def graphics_update_loop(
|
async def graphics_update_loop(
|
||||||
|
|
||||||
linked: LinkedSplits,
|
nurse: trio.Nursery,
|
||||||
stream: tractor.MsgStream,
|
godwidget: GodWidget,
|
||||||
ohlcv: np.ndarray,
|
flume: Flume,
|
||||||
|
|
||||||
wap_in_history: bool = False,
|
wap_in_history: bool = False,
|
||||||
vlm_chart: Optional[ChartPlotWidget] = None,
|
vlm_chart: Optional[ChartPlotWidget] = None,
|
||||||
|
|
||||||
|
@ -171,22 +250,29 @@ async def graphics_update_loop(
|
||||||
# of copying it from last bar's close
|
# of copying it from last bar's close
|
||||||
# - 1-5 sec bar lookback-autocorrection like tws does?
|
# - 1-5 sec bar lookback-autocorrection like tws does?
|
||||||
# (would require a background history checker task)
|
# (would require a background history checker task)
|
||||||
display_rate = linked.godwidget.window.current_screen().refreshRate()
|
linked: LinkedSplits = godwidget.rt_linked
|
||||||
|
display_rate = godwidget.window.current_screen().refreshRate()
|
||||||
|
|
||||||
chart = linked.chart
|
fast_chart = linked.chart
|
||||||
|
hist_chart = godwidget.hist_linked.chart
|
||||||
|
|
||||||
|
ohlcv = flume.rt_shm
|
||||||
|
hist_ohlcv = flume.hist_shm
|
||||||
|
|
||||||
# update last price sticky
|
# update last price sticky
|
||||||
last_price_sticky = chart._ysticks[chart.name]
|
last_price_sticky = fast_chart._ysticks[fast_chart.name]
|
||||||
last_price_sticky.update_from_data(
|
last_price_sticky.update_from_data(
|
||||||
*ohlcv.array[-1][['index', 'close']]
|
*ohlcv.array[-1][['index', 'close']]
|
||||||
)
|
)
|
||||||
|
|
||||||
if vlm_chart:
|
hist_last_price_sticky = hist_chart._ysticks[hist_chart.name]
|
||||||
vlm_sticky = vlm_chart._ysticks['volume']
|
hist_last_price_sticky.update_from_data(
|
||||||
|
*hist_ohlcv.array[-1][['index', 'close']]
|
||||||
|
)
|
||||||
|
|
||||||
maxmin = partial(
|
maxmin = partial(
|
||||||
chart_maxmin,
|
chart_maxmin,
|
||||||
chart,
|
fast_chart,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
vlm_chart,
|
vlm_chart,
|
||||||
)
|
)
|
||||||
|
@ -200,15 +286,15 @@ async def graphics_update_loop(
|
||||||
|
|
||||||
last, volume = ohlcv.array[-1][['close', 'volume']]
|
last, volume = ohlcv.array[-1][['close', 'volume']]
|
||||||
|
|
||||||
symbol = chart.linked.symbol
|
symbol = fast_chart.linked.symbol
|
||||||
|
|
||||||
l1 = L1Labels(
|
l1 = L1Labels(
|
||||||
chart,
|
fast_chart,
|
||||||
# determine precision/decimal lengths
|
# determine precision/decimal lengths
|
||||||
digits=symbol.tick_size_digits,
|
digits=symbol.tick_size_digits,
|
||||||
size_digits=symbol.lot_size_digits,
|
size_digits=symbol.lot_size_digits,
|
||||||
)
|
)
|
||||||
chart._l1_labels = l1
|
fast_chart._l1_labels = l1
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
# - in theory we should be able to read buffer data faster
|
# - in theory we should be able to read buffer data faster
|
||||||
|
@ -218,46 +304,22 @@ async def graphics_update_loop(
|
||||||
# levels this might be dark volume we need to
|
# levels this might be dark volume we need to
|
||||||
# present differently -> likely dark vlm
|
# present differently -> likely dark vlm
|
||||||
|
|
||||||
tick_size = chart.linked.symbol.tick_size
|
tick_size = fast_chart.linked.symbol.tick_size
|
||||||
tick_margin = 3 * tick_size
|
tick_margin = 3 * tick_size
|
||||||
|
|
||||||
chart.show()
|
fast_chart.show()
|
||||||
# view = chart.view
|
|
||||||
last_quote = time.time()
|
last_quote = time.time()
|
||||||
i_last = ohlcv.index
|
i_last = ohlcv.index
|
||||||
|
|
||||||
# async def iter_drain_quotes():
|
|
||||||
# # NOTE: all code below this loop is expected to be synchronous
|
|
||||||
# # and thus draw instructions are not picked up jntil the next
|
|
||||||
# # wait / iteration.
|
|
||||||
# async for quotes in stream:
|
|
||||||
# while True:
|
|
||||||
# try:
|
|
||||||
# moar = stream.receive_nowait()
|
|
||||||
# except trio.WouldBlock:
|
|
||||||
# yield quotes
|
|
||||||
# break
|
|
||||||
# else:
|
|
||||||
# for sym, quote in moar.items():
|
|
||||||
# ticks_frame = quote.get('ticks')
|
|
||||||
# if ticks_frame:
|
|
||||||
# quotes[sym].setdefault(
|
|
||||||
# 'ticks', []).extend(ticks_frame)
|
|
||||||
# print('pulled extra')
|
|
||||||
|
|
||||||
# yield quotes
|
|
||||||
|
|
||||||
# async for quotes in iter_drain_quotes():
|
|
||||||
|
|
||||||
ds = linked.display_state = DisplayState(**{
|
ds = linked.display_state = DisplayState(**{
|
||||||
|
'godwidget': godwidget,
|
||||||
'quotes': {},
|
'quotes': {},
|
||||||
'linked': linked,
|
|
||||||
'maxmin': maxmin,
|
'maxmin': maxmin,
|
||||||
'ohlcv': ohlcv,
|
'ohlcv': ohlcv,
|
||||||
'chart': chart,
|
'hist_ohlcv': hist_ohlcv,
|
||||||
|
'chart': fast_chart,
|
||||||
'last_price_sticky': last_price_sticky,
|
'last_price_sticky': last_price_sticky,
|
||||||
'vlm_chart': vlm_chart,
|
'hist_last_price_sticky': hist_last_price_sticky,
|
||||||
'vlm_sticky': vlm_sticky,
|
|
||||||
'l1': l1,
|
'l1': l1,
|
||||||
|
|
||||||
'vars': {
|
'vars': {
|
||||||
|
@ -270,9 +332,69 @@ async def graphics_update_loop(
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
chart.default_view()
|
if vlm_chart:
|
||||||
|
vlm_sticky = vlm_chart._ysticks['volume']
|
||||||
|
ds.vlm_chart = vlm_chart
|
||||||
|
ds.vlm_sticky = vlm_sticky
|
||||||
|
|
||||||
|
fast_chart.default_view()
|
||||||
|
|
||||||
|
# TODO: probably factor this into some kinda `DisplayState`
|
||||||
|
# API that can be reused at least in terms of pulling view
|
||||||
|
# params (eg ``.bars_range()``).
|
||||||
|
async def increment_history_view():
|
||||||
|
i_last = hist_ohlcv.index
|
||||||
|
state = ds.vars.copy() | {
|
||||||
|
'i_last_append': i_last,
|
||||||
|
'i_last': i_last,
|
||||||
|
}
|
||||||
|
_, hist_step_size_s, _ = flume.get_ds_info()
|
||||||
|
|
||||||
|
async with flume.index_stream(
|
||||||
|
# int(hist_step_size_s)
|
||||||
|
# TODO: seems this is more reliable at keeping the slow
|
||||||
|
# chart incremented in view more correctly?
|
||||||
|
# - It might make sense to just inline this logic with the
|
||||||
|
# main display task? => it's a tradeoff of slower task
|
||||||
|
# wakeups/ctx switches verus logic checks (as normal)
|
||||||
|
# - we need increment logic that only does the view shift
|
||||||
|
# call when the uppx permits/needs it
|
||||||
|
int(1),
|
||||||
|
) as istream:
|
||||||
|
async for msg in istream:
|
||||||
|
|
||||||
|
# check if slow chart needs an x-domain shift and/or
|
||||||
|
# y-range resize.
|
||||||
|
(
|
||||||
|
uppx,
|
||||||
|
liv,
|
||||||
|
do_append,
|
||||||
|
i_diff,
|
||||||
|
append_diff,
|
||||||
|
do_rt_update,
|
||||||
|
) = ds.incr_info(
|
||||||
|
chart=hist_chart,
|
||||||
|
shm=ds.hist_ohlcv,
|
||||||
|
state=state,
|
||||||
|
# update_state=False,
|
||||||
|
)
|
||||||
|
# print(
|
||||||
|
# f'liv: {liv}\n'
|
||||||
|
# f'do_append: {do_append}\n'
|
||||||
|
# f'append_diff: {append_diff}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
|
if (
|
||||||
|
do_append
|
||||||
|
and liv
|
||||||
|
):
|
||||||
|
hist_chart.increment_view(steps=i_diff)
|
||||||
|
hist_chart.view._set_yrange(yrange=hist_chart.maxmin())
|
||||||
|
|
||||||
|
nurse.start_soon(increment_history_view)
|
||||||
|
|
||||||
# main real-time quotes update loop
|
# main real-time quotes update loop
|
||||||
|
stream: tractor.MsgStream = flume.stream
|
||||||
async for quotes in stream:
|
async for quotes in stream:
|
||||||
|
|
||||||
ds.quotes = quotes
|
ds.quotes = quotes
|
||||||
|
@ -292,15 +414,16 @@ async def graphics_update_loop(
|
||||||
last_quote = time.time()
|
last_quote = time.time()
|
||||||
|
|
||||||
# chart isn't active/shown so skip render cycle and pause feed(s)
|
# chart isn't active/shown so skip render cycle and pause feed(s)
|
||||||
if chart.linked.isHidden():
|
if fast_chart.linked.isHidden():
|
||||||
chart.pause_all_feeds()
|
# print('skipping update')
|
||||||
|
fast_chart.pause_all_feeds()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ic = chart.view._ic
|
# ic = fast_chart.view._ic
|
||||||
if ic:
|
# if ic:
|
||||||
chart.pause_all_feeds()
|
# fast_chart.pause_all_feeds()
|
||||||
await ic.wait()
|
# await ic.wait()
|
||||||
chart.resume_all_feeds()
|
# fast_chart.resume_all_feeds()
|
||||||
|
|
||||||
# sync call to update all graphics/UX components.
|
# sync call to update all graphics/UX components.
|
||||||
graphics_update_cycle(ds)
|
graphics_update_cycle(ds)
|
||||||
|
@ -317,8 +440,10 @@ def graphics_update_cycle(
|
||||||
# hopefully XD
|
# hopefully XD
|
||||||
|
|
||||||
chart = ds.chart
|
chart = ds.chart
|
||||||
|
# TODO: just pass this as a direct ref to avoid so many attr accesses?
|
||||||
|
hist_chart = ds.godwidget.hist_linked.chart
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
msg=f'Graphics loop cycle for: `{chart.name}`',
|
msg=f'Graphics loop cycle for: `{chart.name}`',
|
||||||
delayed=True,
|
delayed=True,
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
|
@ -330,53 +455,24 @@ def graphics_update_cycle(
|
||||||
|
|
||||||
# unpack multi-referenced components
|
# unpack multi-referenced components
|
||||||
vlm_chart = ds.vlm_chart
|
vlm_chart = ds.vlm_chart
|
||||||
|
|
||||||
|
# rt "HFT" chart
|
||||||
l1 = ds.l1
|
l1 = ds.l1
|
||||||
ohlcv = ds.ohlcv
|
ohlcv = ds.ohlcv
|
||||||
array = ohlcv.array
|
array = ohlcv.array
|
||||||
|
|
||||||
vars = ds.vars
|
vars = ds.vars
|
||||||
tick_margin = vars['tick_margin']
|
tick_margin = vars['tick_margin']
|
||||||
|
|
||||||
update_uppx = 16
|
|
||||||
|
|
||||||
for sym, quote in ds.quotes.items():
|
for sym, quote in ds.quotes.items():
|
||||||
|
(
|
||||||
# compute the first available graphic's x-units-per-pixel
|
uppx,
|
||||||
uppx = vlm_chart.view.x_uppx()
|
liv,
|
||||||
|
do_append,
|
||||||
# NOTE: vlm may be written by the ``brokerd`` backend
|
i_diff,
|
||||||
# event though a tick sample is not emitted.
|
append_diff,
|
||||||
# TODO: show dark trades differently
|
do_rt_update,
|
||||||
# https://github.com/pikers/piker/issues/116
|
) = ds.incr_info()
|
||||||
|
|
||||||
# NOTE: this used to be implemented in a dedicated
|
|
||||||
# "increment task": ``check_for_new_bars()`` but it doesn't
|
|
||||||
# make sense to do a whole task switch when we can just do
|
|
||||||
# this simple index-diff and all the fsp sub-curve graphics
|
|
||||||
# are diffed on each draw cycle anyway; so updates to the
|
|
||||||
# "curve" length is already automatic.
|
|
||||||
|
|
||||||
# increment the view position by the sample offset.
|
|
||||||
i_step = ohlcv.index
|
|
||||||
i_diff = i_step - vars['i_last']
|
|
||||||
vars['i_last'] = i_step
|
|
||||||
|
|
||||||
append_diff = i_step - vars['i_last_append']
|
|
||||||
|
|
||||||
# update the "last datum" (aka extending the flow graphic with
|
|
||||||
# new data) only if the number of unit steps is >= the number of
|
|
||||||
# such unit steps per pixel (aka uppx). Iow, if the zoom level
|
|
||||||
# is such that a datum(s) update to graphics wouldn't span
|
|
||||||
# to a new pixel, we don't update yet.
|
|
||||||
do_append = (append_diff >= uppx)
|
|
||||||
if do_append:
|
|
||||||
vars['i_last_append'] = i_step
|
|
||||||
|
|
||||||
do_rt_update = uppx < update_uppx
|
|
||||||
# print(
|
|
||||||
# f'append_diff:{append_diff}\n'
|
|
||||||
# f'uppx:{uppx}\n'
|
|
||||||
# f'do_append: {do_append}'
|
|
||||||
# )
|
|
||||||
|
|
||||||
# TODO: we should only run mxmn when we know
|
# TODO: we should only run mxmn when we know
|
||||||
# an update is due via ``do_append`` above.
|
# an update is due via ``do_append`` above.
|
||||||
|
@ -392,8 +488,6 @@ def graphics_update_cycle(
|
||||||
|
|
||||||
profiler('`ds.maxmin()` call')
|
profiler('`ds.maxmin()` call')
|
||||||
|
|
||||||
liv = r >= i_step # the last datum is in view
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
prepend_update_index is not None
|
prepend_update_index is not None
|
||||||
and lbar > prepend_update_index
|
and lbar > prepend_update_index
|
||||||
|
@ -408,18 +502,11 @@ def graphics_update_cycle(
|
||||||
# don't real-time "shift" the curve to the
|
# don't real-time "shift" the curve to the
|
||||||
# left unless we get one of the following:
|
# left unless we get one of the following:
|
||||||
if (
|
if (
|
||||||
(
|
(do_append and liv)
|
||||||
# i_diff > 0 # no new sample step
|
|
||||||
do_append
|
|
||||||
# and uppx < 4 # chart is zoomed out very far
|
|
||||||
and liv
|
|
||||||
)
|
|
||||||
or trigger_all
|
or trigger_all
|
||||||
):
|
):
|
||||||
# TODO: we should track and compute whether the last
|
|
||||||
# pixel in a curve should show new data based on uppx
|
|
||||||
# and then iff update curves and shift?
|
|
||||||
chart.increment_view(steps=i_diff)
|
chart.increment_view(steps=i_diff)
|
||||||
|
chart.view._set_yrange(yrange=(mn, mx))
|
||||||
|
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
vlm_chart.increment_view(steps=i_diff)
|
vlm_chart.increment_view(steps=i_diff)
|
||||||
|
@ -477,7 +564,10 @@ def graphics_update_cycle(
|
||||||
):
|
):
|
||||||
chart.update_graphics_from_flow(
|
chart.update_graphics_from_flow(
|
||||||
chart.name,
|
chart.name,
|
||||||
# do_append=uppx < update_uppx,
|
do_append=do_append,
|
||||||
|
)
|
||||||
|
hist_chart.update_graphics_from_flow(
|
||||||
|
chart.name,
|
||||||
do_append=do_append,
|
do_append=do_append,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -517,6 +607,9 @@ def graphics_update_cycle(
|
||||||
ds.last_price_sticky.update_from_data(
|
ds.last_price_sticky.update_from_data(
|
||||||
*end[['index', 'close']]
|
*end[['index', 'close']]
|
||||||
)
|
)
|
||||||
|
ds.hist_last_price_sticky.update_from_data(
|
||||||
|
*end[['index', 'close']]
|
||||||
|
)
|
||||||
|
|
||||||
if wap_in_history:
|
if wap_in_history:
|
||||||
# update vwap overlay line
|
# update vwap overlay line
|
||||||
|
@ -564,10 +657,12 @@ def graphics_update_cycle(
|
||||||
l1.bid_label.update_fields({'level': price, 'size': size})
|
l1.bid_label.update_fields({'level': price, 'size': size})
|
||||||
|
|
||||||
# check for y-range re-size
|
# check for y-range re-size
|
||||||
|
if (mx > vars['last_mx']) or (mn < vars['last_mn']):
|
||||||
|
|
||||||
|
# fast chart resize case
|
||||||
if (
|
if (
|
||||||
(mx > vars['last_mx']) or (mn < vars['last_mn'])
|
liv
|
||||||
and not chart._static_yrange == 'axis'
|
and not chart._static_yrange == 'axis'
|
||||||
and liv
|
|
||||||
):
|
):
|
||||||
main_vb = chart.view
|
main_vb = chart.view
|
||||||
if (
|
if (
|
||||||
|
@ -585,6 +680,22 @@ def graphics_update_cycle(
|
||||||
yrange=(mn, mx),
|
yrange=(mn, mx),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# check if slow chart needs a resize
|
||||||
|
(
|
||||||
|
_,
|
||||||
|
hist_liv,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
) = ds.incr_info(
|
||||||
|
chart=hist_chart,
|
||||||
|
shm=ds.hist_ohlcv,
|
||||||
|
update_state=False,
|
||||||
|
)
|
||||||
|
if hist_liv:
|
||||||
|
hist_chart.view._set_yrange(yrange=hist_chart.maxmin())
|
||||||
|
|
||||||
# XXX: update this every draw cycle to make L1-always-in-view work.
|
# XXX: update this every draw cycle to make L1-always-in-view work.
|
||||||
vars['last_mx'], vars['last_mn'] = mx, mn
|
vars['last_mx'], vars['last_mn'] = mx, mn
|
||||||
|
|
||||||
|
@ -700,10 +811,144 @@ def graphics_update_cycle(
|
||||||
flow.draw_last(array_key=curve_name)
|
flow.draw_last(array_key=curve_name)
|
||||||
|
|
||||||
|
|
||||||
|
async def link_views_with_region(
|
||||||
|
rt_chart: ChartPlotWidget,
|
||||||
|
hist_chart: ChartPlotWidget,
|
||||||
|
flume: Flume,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# these value are be only pulled once during shm init/startup
|
||||||
|
izero_hist = flume.izero_hist
|
||||||
|
izero_rt = flume.izero_rt
|
||||||
|
|
||||||
|
# Add the LinearRegionItem to the ViewBox, but tell the ViewBox
|
||||||
|
# to exclude this item when doing auto-range calculations.
|
||||||
|
rt_pi = rt_chart.plotItem
|
||||||
|
hist_pi = hist_chart.plotItem
|
||||||
|
|
||||||
|
region = pg.LinearRegionItem(
|
||||||
|
movable=False,
|
||||||
|
# color scheme that matches sidepane styling
|
||||||
|
pen=pg.mkPen(hcolor('gunmetal')),
|
||||||
|
brush=pg.mkBrush(hcolor('default_darkest')),
|
||||||
|
)
|
||||||
|
region.setZValue(10) # put linear region "in front" in layer terms
|
||||||
|
|
||||||
|
hist_pi.addItem(region, ignoreBounds=True)
|
||||||
|
|
||||||
|
flow = rt_chart._flows[hist_chart.name]
|
||||||
|
assert flow
|
||||||
|
|
||||||
|
# XXX: no idea why this doesn't work but it's causing
|
||||||
|
# a weird placement of the region on the way-far-left..
|
||||||
|
# region.setClipItem(flow.graphics)
|
||||||
|
|
||||||
|
# poll for datums load and timestep detection
|
||||||
|
for _ in range(100):
|
||||||
|
try:
|
||||||
|
_, _, ratio = flume.get_ds_info()
|
||||||
|
break
|
||||||
|
except IndexError:
|
||||||
|
await trio.sleep(0.01)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Failed to detect sampling periods from shm!?')
|
||||||
|
|
||||||
|
# sampling rate transform math:
|
||||||
|
# -----------------------------
|
||||||
|
# define the fast chart to slow chart as a linear mapping
|
||||||
|
# over the fast index domain `i` to the slow index domain
|
||||||
|
# `j` as:
|
||||||
|
#
|
||||||
|
# j = i - i_offset
|
||||||
|
# ------------ + j_offset
|
||||||
|
# j/i
|
||||||
|
#
|
||||||
|
# conversely the inverse function is:
|
||||||
|
#
|
||||||
|
# i = j/i * (j - j_offset) + i_offset
|
||||||
|
#
|
||||||
|
# Where `j_offset` is our ``izero_hist`` and `i_offset` is our
|
||||||
|
# `izero_rt`, the ``ShmArray`` offsets which correspond to the
|
||||||
|
# indexes in each array where the "current" time is indexed at init.
|
||||||
|
# AKA the index where new data is "appended to" and historical data
|
||||||
|
# if "prepended from".
|
||||||
|
#
|
||||||
|
# more practically (and by default) `i` is normally an index
|
||||||
|
# into 1s samples and `j` is an index into 60s samples (aka 1m).
|
||||||
|
# in the below handlers ``ratio`` is the `j/i` and ``mn``/``mx``
|
||||||
|
# are the low and high index input from the source index domain.
|
||||||
|
|
||||||
|
def update_region_from_pi(
|
||||||
|
window,
|
||||||
|
viewRange: tuple[tuple, tuple],
|
||||||
|
is_manual: bool = True,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# put linear region "in front" in layer terms
|
||||||
|
region.setZValue(10)
|
||||||
|
|
||||||
|
# set the region on the history chart
|
||||||
|
# to the range currently viewed in the
|
||||||
|
# HFT/real-time chart.
|
||||||
|
mn, mx = viewRange[0]
|
||||||
|
ds_mn = (mn - izero_rt)/ratio
|
||||||
|
ds_mx = (mx - izero_rt)/ratio
|
||||||
|
lhmn = ds_mn + izero_hist
|
||||||
|
lhmx = ds_mx + izero_hist
|
||||||
|
# print(
|
||||||
|
# f'rt_view_range: {(mn, mx)}\n'
|
||||||
|
# f'ds_mn, ds_mx: {(ds_mn, ds_mx)}\n'
|
||||||
|
# f'lhmn, lhmx: {(lhmn, lhmx)}\n'
|
||||||
|
# )
|
||||||
|
region.setRegion((
|
||||||
|
lhmn,
|
||||||
|
lhmx,
|
||||||
|
))
|
||||||
|
|
||||||
|
# TODO: if we want to have the slow chart adjust range to
|
||||||
|
# match the fast chart's selection -> results in the
|
||||||
|
# linear region expansion never can go "outside of view".
|
||||||
|
# hmn, hmx = hvr = hist_chart.view.state['viewRange'][0]
|
||||||
|
# print((hmn, hmx))
|
||||||
|
# if (
|
||||||
|
# hvr
|
||||||
|
# and (lhmn < hmn or lhmx > hmx)
|
||||||
|
# ):
|
||||||
|
# hist_pi.setXRange(
|
||||||
|
# lhmn,
|
||||||
|
# lhmx,
|
||||||
|
# padding=0,
|
||||||
|
# )
|
||||||
|
# hist_linked.graphics_cycle()
|
||||||
|
|
||||||
|
# connect region to be updated on plotitem interaction.
|
||||||
|
rt_pi.sigRangeChanged.connect(update_region_from_pi)
|
||||||
|
|
||||||
|
def update_pi_from_region():
|
||||||
|
region.setZValue(10)
|
||||||
|
mn, mx = region.getRegion()
|
||||||
|
# print(f'region_x: {(mn, mx)}')
|
||||||
|
rt_pi.setXRange(
|
||||||
|
((mn - izero_hist) * ratio) + izero_rt,
|
||||||
|
((mx - izero_hist) * ratio) + izero_rt,
|
||||||
|
padding=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO BUG XXX: seems to cause a real perf hit and a recursion error
|
||||||
|
# (but used to work before generalizing for 1s ohlc offset?)..
|
||||||
|
# something to do with the label callback handlers?
|
||||||
|
|
||||||
|
# region.sigRegionChanged.connect(update_pi_from_region)
|
||||||
|
# region.sigRegionChangeFinished.connect(update_pi_from_region)
|
||||||
|
|
||||||
|
|
||||||
async def display_symbol_data(
|
async def display_symbol_data(
|
||||||
godwidget: GodWidget,
|
godwidget: GodWidget,
|
||||||
provider: str,
|
provider: str,
|
||||||
sym: str,
|
fqsns: list[str],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
order_mode_started: trio.Event,
|
order_mode_started: trio.Event,
|
||||||
|
|
||||||
|
@ -717,11 +962,6 @@ async def display_symbol_data(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
sbar = godwidget.window.status_bar
|
sbar = godwidget.window.status_bar
|
||||||
loading_sym_key = sbar.open_status(
|
|
||||||
f'loading {sym}.{provider} ->',
|
|
||||||
group_key=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# historical data fetch
|
# historical data fetch
|
||||||
# brokermod = brokers.get_brokermod(provider)
|
# brokermod = brokers.get_brokermod(provider)
|
||||||
|
|
||||||
|
@ -730,10 +970,17 @@ async def display_symbol_data(
|
||||||
# clear_on_next=True,
|
# clear_on_next=True,
|
||||||
# group_key=loading_sym_key,
|
# group_key=loading_sym_key,
|
||||||
# )
|
# )
|
||||||
fqsn = '.'.join((sym, provider))
|
|
||||||
|
|
||||||
|
for fqsn in fqsns:
|
||||||
|
|
||||||
|
loading_sym_key = sbar.open_status(
|
||||||
|
f'loading {fqsn} ->',
|
||||||
|
group_key=True
|
||||||
|
)
|
||||||
|
|
||||||
|
feed: Feed
|
||||||
async with open_feed(
|
async with open_feed(
|
||||||
[fqsn],
|
fqsns,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
# limit to at least display's FPS
|
# limit to at least display's FPS
|
||||||
|
@ -741,15 +988,19 @@ async def display_symbol_data(
|
||||||
tick_throttle=_quote_throttle_rate,
|
tick_throttle=_quote_throttle_rate,
|
||||||
|
|
||||||
) as feed:
|
) as feed:
|
||||||
ohlcv: ShmArray = feed.shm
|
|
||||||
bars = ohlcv.array
|
|
||||||
symbol = feed.symbols[sym]
|
|
||||||
fqsn = symbol.front_fqsn()
|
|
||||||
|
|
||||||
times = bars['time']
|
# TODO: right now we only show one symbol on charts, but
|
||||||
end = pendulum.from_timestamp(times[-1])
|
# overlays are coming muy pronto guey..
|
||||||
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
assert len(feed.flumes) == 1
|
||||||
step_size_s = (end - start).seconds
|
flume = list(feed.flumes.values())[0]
|
||||||
|
|
||||||
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
|
symbol = flume.symbol
|
||||||
|
fqsn = symbol.fqsn
|
||||||
|
|
||||||
|
step_size_s = 1
|
||||||
tf_key = tf_in_1s[step_size_s]
|
tf_key = tf_in_1s[step_size_s]
|
||||||
|
|
||||||
# load in symbol's ohlc data
|
# load in symbol's ohlc data
|
||||||
|
@ -759,59 +1010,84 @@ async def display_symbol_data(
|
||||||
f'step:{tf_key} '
|
f'step:{tf_key} '
|
||||||
)
|
)
|
||||||
|
|
||||||
linked = godwidget.linkedsplits
|
rt_linked = godwidget.rt_linked
|
||||||
linked._symbol = symbol
|
rt_linked._symbol = symbol
|
||||||
|
|
||||||
|
# create top history view chart above the "main rt chart".
|
||||||
|
hist_linked = godwidget.hist_linked
|
||||||
|
hist_linked._symbol = symbol
|
||||||
|
hist_chart = hist_linked.plot_ohlc_main(
|
||||||
|
symbol,
|
||||||
|
hist_ohlcv,
|
||||||
|
# in the case of history chart we explicitly set `False`
|
||||||
|
# to avoid internal pane creation.
|
||||||
|
# sidepane=False,
|
||||||
|
sidepane=godwidget.search,
|
||||||
|
)
|
||||||
|
# don't show when not focussed
|
||||||
|
hist_linked.cursor.always_show_xlabel = False
|
||||||
|
|
||||||
# generate order mode side-pane UI
|
# generate order mode side-pane UI
|
||||||
# A ``FieldsForm`` form to configure order entry
|
# A ``FieldsForm`` form to configure order entry
|
||||||
|
# and add as next-to-y-axis singleton pane
|
||||||
pp_pane: FieldsForm = mk_order_pane_layout(godwidget)
|
pp_pane: FieldsForm = mk_order_pane_layout(godwidget)
|
||||||
|
|
||||||
# add as next-to-y-axis singleton pane
|
|
||||||
godwidget.pp_pane = pp_pane
|
godwidget.pp_pane = pp_pane
|
||||||
|
|
||||||
# create main OHLC chart
|
# create main OHLC chart
|
||||||
chart = linked.plot_ohlc_main(
|
ohlc_chart = rt_linked.plot_ohlc_main(
|
||||||
symbol,
|
symbol,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
|
# in the case of history chart we explicitly set `False`
|
||||||
|
# to avoid internal pane creation.
|
||||||
sidepane=pp_pane,
|
sidepane=pp_pane,
|
||||||
)
|
)
|
||||||
chart.default_view()
|
|
||||||
chart._feeds[symbol.key] = feed
|
|
||||||
chart.setFocus()
|
|
||||||
|
|
||||||
# plot historical vwap if available
|
ohlc_chart._feeds[symbol.key] = feed
|
||||||
wap_in_history = False
|
ohlc_chart.setFocus()
|
||||||
|
|
||||||
# XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?!
|
# XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?!
|
||||||
# if brokermod._show_wap_in_history:
|
# plot historical vwap if available
|
||||||
|
wap_in_history = False
|
||||||
# if 'bar_wap' in bars.dtype.fields:
|
# if (
|
||||||
|
# brokermod._show_wap_in_history
|
||||||
|
# and 'bar_wap' in bars.dtype.fields
|
||||||
|
# ):
|
||||||
# wap_in_history = True
|
# wap_in_history = True
|
||||||
# chart.draw_curve(
|
# ohlc_chart.draw_curve(
|
||||||
# name='bar_wap',
|
# name='bar_wap',
|
||||||
# shm=ohlcv,
|
# shm=ohlcv,
|
||||||
# color='default_light',
|
# color='default_light',
|
||||||
# add_label=False,
|
# add_label=False,
|
||||||
# )
|
# )
|
||||||
|
|
||||||
# size view to data once at outset
|
|
||||||
chart.cv._set_yrange()
|
|
||||||
|
|
||||||
# NOTE: we must immediately tell Qt to show the OHLC chart
|
# NOTE: we must immediately tell Qt to show the OHLC chart
|
||||||
# to avoid a race where the subplots get added/shown to
|
# to avoid a race where the subplots get added/shown to
|
||||||
# the linked set *before* the main price chart!
|
# the linked set *before* the main price chart!
|
||||||
linked.show()
|
rt_linked.show()
|
||||||
linked.focus()
|
rt_linked.focus()
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
# NOTE: here we insert the slow-history chart set into
|
||||||
|
# the fast chart's splitter -> so it's a splitter of charts
|
||||||
|
# inside the first widget slot of a splitter of charts XD
|
||||||
|
rt_linked.splitter.insertWidget(0, hist_linked)
|
||||||
|
# XXX: if we wanted it at the bottom?
|
||||||
|
# rt_linked.splitter.addWidget(hist_linked)
|
||||||
|
rt_linked.focus()
|
||||||
|
|
||||||
|
godwidget.resize_all()
|
||||||
|
|
||||||
vlm_chart: Optional[ChartPlotWidget] = None
|
vlm_chart: Optional[ChartPlotWidget] = None
|
||||||
async with trio.open_nursery() as ln:
|
async with trio.open_nursery() as ln:
|
||||||
|
|
||||||
# if available load volume related built-in display(s)
|
# if available load volume related built-in display(s)
|
||||||
if has_vlm(ohlcv):
|
if (
|
||||||
|
not symbol.broker_info[provider].get('no_vlm', False)
|
||||||
|
and has_vlm(ohlcv)
|
||||||
|
):
|
||||||
vlm_chart = await ln.start(
|
vlm_chart = await ln.start(
|
||||||
open_vlm_displays,
|
open_vlm_displays,
|
||||||
linked,
|
rt_linked,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -819,7 +1095,7 @@ async def display_symbol_data(
|
||||||
# from an input config.
|
# from an input config.
|
||||||
ln.start_soon(
|
ln.start_soon(
|
||||||
start_fsp_displays,
|
start_fsp_displays,
|
||||||
linked,
|
rt_linked,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
loading_sym_key,
|
loading_sym_key,
|
||||||
loglevel,
|
loglevel,
|
||||||
|
@ -828,36 +1104,79 @@ async def display_symbol_data(
|
||||||
# start graphics update loop after receiving first live quote
|
# start graphics update loop after receiving first live quote
|
||||||
ln.start_soon(
|
ln.start_soon(
|
||||||
graphics_update_loop,
|
graphics_update_loop,
|
||||||
linked,
|
ln,
|
||||||
feed.stream,
|
godwidget,
|
||||||
ohlcv,
|
flume,
|
||||||
wap_in_history,
|
wap_in_history,
|
||||||
vlm_chart,
|
vlm_chart,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
# size view to data prior to order mode init
|
||||||
|
ohlc_chart.default_view()
|
||||||
|
rt_linked.graphics_cycle()
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
hist_chart.default_view(
|
||||||
|
bars_from_y=int(len(hist_ohlcv.array)), # size to data
|
||||||
|
y_offset=6116*2, # push it a little away from the y-axis
|
||||||
|
)
|
||||||
|
hist_linked.graphics_cycle()
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
godwidget.resize_all()
|
||||||
|
|
||||||
|
await link_views_with_region(
|
||||||
|
ohlc_chart,
|
||||||
|
hist_chart,
|
||||||
|
flume,
|
||||||
|
)
|
||||||
|
|
||||||
|
mode: OrderMode
|
||||||
async with (
|
async with (
|
||||||
open_order_mode(
|
open_order_mode(
|
||||||
feed,
|
feed,
|
||||||
chart,
|
godwidget,
|
||||||
fqsn,
|
fqsn,
|
||||||
order_mode_started
|
order_mode_started
|
||||||
)
|
) as mode
|
||||||
):
|
):
|
||||||
|
if not vlm_chart:
|
||||||
|
# trigger another view reset if no sub-chart
|
||||||
|
ohlc_chart.default_view()
|
||||||
|
|
||||||
|
rt_linked.mode = mode
|
||||||
|
|
||||||
# let Qt run to render all widgets and make sure the
|
# let Qt run to render all widgets and make sure the
|
||||||
# sidepanes line up vertically.
|
# sidepanes line up vertically.
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
linked.resize_sidepanes()
|
|
||||||
|
|
||||||
|
# dynamic resize steps
|
||||||
|
godwidget.resize_all()
|
||||||
|
|
||||||
|
# TODO: look into this because not sure why it was
|
||||||
|
# commented out / we ever needed it XD
|
||||||
# NOTE: we pop the volume chart from the subplots set so
|
# NOTE: we pop the volume chart from the subplots set so
|
||||||
# that it isn't double rendered in the display loop
|
# that it isn't double rendered in the display loop
|
||||||
# above since we do a maxmin calc on the volume data to
|
# above since we do a maxmin calc on the volume data to
|
||||||
# determine if auto-range adjustements should be made.
|
# determine if auto-range adjustements should be made.
|
||||||
# linked.subplots.pop('volume', None)
|
# rt_linked.subplots.pop('volume', None)
|
||||||
|
|
||||||
# TODO: make this not so shit XD
|
# TODO: make this not so shit XD
|
||||||
# close group status
|
# close group status
|
||||||
sbar._status_groups[loading_sym_key][1]()
|
sbar._status_groups[loading_sym_key][1]()
|
||||||
|
|
||||||
|
hist_linked.graphics_cycle()
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
bars_in_mem = int(len(hist_ohlcv.array))
|
||||||
|
hist_chart.default_view(
|
||||||
|
bars_from_y=bars_in_mem, # size to data
|
||||||
|
# push it 1/16th away from the y-axis
|
||||||
|
y_offset=round(bars_in_mem / 16),
|
||||||
|
)
|
||||||
|
godwidget.resize_all()
|
||||||
|
|
||||||
# let the app run.. bby
|
# let the app run.. bby
|
||||||
# linked.graphics_cycle()
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
|
@ -18,11 +18,27 @@
|
||||||
Higher level annotation editors.
|
Higher level annotation editors.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from dataclasses import dataclass, field
|
from __future__ import annotations
|
||||||
from typing import Optional
|
from collections import defaultdict
|
||||||
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
TYPE_CHECKING
|
||||||
|
)
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from pyqtgraph import ViewBox, Point, QtCore, QtGui
|
from pyqtgraph import (
|
||||||
|
ViewBox,
|
||||||
|
Point,
|
||||||
|
QtCore,
|
||||||
|
QtWidgets,
|
||||||
|
)
|
||||||
|
from PyQt5.QtGui import (
|
||||||
|
QColor,
|
||||||
|
)
|
||||||
|
from PyQt5.QtWidgets import (
|
||||||
|
QLabel,
|
||||||
|
)
|
||||||
|
|
||||||
from pyqtgraph import functions as fn
|
from pyqtgraph import functions as fn
|
||||||
from PyQt5.QtCore import QPointF
|
from PyQt5.QtCore import QPointF
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -30,28 +46,34 @@ import numpy as np
|
||||||
from ._style import hcolor, _font
|
from ._style import hcolor, _font
|
||||||
from ._lines import LevelLine
|
from ._lines import LevelLine
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import GodWidget
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class ArrowEditor(Struct):
|
||||||
class ArrowEditor:
|
|
||||||
|
|
||||||
chart: 'ChartPlotWidget' # noqa
|
godw: GodWidget = None # type: ignore # noqa
|
||||||
_arrows: field(default_factory=dict)
|
_arrows: dict[str, list[pg.ArrowItem]] = {}
|
||||||
|
|
||||||
def add(
|
def add(
|
||||||
self,
|
self,
|
||||||
|
plot: pg.PlotItem,
|
||||||
uid: str,
|
uid: str,
|
||||||
x: float,
|
x: float,
|
||||||
y: float,
|
y: float,
|
||||||
color='default',
|
color='default',
|
||||||
pointing: Optional[str] = None,
|
pointing: Optional[str] = None,
|
||||||
) -> pg.ArrowItem:
|
|
||||||
"""Add an arrow graphic to view at given (x, y).
|
|
||||||
|
|
||||||
"""
|
) -> pg.ArrowItem:
|
||||||
|
'''
|
||||||
|
Add an arrow graphic to view at given (x, y).
|
||||||
|
|
||||||
|
'''
|
||||||
angle = {
|
angle = {
|
||||||
'up': 90,
|
'up': 90,
|
||||||
'down': -90,
|
'down': -90,
|
||||||
|
@ -74,25 +96,25 @@ class ArrowEditor:
|
||||||
brush=pg.mkBrush(hcolor(color)),
|
brush=pg.mkBrush(hcolor(color)),
|
||||||
)
|
)
|
||||||
arrow.setPos(x, y)
|
arrow.setPos(x, y)
|
||||||
|
self._arrows.setdefault(uid, []).append(arrow)
|
||||||
self._arrows[uid] = arrow
|
|
||||||
|
|
||||||
# render to view
|
# render to view
|
||||||
self.chart.plotItem.addItem(arrow)
|
plot.addItem(arrow)
|
||||||
|
|
||||||
return arrow
|
return arrow
|
||||||
|
|
||||||
def remove(self, arrow) -> bool:
|
def remove(self, arrow) -> bool:
|
||||||
self.chart.plotItem.removeItem(arrow)
|
for linked in self.godw.iter_linked():
|
||||||
|
linked.chart.plotItem.removeItem(arrow)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class LineEditor(Struct):
|
||||||
class LineEditor:
|
'''
|
||||||
'''The great editor of linez.
|
The great editor of linez.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
chart: 'ChartPlotWidget' = None # type: ignore # noqa
|
godw: GodWidget = None # type: ignore # noqa
|
||||||
_order_lines: dict[str, LevelLine] = field(default_factory=dict)
|
_order_lines: defaultdict[str, LevelLine] = defaultdict(list)
|
||||||
_active_staged_line: LevelLine = None
|
_active_staged_line: LevelLine = None
|
||||||
|
|
||||||
def stage_line(
|
def stage_line(
|
||||||
|
@ -100,11 +122,11 @@ class LineEditor:
|
||||||
line: LevelLine,
|
line: LevelLine,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
"""Stage a line at the current chart's cursor position
|
'''
|
||||||
|
Stage a line at the current chart's cursor position
|
||||||
and return it.
|
and return it.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
|
||||||
# add a "staged" cursor-tracking line to view
|
# add a "staged" cursor-tracking line to view
|
||||||
# and cash it in a a var
|
# and cash it in a a var
|
||||||
if self._active_staged_line:
|
if self._active_staged_line:
|
||||||
|
@ -115,17 +137,25 @@ class LineEditor:
|
||||||
return line
|
return line
|
||||||
|
|
||||||
def unstage_line(self) -> LevelLine:
|
def unstage_line(self) -> LevelLine:
|
||||||
"""Inverse of ``.stage_line()``.
|
'''
|
||||||
|
Inverse of ``.stage_line()``.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# chart = self.chart._cursor.active_plot
|
cursor = self.godw.get_cursor()
|
||||||
# # chart.setCursor(QtCore.Qt.ArrowCursor)
|
if not cursor:
|
||||||
cursor = self.chart.linked.cursor
|
return None
|
||||||
|
|
||||||
# delete "staged" cursor tracking line from view
|
# delete "staged" cursor tracking line from view
|
||||||
line = self._active_staged_line
|
line = self._active_staged_line
|
||||||
if line:
|
if line:
|
||||||
|
try:
|
||||||
cursor._trackers.remove(line)
|
cursor._trackers.remove(line)
|
||||||
|
except KeyError:
|
||||||
|
# when the current cursor doesn't have said line
|
||||||
|
# registered (probably means that user held order mode
|
||||||
|
# key while panning to another view) then we just
|
||||||
|
# ignore the remove error.
|
||||||
|
pass
|
||||||
line.delete()
|
line.delete()
|
||||||
|
|
||||||
self._active_staged_line = None
|
self._active_staged_line = None
|
||||||
|
@ -133,55 +163,58 @@ class LineEditor:
|
||||||
# show the crosshair y line and label
|
# show the crosshair y line and label
|
||||||
cursor.show_xhair()
|
cursor.show_xhair()
|
||||||
|
|
||||||
def submit_line(
|
def submit_lines(
|
||||||
self,
|
self,
|
||||||
line: LevelLine,
|
lines: list[LevelLine],
|
||||||
uuid: str,
|
uuid: str,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
|
|
||||||
staged_line = self._active_staged_line
|
# staged_line = self._active_staged_line
|
||||||
if not staged_line:
|
# if not staged_line:
|
||||||
raise RuntimeError("No line is currently staged!?")
|
# raise RuntimeError("No line is currently staged!?")
|
||||||
|
|
||||||
# for now, until submission reponse arrives
|
# for now, until submission reponse arrives
|
||||||
|
for line in lines:
|
||||||
line.hide_labels()
|
line.hide_labels()
|
||||||
|
|
||||||
# register for later lookup/deletion
|
# register for later lookup/deletion
|
||||||
self._order_lines[uuid] = line
|
self._order_lines[uuid] += lines
|
||||||
|
|
||||||
return line
|
return lines
|
||||||
|
|
||||||
def commit_line(self, uuid: str) -> LevelLine:
|
def commit_line(self, uuid: str) -> list[LevelLine]:
|
||||||
"""Commit a "staged line" to view.
|
'''
|
||||||
|
Commit a "staged line" to view.
|
||||||
|
|
||||||
Submits the line graphic under the cursor as a (new) permanent
|
Submits the line graphic under the cursor as a (new) permanent
|
||||||
graphic in view.
|
graphic in view.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
try:
|
lines = self._order_lines[uuid]
|
||||||
line = self._order_lines[uuid]
|
if lines:
|
||||||
except KeyError:
|
for line in lines:
|
||||||
log.warning(f'No line for {uuid} could be found?')
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
line.show_labels()
|
line.show_labels()
|
||||||
|
line.hide_markers()
|
||||||
|
log.debug(f'Level active for level: {line.value()}')
|
||||||
# TODO: other flashy things to indicate the order is active
|
# TODO: other flashy things to indicate the order is active
|
||||||
|
|
||||||
log.debug(f'Level active for level: {line.value()}')
|
return lines
|
||||||
|
|
||||||
return line
|
|
||||||
|
|
||||||
def lines_under_cursor(self) -> list[LevelLine]:
|
def lines_under_cursor(self) -> list[LevelLine]:
|
||||||
"""Get the line(s) under the cursor position.
|
'''
|
||||||
|
Get the line(s) under the cursor position.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# Delete any hoverable under the cursor
|
# Delete any hoverable under the cursor
|
||||||
return self.chart.linked.cursor._hovered
|
return self.godw.get_cursor()._hovered
|
||||||
|
|
||||||
def all_lines(self) -> tuple[LevelLine]:
|
def all_lines(self) -> list[LevelLine]:
|
||||||
return tuple(self._order_lines.values())
|
all_lines = []
|
||||||
|
for lines in list(self._order_lines.values()):
|
||||||
|
all_lines.extend(lines)
|
||||||
|
|
||||||
|
return all_lines
|
||||||
|
|
||||||
def remove_line(
|
def remove_line(
|
||||||
self,
|
self,
|
||||||
|
@ -196,29 +229,30 @@ class LineEditor:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# try to look up line from our registry
|
# try to look up line from our registry
|
||||||
line = self._order_lines.pop(uuid, line)
|
lines = self._order_lines.pop(uuid, None)
|
||||||
if line:
|
if lines:
|
||||||
|
cursor = self.godw.get_cursor()
|
||||||
|
if cursor:
|
||||||
|
for line in lines:
|
||||||
# if hovered remove from cursor set
|
# if hovered remove from cursor set
|
||||||
cursor = self.chart.linked.cursor
|
|
||||||
hovered = cursor._hovered
|
hovered = cursor._hovered
|
||||||
if line in hovered:
|
if line in hovered:
|
||||||
hovered.remove(line)
|
hovered.remove(line)
|
||||||
|
|
||||||
|
log.debug(f'deleting {line} with oid: {uuid}')
|
||||||
|
line.delete()
|
||||||
|
|
||||||
# make sure the xhair doesn't get left off
|
# make sure the xhair doesn't get left off
|
||||||
# just because we never got a un-hover event
|
# just because we never got a un-hover event
|
||||||
cursor.show_xhair()
|
cursor.show_xhair()
|
||||||
|
|
||||||
log.debug(f'deleting {line} with oid: {uuid}')
|
|
||||||
line.delete()
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.warning(f'Could not find line for {line}')
|
log.warning(f'Could not find line for {line}')
|
||||||
|
|
||||||
return line
|
return lines
|
||||||
|
|
||||||
|
|
||||||
class SelectRect(QtGui.QGraphicsRectItem):
|
class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -227,12 +261,12 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(0, 0, 1, 1)
|
super().__init__(0, 0, 1, 1)
|
||||||
|
|
||||||
# self.rbScaleBox = QtGui.QGraphicsRectItem(0, 0, 1, 1)
|
# self.rbScaleBox = QGraphicsRectItem(0, 0, 1, 1)
|
||||||
self.vb = viewbox
|
self.vb = viewbox
|
||||||
self._chart: 'ChartPlotWidget' = None # noqa
|
self._chart: 'ChartPlotWidget' = None # noqa
|
||||||
|
|
||||||
# override selection box color
|
# override selection box color
|
||||||
color = QtGui.QColor(hcolor(color))
|
color = QColor(hcolor(color))
|
||||||
self.setPen(fn.mkPen(color, width=1))
|
self.setPen(fn.mkPen(color, width=1))
|
||||||
color.setAlpha(66)
|
color.setAlpha(66)
|
||||||
self.setBrush(fn.mkBrush(color))
|
self.setBrush(fn.mkBrush(color))
|
||||||
|
@ -240,7 +274,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
self.hide()
|
self.hide()
|
||||||
self._label = None
|
self._label = None
|
||||||
|
|
||||||
label = self._label = QtGui.QLabel()
|
label = self._label = QLabel()
|
||||||
label.setTextFormat(0) # markdown
|
label.setTextFormat(0) # markdown
|
||||||
label.setFont(_font.font)
|
label.setFont(_font.font)
|
||||||
label.setMargin(0)
|
label.setMargin(0)
|
||||||
|
@ -277,8 +311,8 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
# TODO: get bg color working
|
# TODO: get bg color working
|
||||||
palette.setColor(
|
palette.setColor(
|
||||||
self._label.backgroundRole(),
|
self._label.backgroundRole(),
|
||||||
# QtGui.QColor(chart.backgroundBrush()),
|
# QColor(chart.backgroundBrush()),
|
||||||
QtGui.QColor(hcolor('papas_special')),
|
QColor(hcolor('papas_special')),
|
||||||
)
|
)
|
||||||
|
|
||||||
def update_on_resize(self, vr, r):
|
def update_on_resize(self, vr, r):
|
||||||
|
@ -326,7 +360,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
|
|
||||||
self.setPos(r.topLeft())
|
self.setPos(r.topLeft())
|
||||||
self.resetTransform()
|
self.resetTransform()
|
||||||
self.scale(r.width(), r.height())
|
self.setRect(r)
|
||||||
self.show()
|
self.show()
|
||||||
|
|
||||||
y1, y2 = start_pos.y(), end_pos.y()
|
y1, y2 = start_pos.y(), end_pos.y()
|
||||||
|
|
|
@ -18,11 +18,11 @@
|
||||||
Qt event proxying and processing using ``trio`` mem chans.
|
Qt event proxying and processing using ``trio`` mem chans.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager, AsyncExitStack
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import trio
|
import trio
|
||||||
|
from tractor.trionics import gather_contexts
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
||||||
from PyQt5.QtWidgets import QWidget
|
from PyQt5.QtWidgets import QWidget
|
||||||
|
@ -30,6 +30,8 @@ from PyQt5.QtWidgets import (
|
||||||
QGraphicsSceneMouseEvent as gs_mouse,
|
QGraphicsSceneMouseEvent as gs_mouse,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
MOUSE_EVENTS = {
|
MOUSE_EVENTS = {
|
||||||
gs_mouse.GraphicsSceneMousePress,
|
gs_mouse.GraphicsSceneMousePress,
|
||||||
|
@ -43,13 +45,10 @@ MOUSE_EVENTS = {
|
||||||
# TODO: maybe consider some constrained ints down the road?
|
# TODO: maybe consider some constrained ints down the road?
|
||||||
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
||||||
|
|
||||||
class KeyboardMsg(BaseModel):
|
class KeyboardMsg(Struct):
|
||||||
'''Unpacked Qt keyboard event data.
|
'''Unpacked Qt keyboard event data.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
event: QEvent
|
event: QEvent
|
||||||
etype: int
|
etype: int
|
||||||
key: int
|
key: int
|
||||||
|
@ -57,16 +56,13 @@ class KeyboardMsg(BaseModel):
|
||||||
txt: str
|
txt: str
|
||||||
|
|
||||||
def to_tuple(self) -> tuple:
|
def to_tuple(self) -> tuple:
|
||||||
return tuple(self.dict().values())
|
return tuple(self.to_dict().values())
|
||||||
|
|
||||||
|
|
||||||
class MouseMsg(BaseModel):
|
class MouseMsg(Struct):
|
||||||
'''Unpacked Qt keyboard event data.
|
'''Unpacked Qt keyboard event data.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
event: QEvent
|
event: QEvent
|
||||||
etype: int
|
etype: int
|
||||||
button: int
|
button: int
|
||||||
|
@ -160,7 +156,7 @@ class EventRelay(QtCore.QObject):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_event_stream(
|
async def open_event_stream(
|
||||||
|
|
||||||
source_widget: QWidget,
|
source_widget: QWidget,
|
||||||
|
@ -186,7 +182,7 @@ async def open_event_stream(
|
||||||
source_widget.removeEventFilter(kc)
|
source_widget.removeEventFilter(kc)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_signal_handler(
|
async def open_signal_handler(
|
||||||
|
|
||||||
signal: pyqtBoundSignal,
|
signal: pyqtBoundSignal,
|
||||||
|
@ -211,7 +207,7 @@ async def open_signal_handler(
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_handlers(
|
async def open_handlers(
|
||||||
|
|
||||||
source_widgets: list[QWidget],
|
source_widgets: list[QWidget],
|
||||||
|
@ -220,16 +216,14 @@ async def open_handlers(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
AsyncExitStack() as stack,
|
gather_contexts([
|
||||||
):
|
|
||||||
for widget in source_widgets:
|
|
||||||
|
|
||||||
event_recv_stream = await stack.enter_async_context(
|
|
||||||
open_event_stream(widget, event_types, **kwargs)
|
open_event_stream(widget, event_types, **kwargs)
|
||||||
)
|
for widget in source_widgets
|
||||||
|
]) as streams,
|
||||||
|
):
|
||||||
|
for widget, event_recv_stream in zip(source_widgets, streams):
|
||||||
n.start_soon(async_handler, widget, event_recv_stream)
|
n.start_soon(async_handler, widget, event_recv_stream)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
|
@ -20,16 +20,24 @@ Trio - Qt integration
|
||||||
Run ``trio`` in guest mode on top of the Qt event loop.
|
Run ``trio`` in guest mode on top of the Qt event loop.
|
||||||
All global Qt runtime settings are mostly defined here.
|
All global Qt runtime settings are mostly defined here.
|
||||||
"""
|
"""
|
||||||
from typing import Tuple, Callable, Dict, Any
|
from __future__ import annotations
|
||||||
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Any,
|
||||||
|
Type,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
import platform
|
import platform
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
# Qt specific
|
# Qt specific
|
||||||
import PyQt5 # noqa
|
import PyQt5 # noqa
|
||||||
import pyqtgraph as pg
|
from PyQt5.QtWidgets import (
|
||||||
from pyqtgraph import QtGui
|
QWidget,
|
||||||
|
QMainWindow,
|
||||||
|
QApplication,
|
||||||
|
)
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
# from PyQt5.QtGui import QLabel, QStatusBar
|
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
pyqtRemoveInputHook,
|
pyqtRemoveInputHook,
|
||||||
Qt,
|
Qt,
|
||||||
|
@ -37,7 +45,7 @@ from PyQt5.QtCore import (
|
||||||
)
|
)
|
||||||
import qdarkstyle
|
import qdarkstyle
|
||||||
from qdarkstyle import DarkPalette
|
from qdarkstyle import DarkPalette
|
||||||
# import qdarkgraystyle
|
# import qdarkgraystyle # TODO: play with it
|
||||||
import trio
|
import trio
|
||||||
from outcome import Error
|
from outcome import Error
|
||||||
|
|
||||||
|
@ -46,6 +54,7 @@ from ..log import get_logger
|
||||||
from ._pg_overrides import _do_overrides
|
from ._pg_overrides import _do_overrides
|
||||||
from . import _style
|
from . import _style
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# pyqtgraph global config
|
# pyqtgraph global config
|
||||||
|
@ -72,17 +81,18 @@ if platform.system() == "Windows":
|
||||||
|
|
||||||
def run_qtractor(
|
def run_qtractor(
|
||||||
func: Callable,
|
func: Callable,
|
||||||
args: Tuple,
|
args: tuple,
|
||||||
main_widget: QtGui.QWidget,
|
main_widget_type: Type[QWidget],
|
||||||
tractor_kwargs: Dict[str, Any] = {},
|
tractor_kwargs: dict[str, Any] = {},
|
||||||
window_type: QtGui.QMainWindow = None,
|
window_type: QMainWindow = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# avoids annoying message when entering debugger from qt loop
|
# avoids annoying message when entering debugger from qt loop
|
||||||
pyqtRemoveInputHook()
|
pyqtRemoveInputHook()
|
||||||
|
|
||||||
app = QtGui.QApplication.instance()
|
app = QApplication.instance()
|
||||||
if app is None:
|
if app is None:
|
||||||
app = PyQt5.QtWidgets.QApplication([])
|
app = QApplication([])
|
||||||
|
|
||||||
# TODO: we might not need this if it's desired
|
# TODO: we might not need this if it's desired
|
||||||
# to cancel the tractor machinery on Qt loop
|
# to cancel the tractor machinery on Qt loop
|
||||||
|
@ -156,7 +166,7 @@ def run_qtractor(
|
||||||
# hook into app focus change events
|
# hook into app focus change events
|
||||||
app.focusChanged.connect(window.on_focus_change)
|
app.focusChanged.connect(window.on_focus_change)
|
||||||
|
|
||||||
instance = main_widget()
|
instance = main_widget_type()
|
||||||
instance.window = window
|
instance.window = window
|
||||||
|
|
||||||
# override tractor's defaults
|
# override tractor's defaults
|
||||||
|
@ -178,7 +188,7 @@ def run_qtractor(
|
||||||
# restrict_keyboard_interrupt_to_checkpoints=True,
|
# restrict_keyboard_interrupt_to_checkpoints=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
window.main_widget = main_widget
|
window.godwidget: GodWidget = instance
|
||||||
window.setCentralWidget(instance)
|
window.setCentralWidget(instance)
|
||||||
if is_windows:
|
if is_windows:
|
||||||
window.configure_to_desktop()
|
window.configure_to_desktop()
|
||||||
|
|
|
@ -59,6 +59,7 @@ from ._curve import (
|
||||||
FlattenedOHLC,
|
FlattenedOHLC,
|
||||||
)
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -130,7 +131,7 @@ def render_baritems(
|
||||||
int, int, np.ndarray,
|
int, int, np.ndarray,
|
||||||
int, int, np.ndarray,
|
int, int, np.ndarray,
|
||||||
],
|
],
|
||||||
profiler: pg.debug.Profiler,
|
profiler: Profiler,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -337,13 +338,14 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
name: str
|
name: str
|
||||||
plot: pg.PlotItem
|
plot: pg.PlotItem
|
||||||
graphics: Union[Curve, BarItems]
|
graphics: Union[Curve, BarItems]
|
||||||
|
_shm: ShmArray
|
||||||
|
yrange: tuple[float, float] = None
|
||||||
|
|
||||||
# in some cases a flow may want to change its
|
# in some cases a flow may want to change its
|
||||||
# graphical "type" or, "form" when downsampling,
|
# graphical "type" or, "form" when downsampling,
|
||||||
# normally this is just a plain line.
|
# normally this is just a plain line.
|
||||||
ds_graphics: Optional[Curve] = None
|
ds_graphics: Optional[Curve] = None
|
||||||
|
|
||||||
_shm: ShmArray
|
|
||||||
|
|
||||||
is_ohlc: bool = False
|
is_ohlc: bool = False
|
||||||
render: bool = True # toggle for display loop
|
render: bool = True # toggle for display loop
|
||||||
|
@ -386,10 +388,11 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
lbar: int,
|
lbar: int,
|
||||||
rbar: int,
|
rbar: int,
|
||||||
|
|
||||||
) -> tuple[float, float]:
|
) -> Optional[tuple[float, float]]:
|
||||||
'''
|
'''
|
||||||
Compute the cached max and min y-range values for a given
|
Compute the cached max and min y-range values for a given
|
||||||
x-range determined by ``lbar`` and ``rbar``.
|
x-range determined by ``lbar`` and ``rbar`` or ``None``
|
||||||
|
if no range can be determined (yet).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
rkey = (lbar, rbar)
|
rkey = (lbar, rbar)
|
||||||
|
@ -399,9 +402,8 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
shm = self.shm
|
shm = self.shm
|
||||||
if shm is None:
|
if shm is None:
|
||||||
mxmn = None
|
return None
|
||||||
|
|
||||||
else: # new block for profiling?..
|
|
||||||
arr = shm.array
|
arr = shm.array
|
||||||
|
|
||||||
# build relative indexes into shm array
|
# build relative indexes into shm array
|
||||||
|
@ -414,7 +416,11 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
]
|
]
|
||||||
|
|
||||||
if not slice_view.size:
|
if not slice_view.size:
|
||||||
mxmn = None
|
return None
|
||||||
|
|
||||||
|
elif self.yrange:
|
||||||
|
mxmn = self.yrange
|
||||||
|
# print(f'{self.name} M4 maxmin: {mxmn}')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if self.is_ohlc:
|
if self.is_ohlc:
|
||||||
|
@ -427,9 +433,10 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
yhigh = np.max(view)
|
yhigh = np.max(view)
|
||||||
|
|
||||||
mxmn = ylow, yhigh
|
mxmn = ylow, yhigh
|
||||||
|
# print(f'{self.name} MANUAL maxmin: {mxmin}')
|
||||||
|
|
||||||
if mxmn is not None:
|
# cache result for input range
|
||||||
# cache new mxmn result
|
assert mxmn
|
||||||
self._mxmns[rkey] = mxmn
|
self._mxmns[rkey] = mxmn
|
||||||
|
|
||||||
return mxmn
|
return mxmn
|
||||||
|
@ -511,7 +518,7 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
render: bool = True,
|
render: bool = True,
|
||||||
array_key: Optional[str] = None,
|
array_key: Optional[str] = None,
|
||||||
|
|
||||||
profiler: Optional[pg.debug.Profiler] = None,
|
profiler: Optional[Profiler] = None,
|
||||||
do_append: bool = True,
|
do_append: bool = True,
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
@ -522,7 +529,7 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
render to graphics.
|
render to graphics.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
msg=f'Flow.update_graphics() for {self.name}',
|
msg=f'Flow.update_graphics() for {self.name}',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=4,
|
ms_threshold=4,
|
||||||
|
@ -628,10 +635,13 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
# source data so we clear our path data in prep
|
# source data so we clear our path data in prep
|
||||||
# to generate a new one from original source data.
|
# to generate a new one from original source data.
|
||||||
new_sample_rate = True
|
new_sample_rate = True
|
||||||
showing_src_data = True
|
|
||||||
should_ds = False
|
should_ds = False
|
||||||
should_redraw = True
|
should_redraw = True
|
||||||
|
|
||||||
|
showing_src_data = True
|
||||||
|
# reset yrange to be computed from source data
|
||||||
|
self.yrange = None
|
||||||
|
|
||||||
# MAIN RENDER LOGIC:
|
# MAIN RENDER LOGIC:
|
||||||
# - determine in view data and redraw on range change
|
# - determine in view data and redraw on range change
|
||||||
# - determine downsampling ops if needed
|
# - determine downsampling ops if needed
|
||||||
|
@ -657,6 +667,10 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
**rkwargs,
|
**rkwargs,
|
||||||
)
|
)
|
||||||
|
if showing_src_data:
|
||||||
|
# print(f"{self.name} SHOWING SOURCE")
|
||||||
|
# reset yrange to be computed from source data
|
||||||
|
self.yrange = None
|
||||||
|
|
||||||
if not out:
|
if not out:
|
||||||
log.warning(f'{self.name} failed to render!?')
|
log.warning(f'{self.name} failed to render!?')
|
||||||
|
@ -664,6 +678,9 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
path, data, reset = out
|
path, data, reset = out
|
||||||
|
|
||||||
|
# if self.yrange:
|
||||||
|
# print(f'flow {self.name} yrange from m4: {self.yrange}')
|
||||||
|
|
||||||
# XXX: SUPER UGGGHHH... without this we get stale cache
|
# XXX: SUPER UGGGHHH... without this we get stale cache
|
||||||
# graphics that don't update until you downsampler again..
|
# graphics that don't update until you downsampler again..
|
||||||
if reset:
|
if reset:
|
||||||
|
@ -932,7 +949,7 @@ class Renderer(msgspec.Struct):
|
||||||
|
|
||||||
new_read,
|
new_read,
|
||||||
array_key: str,
|
array_key: str,
|
||||||
profiler: pg.debug.Profiler,
|
profiler: Profiler,
|
||||||
uppx: float = 1,
|
uppx: float = 1,
|
||||||
|
|
||||||
# redraw and ds flags
|
# redraw and ds flags
|
||||||
|
@ -1058,6 +1075,7 @@ class Renderer(msgspec.Struct):
|
||||||
# xy-path data transform: convert source data to a format
|
# xy-path data transform: convert source data to a format
|
||||||
# able to be passed to a `QPainterPath` rendering routine.
|
# able to be passed to a `QPainterPath` rendering routine.
|
||||||
if not len(hist):
|
if not len(hist):
|
||||||
|
# XXX: this might be why the profiler only has exits?
|
||||||
return
|
return
|
||||||
|
|
||||||
x_out, y_out, connect = self.format_xy(
|
x_out, y_out, connect = self.format_xy(
|
||||||
|
@ -1144,11 +1162,14 @@ class Renderer(msgspec.Struct):
|
||||||
|
|
||||||
elif should_ds and uppx > 1:
|
elif should_ds and uppx > 1:
|
||||||
|
|
||||||
x_out, y_out = xy_downsample(
|
x_out, y_out, ymn, ymx = xy_downsample(
|
||||||
x_out,
|
x_out,
|
||||||
y_out,
|
y_out,
|
||||||
uppx,
|
uppx,
|
||||||
)
|
)
|
||||||
|
self.flow.yrange = ymn, ymx
|
||||||
|
# print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}')
|
||||||
|
|
||||||
reset = True
|
reset = True
|
||||||
profiler(f'FULL PATH downsample redraw={should_ds}')
|
profiler(f'FULL PATH downsample redraw={should_ds}')
|
||||||
self._in_ds = True
|
self._in_ds = True
|
||||||
|
|
|
@ -619,7 +619,7 @@ class FillStatusBar(QProgressBar):
|
||||||
# color: #19232D;
|
# color: #19232D;
|
||||||
# width: 10px;
|
# width: 10px;
|
||||||
|
|
||||||
self.setRange(0, slots)
|
self.setRange(0, int(slots))
|
||||||
self.setValue(value)
|
self.setValue(value)
|
||||||
|
|
||||||
|
|
||||||
|
@ -644,7 +644,7 @@ def mk_fill_status_bar(
|
||||||
|
|
||||||
# TODO: calc this height from the ``ChartnPane``
|
# TODO: calc this height from the ``ChartnPane``
|
||||||
chart_h = round(parent_pane.height() * 5/8)
|
chart_h = round(parent_pane.height() * 5/8)
|
||||||
bar_h = chart_h * 0.375
|
bar_h = chart_h * 0.375*0.9
|
||||||
|
|
||||||
# TODO: once things are sized to screen
|
# TODO: once things are sized to screen
|
||||||
bar_label_font_size = label_font_size or _font.px_size - 2
|
bar_label_font_size = label_font_size or _font.px_size - 2
|
||||||
|
|
|
@ -27,12 +27,13 @@ from itertools import cycle
|
||||||
from typing import Optional, AsyncGenerator, Any
|
from typing import Optional, AsyncGenerator, Any
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import create_model
|
import msgspec
|
||||||
import tractor
|
import tractor
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
|
from piker.data.types import Struct
|
||||||
from ._axes import PriceAxis
|
from ._axes import PriceAxis
|
||||||
from .._cacheables import maybe_open_context
|
from .._cacheables import maybe_open_context
|
||||||
from ..calc import humanize
|
from ..calc import humanize
|
||||||
|
@ -53,11 +54,12 @@ from ._forms import (
|
||||||
from ..fsp._api import maybe_mk_fsp_shm, Fsp
|
from ..fsp._api import maybe_mk_fsp_shm, Fsp
|
||||||
from ..fsp import cascade
|
from ..fsp import cascade
|
||||||
from ..fsp._volume import (
|
from ..fsp._volume import (
|
||||||
tina_vwap,
|
# tina_vwap,
|
||||||
dolla_vlm,
|
dolla_vlm,
|
||||||
flow_rates,
|
flow_rates,
|
||||||
)
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -153,12 +155,13 @@ async def open_fsp_sidepane(
|
||||||
)
|
)
|
||||||
|
|
||||||
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
||||||
FspConfig = create_model(
|
FspConfig = msgspec.defstruct(
|
||||||
'FspConfig',
|
"Point",
|
||||||
name=name,
|
[('name', name)] + list(params.items()),
|
||||||
**params,
|
bases=(Struct,),
|
||||||
)
|
)
|
||||||
sidepane.model = FspConfig()
|
model = FspConfig(name=name, **params)
|
||||||
|
sidepane.model = model
|
||||||
|
|
||||||
# just a logger for now until we get fsp configs up and running.
|
# just a logger for now until we get fsp configs up and running.
|
||||||
async def settings_change(
|
async def settings_change(
|
||||||
|
@ -188,7 +191,7 @@ async def open_fsp_actor_cluster(
|
||||||
|
|
||||||
from tractor._clustering import open_actor_cluster
|
from tractor._clustering import open_actor_cluster
|
||||||
|
|
||||||
# profiler = pg.debug.Profiler(
|
# profiler = Profiler(
|
||||||
# delayed=False,
|
# delayed=False,
|
||||||
# disabled=False
|
# disabled=False
|
||||||
# )
|
# )
|
||||||
|
@ -210,7 +213,7 @@ async def run_fsp_ui(
|
||||||
target: Fsp,
|
target: Fsp,
|
||||||
conf: dict[str, dict],
|
conf: dict[str, dict],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
# profiler: pg.debug.Profiler,
|
# profiler: Profiler,
|
||||||
# _quote_throttle_rate: int = 58,
|
# _quote_throttle_rate: int = 58,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -440,7 +443,9 @@ class FspAdmin:
|
||||||
# if the chart isn't hidden try to update
|
# if the chart isn't hidden try to update
|
||||||
# the data on screen.
|
# the data on screen.
|
||||||
if not self.linked.isHidden():
|
if not self.linked.isHidden():
|
||||||
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
|
log.debug(
|
||||||
|
f'Re-syncing graphics for fsp: {ns_path}'
|
||||||
|
)
|
||||||
self.linked.graphics_cycle(
|
self.linked.graphics_cycle(
|
||||||
trigger_all=True,
|
trigger_all=True,
|
||||||
prepend_update_index=info['first'],
|
prepend_update_index=info['first'],
|
||||||
|
@ -469,9 +474,10 @@ class FspAdmin:
|
||||||
target=target,
|
target=target,
|
||||||
readonly=True,
|
readonly=True,
|
||||||
)
|
)
|
||||||
self._flow_registry[
|
self._flow_registry[(
|
||||||
(self.src_shm._token, target.name)
|
self.src_shm._token,
|
||||||
] = dst_shm._token
|
target.name
|
||||||
|
)] = dst_shm._token
|
||||||
|
|
||||||
# if not opened:
|
# if not opened:
|
||||||
# raise RuntimeError(
|
# raise RuntimeError(
|
||||||
|
@ -618,6 +624,8 @@ async def open_vlm_displays(
|
||||||
# built-in vlm which we plot ASAP since it's
|
# built-in vlm which we plot ASAP since it's
|
||||||
# usually data provided directly with OHLC history.
|
# usually data provided directly with OHLC history.
|
||||||
shm = ohlcv
|
shm = ohlcv
|
||||||
|
ohlc_chart = linked.chart
|
||||||
|
|
||||||
chart = linked.add_plot(
|
chart = linked.add_plot(
|
||||||
name='volume',
|
name='volume',
|
||||||
shm=shm,
|
shm=shm,
|
||||||
|
@ -633,26 +641,34 @@ async def open_vlm_displays(
|
||||||
# the curve item internals are pretty convoluted.
|
# the curve item internals are pretty convoluted.
|
||||||
style='step',
|
style='step',
|
||||||
)
|
)
|
||||||
|
ohlc_chart.view.enable_auto_yrange(
|
||||||
|
src_vb=chart.view,
|
||||||
|
)
|
||||||
|
|
||||||
# force 0 to always be in view
|
# force 0 to always be in view
|
||||||
def multi_maxmin(
|
def multi_maxmin(
|
||||||
names: list[str],
|
names: list[str],
|
||||||
|
|
||||||
) -> tuple[float, float]:
|
) -> tuple[float, float]:
|
||||||
|
'''
|
||||||
|
Flows "group" maxmin loop; assumes all named flows
|
||||||
|
are in the same co-domain and thus can be sorted
|
||||||
|
as one set.
|
||||||
|
|
||||||
|
Iterates all the named flows and calls the chart
|
||||||
|
api to find their range values and return.
|
||||||
|
|
||||||
|
TODO: really we should probably have a more built-in API
|
||||||
|
for this?
|
||||||
|
|
||||||
|
'''
|
||||||
mx = 0
|
mx = 0
|
||||||
for name in names:
|
for name in names:
|
||||||
|
ymn, ymx = chart.maxmin(name=name)
|
||||||
mxmn = chart.maxmin(name=name)
|
mx = max(mx, ymx)
|
||||||
if mxmn:
|
|
||||||
ymax = mxmn[1]
|
|
||||||
if ymax > mx:
|
|
||||||
mx = ymax
|
|
||||||
|
|
||||||
return 0, mx
|
return 0, mx
|
||||||
|
|
||||||
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
|
||||||
|
|
||||||
# TODO: fix the x-axis label issue where if you put
|
# TODO: fix the x-axis label issue where if you put
|
||||||
# the axis on the left it's totally not lined up...
|
# the axis on the left it's totally not lined up...
|
||||||
# show volume units value on LHS (for dinkus)
|
# show volume units value on LHS (for dinkus)
|
||||||
|
@ -736,6 +752,8 @@ async def open_vlm_displays(
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
dvlm_pi.hideAxis('left')
|
||||||
|
dvlm_pi.hideAxis('bottom')
|
||||||
# all to be overlayed curve names
|
# all to be overlayed curve names
|
||||||
fields = [
|
fields = [
|
||||||
'dolla_vlm',
|
'dolla_vlm',
|
||||||
|
@ -776,6 +794,7 @@ async def open_vlm_displays(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
for name in names:
|
for name in names:
|
||||||
|
|
||||||
if 'dark' in name:
|
if 'dark' in name:
|
||||||
color = dark_vlm_color
|
color = dark_vlm_color
|
||||||
elif 'rate' in name:
|
elif 'rate' in name:
|
||||||
|
@ -867,6 +886,7 @@ async def open_vlm_displays(
|
||||||
# keep both regular and dark vlm in view
|
# keep both regular and dark vlm in view
|
||||||
names=trade_rate_fields,
|
names=trade_rate_fields,
|
||||||
)
|
)
|
||||||
|
tr_pi.hideAxis('bottom')
|
||||||
|
|
||||||
chart_curves(
|
chart_curves(
|
||||||
trade_rate_fields,
|
trade_rate_fields,
|
||||||
|
@ -940,7 +960,7 @@ async def start_fsp_displays(
|
||||||
# },
|
# },
|
||||||
# },
|
# },
|
||||||
}
|
}
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
disabled=False
|
disabled=False
|
||||||
)
|
)
|
||||||
|
|
|
@ -33,6 +33,7 @@ import numpy as np
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
# from ._style import _min_points_to_show
|
# from ._style import _min_points_to_show
|
||||||
from ._editors import SelectRect
|
from ._editors import SelectRect
|
||||||
|
@ -141,13 +142,16 @@ async def handle_viewmode_kb_inputs(
|
||||||
Qt.Key_Space,
|
Qt.Key_Space,
|
||||||
}
|
}
|
||||||
):
|
):
|
||||||
view._chart.linked.godwidget.search.focus()
|
godw = view._chart.linked.godwidget
|
||||||
|
godw.hist_linked.resize_sidepanes(from_linked=godw.rt_linked)
|
||||||
|
godw.search.focus()
|
||||||
|
|
||||||
# esc and ctrl-c
|
# esc and ctrl-c
|
||||||
if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
|
if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
|
||||||
# ctrl-c as cancel
|
# ctrl-c as cancel
|
||||||
# https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
|
# https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
|
||||||
view.select_box.clear()
|
view.select_box.clear()
|
||||||
|
view.linked.focus()
|
||||||
|
|
||||||
# cancel order or clear graphics
|
# cancel order or clear graphics
|
||||||
if key == Qt.Key_C or key == Qt.Key_Delete:
|
if key == Qt.Key_C or key == Qt.Key_Delete:
|
||||||
|
@ -178,17 +182,17 @@ async def handle_viewmode_kb_inputs(
|
||||||
if key in pressed:
|
if key in pressed:
|
||||||
pressed.remove(key)
|
pressed.remove(key)
|
||||||
|
|
||||||
# QUERY/QUOTE MODE #
|
# QUERY/QUOTE MODE
|
||||||
|
# ----------------
|
||||||
if {Qt.Key_Q}.intersection(pressed):
|
if {Qt.Key_Q}.intersection(pressed):
|
||||||
|
|
||||||
view.linkedsplits.cursor.in_query_mode = True
|
view.linked.cursor.in_query_mode = True
|
||||||
|
|
||||||
else:
|
else:
|
||||||
view.linkedsplits.cursor.in_query_mode = False
|
view.linked.cursor.in_query_mode = False
|
||||||
|
|
||||||
# SELECTION MODE
|
# SELECTION MODE
|
||||||
# --------------
|
# --------------
|
||||||
|
|
||||||
if shift:
|
if shift:
|
||||||
if view.state['mouseMode'] == ViewBox.PanMode:
|
if view.state['mouseMode'] == ViewBox.PanMode:
|
||||||
view.setMouseMode(ViewBox.RectMode)
|
view.setMouseMode(ViewBox.RectMode)
|
||||||
|
@ -209,18 +213,27 @@ async def handle_viewmode_kb_inputs(
|
||||||
|
|
||||||
# ORDER MODE
|
# ORDER MODE
|
||||||
# ----------
|
# ----------
|
||||||
|
|
||||||
# live vs. dark trigger + an action {buy, sell, alert}
|
# live vs. dark trigger + an action {buy, sell, alert}
|
||||||
order_keys_pressed = ORDER_MODE.intersection(pressed)
|
order_keys_pressed = ORDER_MODE.intersection(pressed)
|
||||||
|
|
||||||
if order_keys_pressed:
|
if order_keys_pressed:
|
||||||
|
|
||||||
# show the pp size label
|
# TODO: it seems like maybe the composition should be
|
||||||
order_mode.current_pp.show()
|
# reversed here? Like, maybe we should have the nav have
|
||||||
|
# access to the pos state and then make encapsulated logic
|
||||||
|
# that shows the right stuff on screen instead or order mode
|
||||||
|
# and position-related abstractions doing this?
|
||||||
|
|
||||||
|
# show the pp size label only if there is
|
||||||
|
# a non-zero pos existing
|
||||||
|
tracker = order_mode.current_pp
|
||||||
|
if tracker.live_pp.size:
|
||||||
|
tracker.nav.show()
|
||||||
|
|
||||||
# TODO: show pp config mini-params in status bar widget
|
# TODO: show pp config mini-params in status bar widget
|
||||||
# mode.pp_config.show()
|
# mode.pp_config.show()
|
||||||
|
|
||||||
|
trigger_type: str = 'dark'
|
||||||
if (
|
if (
|
||||||
# 's' for "submit" to activate "live" order
|
# 's' for "submit" to activate "live" order
|
||||||
Qt.Key_S in pressed or
|
Qt.Key_S in pressed or
|
||||||
|
@ -228,9 +241,6 @@ async def handle_viewmode_kb_inputs(
|
||||||
):
|
):
|
||||||
trigger_type: str = 'live'
|
trigger_type: str = 'live'
|
||||||
|
|
||||||
else:
|
|
||||||
trigger_type: str = 'dark'
|
|
||||||
|
|
||||||
# order mode trigger "actions"
|
# order mode trigger "actions"
|
||||||
if Qt.Key_D in pressed: # for "damp eet"
|
if Qt.Key_D in pressed: # for "damp eet"
|
||||||
action = 'sell'
|
action = 'sell'
|
||||||
|
@ -259,8 +269,8 @@ async def handle_viewmode_kb_inputs(
|
||||||
Qt.Key_S in pressed or
|
Qt.Key_S in pressed or
|
||||||
order_keys_pressed or
|
order_keys_pressed or
|
||||||
Qt.Key_O in pressed
|
Qt.Key_O in pressed
|
||||||
) and
|
)
|
||||||
key in NUMBER_LINE
|
and key in NUMBER_LINE
|
||||||
):
|
):
|
||||||
# hot key to set order slots size.
|
# hot key to set order slots size.
|
||||||
# change edit field to current number line value,
|
# change edit field to current number line value,
|
||||||
|
@ -278,7 +288,7 @@ async def handle_viewmode_kb_inputs(
|
||||||
else: # none active
|
else: # none active
|
||||||
|
|
||||||
# hide pp label
|
# hide pp label
|
||||||
order_mode.current_pp.hide_info()
|
order_mode.current_pp.nav.hide_info()
|
||||||
|
|
||||||
# if none are pressed, remove "staged" level
|
# if none are pressed, remove "staged" level
|
||||||
# line under cursor position
|
# line under cursor position
|
||||||
|
@ -319,7 +329,6 @@ async def handle_viewmode_mouse(
|
||||||
):
|
):
|
||||||
# when in order mode, submit execution
|
# when in order mode, submit execution
|
||||||
# msg.event.accept()
|
# msg.event.accept()
|
||||||
# breakpoint()
|
|
||||||
view.order_mode.submit_order()
|
view.order_mode.submit_order()
|
||||||
|
|
||||||
|
|
||||||
|
@ -336,16 +345,6 @@ class ChartView(ViewBox):
|
||||||
'''
|
'''
|
||||||
mode_name: str = 'view'
|
mode_name: str = 'view'
|
||||||
|
|
||||||
# "relay events" for making overlaid views work.
|
|
||||||
# NOTE: these MUST be defined here (and can't be monkey patched
|
|
||||||
# on later) due to signal construction requiring refs to be
|
|
||||||
# in place during the run of meta-class machinery.
|
|
||||||
mouseDragEventRelay = QtCore.Signal(object, object, object)
|
|
||||||
wheelEventRelay = QtCore.Signal(object, object, object)
|
|
||||||
|
|
||||||
event_relay_source: 'Optional[ViewBox]' = None
|
|
||||||
relays: dict[str, QtCore.Signal] = {}
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
|
@ -375,7 +374,7 @@ class ChartView(ViewBox):
|
||||||
y=True,
|
y=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.linkedsplits = None
|
self.linked = None
|
||||||
self._chart: 'ChartPlotWidget' = None # noqa
|
self._chart: 'ChartPlotWidget' = None # noqa
|
||||||
|
|
||||||
# add our selection box annotator
|
# add our selection box annotator
|
||||||
|
@ -397,8 +396,11 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if self._ic is None:
|
if self._ic is None:
|
||||||
|
try:
|
||||||
self.chart.pause_all_feeds()
|
self.chart.pause_all_feeds()
|
||||||
self._ic = trio.Event()
|
self._ic = trio.Event()
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
def signal_ic(
|
def signal_ic(
|
||||||
self,
|
self,
|
||||||
|
@ -411,9 +413,12 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if self._ic:
|
if self._ic:
|
||||||
|
try:
|
||||||
self._ic.set()
|
self._ic.set()
|
||||||
self._ic = None
|
self._ic = None
|
||||||
self.chart.resume_all_feeds()
|
self.chart.resume_all_feeds()
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_async_input_handler(
|
async def open_async_input_handler(
|
||||||
|
@ -463,7 +468,7 @@ class ChartView(ViewBox):
|
||||||
self,
|
self,
|
||||||
ev,
|
ev,
|
||||||
axis=None,
|
axis=None,
|
||||||
relayed_from: ChartView = None,
|
# relayed_from: ChartView = None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Override "center-point" location for scrolling.
|
Override "center-point" location for scrolling.
|
||||||
|
@ -474,13 +479,20 @@ class ChartView(ViewBox):
|
||||||
TODO: PR a method into ``pyqtgraph`` to make this configurable
|
TODO: PR a method into ``pyqtgraph`` to make this configurable
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
linked = self.linked
|
||||||
|
if (
|
||||||
|
not linked
|
||||||
|
):
|
||||||
|
# print(f'{self.name} not linked but relay from {relayed_from.name}')
|
||||||
|
return
|
||||||
|
|
||||||
if axis in (0, 1):
|
if axis in (0, 1):
|
||||||
mask = [False, False]
|
mask = [False, False]
|
||||||
mask[axis] = self.state['mouseEnabled'][axis]
|
mask[axis] = self.state['mouseEnabled'][axis]
|
||||||
else:
|
else:
|
||||||
mask = self.state['mouseEnabled'][:]
|
mask = self.state['mouseEnabled'][:]
|
||||||
|
|
||||||
chart = self.linkedsplits.chart
|
chart = self.linked.chart
|
||||||
|
|
||||||
# don't zoom more then the min points setting
|
# don't zoom more then the min points setting
|
||||||
l, lbar, rbar, r = chart.bars_range()
|
l, lbar, rbar, r = chart.bars_range()
|
||||||
|
@ -593,9 +605,20 @@ class ChartView(ViewBox):
|
||||||
self,
|
self,
|
||||||
ev,
|
ev,
|
||||||
axis: Optional[int] = None,
|
axis: Optional[int] = None,
|
||||||
relayed_from: ChartView = None,
|
# relayed_from: ChartView = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
# if relayed_from:
|
||||||
|
# print(f'PAN: {self.name} -> RELAYED FROM: {relayed_from.name}')
|
||||||
|
|
||||||
|
# NOTE since in the overlay case axes are already
|
||||||
|
# "linked" any x-range change will already be mirrored
|
||||||
|
# in all overlaid ``PlotItems``, so we need to simply
|
||||||
|
# ignore the signal here since otherwise we get N-calls
|
||||||
|
# from N-overlays resulting in an "accelerated" feeling
|
||||||
|
# panning motion instead of the expect linear shift.
|
||||||
|
# if relayed_from:
|
||||||
|
# return
|
||||||
|
|
||||||
pos = ev.pos()
|
pos = ev.pos()
|
||||||
lastPos = ev.lastPos()
|
lastPos = ev.lastPos()
|
||||||
|
@ -669,7 +692,10 @@ class ChartView(ViewBox):
|
||||||
# XXX: WHY
|
# XXX: WHY
|
||||||
ev.accept()
|
ev.accept()
|
||||||
|
|
||||||
|
try:
|
||||||
self.start_ic()
|
self.start_ic()
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
# if self._ic is None:
|
# if self._ic is None:
|
||||||
# self.chart.pause_all_feeds()
|
# self.chart.pause_all_feeds()
|
||||||
# self._ic = trio.Event()
|
# self._ic = trio.Event()
|
||||||
|
@ -761,7 +787,7 @@ class ChartView(ViewBox):
|
||||||
'''
|
'''
|
||||||
name = self.name
|
name = self.name
|
||||||
# print(f'YRANGE ON {name}')
|
# print(f'YRANGE ON {name}')
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
msg=f'`ChartView._set_yrange()`: `{name}`',
|
msg=f'`ChartView._set_yrange()`: `{name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
|
@ -830,29 +856,33 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Assign callback for rescaling y-axis automatically
|
Assign callbacks for rescaling and resampling y-axis data
|
||||||
based on data contents and ``ViewBox`` state.
|
automatically based on data contents and ``ViewBox`` state.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if src_vb is None:
|
if src_vb is None:
|
||||||
src_vb = self
|
src_vb = self
|
||||||
|
|
||||||
# splitter(s) resizing
|
# widget-UIs/splitter(s) resizing
|
||||||
src_vb.sigResized.connect(self._set_yrange)
|
src_vb.sigResized.connect(self._set_yrange)
|
||||||
|
|
||||||
|
# re-sampling trigger:
|
||||||
# TODO: a smarter way to avoid calling this needlessly?
|
# TODO: a smarter way to avoid calling this needlessly?
|
||||||
# 2 things i can think of:
|
# 2 things i can think of:
|
||||||
# - register downsample-able graphics specially and only
|
# - register downsample-able graphics specially and only
|
||||||
# iterate those.
|
# iterate those.
|
||||||
# - only register this when certain downsampleable graphics are
|
# - only register this when certain downsample-able graphics are
|
||||||
# "added to scene".
|
# "added to scene".
|
||||||
src_vb.sigRangeChangedManually.connect(
|
src_vb.sigRangeChangedManually.connect(
|
||||||
self.maybe_downsample_graphics
|
self.maybe_downsample_graphics
|
||||||
)
|
)
|
||||||
|
|
||||||
# mouse wheel doesn't emit XRangeChanged
|
# mouse wheel doesn't emit XRangeChanged
|
||||||
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||||
|
|
||||||
|
# XXX: enabling these will cause "jittery"-ness
|
||||||
|
# on zoom where sharp diffs in the y-range will
|
||||||
|
# not re-size right away until a new sample update?
|
||||||
|
# if src_vb is not self:
|
||||||
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||||
# src_vb.sigXRangeChanged.connect(
|
# src_vb.sigXRangeChanged.connect(
|
||||||
# self.maybe_downsample_graphics
|
# self.maybe_downsample_graphics
|
||||||
|
@ -897,8 +927,7 @@ class ChartView(ViewBox):
|
||||||
self,
|
self,
|
||||||
autoscale_overlays: bool = True,
|
autoscale_overlays: bool = True,
|
||||||
):
|
):
|
||||||
|
profiler = Profiler(
|
||||||
profiler = pg.debug.Profiler(
|
|
||||||
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
|
|
||||||
|
@ -912,8 +941,12 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
# TODO: a faster single-loop-iterator way of doing this XD
|
# TODO: a faster single-loop-iterator way of doing this XD
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
linked = self.linkedsplits
|
plots = {chart.name: chart}
|
||||||
plots = linked.subplots | {chart.name: chart}
|
|
||||||
|
linked = self.linked
|
||||||
|
if linked:
|
||||||
|
plots |= linked.subplots
|
||||||
|
|
||||||
for chart_name, chart in plots.items():
|
for chart_name, chart in plots.items():
|
||||||
for name, flow in chart._flows.items():
|
for name, flow in chart._flows.items():
|
||||||
|
|
||||||
|
@ -923,6 +956,7 @@ class ChartView(ViewBox):
|
||||||
# XXX: super important to be aware of this.
|
# XXX: super important to be aware of this.
|
||||||
# or not flow.graphics.isVisible()
|
# or not flow.graphics.isVisible()
|
||||||
):
|
):
|
||||||
|
# print(f'skipping {flow.name}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# pass in no array which will read and render from the last
|
# pass in no array which will read and render from the last
|
||||||
|
|
|
@ -18,9 +18,14 @@
|
||||||
Lines for orders, alerts, L2.
|
Lines for orders, alerts, L2.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from math import floor
|
from math import floor
|
||||||
from typing import Optional, Callable
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from pyqtgraph import Point, functions as fn
|
from pyqtgraph import Point, functions as fn
|
||||||
|
@ -37,6 +42,9 @@ from ..calc import humanize
|
||||||
from ._label import Label
|
from ._label import Label
|
||||||
from ._style import hcolor, _font
|
from ._style import hcolor, _font
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._cursor import Cursor
|
||||||
|
|
||||||
|
|
||||||
# TODO: probably worth investigating if we can
|
# TODO: probably worth investigating if we can
|
||||||
# make .boundingRect() faster:
|
# make .boundingRect() faster:
|
||||||
|
@ -84,7 +92,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
self._marker = None
|
self._marker = None
|
||||||
self.only_show_markers_on_hover = only_show_markers_on_hover
|
self.only_show_markers_on_hover = only_show_markers_on_hover
|
||||||
self.show_markers: bool = True # presuming the line is hovered at init
|
self.track_marker_pos: bool = False
|
||||||
|
|
||||||
# should line go all the way to far end or leave a "margin"
|
# should line go all the way to far end or leave a "margin"
|
||||||
# space for other graphics (eg. L1 book)
|
# space for other graphics (eg. L1 book)
|
||||||
|
@ -122,6 +130,9 @@ class LevelLine(pg.InfiniteLine):
|
||||||
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
||||||
self._right_end_sc: float = 0
|
self._right_end_sc: float = 0
|
||||||
|
|
||||||
|
# use px caching
|
||||||
|
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
def txt_offsets(self) -> tuple[int, int]:
|
def txt_offsets(self) -> tuple[int, int]:
|
||||||
return 0, 0
|
return 0, 0
|
||||||
|
|
||||||
|
@ -216,20 +227,23 @@ class LevelLine(pg.InfiniteLine):
|
||||||
y: float
|
y: float
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Chart coordinates cursor tracking callback.
|
'''
|
||||||
|
Chart coordinates cursor tracking callback.
|
||||||
|
|
||||||
this is called by our ``Cursor`` type once this line is set to
|
this is called by our ``Cursor`` type once this line is set to
|
||||||
track the cursor: for every movement this callback is invoked to
|
track the cursor: for every movement this callback is invoked to
|
||||||
reposition the line with the current view coordinates.
|
reposition the line with the current view coordinates.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
self.movable = True
|
self.movable = True
|
||||||
self.set_level(y) # implictly calls reposition handler
|
self.set_level(y) # implictly calls reposition handler
|
||||||
|
|
||||||
def mouseDragEvent(self, ev):
|
def mouseDragEvent(self, ev):
|
||||||
"""Override the ``InfiniteLine`` handler since we need more
|
'''
|
||||||
|
Override the ``InfiniteLine`` handler since we need more
|
||||||
detailed control and start end signalling.
|
detailed control and start end signalling.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
cursor = self._chart.linked.cursor
|
cursor = self._chart.linked.cursor
|
||||||
|
|
||||||
# hide y-crosshair
|
# hide y-crosshair
|
||||||
|
@ -281,10 +295,20 @@ class LevelLine(pg.InfiniteLine):
|
||||||
# show y-crosshair again
|
# show y-crosshair again
|
||||||
cursor.show_xhair()
|
cursor.show_xhair()
|
||||||
|
|
||||||
def delete(self) -> None:
|
def get_cursor(self) -> Optional[Cursor]:
|
||||||
"""Remove this line from containing chart/view/scene.
|
|
||||||
|
|
||||||
"""
|
chart = self._chart
|
||||||
|
cur = chart.linked.cursor
|
||||||
|
if self in cur._hovered:
|
||||||
|
return cur
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def delete(self) -> None:
|
||||||
|
'''
|
||||||
|
Remove this line from containing chart/view/scene.
|
||||||
|
|
||||||
|
'''
|
||||||
scene = self.scene()
|
scene = self.scene()
|
||||||
if scene:
|
if scene:
|
||||||
for label in self._labels:
|
for label in self._labels:
|
||||||
|
@ -298,9 +322,8 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
# remove from chart/cursor states
|
# remove from chart/cursor states
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
cur = chart.linked.cursor
|
cur = self.get_cursor()
|
||||||
|
if cur:
|
||||||
if self in cur._hovered:
|
|
||||||
cur._hovered.remove(self)
|
cur._hovered.remove(self)
|
||||||
|
|
||||||
chart.plotItem.removeItem(self)
|
chart.plotItem.removeItem(self)
|
||||||
|
@ -308,8 +331,8 @@ class LevelLine(pg.InfiniteLine):
|
||||||
def mouseDoubleClickEvent(
|
def mouseDoubleClickEvent(
|
||||||
self,
|
self,
|
||||||
ev: QtGui.QMouseEvent,
|
ev: QtGui.QMouseEvent,
|
||||||
) -> None:
|
|
||||||
|
|
||||||
|
) -> None:
|
||||||
# TODO: enter labels edit mode
|
# TODO: enter labels edit mode
|
||||||
print(f'double click {ev}')
|
print(f'double click {ev}')
|
||||||
|
|
||||||
|
@ -334,30 +357,22 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
||||||
|
|
||||||
if self.show_markers and self.markers:
|
# (legacy) NOTE: at one point this seemed slower when moving around
|
||||||
|
# order lines.. not sure if that's still true or why but we've
|
||||||
p.setPen(self.pen)
|
# dropped the original hacky `.pain()` transform stuff for inf
|
||||||
qgo_draw_markers(
|
# line markers now - check the git history if it needs to be
|
||||||
self.markers,
|
# reverted.
|
||||||
self.pen.color(),
|
if self._marker:
|
||||||
p,
|
if self.track_marker_pos:
|
||||||
vb_left,
|
# make the line end at the marker's x pos
|
||||||
vb_right,
|
line_end = marker_right = self._marker.pos().x()
|
||||||
marker_right,
|
|
||||||
)
|
|
||||||
# marker_size = self.markers[0][2]
|
|
||||||
self._maxMarkerSize = max([m[2] / 2. for m in self.markers])
|
|
||||||
|
|
||||||
# this seems slower when moving around
|
|
||||||
# order lines.. not sure wtf is up with that.
|
|
||||||
# for now we're just using it on the position line.
|
|
||||||
elif self._marker:
|
|
||||||
|
|
||||||
# TODO: make this label update part of a scene-aware-marker
|
# TODO: make this label update part of a scene-aware-marker
|
||||||
# composed annotation
|
# composed annotation
|
||||||
self._marker.setPos(
|
self._marker.setPos(
|
||||||
QPointF(marker_right, self.scene_y())
|
QPointF(marker_right, self.scene_y())
|
||||||
)
|
)
|
||||||
|
|
||||||
if hasattr(self._marker, 'label'):
|
if hasattr(self._marker, 'label'):
|
||||||
self._marker.label.update()
|
self._marker.label.update()
|
||||||
|
|
||||||
|
@ -379,16 +394,14 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
def hide(self) -> None:
|
def hide(self) -> None:
|
||||||
super().hide()
|
super().hide()
|
||||||
if self._marker:
|
mkr = self._marker
|
||||||
self._marker.hide()
|
if mkr:
|
||||||
# needed for ``order_line()`` lines currently
|
mkr.hide()
|
||||||
self._marker.label.hide()
|
|
||||||
|
|
||||||
def show(self) -> None:
|
def show(self) -> None:
|
||||||
super().show()
|
super().show()
|
||||||
if self._marker:
|
if self._marker:
|
||||||
self._marker.show()
|
self._marker.show()
|
||||||
# self._marker.label.show()
|
|
||||||
|
|
||||||
def scene_y(self) -> float:
|
def scene_y(self) -> float:
|
||||||
return self.getViewBox().mapFromView(
|
return self.getViewBox().mapFromView(
|
||||||
|
@ -421,6 +434,10 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def marker(self) -> LevelMarker:
|
||||||
|
return self._marker
|
||||||
|
|
||||||
def hoverEvent(self, ev):
|
def hoverEvent(self, ev):
|
||||||
'''
|
'''
|
||||||
Mouse hover callback.
|
Mouse hover callback.
|
||||||
|
@ -429,17 +446,16 @@ class LevelLine(pg.InfiniteLine):
|
||||||
cur = self._chart.linked.cursor
|
cur = self._chart.linked.cursor
|
||||||
|
|
||||||
# hovered
|
# hovered
|
||||||
if (not ev.isExit()) and ev.acceptDrags(QtCore.Qt.LeftButton):
|
if (
|
||||||
|
not ev.isExit()
|
||||||
|
and ev.acceptDrags(QtCore.Qt.LeftButton)
|
||||||
|
):
|
||||||
# if already hovered we don't need to run again
|
# if already hovered we don't need to run again
|
||||||
if self.mouseHovering is True:
|
if self.mouseHovering is True:
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.only_show_markers_on_hover:
|
if self.only_show_markers_on_hover:
|
||||||
self.show_markers = True
|
self.show_markers()
|
||||||
|
|
||||||
if self._marker:
|
|
||||||
self._marker.show()
|
|
||||||
|
|
||||||
# highlight if so configured
|
# highlight if so configured
|
||||||
if self.highlight_on_hover:
|
if self.highlight_on_hover:
|
||||||
|
@ -482,11 +498,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
cur._hovered.remove(self)
|
cur._hovered.remove(self)
|
||||||
|
|
||||||
if self.only_show_markers_on_hover:
|
if self.only_show_markers_on_hover:
|
||||||
self.show_markers = False
|
self.hide_markers()
|
||||||
|
|
||||||
if self._marker:
|
|
||||||
self._marker.hide()
|
|
||||||
self._marker.label.hide()
|
|
||||||
|
|
||||||
if self not in cur._trackers:
|
if self not in cur._trackers:
|
||||||
cur.show_xhair(y_label_level=self.value())
|
cur.show_xhair(y_label_level=self.value())
|
||||||
|
@ -498,6 +510,15 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
self.update()
|
self.update()
|
||||||
|
|
||||||
|
def hide_markers(self) -> None:
|
||||||
|
if self._marker:
|
||||||
|
self._marker.hide()
|
||||||
|
self._marker.label.hide()
|
||||||
|
|
||||||
|
def show_markers(self) -> None:
|
||||||
|
if self._marker:
|
||||||
|
self._marker.show()
|
||||||
|
|
||||||
|
|
||||||
def level_line(
|
def level_line(
|
||||||
|
|
||||||
|
@ -518,9 +539,10 @@ def level_line(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
"""Convenience routine to add a styled horizontal line to a plot.
|
'''
|
||||||
|
Convenience routine to add a styled horizontal line to a plot.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
hl_color = color + '_light' if highlight_on_hover else color
|
hl_color = color + '_light' if highlight_on_hover else color
|
||||||
|
|
||||||
line = LevelLine(
|
line = LevelLine(
|
||||||
|
@ -702,7 +724,7 @@ def order_line(
|
||||||
marker = LevelMarker(
|
marker = LevelMarker(
|
||||||
chart=chart,
|
chart=chart,
|
||||||
style=marker_style,
|
style=marker_style,
|
||||||
get_level=line.value,
|
get_level=line.value, # callback
|
||||||
size=marker_size,
|
size=marker_size,
|
||||||
keep_in_view=False,
|
keep_in_view=False,
|
||||||
)
|
)
|
||||||
|
@ -711,7 +733,8 @@ def order_line(
|
||||||
marker = line.add_marker(marker)
|
marker = line.add_marker(marker)
|
||||||
|
|
||||||
# XXX: DON'T COMMENT THIS!
|
# XXX: DON'T COMMENT THIS!
|
||||||
# this fixes it the artifact issue! .. of course, bounding rect stuff
|
# this fixes it the artifact issue!
|
||||||
|
# .. of course, bounding rect stuff
|
||||||
line._maxMarkerSize = marker_size
|
line._maxMarkerSize = marker_size
|
||||||
|
|
||||||
assert line._marker is marker
|
assert line._marker is marker
|
||||||
|
@ -732,7 +755,8 @@ def order_line(
|
||||||
|
|
||||||
if action != 'alert':
|
if action != 'alert':
|
||||||
|
|
||||||
# add a partial position label if we also added a level marker
|
# add a partial position label if we also added a level
|
||||||
|
# marker
|
||||||
pp_size_label = Label(
|
pp_size_label = Label(
|
||||||
view=view,
|
view=view,
|
||||||
color=line.color,
|
color=line.color,
|
||||||
|
@ -766,9 +790,9 @@ def order_line(
|
||||||
# XXX: without this the pp proportion label next the marker
|
# XXX: without this the pp proportion label next the marker
|
||||||
# seems to lag? this is the same issue we had with position
|
# seems to lag? this is the same issue we had with position
|
||||||
# lines which we handle with ``.update_graphcis()``.
|
# lines which we handle with ``.update_graphcis()``.
|
||||||
# marker._on_paint=lambda marker: pp_size_label.update()
|
|
||||||
marker._on_paint = lambda marker: pp_size_label.update()
|
marker._on_paint = lambda marker: pp_size_label.update()
|
||||||
|
|
||||||
|
# XXX: THIS IS AN UNTYPED MONKEY PATCH!?!?!
|
||||||
marker.label = label
|
marker.label = label
|
||||||
|
|
||||||
# sanity check
|
# sanity check
|
||||||
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Notifications utils.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import subprocess
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import trio
|
||||||
|
|
||||||
|
from ..log import get_logger
|
||||||
|
from ..clearing._messages import (
|
||||||
|
Status,
|
||||||
|
)
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_dbus_uid: Optional[str] = ''
|
||||||
|
|
||||||
|
|
||||||
|
async def notify_from_ems_status_msg(
|
||||||
|
msg: Status,
|
||||||
|
duration: int = 3000,
|
||||||
|
is_subproc: bool = False,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Send a linux desktop notification.
|
||||||
|
|
||||||
|
Handle subprocesses by discovering the dbus user id
|
||||||
|
on first call.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if platform.system() != "Linux":
|
||||||
|
return
|
||||||
|
|
||||||
|
# TODO: this in another task?
|
||||||
|
# not sure if this will ever be a bottleneck,
|
||||||
|
# we probably could do graphics stuff first tho?
|
||||||
|
|
||||||
|
if is_subproc:
|
||||||
|
global _dbus_uid
|
||||||
|
su = os.environ.get('SUDO_USER')
|
||||||
|
if (
|
||||||
|
not _dbus_uid
|
||||||
|
and su
|
||||||
|
):
|
||||||
|
|
||||||
|
# TODO: use `trio` but we need to use nursery.start()
|
||||||
|
# to use pipes?
|
||||||
|
# result = await trio.run_process(
|
||||||
|
result = subprocess.run(
|
||||||
|
[
|
||||||
|
'id',
|
||||||
|
'-u',
|
||||||
|
su,
|
||||||
|
],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
# check=True
|
||||||
|
)
|
||||||
|
_dbus_uid = result.stdout.decode("utf-8").replace('\n', '')
|
||||||
|
|
||||||
|
os.environ['DBUS_SESSION_BUS_ADDRESS'] = (
|
||||||
|
f'unix:path=/run/user/{_dbus_uid}/bus'
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await trio.run_process(
|
||||||
|
[
|
||||||
|
'notify-send',
|
||||||
|
'-u', 'normal',
|
||||||
|
'-t', f'{duration}',
|
||||||
|
'piker',
|
||||||
|
|
||||||
|
# TODO: add in standard fill/exec info that maybe we
|
||||||
|
# pack in a broker independent way?
|
||||||
|
f"'{msg.pformat()}'",
|
||||||
|
],
|
||||||
|
capture_stdout=True,
|
||||||
|
capture_stderr=True,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
if result.returncode != 0:
|
||||||
|
log.warn(f'No notification daemon installed stderr: {result.stderr}')
|
||||||
|
|
||||||
|
log.runtime(result)
|
|
@ -32,6 +32,7 @@ from PyQt5.QtGui import QPainterPath
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._style import hcolor
|
from ._style import hcolor
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._chart import LinkedSplits
|
from ._chart import LinkedSplits
|
||||||
|
@ -170,7 +171,7 @@ class BarItems(pg.GraphicsObject):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
)
|
)
|
||||||
|
|
|
@ -22,12 +22,9 @@ from __future__ import annotations
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional, Generic,
|
Optional, Generic,
|
||||||
TypeVar, Callable,
|
TypeVar, Callable,
|
||||||
Literal,
|
|
||||||
)
|
)
|
||||||
import enum
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pydantic import BaseModel, validator
|
# from pydantic import BaseModel, validator
|
||||||
from pydantic.generics import GenericModel
|
from pydantic.generics import GenericModel
|
||||||
from PyQt5.QtWidgets import (
|
from PyQt5.QtWidgets import (
|
||||||
QWidget,
|
QWidget,
|
||||||
|
@ -38,6 +35,7 @@ from ._forms import (
|
||||||
# FontScaledDelegate,
|
# FontScaledDelegate,
|
||||||
Edit,
|
Edit,
|
||||||
)
|
)
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
DataType = TypeVar('DataType')
|
DataType = TypeVar('DataType')
|
||||||
|
@ -62,7 +60,7 @@ class Selection(Field[DataType], Generic[DataType]):
|
||||||
options: dict[str, DataType]
|
options: dict[str, DataType]
|
||||||
# value: DataType = None
|
# value: DataType = None
|
||||||
|
|
||||||
@validator('value') # , always=True)
|
# @validator('value') # , always=True)
|
||||||
def set_value_first(
|
def set_value_first(
|
||||||
cls,
|
cls,
|
||||||
|
|
||||||
|
@ -100,7 +98,7 @@ class Edit(Field[DataType], Generic[DataType]):
|
||||||
widget_factory = Edit
|
widget_factory = Edit
|
||||||
|
|
||||||
|
|
||||||
class AllocatorPane(BaseModel):
|
class AllocatorPane(Struct):
|
||||||
|
|
||||||
account = Selection[str](
|
account = Selection[str](
|
||||||
options=dict.fromkeys(
|
options=dict.fromkeys(
|
||||||
|
|
|
@ -18,23 +18,27 @@
|
||||||
Charting overlay helpers.
|
Charting overlay helpers.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from typing import Callable, Optional
|
from collections import defaultdict
|
||||||
|
from functools import partial
|
||||||
from pyqtgraph.Qt.QtCore import (
|
from typing import (
|
||||||
# QObject,
|
Callable,
|
||||||
# Signal,
|
Optional,
|
||||||
Qt,
|
|
||||||
# QEvent,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
||||||
from pyqtgraph.graphicsItems.ViewBox import ViewBox
|
from pyqtgraph.graphicsItems.ViewBox import ViewBox
|
||||||
from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
# from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
||||||
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
|
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
|
||||||
from pyqtgraph.Qt.QtCore import QObject, Signal, QEvent
|
from pyqtgraph.Qt.QtCore import (
|
||||||
from pyqtgraph.Qt.QtWidgets import QGraphicsGridLayout, QGraphicsLinearLayout
|
QObject,
|
||||||
|
Signal,
|
||||||
from ._interaction import ChartView
|
QEvent,
|
||||||
|
Qt,
|
||||||
|
)
|
||||||
|
from pyqtgraph.Qt.QtWidgets import (
|
||||||
|
# QGraphicsGridLayout,
|
||||||
|
QGraphicsLinearLayout,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = ["PlotItemOverlay"]
|
__all__ = ["PlotItemOverlay"]
|
||||||
|
|
||||||
|
@ -80,8 +84,8 @@ class ComposedGridLayout:
|
||||||
``<axis_name>i`` in the layout.
|
``<axis_name>i`` in the layout.
|
||||||
|
|
||||||
The ``item: PlotItem`` passed to the constructor's grid layout is
|
The ``item: PlotItem`` passed to the constructor's grid layout is
|
||||||
used verbatim as the "main plot" who's view box is give precedence
|
used verbatim as the "main plot" who's view box is given precedence
|
||||||
for input handling. The main plot's axes are removed from it's
|
for input handling. The main plot's axes are removed from its
|
||||||
layout and placed in the surrounding exterior layouts to allow for
|
layout and placed in the surrounding exterior layouts to allow for
|
||||||
re-ordering if desired.
|
re-ordering if desired.
|
||||||
|
|
||||||
|
@ -89,16 +93,11 @@ class ComposedGridLayout:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
item: PlotItem,
|
item: PlotItem,
|
||||||
grid: QGraphicsGridLayout,
|
|
||||||
reverse: bool = False, # insert items to the "center"
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
self.items: list[PlotItem] = []
|
|
||||||
# self.grid = grid
|
|
||||||
self.reverse = reverse
|
|
||||||
|
|
||||||
# TODO: use a ``bidict`` here?
|
self.items: list[PlotItem] = []
|
||||||
self._pi2axes: dict[
|
self._pi2axes: dict[ # TODO: use a ``bidict`` here?
|
||||||
int,
|
int,
|
||||||
dict[str, AxisItem],
|
dict[str, AxisItem],
|
||||||
] = {}
|
] = {}
|
||||||
|
@ -120,12 +119,13 @@ class ComposedGridLayout:
|
||||||
|
|
||||||
if name in ('top', 'bottom'):
|
if name in ('top', 'bottom'):
|
||||||
orient = Qt.Vertical
|
orient = Qt.Vertical
|
||||||
|
|
||||||
elif name in ('left', 'right'):
|
elif name in ('left', 'right'):
|
||||||
orient = Qt.Horizontal
|
orient = Qt.Horizontal
|
||||||
|
|
||||||
layout.setOrientation(orient)
|
layout.setOrientation(orient)
|
||||||
|
|
||||||
self.insert(0, item)
|
self.insert_plotitem(0, item)
|
||||||
|
|
||||||
# insert surrounding linear layouts into the parent pi's layout
|
# insert surrounding linear layouts into the parent pi's layout
|
||||||
# such that additional axes can be appended arbitrarily without
|
# such that additional axes can be appended arbitrarily without
|
||||||
|
@ -159,7 +159,7 @@ class ComposedGridLayout:
|
||||||
# enter plot into list for index tracking
|
# enter plot into list for index tracking
|
||||||
self.items.insert(index, plotitem)
|
self.items.insert(index, plotitem)
|
||||||
|
|
||||||
def insert(
|
def insert_plotitem(
|
||||||
self,
|
self,
|
||||||
index: int,
|
index: int,
|
||||||
plotitem: PlotItem,
|
plotitem: PlotItem,
|
||||||
|
@ -171,7 +171,9 @@ class ComposedGridLayout:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if index < 0:
|
if index < 0:
|
||||||
raise ValueError('`insert()` only supports an index >= 0')
|
raise ValueError(
|
||||||
|
'`.insert_plotitem()` only supports an index >= 0'
|
||||||
|
)
|
||||||
|
|
||||||
# add plot's axes in sequence to the embedded linear layouts
|
# add plot's axes in sequence to the embedded linear layouts
|
||||||
# for each "side" thus avoiding graphics collisions.
|
# for each "side" thus avoiding graphics collisions.
|
||||||
|
@ -220,7 +222,7 @@ class ComposedGridLayout:
|
||||||
|
|
||||||
return index
|
return index
|
||||||
|
|
||||||
def append(
|
def append_plotitem(
|
||||||
self,
|
self,
|
||||||
item: PlotItem,
|
item: PlotItem,
|
||||||
|
|
||||||
|
@ -232,7 +234,7 @@ class ComposedGridLayout:
|
||||||
'''
|
'''
|
||||||
# for left and bottom axes we have to first remove
|
# for left and bottom axes we have to first remove
|
||||||
# items and re-insert to maintain a list-order.
|
# items and re-insert to maintain a list-order.
|
||||||
return self.insert(len(self.items), item)
|
return self.insert_plotitem(len(self.items), item)
|
||||||
|
|
||||||
def get_axis(
|
def get_axis(
|
||||||
self,
|
self,
|
||||||
|
@ -249,16 +251,16 @@ class ComposedGridLayout:
|
||||||
named = self._pi2axes[name]
|
named = self._pi2axes[name]
|
||||||
return named.get(index)
|
return named.get(index)
|
||||||
|
|
||||||
def pop(
|
# def pop(
|
||||||
self,
|
# self,
|
||||||
item: PlotItem,
|
# item: PlotItem,
|
||||||
|
|
||||||
) -> PlotItem:
|
# ) -> PlotItem:
|
||||||
'''
|
# '''
|
||||||
Remove item and restack all axes in list-order.
|
# Remove item and restack all axes in list-order.
|
||||||
|
|
||||||
'''
|
# '''
|
||||||
raise NotImplementedError
|
# raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
# Unimplemented features TODO:
|
# Unimplemented features TODO:
|
||||||
|
@ -279,194 +281,6 @@ class ComposedGridLayout:
|
||||||
# axis?
|
# axis?
|
||||||
|
|
||||||
|
|
||||||
# TODO: we might want to enabled some kind of manual flag to disable
|
|
||||||
# this method wrapping during type creation? As example a user could
|
|
||||||
# definitively decide **not** to enable broadcasting support by
|
|
||||||
# setting something like ``ViewBox.disable_relays = True``?
|
|
||||||
def mk_relay_method(
|
|
||||||
|
|
||||||
signame: str,
|
|
||||||
slot: Callable[
|
|
||||||
[ViewBox,
|
|
||||||
'QEvent',
|
|
||||||
Optional[AxisItem]],
|
|
||||||
None,
|
|
||||||
],
|
|
||||||
|
|
||||||
) -> Callable[
|
|
||||||
[
|
|
||||||
ViewBox,
|
|
||||||
# lol, there isn't really a generic type thanks
|
|
||||||
# to the rewrite of Qt's event system XD
|
|
||||||
'QEvent',
|
|
||||||
|
|
||||||
'Optional[AxisItem]',
|
|
||||||
'Optional[ViewBox]', # the ``relayed_from`` arg we provide
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
]:
|
|
||||||
|
|
||||||
def maybe_broadcast(
|
|
||||||
vb: 'ViewBox',
|
|
||||||
ev: 'QEvent',
|
|
||||||
axis: 'Optional[int]' = None,
|
|
||||||
relayed_from: 'ViewBox' = None,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
(soon to be) Decorator which makes an event handler
|
|
||||||
"broadcastable" to overlayed ``GraphicsWidget``s.
|
|
||||||
|
|
||||||
Adds relay signals based on the decorated handler's name
|
|
||||||
and conducts a signal broadcast of the relay signal if there
|
|
||||||
are consumers registered.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# When no relay source has been set just bypass all
|
|
||||||
# the broadcast machinery.
|
|
||||||
if vb.event_relay_source is None:
|
|
||||||
ev.accept()
|
|
||||||
return slot(
|
|
||||||
vb,
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
)
|
|
||||||
|
|
||||||
if relayed_from:
|
|
||||||
assert axis is None
|
|
||||||
|
|
||||||
# this is a relayed event and should be ignored (so it does not
|
|
||||||
# halt/short circuit the graphicscene loop). Further the
|
|
||||||
# surrounding handler for this signal must be allowed to execute
|
|
||||||
# and get processed by **this consumer**.
|
|
||||||
# print(f'{vb.name} rx relayed from {relayed_from.name}')
|
|
||||||
ev.ignore()
|
|
||||||
|
|
||||||
return slot(
|
|
||||||
vb,
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
)
|
|
||||||
|
|
||||||
if axis is not None:
|
|
||||||
# print(f'{vb.name} handling axis event:\n{str(ev)}')
|
|
||||||
ev.accept()
|
|
||||||
return slot(
|
|
||||||
vb,
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
)
|
|
||||||
|
|
||||||
elif (
|
|
||||||
relayed_from is None
|
|
||||||
and vb.event_relay_source is vb # we are the broadcaster
|
|
||||||
and axis is None
|
|
||||||
):
|
|
||||||
# Broadcast case: this is a source event which will be
|
|
||||||
# relayed to attached consumers and accepted after all
|
|
||||||
# consumers complete their own handling followed by this
|
|
||||||
# routine's processing. Sequence is,
|
|
||||||
# - pre-relay to all consumers *first* - ``.emit()`` blocks
|
|
||||||
# until all downstream relay handlers have run.
|
|
||||||
# - run the source handler for **this** event and accept
|
|
||||||
# the event
|
|
||||||
|
|
||||||
# Access the "bound signal" that is created
|
|
||||||
# on the widget type as part of instantiation.
|
|
||||||
signal = getattr(vb, signame)
|
|
||||||
# print(f'{vb.name} emitting {signame}')
|
|
||||||
|
|
||||||
# TODO/NOTE: we could also just bypass a "relay" signal
|
|
||||||
# entirely and instead call the handlers manually in
|
|
||||||
# a loop? This probably is a lot simpler and also doesn't
|
|
||||||
# have any downside, and allows not touching target widget
|
|
||||||
# internals.
|
|
||||||
signal.emit(
|
|
||||||
ev,
|
|
||||||
axis,
|
|
||||||
# passing this demarks a broadcasted/relayed event
|
|
||||||
vb,
|
|
||||||
)
|
|
||||||
# accept event so no more relays are fired.
|
|
||||||
ev.accept()
|
|
||||||
|
|
||||||
# call underlying wrapped method with an extra
|
|
||||||
# ``relayed_from`` value to denote that this is a relayed
|
|
||||||
# event handling case.
|
|
||||||
return slot(
|
|
||||||
vb,
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
)
|
|
||||||
|
|
||||||
return maybe_broadcast
|
|
||||||
|
|
||||||
|
|
||||||
# XXX: :( can't define signals **after** class compile time
|
|
||||||
# so this is not really useful.
|
|
||||||
# def mk_relay_signal(
|
|
||||||
# func,
|
|
||||||
# name: str = None,
|
|
||||||
|
|
||||||
# ) -> Signal:
|
|
||||||
# (
|
|
||||||
# args,
|
|
||||||
# varargs,
|
|
||||||
# varkw,
|
|
||||||
# defaults,
|
|
||||||
# kwonlyargs,
|
|
||||||
# kwonlydefaults,
|
|
||||||
# annotations
|
|
||||||
# ) = inspect.getfullargspec(func)
|
|
||||||
|
|
||||||
# # XXX: generate a relay signal with 1 extra
|
|
||||||
# # argument for a ``relayed_from`` kwarg. Since
|
|
||||||
# # ``'self'`` is already ignored by signals we just need
|
|
||||||
# # to count the arguments since we're adding only 1 (and
|
|
||||||
# # ``args`` will capture that).
|
|
||||||
# numargs = len(args + list(defaults))
|
|
||||||
# signal = Signal(*tuple(numargs * [object]))
|
|
||||||
# signame = name or func.__name__ + 'Relay'
|
|
||||||
# return signame, signal
|
|
||||||
|
|
||||||
|
|
||||||
def enable_relays(
|
|
||||||
widget: GraphicsWidget,
|
|
||||||
handler_names: list[str],
|
|
||||||
|
|
||||||
) -> list[Signal]:
|
|
||||||
'''
|
|
||||||
Method override helper which enables relay of a particular
|
|
||||||
``Signal`` from some chosen broadcaster widget to a set of
|
|
||||||
consumer widgets which should operate their event handlers normally
|
|
||||||
but instead of signals "relayed" from the broadcaster.
|
|
||||||
|
|
||||||
Mostly useful for overlaying widgets that handle user input
|
|
||||||
that you want to overlay graphically. The target ``widget`` type must
|
|
||||||
define ``QtCore.Signal``s each with a `'Relay'` suffix for each
|
|
||||||
name provided in ``handler_names: list[str]``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
signals = []
|
|
||||||
for name in handler_names:
|
|
||||||
handler = getattr(widget, name)
|
|
||||||
signame = name + 'Relay'
|
|
||||||
# ensure the target widget defines a relay signal
|
|
||||||
relay = getattr(widget, signame)
|
|
||||||
widget.relays[signame] = name
|
|
||||||
signals.append(relay)
|
|
||||||
method = mk_relay_method(signame, handler)
|
|
||||||
setattr(widget, name, method)
|
|
||||||
|
|
||||||
return signals
|
|
||||||
|
|
||||||
|
|
||||||
enable_relays(
|
|
||||||
ChartView,
|
|
||||||
['wheelEvent', 'mouseDragEvent']
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PlotItemOverlay:
|
class PlotItemOverlay:
|
||||||
'''
|
'''
|
||||||
A composite for managing overlaid ``PlotItem`` instances such that
|
A composite for managing overlaid ``PlotItem`` instances such that
|
||||||
|
@ -482,16 +296,18 @@ class PlotItemOverlay:
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
self.root_plotitem: PlotItem = root_plotitem
|
self.root_plotitem: PlotItem = root_plotitem
|
||||||
|
self.relay_handlers: defaultdict[
|
||||||
|
str,
|
||||||
|
list[Callable],
|
||||||
|
] = defaultdict(list)
|
||||||
|
|
||||||
vb = root_plotitem.vb
|
# NOTE: required for scene layering/relaying; this guarantees
|
||||||
vb.event_relay_source = vb # TODO: maybe change name?
|
# the "root" plot receives priority for interaction
|
||||||
vb.setZValue(1000) # XXX: critical for scene layering/relaying
|
# events/signals.
|
||||||
|
root_plotitem.vb.setZValue(10)
|
||||||
|
|
||||||
self.overlays: list[PlotItem] = []
|
self.overlays: list[PlotItem] = []
|
||||||
self.layout = ComposedGridLayout(
|
self.layout = ComposedGridLayout(root_plotitem)
|
||||||
root_plotitem,
|
|
||||||
root_plotitem.layout,
|
|
||||||
)
|
|
||||||
self._relays: dict[str, Signal] = {}
|
self._relays: dict[str, Signal] = {}
|
||||||
|
|
||||||
def add_plotitem(
|
def add_plotitem(
|
||||||
|
@ -499,8 +315,10 @@ class PlotItemOverlay:
|
||||||
plotitem: PlotItem,
|
plotitem: PlotItem,
|
||||||
index: Optional[int] = None,
|
index: Optional[int] = None,
|
||||||
|
|
||||||
# TODO: we could also put the ``ViewBox.XAxis``
|
# event/signal names which will be broadcasted to all added
|
||||||
# style enum here?
|
# (relayee) ``PlotItem``s (eg. ``ViewBox.mouseDragEvent``).
|
||||||
|
relay_events: list[str] = [],
|
||||||
|
|
||||||
# (0,), # link x
|
# (0,), # link x
|
||||||
# (1,), # link y
|
# (1,), # link y
|
||||||
# (0, 1), # link both
|
# (0, 1), # link both
|
||||||
|
@ -510,58 +328,155 @@ class PlotItemOverlay:
|
||||||
|
|
||||||
index = index or len(self.overlays)
|
index = index or len(self.overlays)
|
||||||
root = self.root_plotitem
|
root = self.root_plotitem
|
||||||
# layout: QGraphicsGridLayout = root.layout
|
|
||||||
self.overlays.insert(index, plotitem)
|
self.overlays.insert(index, plotitem)
|
||||||
vb: ViewBox = plotitem.vb
|
vb: ViewBox = plotitem.vb
|
||||||
|
|
||||||
# mark this consumer overlay as ready to expect relayed events
|
|
||||||
# from the root plotitem.
|
|
||||||
vb.event_relay_source = root.vb
|
|
||||||
|
|
||||||
# TODO: some sane way to allow menu event broadcast XD
|
# TODO: some sane way to allow menu event broadcast XD
|
||||||
# vb.setMenuEnabled(False)
|
# vb.setMenuEnabled(False)
|
||||||
|
|
||||||
# TODO: inside the `maybe_broadcast()` (soon to be) decorator
|
# wire up any relay signal(s) from the source plot to added
|
||||||
# we need have checks that consumers have been attached to
|
# "overlays". We use a plain loop instead of mucking with
|
||||||
# these relay signals.
|
# re-connecting signal/slots which tends to be more invasive and
|
||||||
if link_axes != (0, 1):
|
# harder to implement and provides no measurable performance
|
||||||
|
# gain.
|
||||||
|
if relay_events:
|
||||||
|
for ev_name in relay_events:
|
||||||
|
relayee_handler: Callable[
|
||||||
|
[
|
||||||
|
ViewBox,
|
||||||
|
# lol, there isn't really a generic type thanks
|
||||||
|
# to the rewrite of Qt's event system XD
|
||||||
|
QEvent,
|
||||||
|
|
||||||
# wire up relay signals
|
AxisItem | None,
|
||||||
for relay_signal_name, handler_name in vb.relays.items():
|
],
|
||||||
# print(handler_name)
|
None,
|
||||||
# XXX: Signal class attrs are bound after instantiation
|
] = getattr(vb, ev_name)
|
||||||
# of the defining type, so we need to access that bound
|
|
||||||
# version here.
|
sub_handlers: list[Callable] = self.relay_handlers[ev_name]
|
||||||
signal = getattr(root.vb, relay_signal_name)
|
|
||||||
handler = getattr(vb, handler_name)
|
# on the first registry of a relayed event we pop the
|
||||||
signal.connect(handler)
|
# root's handler and override it to a custom broadcaster
|
||||||
|
# routine.
|
||||||
|
if not sub_handlers:
|
||||||
|
|
||||||
|
src_handler = getattr(
|
||||||
|
root.vb,
|
||||||
|
ev_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
def broadcast(
|
||||||
|
ev: 'QEvent',
|
||||||
|
|
||||||
|
# TODO: drop this viewbox specific input and
|
||||||
|
# allow a predicate to be passed in by user.
|
||||||
|
axis: 'Optional[int]' = None,
|
||||||
|
|
||||||
|
*,
|
||||||
|
|
||||||
|
# these are bound in by the ``partial`` below
|
||||||
|
# and ensure a unique broadcaster per event.
|
||||||
|
ev_name: str = None,
|
||||||
|
src_handler: Callable = None,
|
||||||
|
relayed_from: 'ViewBox' = None,
|
||||||
|
|
||||||
|
# remaining inputs the source handler expects
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Broadcast signal or event: this is a source
|
||||||
|
event which will be relayed to attached
|
||||||
|
"relayee" plot item consumers.
|
||||||
|
|
||||||
|
The event is accepted halting any further
|
||||||
|
handlers from being triggered.
|
||||||
|
|
||||||
|
Sequence is,
|
||||||
|
- pre-relay to all consumers *first* - exactly
|
||||||
|
like how a ``Signal.emit()`` blocks until all
|
||||||
|
downstream relay handlers have run.
|
||||||
|
- run the event's source handler event
|
||||||
|
|
||||||
|
'''
|
||||||
|
ev.accept()
|
||||||
|
|
||||||
|
# broadcast first to relayees *first*. trigger
|
||||||
|
# relay of event to all consumers **before**
|
||||||
|
# processing/consumption in the source handler.
|
||||||
|
relayed_handlers = self.relay_handlers[ev_name]
|
||||||
|
|
||||||
|
assert getattr(vb, ev_name).__name__ == ev_name
|
||||||
|
|
||||||
|
# TODO: generalize as an input predicate
|
||||||
|
if axis is None:
|
||||||
|
for handler in relayed_handlers:
|
||||||
|
handler(
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
# run "source" widget's handler last
|
||||||
|
src_handler(
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
# dynamic handler override on the publisher plot
|
||||||
|
setattr(
|
||||||
|
root.vb,
|
||||||
|
ev_name,
|
||||||
|
partial(
|
||||||
|
broadcast,
|
||||||
|
ev_name=ev_name,
|
||||||
|
src_handler=src_handler
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert getattr(root.vb, ev_name)
|
||||||
|
assert relayee_handler not in sub_handlers
|
||||||
|
|
||||||
|
# append relayed-to widget's handler to relay table
|
||||||
|
sub_handlers.append(relayee_handler)
|
||||||
|
|
||||||
# link dim-axes to root if requested by user.
|
# link dim-axes to root if requested by user.
|
||||||
# TODO: solve more-then-wanted scaled panning on click drag
|
|
||||||
# which seems to be due to broadcast. So we probably need to
|
|
||||||
# disable broadcast when axes are linked in a particular
|
|
||||||
# dimension?
|
|
||||||
for dim in link_axes:
|
for dim in link_axes:
|
||||||
# link x and y axes to new view box such that the top level
|
# link x and y axes to new view box such that the top level
|
||||||
# viewbox propagates to the root (and whatever other
|
# viewbox propagates to the root (and whatever other
|
||||||
# plotitem overlays that have been added).
|
# plotitem overlays that have been added).
|
||||||
vb.linkView(dim, root.vb)
|
vb.linkView(dim, root.vb)
|
||||||
|
|
||||||
# make overlaid viewbox impossible to focus since the top
|
# => NOTE: in order to prevent "more-then-linear" scaled
|
||||||
# level should handle all input and relay to overlays.
|
# panning moves on (for eg. click-drag) certain range change
|
||||||
# NOTE: this was solved with the `setZValue()` above!
|
# signals (i.e. ``.sigXRangeChanged``), the user needs to be
|
||||||
|
# careful that any broadcasted ``relay_events`` are are short
|
||||||
|
# circuited in sub-handlers (aka relayee's) implementations. As
|
||||||
|
# an example if a ``ViewBox.mouseDragEvent`` is broadcasted, the
|
||||||
|
# overlayed implementations need to be sure they either don't
|
||||||
|
# also link the x-axes (by not providing ``link_axes=(0,)``
|
||||||
|
# above) or that the relayee ``.mouseDragEvent()`` handlers are
|
||||||
|
# ready to "``return`` early" in the case that
|
||||||
|
# ``.sigXRangeChanged`` is emitted as part of linked axes.
|
||||||
|
# For more details on such signalling mechanics peek in
|
||||||
|
# ``ViewBox.linkView()``.
|
||||||
|
|
||||||
# TODO: we will probably want to add a "focus" api such that
|
# make overlaid viewbox impossible to focus since the top level
|
||||||
# a new "top level" ``PlotItem`` can be selected dynamically
|
# should handle all input and relay to overlays. Note that the
|
||||||
# (and presumably the axes dynamically sorted to match).
|
# "root" plot item gettingn interaction priority is configured
|
||||||
|
# with the ``.setZValue()`` during init.
|
||||||
vb.setFlag(
|
vb.setFlag(
|
||||||
vb.GraphicsItemFlag.ItemIsFocusable,
|
vb.GraphicsItemFlag.ItemIsFocusable,
|
||||||
False
|
False
|
||||||
)
|
)
|
||||||
vb.setFocusPolicy(Qt.NoFocus)
|
vb.setFocusPolicy(Qt.NoFocus)
|
||||||
|
|
||||||
|
# => TODO: add a "focus" api for switching the "top level"
|
||||||
|
# ``PlotItem`` dynamically.
|
||||||
|
|
||||||
# append-compose into the layout all axes from this plot
|
# append-compose into the layout all axes from this plot
|
||||||
self.layout.insert(index, plotitem)
|
self.layout.insert_plotitem(index, plotitem)
|
||||||
|
|
||||||
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
||||||
|
|
||||||
|
@ -579,24 +494,7 @@ class PlotItemOverlay:
|
||||||
root.vb.setFocus()
|
root.vb.setFocus()
|
||||||
assert root.vb.focusWidget()
|
assert root.vb.focusWidget()
|
||||||
|
|
||||||
# XXX: do we need this? Why would you build then destroy?
|
vb.setZValue(100)
|
||||||
def remove_plotitem(self, plotItem: PlotItem) -> None:
|
|
||||||
'''
|
|
||||||
Remove this ``PlotItem`` from the overlayed set making not shown
|
|
||||||
and unable to accept input.
|
|
||||||
|
|
||||||
'''
|
|
||||||
...
|
|
||||||
|
|
||||||
# TODO: i think this would be super hot B)
|
|
||||||
def focus_item(self, plotitem: PlotItem) -> PlotItem:
|
|
||||||
'''
|
|
||||||
Apply focus to a contained PlotItem thus making it the "top level"
|
|
||||||
item in the overlay able to accept peripheral's input from the user
|
|
||||||
and responsible for zoom and panning control via its ``ViewBox``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
...
|
|
||||||
|
|
||||||
def get_axis(
|
def get_axis(
|
||||||
self,
|
self,
|
||||||
|
@ -630,8 +528,9 @@ class PlotItemOverlay:
|
||||||
|
|
||||||
return axes
|
return axes
|
||||||
|
|
||||||
# TODO: i guess we need this if you want to detach existing plots
|
# XXX: untested as of now.
|
||||||
# dynamically? XXX: untested as of now.
|
# TODO: need this as part of selecting a different root/source
|
||||||
|
# plot to rewire interaction event broadcast dynamically.
|
||||||
def _disconnect_all(
|
def _disconnect_all(
|
||||||
self,
|
self,
|
||||||
plotitem: PlotItem,
|
plotitem: PlotItem,
|
||||||
|
@ -646,3 +545,22 @@ class PlotItemOverlay:
|
||||||
disconnected.append(sig)
|
disconnected.append(sig)
|
||||||
|
|
||||||
return disconnected
|
return disconnected
|
||||||
|
|
||||||
|
# XXX: do we need this? Why would you build then destroy?
|
||||||
|
# def remove_plotitem(self, plotItem: PlotItem) -> None:
|
||||||
|
# '''
|
||||||
|
# Remove this ``PlotItem`` from the overlayed set making not shown
|
||||||
|
# and unable to accept input.
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# ...
|
||||||
|
|
||||||
|
# TODO: i think this would be super hot B)
|
||||||
|
# def focus_plotitem(self, plotitem: PlotItem) -> PlotItem:
|
||||||
|
# '''
|
||||||
|
# Apply focus to a contained PlotItem thus making it the "top level"
|
||||||
|
# item in the overlay able to accept peripheral's input from the user
|
||||||
|
# and responsible for zoom and panning control via its ``ViewBox``.
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# ...
|
||||||
|
|
|
@ -49,12 +49,17 @@ def xy_downsample(
|
||||||
|
|
||||||
x_spacer: float = 0.5,
|
x_spacer: float = 0.5,
|
||||||
|
|
||||||
) -> tuple[np.ndarray, np.ndarray]:
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
float,
|
||||||
|
float,
|
||||||
|
]:
|
||||||
|
|
||||||
# downsample whenever more then 1 pixels per datum can be shown.
|
# downsample whenever more then 1 pixels per datum can be shown.
|
||||||
# always refresh data bounds until we get diffing
|
# always refresh data bounds until we get diffing
|
||||||
# working properly, see above..
|
# working properly, see above..
|
||||||
bins, x, y = ds_m4(
|
bins, x, y, ymn, ymx = ds_m4(
|
||||||
x,
|
x,
|
||||||
y,
|
y,
|
||||||
uppx,
|
uppx,
|
||||||
|
@ -67,7 +72,7 @@ def xy_downsample(
|
||||||
)).flatten()
|
)).flatten()
|
||||||
y = y.flatten()
|
y = y.flatten()
|
||||||
|
|
||||||
return x, y
|
return x, y, ymn, ymx
|
||||||
|
|
||||||
|
|
||||||
@njit(
|
@njit(
|
||||||
|
|
|
@ -15,11 +15,15 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Customization of ``pyqtgraph`` core routines to speed up our use mostly
|
Customization of ``pyqtgraph`` core routines and various types normally
|
||||||
based on not requiring "scentific precision" for pixel perfect view
|
for speedups.
|
||||||
transforms.
|
|
||||||
|
Generally, our does not require "scentific precision" for pixel perfect
|
||||||
|
view transforms.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
|
|
||||||
|
|
||||||
|
@ -46,3 +50,211 @@ def _do_overrides() -> None:
|
||||||
"""
|
"""
|
||||||
# we don't care about potential fp issues inside Qt
|
# we don't care about potential fp issues inside Qt
|
||||||
pg.functions.invertQTransform = invertQTransform
|
pg.functions.invertQTransform = invertQTransform
|
||||||
|
pg.PlotItem = PlotItem
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE: the below customized type contains all our changes on a method
|
||||||
|
# by method basis as per the diff:
|
||||||
|
# https://github.com/pyqtgraph/pyqtgraph/commit/8e60bc14234b6bec1369ff4192dbfb82f8682920#diff-a2b5865955d2ba703dbc4c35ff01aa761aa28d2aeaac5e68d24e338bc82fb5b1R500
|
||||||
|
|
||||||
|
class PlotItem(pg.PlotItem):
|
||||||
|
'''
|
||||||
|
Overrides for the core plot object mostly pertaining to overlayed
|
||||||
|
multi-view management as it relates to multi-axis managment.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
parent=None,
|
||||||
|
name=None,
|
||||||
|
labels=None,
|
||||||
|
title=None,
|
||||||
|
viewBox=None,
|
||||||
|
axisItems=None,
|
||||||
|
default_axes=['left', 'bottom'],
|
||||||
|
enableMenu=True,
|
||||||
|
**kargs
|
||||||
|
):
|
||||||
|
super().__init__(
|
||||||
|
parent=parent,
|
||||||
|
name=name,
|
||||||
|
labels=labels,
|
||||||
|
title=title,
|
||||||
|
viewBox=viewBox,
|
||||||
|
axisItems=axisItems,
|
||||||
|
# default_axes=default_axes,
|
||||||
|
enableMenu=enableMenu,
|
||||||
|
kargs=kargs,
|
||||||
|
)
|
||||||
|
# self.setAxisItems(
|
||||||
|
# axisItems,
|
||||||
|
# default_axes=default_axes,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# NOTE: this is an entirely new method not in upstream.
|
||||||
|
def removeAxis(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
unlink: bool = True,
|
||||||
|
|
||||||
|
) -> Optional[pg.AxisItem]:
|
||||||
|
"""
|
||||||
|
Remove an axis from the contained axis items
|
||||||
|
by ```name: str```.
|
||||||
|
|
||||||
|
This means the axis graphics object will be removed
|
||||||
|
from the ``.layout: QGraphicsGridLayout`` as well as unlinked
|
||||||
|
from the underlying associated ``ViewBox``.
|
||||||
|
|
||||||
|
If the ``unlink: bool`` is set to ``False`` then the axis will
|
||||||
|
stay linked to its view and will only be removed from the
|
||||||
|
layoutonly be removed from the layout.
|
||||||
|
|
||||||
|
If no axis with ``name: str`` is found then this is a noop.
|
||||||
|
|
||||||
|
Return the axis instance that was removed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
entry = self.axes.pop(name, None)
|
||||||
|
|
||||||
|
if not entry:
|
||||||
|
return
|
||||||
|
|
||||||
|
axis = entry['item']
|
||||||
|
self.layout.removeItem(axis)
|
||||||
|
axis.scene().removeItem(axis)
|
||||||
|
if unlink:
|
||||||
|
axis.unlinkFromView()
|
||||||
|
|
||||||
|
self.update()
|
||||||
|
|
||||||
|
return axis
|
||||||
|
|
||||||
|
# Why do we need to always have all axes created?
|
||||||
|
#
|
||||||
|
# I don't understand this at all.
|
||||||
|
#
|
||||||
|
# Everything seems to work if you just always apply the
|
||||||
|
# set passed to this method **EXCEPT** for some super weird reason
|
||||||
|
# the view box geometry still computes as though the space for the
|
||||||
|
# `'bottom'` axis is always there **UNLESS** you always add that
|
||||||
|
# axis but hide it?
|
||||||
|
#
|
||||||
|
# Why in tf would this be the case!?!?
|
||||||
|
def setAxisItems(
|
||||||
|
self,
|
||||||
|
# XXX: yeah yeah, i know we can't use type annots like this yet.
|
||||||
|
axisItems: Optional[dict[str, pg.AxisItem]] = None,
|
||||||
|
add_to_layout: bool = True,
|
||||||
|
default_axes: list[str] = ['left', 'bottom'],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Override axis item setting to only
|
||||||
|
|
||||||
|
"""
|
||||||
|
axisItems = axisItems or {}
|
||||||
|
|
||||||
|
# XXX: wth is is this even saying?!?
|
||||||
|
# Array containing visible axis items
|
||||||
|
# Also containing potentially hidden axes, but they are not
|
||||||
|
# touched so it does not matter
|
||||||
|
# visibleAxes = ['left', 'bottom']
|
||||||
|
# Note that it does not matter that this adds
|
||||||
|
# some values to visibleAxes a second time
|
||||||
|
|
||||||
|
# XXX: uhhh wat^ ..?
|
||||||
|
|
||||||
|
visibleAxes = list(default_axes) + list(axisItems.keys())
|
||||||
|
|
||||||
|
# TODO: we should probably invert the loop here to not loop the
|
||||||
|
# predefined "axis name set" and instead loop the `axisItems`
|
||||||
|
# input and lookup indices from a predefined map.
|
||||||
|
for name, pos in (
|
||||||
|
('top', (1, 1)),
|
||||||
|
('bottom', (3, 1)),
|
||||||
|
('left', (2, 0)),
|
||||||
|
('right', (2, 2))
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
name in self.axes and
|
||||||
|
name in axisItems
|
||||||
|
):
|
||||||
|
# we already have an axis entry for this name
|
||||||
|
# so remove the existing entry.
|
||||||
|
self.removeAxis(name)
|
||||||
|
|
||||||
|
# elif name not in axisItems:
|
||||||
|
# # this axis entry is not provided in this call
|
||||||
|
# # so remove any old/existing entry.
|
||||||
|
# self.removeAxis(name)
|
||||||
|
|
||||||
|
# Create new axis
|
||||||
|
if name in axisItems:
|
||||||
|
axis = axisItems[name]
|
||||||
|
if axis.scene() is not None:
|
||||||
|
if (
|
||||||
|
name not in self.axes
|
||||||
|
or axis != self.axes[name]["item"]
|
||||||
|
):
|
||||||
|
raise RuntimeError(
|
||||||
|
"Can't add an axis to multiple plots. Shared axes"
|
||||||
|
" can be achieved with multiple AxisItem instances"
|
||||||
|
" and set[X/Y]Link.")
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Set up new axis
|
||||||
|
|
||||||
|
# XXX: ok but why do we want to add axes for all entries
|
||||||
|
# if not desired by the user? The only reason I can see
|
||||||
|
# adding this is without it there's some weird
|
||||||
|
# ``ViewBox`` geometry bug.. where a gap for the
|
||||||
|
# 'bottom' axis is somehow left in?
|
||||||
|
axis = pg.AxisItem(orientation=name, parent=self)
|
||||||
|
|
||||||
|
axis.linkToView(self.vb)
|
||||||
|
|
||||||
|
# XXX: shouldn't you already know the ``pos`` from the name?
|
||||||
|
# Oh right instead of a global map that would let you
|
||||||
|
# reasily look that up it's redefined over and over and over
|
||||||
|
# again in methods..
|
||||||
|
self.axes[name] = {'item': axis, 'pos': pos}
|
||||||
|
|
||||||
|
# NOTE: in the overlay case the axis may be added to some
|
||||||
|
# other layout and should not be added here.
|
||||||
|
if add_to_layout:
|
||||||
|
self.layout.addItem(axis, *pos)
|
||||||
|
|
||||||
|
# place axis above images at z=0, items that want to draw
|
||||||
|
# over the axes should be placed at z>=1:
|
||||||
|
axis.setZValue(0.5)
|
||||||
|
axis.setFlag(
|
||||||
|
axis.GraphicsItemFlag.ItemNegativeZStacksBehindParent
|
||||||
|
)
|
||||||
|
if name in visibleAxes:
|
||||||
|
self.showAxis(name, True)
|
||||||
|
else:
|
||||||
|
# why do we need to insert all axes to ``.axes`` and
|
||||||
|
# only hide the ones the user doesn't specify? It all
|
||||||
|
# seems to work fine without doing this except for this
|
||||||
|
# weird gap for the 'bottom' axis that always shows up
|
||||||
|
# in the view box geometry??
|
||||||
|
self.hideAxis(name)
|
||||||
|
|
||||||
|
def updateGrid(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
):
|
||||||
|
alpha = self.ctrl.gridAlphaSlider.value()
|
||||||
|
x = alpha if self.ctrl.xGridCheck.isChecked() else False
|
||||||
|
y = alpha if self.ctrl.yGridCheck.isChecked() else False
|
||||||
|
for name, dim in (
|
||||||
|
('top', x),
|
||||||
|
('bottom', x),
|
||||||
|
('left', y),
|
||||||
|
('right', y)
|
||||||
|
):
|
||||||
|
if name in self.axes:
|
||||||
|
self.getAxis(name).setGrid(dim)
|
||||||
|
# self.getAxis('bottom').setGrid(x)
|
||||||
|
# self.getAxis('left').setGrid(y)
|
||||||
|
# self.getAxis('right').setGrid(y)
|
||||||
|
|
|
@ -19,10 +19,15 @@ Position info and display
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
from copy import copy
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from math import floor, copysign
|
from math import floor, copysign
|
||||||
from typing import Optional
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Optional,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# from PyQt5.QtWidgets import QStyle
|
# from PyQt5.QtWidgets import QStyle
|
||||||
|
@ -37,27 +42,38 @@ from ._anchors import (
|
||||||
gpath_pin,
|
gpath_pin,
|
||||||
)
|
)
|
||||||
from ..calc import humanize, pnl, puterize
|
from ..calc import humanize, pnl, puterize
|
||||||
from ..clearing._allocate import Allocator, Position
|
from ..clearing._allocate import Allocator
|
||||||
|
from ..pp import Position
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data.feed import Feed
|
from ..data.feed import (
|
||||||
|
Feed,
|
||||||
|
Flume,
|
||||||
|
)
|
||||||
|
from ..data.types import Struct
|
||||||
from ._label import Label
|
from ._label import Label
|
||||||
from ._lines import LevelLine, order_line
|
from ._lines import LevelLine, order_line
|
||||||
from ._style import _font
|
from ._style import _font
|
||||||
from ._forms import FieldsForm, FillStatusBar, QLabel
|
from ._forms import FieldsForm, FillStatusBar, QLabel
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import (
|
||||||
|
ChartPlotWidget,
|
||||||
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
_pnl_tasks: dict[str, bool] = {}
|
_pnl_tasks: dict[str, bool] = {}
|
||||||
|
|
||||||
|
|
||||||
async def update_pnl_from_feed(
|
async def update_pnl_from_feed(
|
||||||
|
|
||||||
feed: Feed,
|
flume: Flume,
|
||||||
order_mode: OrderMode, # noqa
|
order_mode: OrderMode, # noqa
|
||||||
tracker: PositionTracker,
|
tracker: PositionTracker,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Real-time display the current pp's PnL in the appropriate label.
|
'''
|
||||||
|
Real-time display the current pp's PnL in the appropriate label.
|
||||||
|
|
||||||
``ValueError`` if this task is spawned where there is a net-zero pp.
|
``ValueError`` if this task is spawned where there is a net-zero pp.
|
||||||
|
|
||||||
|
@ -66,7 +82,7 @@ async def update_pnl_from_feed(
|
||||||
|
|
||||||
pp = order_mode.current_pp
|
pp = order_mode.current_pp
|
||||||
live = pp.live_pp
|
live = pp.live_pp
|
||||||
key = live.symbol.key
|
key = live.symbol.front_fqsn()
|
||||||
|
|
||||||
log.info(f'Starting pnl display for {pp.alloc.account}')
|
log.info(f'Starting pnl display for {pp.alloc.account}')
|
||||||
|
|
||||||
|
@ -82,7 +98,7 @@ async def update_pnl_from_feed(
|
||||||
|
|
||||||
# real-time update pnl on the status pane
|
# real-time update pnl on the status pane
|
||||||
try:
|
try:
|
||||||
async with feed.stream.subscribe() as bstream:
|
async with flume.stream.subscribe() as bstream:
|
||||||
# last_tick = time.time()
|
# last_tick = time.time()
|
||||||
async for quotes in bstream:
|
async for quotes in bstream:
|
||||||
|
|
||||||
|
@ -105,8 +121,8 @@ async def update_pnl_from_feed(
|
||||||
# compute and display pnl status
|
# compute and display pnl status
|
||||||
order_mode.pane.pnl_label.format(
|
order_mode.pane.pnl_label.format(
|
||||||
pnl=copysign(1, size) * pnl(
|
pnl=copysign(1, size) * pnl(
|
||||||
# live.avg_price,
|
# live.ppu,
|
||||||
order_mode.current_pp.live_pp.avg_price,
|
order_mode.current_pp.live_pp.ppu,
|
||||||
tick['price'],
|
tick['price'],
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
@ -165,19 +181,36 @@ class SettingsPane:
|
||||||
key: str,
|
key: str,
|
||||||
value: str,
|
value: str,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Try to apply some input setting (by the user), revert to
|
||||||
|
previous setting if it fails display new value if applied.
|
||||||
|
|
||||||
|
'''
|
||||||
|
self.apply_setting(key, value)
|
||||||
|
self.update_status_ui(self.order_mode.current_pp)
|
||||||
|
|
||||||
|
def apply_setting(
|
||||||
|
self,
|
||||||
|
|
||||||
|
key: str,
|
||||||
|
value: str,
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
'''
|
'''
|
||||||
Called on any order pane edit field value change.
|
Called on any order pane edit field value change.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
mode = self.order_mode
|
mode = self.order_mode
|
||||||
|
tracker = mode.current_pp
|
||||||
|
alloc = tracker.alloc
|
||||||
|
|
||||||
# an account switch request
|
# an account switch request
|
||||||
if key == 'account':
|
if key == 'account':
|
||||||
|
|
||||||
# hide details on the old selection
|
# hide details on the old selection
|
||||||
old_tracker = mode.current_pp
|
old_tracker = mode.current_pp
|
||||||
old_tracker.hide_info()
|
old_tracker.nav.hide_info()
|
||||||
|
|
||||||
# re-assign the order mode tracker
|
# re-assign the order mode tracker
|
||||||
account_name = value
|
account_name = value
|
||||||
|
@ -187,7 +220,7 @@ class SettingsPane:
|
||||||
# a ``brokerd`) then error and switch back to the last
|
# a ``brokerd`) then error and switch back to the last
|
||||||
# selection.
|
# selection.
|
||||||
if tracker is None:
|
if tracker is None:
|
||||||
sym = old_tracker.chart.linked.symbol.key
|
sym = old_tracker.charts[0].linked.symbol.key
|
||||||
log.error(
|
log.error(
|
||||||
f'Account `{account_name}` can not be set for {sym}'
|
f'Account `{account_name}` can not be set for {sym}'
|
||||||
)
|
)
|
||||||
|
@ -198,39 +231,44 @@ class SettingsPane:
|
||||||
self.order_mode.current_pp = tracker
|
self.order_mode.current_pp = tracker
|
||||||
assert tracker.alloc.account == account_name
|
assert tracker.alloc.account == account_name
|
||||||
self.form.fields['account'].setCurrentText(account_name)
|
self.form.fields['account'].setCurrentText(account_name)
|
||||||
tracker.show()
|
tracker.nav.show()
|
||||||
tracker.hide_info()
|
tracker.nav.hide_info()
|
||||||
|
|
||||||
self.display_pnl(tracker)
|
self.display_pnl(tracker)
|
||||||
|
|
||||||
# load the new account's allocator
|
# load the new account's allocator
|
||||||
alloc = tracker.alloc
|
alloc = tracker.alloc
|
||||||
|
|
||||||
else:
|
|
||||||
tracker = mode.current_pp
|
|
||||||
alloc = tracker.alloc
|
|
||||||
|
|
||||||
size_unit = alloc.size_unit
|
|
||||||
|
|
||||||
# WRITE any settings to current pp's allocator
|
# WRITE any settings to current pp's allocator
|
||||||
try:
|
|
||||||
if key == 'size_unit':
|
if key == 'size_unit':
|
||||||
# implicit re-write of value if input
|
# implicit re-write of value if input
|
||||||
# is the "text name" of the units.
|
# is the "text name" of the units.
|
||||||
# yah yah, i know this is badd..
|
# yah yah, i know this is badd..
|
||||||
alloc.size_unit = value
|
alloc.size_unit = value
|
||||||
else:
|
|
||||||
|
elif key != 'account': # numeric fields entry
|
||||||
|
try:
|
||||||
value = puterize(value)
|
value = puterize(value)
|
||||||
|
except ValueError as err:
|
||||||
|
log.error(err.args[0])
|
||||||
|
return False
|
||||||
|
|
||||||
if key == 'limit':
|
if key == 'limit':
|
||||||
|
if value <= 0:
|
||||||
|
log.error('limit must be > 0')
|
||||||
|
return False
|
||||||
|
|
||||||
pp = mode.current_pp.live_pp
|
pp = mode.current_pp.live_pp
|
||||||
|
|
||||||
if size_unit == 'currency':
|
if alloc.size_unit == 'currency':
|
||||||
dsize = pp.dsize
|
dsize = pp.dsize
|
||||||
if dsize > value:
|
if dsize > value:
|
||||||
log.error(
|
log.error(
|
||||||
f'limit must > then current pp: {dsize}'
|
f'limit must > then current pp: {dsize}'
|
||||||
)
|
)
|
||||||
raise ValueError
|
# reset position size value
|
||||||
|
alloc.currency_limit = dsize
|
||||||
|
return False
|
||||||
|
|
||||||
alloc.currency_limit = value
|
alloc.currency_limit = value
|
||||||
|
|
||||||
|
@ -246,30 +284,50 @@ class SettingsPane:
|
||||||
|
|
||||||
elif key == 'slots':
|
elif key == 'slots':
|
||||||
if value <= 0:
|
if value <= 0:
|
||||||
raise ValueError('slots must be > 0')
|
# raise ValueError('slots must be > 0')
|
||||||
|
log.error('limit must be > 0')
|
||||||
|
return False
|
||||||
|
|
||||||
alloc.slots = int(value)
|
alloc.slots = int(value)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.error(f'Unknown setting {key}')
|
log.error(f'Unknown setting {key}')
|
||||||
raise ValueError
|
raise ValueError
|
||||||
|
|
||||||
log.info(f'settings change: {key}: {value}')
|
# don't log account "change" case since it'll be submitted
|
||||||
|
# on every mouse interaction.
|
||||||
|
log.runtime(f'settings change: {key}: {value}')
|
||||||
|
|
||||||
except ValueError:
|
# TODO: maybe return a diff of settings so if we can an error we
|
||||||
log.error(f'Invalid value for `{key}`: {value}')
|
# can have general input handling code to report it through the
|
||||||
|
# UI in some way?
|
||||||
|
return True
|
||||||
|
|
||||||
|
def update_status_ui(
|
||||||
|
self,
|
||||||
|
tracker: PositionTracker,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
alloc = tracker.alloc
|
||||||
|
slots = alloc.slots
|
||||||
|
used = alloc.slots_used(tracker.live_pp)
|
||||||
|
size = tracker.live_pp.size
|
||||||
|
dsize = tracker.live_pp.dsize
|
||||||
|
|
||||||
# READ out settings and update the status UI / settings widgets
|
# READ out settings and update the status UI / settings widgets
|
||||||
suffix = {'currency': ' $', 'units': ' u'}[size_unit]
|
suffix = {'currency': ' $', 'units': ' u'}[alloc.size_unit]
|
||||||
limit = alloc.limit()
|
size_unit, limit = alloc.limit_info()
|
||||||
|
|
||||||
# TODO: a reverse look up from the position to the equivalent
|
|
||||||
# account(s), if none then look to user config for default?
|
|
||||||
self.update_status_ui(pp=tracker)
|
|
||||||
|
|
||||||
step_size, currency_per_slot = alloc.step_sizes()
|
step_size, currency_per_slot = alloc.step_sizes()
|
||||||
|
|
||||||
if size_unit == 'currency':
|
if alloc.size_unit == 'currency':
|
||||||
step_size = currency_per_slot
|
step_size = currency_per_slot
|
||||||
|
if dsize >= limit:
|
||||||
|
self.apply_setting('limit', limit)
|
||||||
|
|
||||||
|
elif size >= limit:
|
||||||
|
self.apply_setting('limit', limit)
|
||||||
|
|
||||||
self.step_label.format(
|
self.step_label.format(
|
||||||
step_size=str(humanize(step_size)) + suffix
|
step_size=str(humanize(step_size)) + suffix
|
||||||
|
@ -288,22 +346,6 @@ class SettingsPane:
|
||||||
# update of level marker size label based on any new settings
|
# update of level marker size label based on any new settings
|
||||||
tracker.update_from_pp()
|
tracker.update_from_pp()
|
||||||
|
|
||||||
# TODO: maybe return a diff of settings so if we can an error we
|
|
||||||
# can have general input handling code to report it through the
|
|
||||||
# UI in some way?
|
|
||||||
return True
|
|
||||||
|
|
||||||
def update_status_ui(
|
|
||||||
self,
|
|
||||||
|
|
||||||
pp: PositionTracker,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
alloc = pp.alloc
|
|
||||||
slots = alloc.slots
|
|
||||||
used = alloc.slots_used(pp.live_pp)
|
|
||||||
|
|
||||||
# calculate proportion of position size limit
|
# calculate proportion of position size limit
|
||||||
# that exists and display in fill bar
|
# that exists and display in fill bar
|
||||||
# TODO: what should we do for fractional slot pps?
|
# TODO: what should we do for fractional slot pps?
|
||||||
|
@ -314,7 +356,7 @@ class SettingsPane:
|
||||||
# min(round(prop * slots), slots)
|
# min(round(prop * slots), slots)
|
||||||
min(used, slots)
|
min(used, slots)
|
||||||
)
|
)
|
||||||
self.update_account_icons({alloc.account: pp.live_pp})
|
self.update_account_icons({alloc.account: tracker.live_pp})
|
||||||
|
|
||||||
def update_account_icons(
|
def update_account_icons(
|
||||||
self,
|
self,
|
||||||
|
@ -340,7 +382,9 @@ class SettingsPane:
|
||||||
tracker: PositionTracker,
|
tracker: PositionTracker,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Display the PnL for the current symbol and personal positioning (pp).
|
'''
|
||||||
|
Display the PnL for the current symbol and personal positioning
|
||||||
|
(pp).
|
||||||
|
|
||||||
If a position is open start a background task which will
|
If a position is open start a background task which will
|
||||||
real-time update the pnl label in the settings pane.
|
real-time update the pnl label in the settings pane.
|
||||||
|
@ -349,24 +393,25 @@ class SettingsPane:
|
||||||
mode = self.order_mode
|
mode = self.order_mode
|
||||||
sym = mode.chart.linked.symbol
|
sym = mode.chart.linked.symbol
|
||||||
size = tracker.live_pp.size
|
size = tracker.live_pp.size
|
||||||
feed = mode.quote_feed
|
flume: Feed = mode.feed.flumes[sym.fqsn]
|
||||||
pnl_value = 0
|
pnl_value = 0
|
||||||
|
|
||||||
if size:
|
if size:
|
||||||
# last historical close price
|
# last historical close price
|
||||||
last = feed.shm.array[-1][['close']][0]
|
last = flume.rt_shm.array[-1][['close']][0]
|
||||||
pnl_value = copysign(1, size) * pnl(
|
pnl_value = copysign(1, size) * pnl(
|
||||||
tracker.live_pp.avg_price,
|
tracker.live_pp.ppu,
|
||||||
last,
|
last,
|
||||||
)
|
)
|
||||||
|
|
||||||
# maybe start update task
|
# maybe start update task
|
||||||
global _pnl_tasks
|
global _pnl_tasks
|
||||||
if sym.key not in _pnl_tasks:
|
fqsn = sym.front_fqsn()
|
||||||
_pnl_tasks[sym.key] = True
|
if fqsn not in _pnl_tasks:
|
||||||
|
_pnl_tasks[fqsn] = True
|
||||||
self.order_mode.nursery.start_soon(
|
self.order_mode.nursery.start_soon(
|
||||||
update_pnl_from_feed,
|
update_pnl_from_feed,
|
||||||
feed,
|
flume,
|
||||||
mode,
|
mode,
|
||||||
tracker,
|
tracker,
|
||||||
)
|
)
|
||||||
|
@ -375,15 +420,15 @@ class SettingsPane:
|
||||||
self.pnl_label.format(pnl=pnl_value)
|
self.pnl_label.format(pnl=pnl_value)
|
||||||
|
|
||||||
|
|
||||||
def position_line(
|
def pp_line(
|
||||||
|
|
||||||
chart: 'ChartPlotWidget', # noqa
|
chart: ChartPlotWidget, # noqa
|
||||||
size: float,
|
size: float,
|
||||||
level: float,
|
level: float,
|
||||||
color: str,
|
color: str,
|
||||||
|
marker: LevelMarker,
|
||||||
|
|
||||||
orient_v: str = 'bottom',
|
orient_v: str = 'bottom',
|
||||||
marker: Optional[LevelMarker] = None,
|
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
'''
|
'''
|
||||||
|
@ -414,16 +459,7 @@ def position_line(
|
||||||
show_markers=False,
|
show_markers=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
if marker:
|
# TODO: use `LevelLine.add_marker()`` for this instead?
|
||||||
# configure marker to position data
|
|
||||||
|
|
||||||
if size > 0: # long
|
|
||||||
style = '|<' # point "up to" the line
|
|
||||||
elif size < 0: # short
|
|
||||||
style = '>|' # point "down to" the line
|
|
||||||
|
|
||||||
marker.style = style
|
|
||||||
|
|
||||||
# set marker color to same as line
|
# set marker color to same as line
|
||||||
marker.setPen(line.currentPen)
|
marker.setPen(line.currentPen)
|
||||||
marker.setBrush(fn.mkBrush(line.currentPen.color()))
|
marker.setBrush(fn.mkBrush(line.currentPen.color()))
|
||||||
|
@ -431,77 +467,331 @@ def position_line(
|
||||||
marker.update()
|
marker.update()
|
||||||
marker.show()
|
marker.show()
|
||||||
|
|
||||||
|
line._marker = marker
|
||||||
|
line.track_marker_pos = True
|
||||||
|
|
||||||
# show position marker on view "edge" when out of view
|
# show position marker on view "edge" when out of view
|
||||||
vb = line.getViewBox()
|
vb = line.getViewBox()
|
||||||
vb.sigRangeChanged.connect(marker.position_in_view)
|
vb.sigRangeChanged.connect(marker.position_in_view)
|
||||||
|
|
||||||
line.set_level(level)
|
|
||||||
|
|
||||||
return line
|
return line
|
||||||
|
|
||||||
|
|
||||||
|
_derivs = (
|
||||||
|
'future',
|
||||||
|
'continuous_future',
|
||||||
|
'option',
|
||||||
|
'futures_option',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: move into annoate module?
|
||||||
|
def mk_level_marker(
|
||||||
|
chart: ChartPlotWidget,
|
||||||
|
size: float,
|
||||||
|
level: float,
|
||||||
|
on_paint: Callable,
|
||||||
|
|
||||||
|
) -> LevelMarker:
|
||||||
|
'''
|
||||||
|
Allocate and return nan arrow graphics element.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# scale marker size with dpi-aware font size
|
||||||
|
font_size = _font.font.pixelSize()
|
||||||
|
arrow_size = floor(1.375 * font_size)
|
||||||
|
arrow = LevelMarker(
|
||||||
|
chart=chart,
|
||||||
|
style='|<', # actual style is set by caller based on size
|
||||||
|
get_level=level,
|
||||||
|
size=arrow_size,
|
||||||
|
on_paint=on_paint,
|
||||||
|
)
|
||||||
|
arrow.show()
|
||||||
|
return arrow
|
||||||
|
|
||||||
|
|
||||||
|
class Nav(Struct):
|
||||||
|
'''
|
||||||
|
Composite for holding a set of charts and respective (by order)
|
||||||
|
graphics-elements which display position information acting as sort
|
||||||
|
of "navigation" system for a position.
|
||||||
|
|
||||||
|
'''
|
||||||
|
charts: dict[int, ChartPlotWidget]
|
||||||
|
pp_labels: dict[str, Label] = {}
|
||||||
|
size_labels: dict[str, Label] = {}
|
||||||
|
lines: dict[str, Optional[LevelLine]] = {}
|
||||||
|
level_markers: dict[str, Optional[LevelMarker]] = {}
|
||||||
|
color: str = 'default_lightest'
|
||||||
|
|
||||||
|
def update_ui(
|
||||||
|
self,
|
||||||
|
account: str,
|
||||||
|
price: float,
|
||||||
|
size: float,
|
||||||
|
slots_used: float,
|
||||||
|
size_digits: Optional[int] = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Update personal position level line.
|
||||||
|
|
||||||
|
'''
|
||||||
|
for key, chart in self.charts.items():
|
||||||
|
size_digits = size_digits or chart.linked.symbol.lot_size_digits
|
||||||
|
line = self.lines.get(key)
|
||||||
|
level_marker = self.level_markers[key]
|
||||||
|
pp_label = self.pp_labels[key]
|
||||||
|
|
||||||
|
if size:
|
||||||
|
# create and show a pp line if none yet exists
|
||||||
|
if line is None:
|
||||||
|
arrow = self.level_markers[key]
|
||||||
|
line = pp_line(
|
||||||
|
chart=chart,
|
||||||
|
level=price,
|
||||||
|
size=size,
|
||||||
|
color=self.color,
|
||||||
|
marker=arrow,
|
||||||
|
)
|
||||||
|
self.lines[key] = line
|
||||||
|
|
||||||
|
# modify existing indicator line
|
||||||
|
line.set_level(price)
|
||||||
|
|
||||||
|
# update LHS sizing label
|
||||||
|
line.update_labels({
|
||||||
|
'size': size,
|
||||||
|
'size_digits': size_digits,
|
||||||
|
'fiat_size': round(price * size, ndigits=2),
|
||||||
|
|
||||||
|
# TODO: per account lines on a single (or very
|
||||||
|
# related) symbol
|
||||||
|
'account': account,
|
||||||
|
})
|
||||||
|
line.show()
|
||||||
|
|
||||||
|
# always show arrow-marker when a non-zero
|
||||||
|
# pos size.
|
||||||
|
level_marker.show()
|
||||||
|
|
||||||
|
# configure marker to position data
|
||||||
|
if size > 0: # long
|
||||||
|
# point "up to" the line
|
||||||
|
level_marker.style = '|<'
|
||||||
|
|
||||||
|
elif size < 0: # short
|
||||||
|
# point "down to" the line
|
||||||
|
level_marker.style = '>|'
|
||||||
|
|
||||||
|
# remove line from view for a net-zero pos
|
||||||
|
else:
|
||||||
|
self.hide()
|
||||||
|
|
||||||
|
# label updates
|
||||||
|
size_label = self.size_labels[key]
|
||||||
|
size_label.fields['slots_used'] = slots_used
|
||||||
|
size_label.render()
|
||||||
|
|
||||||
|
# set arrow marker to correct level
|
||||||
|
level_marker.level = price
|
||||||
|
|
||||||
|
# these updates are critical to avoid lag on view/scene changes
|
||||||
|
# TODO: couldn't we integrate this into
|
||||||
|
# a ``.inter_ui_elements_and_update()``?
|
||||||
|
level_marker.update() # trigger paint
|
||||||
|
pp_label.update()
|
||||||
|
size_label.update()
|
||||||
|
|
||||||
|
def level(self) -> float:
|
||||||
|
'''
|
||||||
|
Return the "level" value from the underlying ``LevelLine`` which tracks
|
||||||
|
the "average position" price defined the represented position instance.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if self.lines:
|
||||||
|
for key, line in self.lines.items():
|
||||||
|
if line:
|
||||||
|
return line.value()
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def iter_ui_elements(self) -> tuple[
|
||||||
|
Label,
|
||||||
|
Label,
|
||||||
|
LevelLine,
|
||||||
|
LevelMarker,
|
||||||
|
]:
|
||||||
|
for key, chart in self.charts.items():
|
||||||
|
yield (
|
||||||
|
self.pp_labels[key],
|
||||||
|
self.size_labels[key],
|
||||||
|
self.lines.get(key),
|
||||||
|
self.level_markers[key],
|
||||||
|
)
|
||||||
|
|
||||||
|
def show(self) -> None:
|
||||||
|
'''
|
||||||
|
Show all UI elements on all managed charts.
|
||||||
|
|
||||||
|
'''
|
||||||
|
for (
|
||||||
|
pp_label,
|
||||||
|
size_label,
|
||||||
|
line,
|
||||||
|
level_marker,
|
||||||
|
) in self.iter_ui_elements():
|
||||||
|
|
||||||
|
# NOTE: be sure to re-trigger arrow/label placement in case
|
||||||
|
# a new sidepane or other widget (like the search bar) was
|
||||||
|
# dynamically swapped into the chart-row-widget-space in
|
||||||
|
# which case we want to reposition in the view but including
|
||||||
|
# the new x-distance added by that sidepane. See details in
|
||||||
|
# ``LevelMarker.position_in_view()`` but more less ``.
|
||||||
|
# ``ChartPlotWidget.self.marker_right_points()`` gets called
|
||||||
|
# which itself eventually calls `.getAxis.pos().x()` and
|
||||||
|
# it's THIS that needs to be called **AFTER** the sidepane
|
||||||
|
# has been added..
|
||||||
|
level_marker.show()
|
||||||
|
level_marker.position_in_view()
|
||||||
|
|
||||||
|
# labels
|
||||||
|
pp_label.show()
|
||||||
|
size_label.show()
|
||||||
|
|
||||||
|
if line:
|
||||||
|
line.show()
|
||||||
|
line.show_labels()
|
||||||
|
|
||||||
|
def hide(self) -> None:
|
||||||
|
for (
|
||||||
|
pp_label,
|
||||||
|
size_label,
|
||||||
|
line,
|
||||||
|
level_marker,
|
||||||
|
) in self.iter_ui_elements():
|
||||||
|
pp_label.hide()
|
||||||
|
level_marker.hide()
|
||||||
|
size_label.hide()
|
||||||
|
if line:
|
||||||
|
line.hide()
|
||||||
|
|
||||||
|
def update_graphics(
|
||||||
|
self,
|
||||||
|
marker: LevelMarker,
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Update all labels callback.
|
||||||
|
|
||||||
|
Meant to be called from the marker ``.paint()``
|
||||||
|
for immediate, lag free label draws.
|
||||||
|
|
||||||
|
'''
|
||||||
|
for (
|
||||||
|
pp_label,
|
||||||
|
size_label,
|
||||||
|
line,
|
||||||
|
level_marker,
|
||||||
|
) in self.iter_ui_elements():
|
||||||
|
|
||||||
|
pp_label.update()
|
||||||
|
size_label.update()
|
||||||
|
|
||||||
|
# XXX: can't call this because it causes a recursive paint/render
|
||||||
|
# level_marker.update()
|
||||||
|
|
||||||
|
def hide_info(self) -> None:
|
||||||
|
'''
|
||||||
|
Hide details (just size label?) of position nav elements.
|
||||||
|
|
||||||
|
'''
|
||||||
|
for (
|
||||||
|
pp_label,
|
||||||
|
size_label,
|
||||||
|
line,
|
||||||
|
level_marker,
|
||||||
|
) in self.iter_ui_elements():
|
||||||
|
|
||||||
|
size_label.hide()
|
||||||
|
if line:
|
||||||
|
line.hide_labels()
|
||||||
|
|
||||||
|
|
||||||
class PositionTracker:
|
class PositionTracker:
|
||||||
'''
|
'''
|
||||||
Track and display real-time positions for a single symbol
|
Track and display real-time positions for a single asset-symbol
|
||||||
over multiple accounts on a single chart.
|
held in a single account, normally shown on a single chart.
|
||||||
|
|
||||||
Graphically composed of a level line and marker as well as labels
|
Graphically composed of a level line and marker as well as labels
|
||||||
for indcating current position information. Updates are made to the
|
for indcating current position information. Updates are made to the
|
||||||
corresponding "settings pane" for the chart's "order mode" UX.
|
corresponding "settings pane" for the chart's "order mode" UX.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# inputs
|
|
||||||
chart: 'ChartPlotWidget' # noqa
|
|
||||||
|
|
||||||
alloc: Allocator
|
alloc: Allocator
|
||||||
startup_pp: Position
|
startup_pp: Position
|
||||||
live_pp: Position
|
live_pp: Position
|
||||||
|
nav: Nav # holds all UI elements across all charts
|
||||||
# allocated
|
|
||||||
pp_label: Label
|
|
||||||
size_label: Label
|
|
||||||
line: Optional[LevelLine] = None
|
|
||||||
|
|
||||||
_color: str = 'default_lightest'
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
chart: 'ChartPlotWidget', # noqa
|
charts: list[ChartPlotWidget],
|
||||||
alloc: Allocator,
|
alloc: Allocator,
|
||||||
startup_pp: Position,
|
startup_pp: Position,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
self.chart = chart
|
nav = self.nav = Nav(charts={id(chart): chart for chart in charts})
|
||||||
|
|
||||||
self.alloc = alloc
|
self.alloc = alloc
|
||||||
self.startup_pp = startup_pp
|
self.startup_pp = startup_pp
|
||||||
self.live_pp = startup_pp.copy()
|
self.live_pp = copy(startup_pp)
|
||||||
|
|
||||||
|
# TODO: maybe add this as a method ``Nav.add_chart()``
|
||||||
|
# init all UI elements
|
||||||
|
for key, chart in nav.charts.items():
|
||||||
view = chart.getViewBox()
|
view = chart.getViewBox()
|
||||||
|
|
||||||
# literally the 'pp' (pee pee) label that's always in view
|
arrow = mk_level_marker(
|
||||||
self.pp_label = pp_label = Label(
|
chart=chart,
|
||||||
|
size=1,
|
||||||
|
level=nav.level,
|
||||||
|
on_paint=nav.update_graphics,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: we really need some kinda "spacing" manager for all
|
||||||
|
# this stuff...
|
||||||
|
def offset_from_yaxis() -> float:
|
||||||
|
'''
|
||||||
|
If no L1 labels are present beside the x-axis place
|
||||||
|
the line label offset from the y-axis just enough to avoid
|
||||||
|
label overlap with any sticky labels.
|
||||||
|
|
||||||
|
'''
|
||||||
|
x = chart.marker_right_points()[1]
|
||||||
|
if chart._max_l1_line_len == 0:
|
||||||
|
mkw = pp_label.txt.boundingRect().width()
|
||||||
|
x -= 1.5 * mkw
|
||||||
|
|
||||||
|
return x
|
||||||
|
|
||||||
|
arrow.scene_x = offset_from_yaxis
|
||||||
|
view.scene().addItem(arrow)
|
||||||
|
arrow.hide() # never show on startup
|
||||||
|
nav.level_markers[key] = arrow
|
||||||
|
|
||||||
|
# literally the 'pp' (pee pee) "position price" label that's
|
||||||
|
# always in view
|
||||||
|
pp_label = Label(
|
||||||
view=view,
|
view=view,
|
||||||
fmt_str='pp',
|
fmt_str='pp',
|
||||||
color=self._color,
|
color=nav.color,
|
||||||
update_on_range_change=False,
|
update_on_range_change=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
# create placeholder 'up' level arrow
|
|
||||||
self._level_marker = None
|
|
||||||
self._level_marker = self.level_marker(size=1)
|
|
||||||
|
|
||||||
pp_label.scene_anchor = partial(
|
|
||||||
gpath_pin,
|
|
||||||
gpath=self._level_marker,
|
|
||||||
label=pp_label,
|
|
||||||
)
|
|
||||||
pp_label.render()
|
pp_label.render()
|
||||||
|
nav.pp_labels[key] = pp_label
|
||||||
|
|
||||||
self.size_label = size_label = Label(
|
size_label = Label(
|
||||||
view=view,
|
view=view,
|
||||||
color=self._color,
|
color=self.nav.color,
|
||||||
|
|
||||||
# this is "static" label
|
# this is "static" label
|
||||||
# update_on_range_change=False,
|
# update_on_range_change=False,
|
||||||
|
@ -514,11 +804,19 @@ class PositionTracker:
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
size_label.render()
|
size_label.render()
|
||||||
|
|
||||||
size_label.scene_anchor = partial(
|
size_label.scene_anchor = partial(
|
||||||
pp_tight_and_right,
|
pp_tight_and_right,
|
||||||
label=self.pp_label,
|
label=pp_label,
|
||||||
)
|
)
|
||||||
|
nav.size_labels[key] = size_label
|
||||||
|
|
||||||
|
pp_label.scene_anchor = partial(
|
||||||
|
gpath_pin,
|
||||||
|
gpath=arrow,
|
||||||
|
label=pp_label,
|
||||||
|
)
|
||||||
|
|
||||||
|
nav.show()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pane(self) -> FieldsForm:
|
def pane(self) -> FieldsForm:
|
||||||
|
@ -528,169 +826,74 @@ class PositionTracker:
|
||||||
'''
|
'''
|
||||||
return self.chart.linked.godwidget.pp_pane
|
return self.chart.linked.godwidget.pp_pane
|
||||||
|
|
||||||
def update_graphics(
|
|
||||||
self,
|
|
||||||
marker: LevelMarker
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
Update all labels.
|
|
||||||
|
|
||||||
Meant to be called from the maker ``.paint()``
|
|
||||||
for immediate, lag free label draws.
|
|
||||||
|
|
||||||
'''
|
|
||||||
self.pp_label.update()
|
|
||||||
self.size_label.update()
|
|
||||||
|
|
||||||
def update_from_pp(
|
def update_from_pp(
|
||||||
self,
|
self,
|
||||||
position: Optional[Position] = None,
|
position: Optional[Position] = None,
|
||||||
|
set_as_startup: bool = False,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Update graphics and data from average price and size passed in our
|
'''
|
||||||
EMS ``BrokerdPosition`` msg.
|
Update graphics and data from average price and size passed in
|
||||||
|
our EMS ``BrokerdPosition`` msg.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# live pp updates
|
# live pp updates
|
||||||
pp = position or self.live_pp
|
pp = position or self.live_pp
|
||||||
|
if set_as_startup:
|
||||||
|
startup_pp = pp
|
||||||
|
else:
|
||||||
|
startup_pp = self.startup_pp
|
||||||
|
alloc = self.alloc
|
||||||
|
|
||||||
self.update_line(
|
# update allocator settings
|
||||||
pp.avg_price,
|
asset_type = pp.symbol.type_key
|
||||||
pp.size,
|
|
||||||
self.chart.linked.symbol.lot_size_digits,
|
|
||||||
)
|
|
||||||
|
|
||||||
# label updates
|
# specific configs by asset class / type
|
||||||
self.size_label.fields['slots_used'] = round(
|
if asset_type in _derivs:
|
||||||
self.alloc.slots_used(pp), ndigits=1)
|
# since it's harder to know how currency "applies" in this case
|
||||||
self.size_label.render()
|
# given leverage properties
|
||||||
|
alloc.size_unit = '# units'
|
||||||
|
|
||||||
if pp.size == 0:
|
# set units limit to slots size thus making make the next
|
||||||
self.hide()
|
# entry step 1.0
|
||||||
|
alloc.units_limit = alloc.slots
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self._level_marker.level = pp.avg_price
|
alloc.size_unit = 'currency'
|
||||||
|
|
||||||
# these updates are critical to avoid lag on view/scene changes
|
# if the current position is already greater then the limit
|
||||||
self._level_marker.update() # trigger paint
|
# settings, increase the limit to the current position
|
||||||
self.pp_label.update()
|
if alloc.size_unit == 'currency':
|
||||||
self.size_label.update()
|
startup_size = self.startup_pp.size * startup_pp.ppu
|
||||||
|
|
||||||
self.show()
|
if startup_size > alloc.currency_limit:
|
||||||
|
alloc.currency_limit = round(startup_size, ndigits=2)
|
||||||
|
|
||||||
|
else:
|
||||||
|
startup_size = abs(startup_pp.size)
|
||||||
|
|
||||||
|
if startup_size > alloc.units_limit:
|
||||||
|
alloc.units_limit = startup_size
|
||||||
|
|
||||||
|
if asset_type in _derivs:
|
||||||
|
alloc.slots = alloc.units_limit
|
||||||
|
|
||||||
|
self.nav.update_ui(
|
||||||
|
self.alloc.account,
|
||||||
|
pp.ppu,
|
||||||
|
pp.size,
|
||||||
|
round(alloc.slots_used(pp), ndigits=1), # slots used
|
||||||
|
)
|
||||||
|
|
||||||
|
if self.live_pp.size:
|
||||||
|
# print("SHOWING NAV")
|
||||||
|
self.nav.show()
|
||||||
|
|
||||||
|
# if pp.size == 0:
|
||||||
|
else:
|
||||||
|
# print("HIDING NAV")
|
||||||
|
self.nav.hide()
|
||||||
|
|
||||||
# don't show side and status widgets unless
|
# don't show side and status widgets unless
|
||||||
# order mode is "engaged" (which done via input controls)
|
# order mode is "engaged" (which done via input controls)
|
||||||
self.hide_info()
|
self.nav.hide_info()
|
||||||
|
|
||||||
def level(self) -> float:
|
|
||||||
if self.line:
|
|
||||||
return self.line.value()
|
|
||||||
else:
|
|
||||||
return 0
|
|
||||||
|
|
||||||
def show(self) -> None:
|
|
||||||
if self.live_pp.size:
|
|
||||||
self.line.show()
|
|
||||||
self.line.show_labels()
|
|
||||||
|
|
||||||
self._level_marker.show()
|
|
||||||
self.pp_label.show()
|
|
||||||
self.size_label.show()
|
|
||||||
|
|
||||||
def hide(self) -> None:
|
|
||||||
self.pp_label.hide()
|
|
||||||
self._level_marker.hide()
|
|
||||||
self.size_label.hide()
|
|
||||||
if self.line:
|
|
||||||
self.line.hide()
|
|
||||||
|
|
||||||
def hide_info(self) -> None:
|
|
||||||
'''Hide details (right now just size label?) of position.
|
|
||||||
|
|
||||||
'''
|
|
||||||
self.size_label.hide()
|
|
||||||
if self.line:
|
|
||||||
self.line.hide_labels()
|
|
||||||
|
|
||||||
# TODO: move into annoate module
|
|
||||||
def level_marker(
|
|
||||||
self,
|
|
||||||
size: float,
|
|
||||||
|
|
||||||
) -> LevelMarker:
|
|
||||||
|
|
||||||
if self._level_marker:
|
|
||||||
self._level_marker.delete()
|
|
||||||
|
|
||||||
# arrow marker
|
|
||||||
# scale marker size with dpi-aware font size
|
|
||||||
font_size = _font.font.pixelSize()
|
|
||||||
|
|
||||||
# scale marker size with dpi-aware font size
|
|
||||||
arrow_size = floor(1.375 * font_size)
|
|
||||||
|
|
||||||
if size > 0:
|
|
||||||
style = '|<'
|
|
||||||
|
|
||||||
elif size < 0:
|
|
||||||
style = '>|'
|
|
||||||
|
|
||||||
arrow = LevelMarker(
|
|
||||||
chart=self.chart,
|
|
||||||
style=style,
|
|
||||||
get_level=self.level,
|
|
||||||
size=arrow_size,
|
|
||||||
on_paint=self.update_graphics,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.chart.getViewBox().scene().addItem(arrow)
|
|
||||||
arrow.show()
|
|
||||||
|
|
||||||
return arrow
|
|
||||||
|
|
||||||
def update_line(
|
|
||||||
self,
|
|
||||||
price: float,
|
|
||||||
size: float,
|
|
||||||
size_digits: int,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''Update personal position level line.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# do line update
|
|
||||||
line = self.line
|
|
||||||
|
|
||||||
if size:
|
|
||||||
if line is None:
|
|
||||||
|
|
||||||
# create and show a pp line
|
|
||||||
line = self.line = position_line(
|
|
||||||
chart=self.chart,
|
|
||||||
level=price,
|
|
||||||
size=size,
|
|
||||||
color=self._color,
|
|
||||||
marker=self._level_marker,
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
line.set_level(price)
|
|
||||||
self._level_marker.level = price
|
|
||||||
self._level_marker.update()
|
|
||||||
|
|
||||||
# update LHS sizing label
|
|
||||||
line.update_labels({
|
|
||||||
'size': size,
|
|
||||||
'size_digits': size_digits,
|
|
||||||
'fiat_size': round(price * size, ndigits=2),
|
|
||||||
|
|
||||||
# TODO: per account lines on a single (or very related) symbol
|
|
||||||
'account': self.alloc.account,
|
|
||||||
})
|
|
||||||
line.show()
|
|
||||||
|
|
||||||
elif line: # remove pp line from view if it exists on a net-zero pp
|
|
||||||
line.delete()
|
|
||||||
self.line = None
|
|
||||||
|
|
|
@ -35,9 +35,13 @@ from collections import defaultdict
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional, Callable,
|
Optional,
|
||||||
Awaitable, Sequence,
|
Callable,
|
||||||
Any, AsyncIterator
|
Awaitable,
|
||||||
|
Sequence,
|
||||||
|
Any,
|
||||||
|
AsyncIterator,
|
||||||
|
Iterator,
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
# from pprint import pformat
|
# from pprint import pformat
|
||||||
|
@ -119,7 +123,7 @@ class CompleterView(QTreeView):
|
||||||
# TODO: size this based on DPI font
|
# TODO: size this based on DPI font
|
||||||
self.setIndentation(_font.px_size)
|
self.setIndentation(_font.px_size)
|
||||||
|
|
||||||
# self.setUniformRowHeights(True)
|
self.setUniformRowHeights(True)
|
||||||
# self.setColumnWidth(0, 3)
|
# self.setColumnWidth(0, 3)
|
||||||
# self.setVerticalBarPolicy(Qt.ScrollBarAlwaysOff)
|
# self.setVerticalBarPolicy(Qt.ScrollBarAlwaysOff)
|
||||||
# self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustIgnored)
|
# self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustIgnored)
|
||||||
|
@ -138,13 +142,15 @@ class CompleterView(QTreeView):
|
||||||
model.setHorizontalHeaderLabels(labels)
|
model.setHorizontalHeaderLabels(labels)
|
||||||
|
|
||||||
self._font_size: int = 0 # pixels
|
self._font_size: int = 0 # pixels
|
||||||
|
self._init: bool = False
|
||||||
|
|
||||||
async def on_pressed(self, idx: QModelIndex) -> None:
|
async def on_pressed(self, idx: QModelIndex) -> None:
|
||||||
'''Mouse pressed on view handler.
|
'''
|
||||||
|
Mouse pressed on view handler.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
search = self.parent()
|
search = self.parent()
|
||||||
await search.chart_current_item(clear_to_cache=False)
|
await search.chart_current_item()
|
||||||
search.focus()
|
search.focus()
|
||||||
|
|
||||||
def set_font_size(self, size: int = 18):
|
def set_font_size(self, size: int = 18):
|
||||||
|
@ -156,56 +162,64 @@ class CompleterView(QTreeView):
|
||||||
|
|
||||||
self.setStyleSheet(f"font: {size}px")
|
self.setStyleSheet(f"font: {size}px")
|
||||||
|
|
||||||
# def resizeEvent(self, event: 'QEvent') -> None:
|
def resize_to_results(
|
||||||
# event.accept()
|
self,
|
||||||
# super().resizeEvent(event)
|
w: Optional[float] = 0,
|
||||||
|
h: Optional[float] = None,
|
||||||
|
|
||||||
def on_resize(self) -> None:
|
) -> None:
|
||||||
'''
|
|
||||||
Resize relay event from god.
|
|
||||||
|
|
||||||
'''
|
|
||||||
self.resize_to_results()
|
|
||||||
|
|
||||||
def resize_to_results(self):
|
|
||||||
model = self.model()
|
model = self.model()
|
||||||
cols = model.columnCount()
|
cols = model.columnCount()
|
||||||
# rows = model.rowCount()
|
cidx = self.selectionModel().currentIndex()
|
||||||
|
rows = model.rowCount()
|
||||||
|
self.expandAll()
|
||||||
|
|
||||||
|
# compute the approx height in pixels needed to include
|
||||||
|
# all result rows in view.
|
||||||
|
row_h = rows_h = self.rowHeight(cidx) * (rows + 1)
|
||||||
|
for idx, item in self.iter_df_rows():
|
||||||
|
row_h = self.rowHeight(idx)
|
||||||
|
rows_h += row_h
|
||||||
|
# print(f'row_h: {row_h}\nrows_h: {rows_h}')
|
||||||
|
|
||||||
|
# TODO: could we just break early here on detection
|
||||||
|
# of ``rows_h >= h``?
|
||||||
|
|
||||||
col_w_tot = 0
|
col_w_tot = 0
|
||||||
for i in range(cols):
|
for i in range(cols):
|
||||||
|
# only slap in a rows's height's worth
|
||||||
|
# of padding once at startup.. no idea
|
||||||
|
if (
|
||||||
|
not self._init
|
||||||
|
and row_h
|
||||||
|
):
|
||||||
|
col_w_tot = row_h
|
||||||
|
self._init = True
|
||||||
|
|
||||||
self.resizeColumnToContents(i)
|
self.resizeColumnToContents(i)
|
||||||
col_w_tot += self.columnWidth(i)
|
col_w_tot += self.columnWidth(i)
|
||||||
|
|
||||||
win = self.window()
|
# NOTE: if the heigh `h` set here is **too large** then the
|
||||||
win_h = win.height()
|
# resize event will perpetually trigger as the window causes
|
||||||
edit_h = self.parent().bar.height()
|
# some kind of recompute of callbacks.. so we have to ensure
|
||||||
sb_h = win.statusBar().height()
|
# it's limited.
|
||||||
|
if h:
|
||||||
|
h: int = round(h)
|
||||||
|
abs_mx = round(0.91 * h)
|
||||||
|
self.setMaximumHeight(abs_mx)
|
||||||
|
|
||||||
# TODO: probably make this more general / less hacky
|
if rows_h <= abs_mx:
|
||||||
# we should figure out the exact number of rows to allow
|
# self.setMinimumHeight(rows_h)
|
||||||
# inclusive of search bar and header "rows", in pixel terms.
|
self.setMinimumHeight(rows_h)
|
||||||
# Eventually when we have an "info" widget below the results we
|
# self.setFixedHeight(rows_h)
|
||||||
# will want space for it and likely terminating the results-view
|
|
||||||
# space **exactly on a row** would be ideal.
|
|
||||||
# if row_px > 0:
|
|
||||||
# rows = ceil(window_h / row_px) - 4
|
|
||||||
# else:
|
|
||||||
# rows = 16
|
|
||||||
# self.setFixedHeight(rows * row_px)
|
|
||||||
# self.resize(self.width(), rows * row_px)
|
|
||||||
|
|
||||||
# NOTE: if the heigh set here is **too large** then the resize
|
else:
|
||||||
# event will perpetually trigger as the window causes some kind
|
self.setMinimumHeight(abs_mx)
|
||||||
# of recompute of callbacks.. so we have to ensure it's limited.
|
|
||||||
h = win_h - (edit_h + 1.666*sb_h)
|
|
||||||
assert h > 0
|
|
||||||
self.setFixedHeight(round(h))
|
|
||||||
|
|
||||||
# size to width of longest result seen thus far
|
# dyncamically size to width of longest result seen
|
||||||
# TODO: should we always dynamically scale to longest result?
|
curr_w = self.width()
|
||||||
if self.width() < col_w_tot:
|
if curr_w < col_w_tot:
|
||||||
self.setFixedWidth(col_w_tot)
|
self.setMinimumWidth(col_w_tot)
|
||||||
|
|
||||||
self.update()
|
self.update()
|
||||||
|
|
||||||
|
@ -331,6 +345,23 @@ class CompleterView(QTreeView):
|
||||||
item = model.itemFromIndex(idx)
|
item = model.itemFromIndex(idx)
|
||||||
yield idx, item
|
yield idx, item
|
||||||
|
|
||||||
|
def iter_df_rows(
|
||||||
|
self,
|
||||||
|
iparent: QModelIndex = QModelIndex(),
|
||||||
|
|
||||||
|
) -> Iterator[tuple[QModelIndex, QStandardItem]]:
|
||||||
|
|
||||||
|
model = self.model()
|
||||||
|
isections = model.rowCount(iparent)
|
||||||
|
for i in range(isections):
|
||||||
|
idx = model.index(i, 0, iparent)
|
||||||
|
item = model.itemFromIndex(idx)
|
||||||
|
yield idx, item
|
||||||
|
|
||||||
|
if model.hasChildren(idx):
|
||||||
|
# recursively yield child items depth-first
|
||||||
|
yield from self.iter_df_rows(idx)
|
||||||
|
|
||||||
def find_section(
|
def find_section(
|
||||||
self,
|
self,
|
||||||
section: str,
|
section: str,
|
||||||
|
@ -354,7 +385,8 @@ class CompleterView(QTreeView):
|
||||||
status_field: str = None,
|
status_field: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Clear all result-rows from under the depth = 1 section.
|
'''
|
||||||
|
Clear all result-rows from under the depth = 1 section.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
idx = self.find_section(section)
|
idx = self.find_section(section)
|
||||||
|
@ -375,8 +407,6 @@ class CompleterView(QTreeView):
|
||||||
else:
|
else:
|
||||||
model.setItem(idx.row(), 1, QStandardItem())
|
model.setItem(idx.row(), 1, QStandardItem())
|
||||||
|
|
||||||
self.resize_to_results()
|
|
||||||
|
|
||||||
return idx
|
return idx
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
@ -444,9 +474,22 @@ class CompleterView(QTreeView):
|
||||||
|
|
||||||
self.show_matches()
|
self.show_matches()
|
||||||
|
|
||||||
def show_matches(self) -> None:
|
def show_matches(
|
||||||
|
self,
|
||||||
|
wh: Optional[tuple[float, float]] = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
if wh:
|
||||||
|
self.resize_to_results(*wh)
|
||||||
|
else:
|
||||||
|
# case where it's just an update from results and *NOT*
|
||||||
|
# a resize of some higher level parent-container widget.
|
||||||
|
search = self.parent()
|
||||||
|
w, h = search.space_dims()
|
||||||
|
self.resize_to_results(w=w, h=h)
|
||||||
|
|
||||||
self.show()
|
self.show()
|
||||||
self.resize_to_results()
|
|
||||||
|
|
||||||
|
|
||||||
class SearchBar(Edit):
|
class SearchBar(Edit):
|
||||||
|
@ -466,18 +509,15 @@ class SearchBar(Edit):
|
||||||
self.godwidget = godwidget
|
self.godwidget = godwidget
|
||||||
super().__init__(parent, **kwargs)
|
super().__init__(parent, **kwargs)
|
||||||
self.view: CompleterView = view
|
self.view: CompleterView = view
|
||||||
godwidget._widgets[view.mode_name] = view
|
|
||||||
|
|
||||||
def show(self) -> None:
|
|
||||||
super().show()
|
|
||||||
self.view.show_matches()
|
|
||||||
|
|
||||||
def unfocus(self) -> None:
|
def unfocus(self) -> None:
|
||||||
self.parent().hide()
|
self.parent().hide()
|
||||||
self.clearFocus()
|
self.clearFocus()
|
||||||
|
|
||||||
|
def hide(self) -> None:
|
||||||
if self.view:
|
if self.view:
|
||||||
self.view.hide()
|
self.view.hide()
|
||||||
|
super().hide()
|
||||||
|
|
||||||
|
|
||||||
class SearchWidget(QtWidgets.QWidget):
|
class SearchWidget(QtWidgets.QWidget):
|
||||||
|
@ -496,15 +536,16 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
parent=None,
|
parent=None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(parent or godwidget)
|
super().__init__(parent)
|
||||||
|
|
||||||
# size it as we specify
|
# size it as we specify
|
||||||
self.setSizePolicy(
|
self.setSizePolicy(
|
||||||
QtWidgets.QSizePolicy.Fixed,
|
QtWidgets.QSizePolicy.Fixed,
|
||||||
QtWidgets.QSizePolicy.Expanding,
|
QtWidgets.QSizePolicy.Fixed,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.godwidget = godwidget
|
self.godwidget = godwidget
|
||||||
|
godwidget.reg_for_resize(self)
|
||||||
|
|
||||||
self.vbox = QtWidgets.QVBoxLayout(self)
|
self.vbox = QtWidgets.QVBoxLayout(self)
|
||||||
self.vbox.setContentsMargins(0, 4, 4, 0)
|
self.vbox.setContentsMargins(0, 4, 4, 0)
|
||||||
|
@ -554,18 +595,23 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
self.vbox.setAlignment(self.view, Qt.AlignTop | Qt.AlignLeft)
|
self.vbox.setAlignment(self.view, Qt.AlignTop | Qt.AlignLeft)
|
||||||
|
|
||||||
def focus(self) -> None:
|
def focus(self) -> None:
|
||||||
|
self.show()
|
||||||
|
self.bar.focus()
|
||||||
|
|
||||||
if self.view.model().rowCount(QModelIndex()) == 0:
|
def show_only_cache_entries(self) -> None:
|
||||||
# fill cache list if nothing existing
|
'''
|
||||||
|
Clear the search results view and show only cached (aka recently
|
||||||
|
loaded with active data) feeds in the results section.
|
||||||
|
|
||||||
|
'''
|
||||||
|
godw = self.godwidget
|
||||||
self.view.set_section_entries(
|
self.view.set_section_entries(
|
||||||
'cache',
|
'cache',
|
||||||
list(reversed(self.godwidget._chart_cache)),
|
list(reversed(godw._chart_cache)),
|
||||||
|
# remove all other completion results except for cache
|
||||||
clear_all=True,
|
clear_all=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.bar.focus()
|
|
||||||
self.show()
|
|
||||||
|
|
||||||
def get_current_item(self) -> Optional[tuple[str, str]]:
|
def get_current_item(self) -> Optional[tuple[str, str]]:
|
||||||
'''Return the current completer tree selection as
|
'''Return the current completer tree selection as
|
||||||
a tuple ``(parent: str, child: str)`` if valid, else ``None``.
|
a tuple ``(parent: str, child: str)`` if valid, else ``None``.
|
||||||
|
@ -603,7 +649,8 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
clear_to_cache: bool = True,
|
clear_to_cache: bool = True,
|
||||||
|
|
||||||
) -> Optional[str]:
|
) -> Optional[str]:
|
||||||
'''Attempt to load and switch the current selected
|
'''
|
||||||
|
Attempt to load and switch the current selected
|
||||||
completion result to the affiliated chart app.
|
completion result to the affiliated chart app.
|
||||||
|
|
||||||
Return any loaded symbol.
|
Return any loaded symbol.
|
||||||
|
@ -614,13 +661,13 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
provider, symbol = value
|
provider, symbol = value
|
||||||
chart = self.godwidget
|
godw = self.godwidget
|
||||||
|
|
||||||
log.info(f'Requesting symbol: {symbol}.{provider}')
|
log.info(f'Requesting symbol: {symbol}.{provider}')
|
||||||
|
|
||||||
await chart.load_symbol(
|
await godw.load_symbols(
|
||||||
provider,
|
provider,
|
||||||
symbol,
|
[symbol],
|
||||||
'info',
|
'info',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -635,18 +682,46 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
# Re-order the symbol cache on the chart to display in
|
# Re-order the symbol cache on the chart to display in
|
||||||
# LIFO order. this is normally only done internally by
|
# LIFO order. this is normally only done internally by
|
||||||
# the chart on new symbols being loaded into memory
|
# the chart on new symbols being loaded into memory
|
||||||
chart.set_chart_symbol(fqsn, chart.linkedsplits)
|
godw.set_chart_symbol(
|
||||||
|
fqsn, (
|
||||||
self.view.set_section_entries(
|
godw.hist_linked,
|
||||||
'cache',
|
godw.rt_linked,
|
||||||
values=list(reversed(chart._chart_cache)),
|
|
||||||
|
|
||||||
# remove all other completion results except for cache
|
|
||||||
clear_all=True,
|
|
||||||
)
|
)
|
||||||
|
)
|
||||||
|
self.show_only_cache_entries()
|
||||||
|
|
||||||
|
self.bar.focus()
|
||||||
return fqsn
|
return fqsn
|
||||||
|
|
||||||
|
def space_dims(self) -> tuple[float, float]:
|
||||||
|
'''
|
||||||
|
Compute and return the "available space dimentions" for this
|
||||||
|
search widget in terms of px space for results by return the
|
||||||
|
pair of width and height.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# XXX: dun need dis rite?
|
||||||
|
# win = self.window()
|
||||||
|
# win_h = win.height()
|
||||||
|
# sb_h = win.statusBar().height()
|
||||||
|
godw = self.godwidget
|
||||||
|
hl = godw.hist_linked
|
||||||
|
edit_h = self.bar.height()
|
||||||
|
h = hl.height() - edit_h
|
||||||
|
w = hl.width()
|
||||||
|
return w, h
|
||||||
|
|
||||||
|
def on_resize(self) -> None:
|
||||||
|
'''
|
||||||
|
Resize relay event from god, resize all child widgets.
|
||||||
|
|
||||||
|
Right now this is just view to contents and/or the fast chart
|
||||||
|
height.
|
||||||
|
|
||||||
|
'''
|
||||||
|
w, h = self.space_dims()
|
||||||
|
self.bar.view.show_matches(wh=(w, h))
|
||||||
|
|
||||||
|
|
||||||
_search_active: trio.Event = trio.Event()
|
_search_active: trio.Event = trio.Event()
|
||||||
_search_enabled: bool = False
|
_search_enabled: bool = False
|
||||||
|
@ -712,10 +787,11 @@ async def fill_results(
|
||||||
max_pause_time: float = 6/16 + 0.001,
|
max_pause_time: float = 6/16 + 0.001,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Task to search through providers and fill in possible
|
'''
|
||||||
|
Task to search through providers and fill in possible
|
||||||
completion results.
|
completion results.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
global _search_active, _search_enabled, _searcher_cache
|
global _search_active, _search_enabled, _searcher_cache
|
||||||
|
|
||||||
bar = search.bar
|
bar = search.bar
|
||||||
|
@ -729,6 +805,10 @@ async def fill_results(
|
||||||
matches = defaultdict(list)
|
matches = defaultdict(list)
|
||||||
has_results: defaultdict[str, set[str]] = defaultdict(set)
|
has_results: defaultdict[str, set[str]] = defaultdict(set)
|
||||||
|
|
||||||
|
# show cached feed list at startup
|
||||||
|
search.show_only_cache_entries()
|
||||||
|
search.on_resize()
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
await _search_active.wait()
|
await _search_active.wait()
|
||||||
period = None
|
period = None
|
||||||
|
@ -742,7 +822,7 @@ async def fill_results(
|
||||||
pattern = await recv_chan.receive()
|
pattern = await recv_chan.receive()
|
||||||
|
|
||||||
period = time.time() - wait_start
|
period = time.time() - wait_start
|
||||||
print(f'{pattern} after {period}')
|
log.debug(f'{pattern} after {period}')
|
||||||
|
|
||||||
# during fast multiple key inputs, wait until a pause
|
# during fast multiple key inputs, wait until a pause
|
||||||
# (in typing) to initiate search
|
# (in typing) to initiate search
|
||||||
|
@ -841,8 +921,7 @@ async def handle_keyboard_input(
|
||||||
godwidget = search.godwidget
|
godwidget = search.godwidget
|
||||||
view = bar.view
|
view = bar.view
|
||||||
view.set_font_size(bar.dpi_font.px_size)
|
view.set_font_size(bar.dpi_font.px_size)
|
||||||
|
send, recv = trio.open_memory_channel(616)
|
||||||
send, recv = trio.open_memory_channel(16)
|
|
||||||
|
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
|
|
||||||
|
@ -857,6 +936,10 @@ async def handle_keyboard_input(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
bar.focus()
|
||||||
|
search.show_only_cache_entries()
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
async for kbmsg in recv_chan:
|
async for kbmsg in recv_chan:
|
||||||
event, etype, key, mods, txt = kbmsg.to_tuple()
|
event, etype, key, mods, txt = kbmsg.to_tuple()
|
||||||
|
|
||||||
|
@ -867,10 +950,11 @@ async def handle_keyboard_input(
|
||||||
ctl = True
|
ctl = True
|
||||||
|
|
||||||
if key in (Qt.Key_Enter, Qt.Key_Return):
|
if key in (Qt.Key_Enter, Qt.Key_Return):
|
||||||
|
|
||||||
await search.chart_current_item(clear_to_cache=True)
|
|
||||||
_search_enabled = False
|
_search_enabled = False
|
||||||
continue
|
await search.chart_current_item(clear_to_cache=True)
|
||||||
|
search.show_only_cache_entries()
|
||||||
|
view.show_matches()
|
||||||
|
search.focus()
|
||||||
|
|
||||||
elif not ctl and not bar.text():
|
elif not ctl and not bar.text():
|
||||||
# if nothing in search text show the cache
|
# if nothing in search text show the cache
|
||||||
|
@ -887,7 +971,7 @@ async def handle_keyboard_input(
|
||||||
Qt.Key_Space, # i feel like this is the "native" one
|
Qt.Key_Space, # i feel like this is the "native" one
|
||||||
Qt.Key_Alt,
|
Qt.Key_Alt,
|
||||||
}:
|
}:
|
||||||
search.bar.unfocus()
|
bar.unfocus()
|
||||||
|
|
||||||
# kill the search and focus back on main chart
|
# kill the search and focus back on main chart
|
||||||
if godwidget:
|
if godwidget:
|
||||||
|
@ -935,9 +1019,10 @@ async def handle_keyboard_input(
|
||||||
if item:
|
if item:
|
||||||
parent_item = item.parent()
|
parent_item = item.parent()
|
||||||
|
|
||||||
|
# if we're in the cache section and thus the next
|
||||||
|
# selection is a cache item, switch and show it
|
||||||
|
# immediately since it should be very fast.
|
||||||
if parent_item and parent_item.text() == 'cache':
|
if parent_item and parent_item.text() == 'cache':
|
||||||
|
|
||||||
# if it's a cache item, switch and show it immediately
|
|
||||||
await search.chart_current_item(clear_to_cache=False)
|
await search.chart_current_item(clear_to_cache=False)
|
||||||
|
|
||||||
elif not ctl:
|
elif not ctl:
|
||||||
|
|
|
@ -21,15 +21,29 @@ Qt main window singletons and stuff.
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
import time
|
import time
|
||||||
from typing import Callable, Optional, Union
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from pyqtgraph import QtGui
|
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
from PyQt5.QtWidgets import QLabel, QStatusBar
|
from PyQt5.QtWidgets import (
|
||||||
|
QWidget,
|
||||||
|
QMainWindow,
|
||||||
|
QApplication,
|
||||||
|
QLabel,
|
||||||
|
QStatusBar,
|
||||||
|
)
|
||||||
|
|
||||||
|
from PyQt5.QtGui import (
|
||||||
|
QScreen,
|
||||||
|
QCloseEvent,
|
||||||
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._style import _font_small, hcolor
|
from ._style import _font_small, hcolor
|
||||||
|
from ._chart import GodWidget
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -148,12 +162,13 @@ class MultiStatus:
|
||||||
self.bar.clearMessage()
|
self.bar.clearMessage()
|
||||||
|
|
||||||
|
|
||||||
class MainWindow(QtGui.QMainWindow):
|
class MainWindow(QMainWindow):
|
||||||
|
|
||||||
# XXX: for tiling wms this should scale
|
# XXX: for tiling wms this should scale
|
||||||
# with the alloted window size.
|
# with the alloted window size.
|
||||||
# TODO: detect for tiling and if untrue set some size?
|
# TODO: detect for tiling and if untrue set some size?
|
||||||
size = (300, 500)
|
# size = (300, 500)
|
||||||
|
godwidget: GodWidget
|
||||||
|
|
||||||
title = 'piker chart (ur symbol is loading bby)'
|
title = 'piker chart (ur symbol is loading bby)'
|
||||||
|
|
||||||
|
@ -162,17 +177,20 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
# self.setMinimumSize(*self.size)
|
# self.setMinimumSize(*self.size)
|
||||||
self.setWindowTitle(self.title)
|
self.setWindowTitle(self.title)
|
||||||
|
|
||||||
|
# set by runtime after `trio` is engaged.
|
||||||
|
self.godwidget: Optional[GodWidget] = None
|
||||||
|
|
||||||
self._status_bar: QStatusBar = None
|
self._status_bar: QStatusBar = None
|
||||||
self._status_label: QLabel = None
|
self._status_label: QLabel = None
|
||||||
self._size: Optional[tuple[int, int]] = None
|
self._size: Optional[tuple[int, int]] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mode_label(self) -> QtGui.QLabel:
|
def mode_label(self) -> QLabel:
|
||||||
|
|
||||||
# init mode label
|
# init mode label
|
||||||
if not self._status_label:
|
if not self._status_label:
|
||||||
|
|
||||||
self._status_label = label = QtGui.QLabel()
|
self._status_label = label = QLabel()
|
||||||
label.setStyleSheet(
|
label.setStyleSheet(
|
||||||
f"""QLabel {{
|
f"""QLabel {{
|
||||||
color : {hcolor('gunmetal')};
|
color : {hcolor('gunmetal')};
|
||||||
|
@ -194,8 +212,7 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
|
|
||||||
def closeEvent(
|
def closeEvent(
|
||||||
self,
|
self,
|
||||||
|
event: QCloseEvent,
|
||||||
event: QtGui.QCloseEvent,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Cancel the root actor asap.
|
'''Cancel the root actor asap.
|
||||||
|
@ -235,8 +252,8 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
def on_focus_change(
|
def on_focus_change(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
last: QtGui.QWidget,
|
last: QWidget,
|
||||||
current: QtGui.QWidget,
|
current: QWidget,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
@ -247,11 +264,12 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
name = getattr(current, 'mode_name', '')
|
name = getattr(current, 'mode_name', '')
|
||||||
self.set_mode_name(name)
|
self.set_mode_name(name)
|
||||||
|
|
||||||
def current_screen(self) -> QtGui.QScreen:
|
def current_screen(self) -> QScreen:
|
||||||
"""Get a frickin screen (if we can, gawd).
|
'''
|
||||||
|
Get a frickin screen (if we can, gawd).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
app = QtGui.QApplication.instance()
|
app = QApplication.instance()
|
||||||
|
|
||||||
for _ in range(3):
|
for _ in range(3):
|
||||||
screen = app.screenAt(self.pos())
|
screen = app.screenAt(self.pos())
|
||||||
|
@ -284,7 +302,7 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
'''
|
'''
|
||||||
# https://stackoverflow.com/a/18975846
|
# https://stackoverflow.com/a/18975846
|
||||||
if not size and not self._size:
|
if not size and not self._size:
|
||||||
app = QtGui.QApplication.instance()
|
# app = QApplication.instance()
|
||||||
geo = self.current_screen().geometry()
|
geo = self.current_screen().geometry()
|
||||||
h, w = geo.height(), geo.width()
|
h, w = geo.height(), geo.width()
|
||||||
# use approx 1/3 of the area of the screen by default
|
# use approx 1/3 of the area of the screen by default
|
||||||
|
@ -292,9 +310,36 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
|
|
||||||
self.resize(*size or self._size)
|
self.resize(*size or self._size)
|
||||||
|
|
||||||
|
def resizeEvent(self, event: QtCore.QEvent) -> None:
|
||||||
|
if (
|
||||||
|
# event.spontaneous()
|
||||||
|
event.oldSize().height == event.size().height
|
||||||
|
):
|
||||||
|
event.ignore()
|
||||||
|
return
|
||||||
|
|
||||||
|
# XXX: uncomment for debugging..
|
||||||
|
# attrs = {}
|
||||||
|
# for key in dir(event):
|
||||||
|
# if key == '__dir__':
|
||||||
|
# continue
|
||||||
|
# attr = getattr(event, key)
|
||||||
|
# try:
|
||||||
|
# attrs[key] = attr()
|
||||||
|
# except TypeError:
|
||||||
|
# attrs[key] = attr
|
||||||
|
|
||||||
|
# from pprint import pformat
|
||||||
|
# print(
|
||||||
|
# f'{pformat(attrs)}\n'
|
||||||
|
# f'WINDOW RESIZE: {self.size()}\n\n'
|
||||||
|
# )
|
||||||
|
self.godwidget.on_win_resize(event)
|
||||||
|
event.accept()
|
||||||
|
|
||||||
|
|
||||||
# singleton app per actor
|
# singleton app per actor
|
||||||
_qt_win: QtGui.QMainWindow = None
|
_qt_win: QMainWindow = None
|
||||||
|
|
||||||
|
|
||||||
def main_window() -> MainWindow:
|
def main_window() -> MainWindow:
|
||||||
|
|
|
@ -46,8 +46,10 @@ def _kivy_import_hack():
|
||||||
@click.argument('name', nargs=1, required=True)
|
@click.argument('name', nargs=1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def monitor(config, rate, name, dhost, test, tl):
|
def monitor(config, rate, name, dhost, test, tl):
|
||||||
"""Start a real-time watchlist UI
|
'''
|
||||||
"""
|
Start a real-time watchlist UI
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
loglevel = config['loglevel']
|
loglevel = config['loglevel']
|
||||||
|
@ -70,8 +72,12 @@ def monitor(config, rate, name, dhost, test, tl):
|
||||||
) as portal:
|
) as portal:
|
||||||
# run app "main"
|
# run app "main"
|
||||||
await _async_main(
|
await _async_main(
|
||||||
name, portal, tickers,
|
name,
|
||||||
brokermod, rate, test=test,
|
portal,
|
||||||
|
tickers,
|
||||||
|
brokermod,
|
||||||
|
rate,
|
||||||
|
test=test,
|
||||||
)
|
)
|
||||||
|
|
||||||
tractor.run(
|
tractor.run(
|
||||||
|
@ -122,7 +128,7 @@ def optschain(config, symbol, date, rate, test):
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option(
|
@click.option(
|
||||||
'--profile',
|
'--profile',
|
||||||
'-p',
|
# '-p',
|
||||||
default=None,
|
default=None,
|
||||||
help='Enable pyqtgraph profiling'
|
help='Enable pyqtgraph profiling'
|
||||||
)
|
)
|
||||||
|
@ -131,9 +137,14 @@ def optschain(config, symbol, date, rate, test):
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
help='Enable tractor debug mode'
|
help='Enable tractor debug mode'
|
||||||
)
|
)
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbols', nargs=-1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def chart(config, symbol, profile, pdb):
|
def chart(
|
||||||
|
config,
|
||||||
|
symbols: list[str],
|
||||||
|
profile,
|
||||||
|
pdb: bool,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Start a real-time chartng UI
|
Start a real-time chartng UI
|
||||||
|
|
||||||
|
@ -144,8 +155,10 @@ def chart(config, symbol, profile, pdb):
|
||||||
_profile._pg_profile = True
|
_profile._pg_profile = True
|
||||||
_profile.ms_slower_then = float(profile)
|
_profile.ms_slower_then = float(profile)
|
||||||
|
|
||||||
|
# Qt UI entrypoint
|
||||||
from ._app import _main
|
from ._app import _main
|
||||||
|
|
||||||
|
for symbol in symbols:
|
||||||
if '.' not in symbol:
|
if '.' not in symbol:
|
||||||
click.echo(click.style(
|
click.echo(click.style(
|
||||||
f'symbol: {symbol} must have a {symbol}.<provider> suffix',
|
f'symbol: {symbol} must have a {symbol}.<provider> suffix',
|
||||||
|
@ -153,15 +166,16 @@ def chart(config, symbol, profile, pdb):
|
||||||
))
|
))
|
||||||
return
|
return
|
||||||
|
|
||||||
|
|
||||||
# global opts
|
# global opts
|
||||||
brokernames = config['brokers']
|
brokernames = config['brokers']
|
||||||
|
brokermods = config['brokermods']
|
||||||
|
assert brokermods
|
||||||
tractorloglevel = config['tractorloglevel']
|
tractorloglevel = config['tractorloglevel']
|
||||||
pikerloglevel = config['loglevel']
|
pikerloglevel = config['loglevel']
|
||||||
|
|
||||||
_main(
|
_main(
|
||||||
sym=symbol,
|
syms=symbols,
|
||||||
brokernames=brokernames,
|
brokermods=brokermods,
|
||||||
piker_loglevel=pikerloglevel,
|
piker_loglevel=pikerloglevel,
|
||||||
tractor_kwargs={
|
tractor_kwargs={
|
||||||
'debug_mode': pdb,
|
'debug_mode': pdb,
|
||||||
|
@ -170,5 +184,6 @@ def chart(config, symbol, profile, pdb):
|
||||||
'enable_modules': [
|
'enable_modules': [
|
||||||
'piker.clearing._client'
|
'piker.clearing._client'
|
||||||
],
|
],
|
||||||
|
'registry_addr': config.get('registry_addr'),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,13 +1,12 @@
|
||||||
# we require a pinned dev branch to get some edge features that
|
# we require a pinned dev branch to get some edge features that
|
||||||
# are often untested in tractor's CI and/or being tested by us
|
# are often untested in tractor's CI and/or being tested by us
|
||||||
# first before committing as core features in tractor's base.
|
# first before committing as core features in tractor's base.
|
||||||
-e git+https://github.com/goodboy/tractor.git@master#egg=tractor
|
-e git+https://github.com/goodboy/tractor.git@piker_pin#egg=tractor
|
||||||
|
|
||||||
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
|
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
|
||||||
# pin this to a dev branch that we have more control over especially
|
# pin this to a dev branch that we have more control over especially
|
||||||
# as more graphics stuff gets hashed out.
|
# as more graphics stuff gets hashed out.
|
||||||
-e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
|
-e git+https://github.com/pikers/pyqtgraph.git@master#egg=pyqtgraph
|
||||||
|
|
||||||
|
|
||||||
# our async client for ``marketstore`` (the tsdb)
|
# our async client for ``marketstore`` (the tsdb)
|
||||||
-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
|
-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
|
||||||
|
@ -18,4 +17,7 @@
|
||||||
|
|
||||||
|
|
||||||
# ``asyncvnc`` for sending interactions to ib-gw inside docker
|
# ``asyncvnc`` for sending interactions to ib-gw inside docker
|
||||||
-e git+https://github.com/pikers/asyncvnc.git@vid_passthrough#egg=asyncvnc
|
-e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc
|
||||||
|
|
||||||
|
# ``cryptofeed`` for connecting to various crypto exchanges + custom fixes
|
||||||
|
-e git+https://github.com/pikers/cryptofeed.git@date_parsing#egg=cryptofeed
|
||||||
|
|
5
setup.py
5
setup.py
|
@ -41,23 +41,24 @@ setup(
|
||||||
},
|
},
|
||||||
install_requires=[
|
install_requires=[
|
||||||
'toml',
|
'toml',
|
||||||
|
'tomli', # fastest pure py reader
|
||||||
'click',
|
'click',
|
||||||
'colorlog',
|
'colorlog',
|
||||||
'attrs',
|
'attrs',
|
||||||
'pygments',
|
'pygments',
|
||||||
'colorama', # numba traceback coloring
|
'colorama', # numba traceback coloring
|
||||||
'pydantic', # structured data
|
'msgspec', # performant IPC messaging and structs
|
||||||
|
|
||||||
# async
|
# async
|
||||||
'trio',
|
'trio',
|
||||||
'trio-websocket',
|
'trio-websocket',
|
||||||
'msgspec', # performant IPC messaging
|
|
||||||
'async_generator',
|
'async_generator',
|
||||||
|
|
||||||
# from github currently (see requirements.txt)
|
# from github currently (see requirements.txt)
|
||||||
# 'trimeter', # not released yet..
|
# 'trimeter', # not released yet..
|
||||||
# 'tractor',
|
# 'tractor',
|
||||||
# asyncvnc,
|
# asyncvnc,
|
||||||
|
# 'cryptofeed',
|
||||||
|
|
||||||
# brokers
|
# brokers
|
||||||
'asks==2.4.8',
|
'asks==2.4.8',
|
||||||
|
|
|
@ -1,10 +1,12 @@
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
from piker import (
|
||||||
from piker import log, config
|
# log,
|
||||||
from piker.brokers import questrade
|
config,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
|
@ -14,15 +16,6 @@ def pytest_addoption(parser):
|
||||||
help="Use a practice API account")
|
help="Use a practice API account")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session', autouse=True)
|
|
||||||
def loglevel(request):
|
|
||||||
orig = tractor.log._default_loglevel
|
|
||||||
level = tractor.log._default_loglevel = request.config.option.loglevel
|
|
||||||
log.get_console_log(level)
|
|
||||||
yield level
|
|
||||||
tractor.log._default_loglevel = orig
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def test_config():
|
def test_config():
|
||||||
dirname = os.path.dirname
|
dirname = os.path.dirname
|
||||||
|
@ -37,9 +30,11 @@ def test_config():
|
||||||
|
|
||||||
@pytest.fixture(scope='session', autouse=True)
|
@pytest.fixture(scope='session', autouse=True)
|
||||||
def confdir(request, test_config):
|
def confdir(request, test_config):
|
||||||
"""If the `--confdir` flag is not passed use the
|
'''
|
||||||
|
If the `--confdir` flag is not passed use the
|
||||||
broker config file found in that dir.
|
broker config file found in that dir.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
confdir = request.config.option.confdir
|
confdir = request.config.option.confdir
|
||||||
if confdir is not None:
|
if confdir is not None:
|
||||||
config._override_config_dir(confdir)
|
config._override_config_dir(confdir)
|
||||||
|
@ -47,49 +42,61 @@ def confdir(request, test_config):
|
||||||
return confdir
|
return confdir
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session', autouse=True)
|
# @pytest.fixture(scope='session', autouse=True)
|
||||||
def travis(confdir):
|
# def travis(confdir):
|
||||||
is_travis = os.environ.get('TRAVIS', False)
|
# is_travis = os.environ.get('TRAVIS', False)
|
||||||
if is_travis:
|
# if is_travis:
|
||||||
# this directory is cached, see .travis.yaml
|
# # this directory is cached, see .travis.yaml
|
||||||
conf_file = config.get_broker_conf_path()
|
# conf_file = config.get_broker_conf_path()
|
||||||
refresh_token = os.environ['QT_REFRESH_TOKEN']
|
# refresh_token = os.environ['QT_REFRESH_TOKEN']
|
||||||
|
|
||||||
def write_with_token(token):
|
# def write_with_token(token):
|
||||||
# XXX don't pass the dir path here since may be
|
# # XXX don't pass the dir path here since may be
|
||||||
# written behind the scenes in the `confdir fixture`
|
# # written behind the scenes in the `confdir fixture`
|
||||||
if not os.path.isfile(conf_file):
|
# if not os.path.isfile(conf_file):
|
||||||
open(conf_file, 'w').close()
|
# open(conf_file, 'w').close()
|
||||||
conf, path = config.load()
|
# conf, path = config.load()
|
||||||
conf.setdefault('questrade', {}).update(
|
# conf.setdefault('questrade', {}).update(
|
||||||
{'refresh_token': token,
|
# {'refresh_token': token,
|
||||||
'is_practice': 'True'}
|
# 'is_practice': 'True'}
|
||||||
)
|
# )
|
||||||
config.write(conf, path)
|
# config.write(conf, path)
|
||||||
|
|
||||||
async def ensure_config():
|
# async def ensure_config():
|
||||||
# try to refresh current token using cached brokers config
|
# # try to refresh current token using cached brokers config
|
||||||
# if it fails fail try using the refresh token provided by the
|
# # if it fails fail try using the refresh token provided by the
|
||||||
# env var and if that fails stop the test run here.
|
# # env var and if that fails stop the test run here.
|
||||||
try:
|
# try:
|
||||||
async with questrade.get_client(ask_user=False):
|
# async with questrade.get_client(ask_user=False):
|
||||||
pass
|
# pass
|
||||||
except (
|
# except (
|
||||||
FileNotFoundError, ValueError,
|
# FileNotFoundError, ValueError,
|
||||||
questrade.BrokerError, questrade.QuestradeError,
|
# questrade.BrokerError, questrade.QuestradeError,
|
||||||
trio.MultiError,
|
# trio.MultiError,
|
||||||
):
|
# ):
|
||||||
# 3 cases:
|
# # 3 cases:
|
||||||
# - config doesn't have a ``refresh_token`` k/v
|
# # - config doesn't have a ``refresh_token`` k/v
|
||||||
# - cache dir does not exist yet
|
# # - cache dir does not exist yet
|
||||||
# - current token is expired; take it form env var
|
# # - current token is expired; take it form env var
|
||||||
write_with_token(refresh_token)
|
# write_with_token(refresh_token)
|
||||||
|
|
||||||
async with questrade.get_client(ask_user=False):
|
# async with questrade.get_client(ask_user=False):
|
||||||
pass
|
# pass
|
||||||
|
|
||||||
# XXX ``pytest_trio`` doesn't support scope or autouse
|
# # XXX ``pytest_trio`` doesn't support scope or autouse
|
||||||
trio.run(ensure_config)
|
# trio.run(ensure_config)
|
||||||
|
|
||||||
|
|
||||||
|
_ci_env: bool = os.environ.get('CI', False)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def ci_env() -> bool:
|
||||||
|
'''
|
||||||
|
Detect CI envoirment.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return _ci_env
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -105,3 +112,56 @@ def tmx_symbols():
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def cse_symbols():
|
def cse_symbols():
|
||||||
return ['TRUL.CN', 'CWEB.CN', 'SNN.CN']
|
return ['TRUL.CN', 'CWEB.CN', 'SNN.CN']
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def _open_test_pikerd(
|
||||||
|
reg_addr: tuple[str, int] | None = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
str,
|
||||||
|
int,
|
||||||
|
tractor.Portal
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Testing helper to startup the service tree and runtime on
|
||||||
|
a different port then the default to allow testing alongside
|
||||||
|
a running stack.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import random
|
||||||
|
from piker._daemon import maybe_open_pikerd
|
||||||
|
|
||||||
|
if reg_addr is None:
|
||||||
|
port = random.randint(6e3, 7e3)
|
||||||
|
reg_addr = ('127.0.0.1', port)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
maybe_open_pikerd(
|
||||||
|
registry_addr=reg_addr,
|
||||||
|
**kwargs,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
async with tractor.wait_for_actor(
|
||||||
|
'pikerd',
|
||||||
|
arbiter_sockaddr=reg_addr,
|
||||||
|
) as portal:
|
||||||
|
raddr = portal.channel.raddr
|
||||||
|
assert raddr == reg_addr
|
||||||
|
yield (
|
||||||
|
raddr[0],
|
||||||
|
raddr[1],
|
||||||
|
portal,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def open_test_pikerd():
|
||||||
|
|
||||||
|
yield _open_test_pikerd
|
||||||
|
|
||||||
|
# TODO: teardown checks such as,
|
||||||
|
# - no leaked subprocs or shm buffers
|
||||||
|
# - all requested container service are torn down
|
||||||
|
# - certain ``tractor`` runtime state?
|
||||||
|
|
|
@ -0,0 +1,128 @@
|
||||||
|
'''
|
||||||
|
Data feed layer APIs, performance, msg throttling.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from collections import Counter
|
||||||
|
from pprint import pprint
|
||||||
|
from typing import AsyncContextManager
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
# import tractor
|
||||||
|
import trio
|
||||||
|
from piker.data import (
|
||||||
|
ShmArray,
|
||||||
|
open_feed,
|
||||||
|
)
|
||||||
|
from piker.data._source import (
|
||||||
|
unpack_fqsn,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'fqsns',
|
||||||
|
[
|
||||||
|
# binance
|
||||||
|
(100, {'btcusdt.binance', 'ethusdt.binance'}, False),
|
||||||
|
|
||||||
|
# kraken
|
||||||
|
(20, {'ethusdt.kraken', 'xbtusd.kraken'}, True),
|
||||||
|
|
||||||
|
# binance + kraken
|
||||||
|
(100, {'btcusdt.binance', 'xbtusd.kraken'}, False),
|
||||||
|
],
|
||||||
|
ids=lambda param: f'quotes={param[0]}@fqsns={param[1]}',
|
||||||
|
)
|
||||||
|
def test_multi_fqsn_feed(
|
||||||
|
open_test_pikerd: AsyncContextManager,
|
||||||
|
fqsns: set[str],
|
||||||
|
ci_env: bool
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Start a real-time data feed for provided fqsn and pull
|
||||||
|
a few quotes then simply shut down.
|
||||||
|
|
||||||
|
'''
|
||||||
|
max_quotes, fqsns, run_in_ci = fqsns
|
||||||
|
|
||||||
|
if (
|
||||||
|
ci_env
|
||||||
|
and not run_in_ci
|
||||||
|
):
|
||||||
|
pytest.skip('Skipping CI disabled test due to feed restrictions')
|
||||||
|
|
||||||
|
brokers = set()
|
||||||
|
for fqsn in fqsns:
|
||||||
|
brokername, key, suffix = unpack_fqsn(fqsn)
|
||||||
|
brokers.add(brokername)
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with (
|
||||||
|
open_test_pikerd(),
|
||||||
|
open_feed(
|
||||||
|
fqsns,
|
||||||
|
loglevel='info',
|
||||||
|
|
||||||
|
# TODO: ensure throttle rate is applied
|
||||||
|
# limit to at least display's FPS
|
||||||
|
# avoiding needless Qt-in-guest-mode context switches
|
||||||
|
# tick_throttle=_quote_throttle_rate,
|
||||||
|
|
||||||
|
) as feed
|
||||||
|
):
|
||||||
|
# verify shm buffers exist
|
||||||
|
for fqin in fqsns:
|
||||||
|
flume = feed.flumes[fqin]
|
||||||
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
|
async with feed.open_multi_stream(brokers) as stream:
|
||||||
|
|
||||||
|
# pull the first startup quotes, one for each fqsn, and
|
||||||
|
# ensure they match each flume's startup quote value.
|
||||||
|
fqsns_copy = fqsns.copy()
|
||||||
|
with trio.fail_after(0.5):
|
||||||
|
for _ in range(1):
|
||||||
|
first_quotes = await stream.receive()
|
||||||
|
for fqsn, quote in first_quotes.items():
|
||||||
|
|
||||||
|
# XXX: TODO: WTF apparently this error will get
|
||||||
|
# supressed and only show up in the teardown
|
||||||
|
# excgroup if we don't have the fix from
|
||||||
|
# <tractorbugurl>
|
||||||
|
# assert 0
|
||||||
|
|
||||||
|
fqsns_copy.remove(fqsn)
|
||||||
|
flume = feed.flumes[fqsn]
|
||||||
|
assert quote['last'] == flume.first_quote['last']
|
||||||
|
|
||||||
|
cntr = Counter()
|
||||||
|
with trio.fail_after(6):
|
||||||
|
async for quotes in stream:
|
||||||
|
for fqsn, quote in quotes.items():
|
||||||
|
cntr[fqsn] += 1
|
||||||
|
|
||||||
|
# await tractor.breakpoint()
|
||||||
|
flume = feed.flumes[fqsn]
|
||||||
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
|
# print quote msg, rt and history
|
||||||
|
# buffer values on console.
|
||||||
|
rt_row = ohlcv.array[-1]
|
||||||
|
hist_row = hist_ohlcv.array[-1]
|
||||||
|
# last = quote['last']
|
||||||
|
|
||||||
|
# assert last == rt_row['close']
|
||||||
|
# assert last == hist_row['close']
|
||||||
|
pprint(
|
||||||
|
f'{fqsn}: {quote}\n'
|
||||||
|
f'rt_ohlc: {rt_row}\n'
|
||||||
|
f'hist_ohlc: {hist_row}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
if cntr.total() >= max_quotes:
|
||||||
|
break
|
||||||
|
|
||||||
|
assert set(cntr.keys()) == fqsns
|
||||||
|
|
||||||
|
trio.run(main)
|
|
@ -8,7 +8,6 @@ from trio.testing import trio_test
|
||||||
from piker.brokers import questrade as qt
|
from piker.brokers import questrade as qt
|
||||||
import pytest
|
import pytest
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.testing import tractor_test
|
|
||||||
|
|
||||||
import piker
|
import piker
|
||||||
from piker.brokers import get_brokermod
|
from piker.brokers import get_brokermod
|
||||||
|
@ -23,6 +22,12 @@ pytestmark = pytest.mark.skipif(
|
||||||
reason="questrade tests can only be run locally with an API key",
|
reason="questrade tests can only be run locally with an API key",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: this module was removed from tractor into it's
|
||||||
|
# tests/conftest.py, we need to rewrite the below tests
|
||||||
|
# to use the `open_pikerd_runtime()` to make these work again
|
||||||
|
# (if we're not just gonna junk em).
|
||||||
|
# from tractor.testing import tractor_test
|
||||||
|
|
||||||
|
|
||||||
# stock quote
|
# stock quote
|
||||||
_ex_quotes = {
|
_ex_quotes = {
|
||||||
|
@ -106,7 +111,7 @@ def match_packet(symbols, quotes, feed_type='stock'):
|
||||||
assert not quotes
|
assert not quotes
|
||||||
|
|
||||||
|
|
||||||
@tractor_test
|
# @tractor_test
|
||||||
async def test_concurrent_tokens_refresh(us_symbols, loglevel):
|
async def test_concurrent_tokens_refresh(us_symbols, loglevel):
|
||||||
"""Verify that concurrent requests from mulitple tasks work alongside
|
"""Verify that concurrent requests from mulitple tasks work alongside
|
||||||
random token refreshing which simulates an access token expiry + refresh
|
random token refreshing which simulates an access token expiry + refresh
|
||||||
|
@ -337,7 +342,7 @@ async def stream_stocks(feed, symbols):
|
||||||
'options_and_options',
|
'options_and_options',
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@tractor_test
|
# @tractor_test
|
||||||
async def test_quote_streaming(tmx_symbols, loglevel, stream_what):
|
async def test_quote_streaming(tmx_symbols, loglevel, stream_what):
|
||||||
"""Set up option streaming using the broker daemon.
|
"""Set up option streaming using the broker daemon.
|
||||||
"""
|
"""
|
||||||
|
|
Loading…
Reference in New Issue