Compare commits
700 Commits
310_plus
...
update_qt_
Author | SHA1 | Date |
---|---|---|
Tyler Goodlet | d2e21f45b2 | |
Tyler Goodlet | f31bd42a19 | |
Tyler Goodlet | 0136f502d1 | |
goodboy | dcdfd2577a | |
goodboy | 6733dc57af | |
Tyler Goodlet | 05c4b6afb9 | |
Tyler Goodlet | 4b22325ffc | |
Tyler Goodlet | 9d16299f60 | |
Tyler Goodlet | ab1f15506d | |
Tyler Goodlet | 0db5451e47 | |
goodboy | 61218f30f5 | |
Tyler Goodlet | fcfc0f31f0 | |
Tyler Goodlet | 69074f4fa5 | |
Tyler Goodlet | fe4fb37b58 | |
Tyler Goodlet | 7cfd431a2b | |
Tyler Goodlet | 61e20a86cc | |
Tyler Goodlet | d9b73e1d08 | |
goodboy | 4833d56ecb | |
Tyler Goodlet | 090d1ba524 | |
Tyler Goodlet | afc45a8e16 | |
Tyler Goodlet | 844626f6dc | |
Tyler Goodlet | 470079665f | |
Tyler Goodlet | 0cd87d9e54 | |
Tyler Goodlet | 09711750bf | |
Tyler Goodlet | 71ca4c8e1f | |
Tyler Goodlet | 9811dcf5f3 | |
Tyler Goodlet | da659cf607 | |
Tyler Goodlet | 37e0ec7b7d | |
Tyler Goodlet | 045b76bab5 | |
Tyler Goodlet | c8c641a038 | |
Tyler Goodlet | 6a1bb13feb | |
Tyler Goodlet | 75591dd7e9 | |
Tyler Goodlet | d792fed099 | |
Tyler Goodlet | d66fb49077 | |
Tyler Goodlet | 78c7c8524c | |
Tyler Goodlet | a746258f99 | |
Tyler Goodlet | 5adb234a24 | |
Tyler Goodlet | 2778ee1401 | |
Tyler Goodlet | e0ca5d5200 | |
Tyler Goodlet | b3d1b1aa63 | |
Tyler Goodlet | 5ec1a72a3d | |
Tyler Goodlet | a342f7d2d4 | |
Tyler Goodlet | 2c76cee928 | |
Tyler Goodlet | b5f2ff854c | |
Tyler Goodlet | 3efb0b5884 | |
Tyler Goodlet | 009bbe456e | |
Tyler Goodlet | daf7b3f4a5 | |
Tyler Goodlet | b0a6dd46e4 | |
Tyler Goodlet | 1c5141f4c6 | |
Tyler Goodlet | 4cdd2271b0 | |
Tyler Goodlet | 89095d4e9f | |
Tyler Goodlet | 04c0d77595 | |
Tyler Goodlet | d1b07c625f | |
Tyler Goodlet | a5bb33b0ff | |
Tyler Goodlet | 8e1ceca43d | |
Tyler Goodlet | c85e7790de | |
Tyler Goodlet | 2399c618b6 | |
Tyler Goodlet | 7ec88f8cac | |
Tyler Goodlet | eacd44dd65 | |
Tyler Goodlet | e5e70a6011 | |
Tyler Goodlet | 7da5c2b238 | |
Tyler Goodlet | 1ee49df31d | |
Tyler Goodlet | f2df32a673 | |
Tyler Goodlet | 125e31dbf3 | |
Tyler Goodlet | 715e693564 | |
Tyler Goodlet | 43717c92d9 | |
Tyler Goodlet | f370685c62 | |
Tyler Goodlet | 4300470786 | |
Tyler Goodlet | b89fd9652c | |
Tyler Goodlet | 51f4afbd88 | |
Tyler Goodlet | 7ef8111381 | |
Tyler Goodlet | 35b097469b | |
Tyler Goodlet | 94290c7d8b | |
Tyler Goodlet | 73379d3627 | |
Tyler Goodlet | 23835f2c08 | |
Tyler Goodlet | d2aee00a56 | |
Tyler Goodlet | cf6e44cb9c | |
Tyler Goodlet | a146ad9e69 | |
Tyler Goodlet | 70ad1a1860 | |
Tyler Goodlet | f3ef73ef41 | |
Tyler Goodlet | a9832dc0cb | |
Tyler Goodlet | 9be245e955 | |
Tyler Goodlet | 800773e585 | |
goodboy | 8d1eb81f16 | |
Tyler Goodlet | 963e5bdd62 | |
Tyler Goodlet | 55de9abc41 | |
Tyler Goodlet | 593db0ed0d | |
Tyler Goodlet | 06622105cd | |
Tyler Goodlet | 008ae47e14 | |
Tyler Goodlet | 81585d9e6e | |
Tyler Goodlet | f6b7057b0d | |
Tyler Goodlet | 76f920a16b | |
Tyler Goodlet | f232d6d4ee | |
Tyler Goodlet | b7e1443618 | |
Tyler Goodlet | 5d021ffb85 | |
Tyler Goodlet | 28fd795280 | |
Tyler Goodlet | c944db5f02 | |
Tyler Goodlet | 967e28b7ac | |
Tyler Goodlet | 2a158aea2c | |
Tyler Goodlet | 88870fdda7 | |
Tyler Goodlet | 326f153a47 | |
Tyler Goodlet | f5cd63ad35 | |
Tyler Goodlet | 1e96ca32df | |
Tyler Goodlet | c088963cf2 | |
Tyler Goodlet | 79fcbcc281 | |
Tyler Goodlet | ddbba76095 | |
Tyler Goodlet | 0a959c1c74 | |
Tyler Goodlet | e348968113 | |
Tyler Goodlet | 7bbe86d6fb | |
Tyler Goodlet | 7b9db86753 | |
Tyler Goodlet | 20a396270e | |
Tyler Goodlet | 81516c5204 | |
Tyler Goodlet | d6fb6fe3ae | |
Tyler Goodlet | 8476d8d056 | |
Tyler Goodlet | 36868bb86e | |
Tyler Goodlet | 29b6b3e54f | |
Tyler Goodlet | 8a01c9e42b | |
Tyler Goodlet | 2c4daf08e0 | |
Tyler Goodlet | 7daab6329d | |
Tyler Goodlet | bb6452b969 | |
Tyler Goodlet | 25bfe6f035 | |
Tyler Goodlet | 32b36aa042 | |
Tyler Goodlet | e7de5404d3 | |
Tyler Goodlet | 18dc8b08e4 | |
Tyler Goodlet | 5bf3cb8e4b | |
Tyler Goodlet | c7d5db5f90 | |
Tyler Goodlet | 1bf1965a8b | |
Tyler Goodlet | 051a8729b6 | |
Tyler Goodlet | 8e85ed92c8 | |
Tyler Goodlet | 2a9042b1b1 | |
Tyler Goodlet | 344a634cb6 | |
Tyler Goodlet | 508de6182a | |
Tyler Goodlet | 40000345a1 | |
goodboy | 220d38b4a9 | |
Esmeralda Gallardo | 888438ca25 | |
goodboy | d84bcf77c0 | |
Guillermo Rodriguez | 0474d66531 | |
algorandpa | f218b804b4 | |
Guillermo Rodriguez | 7b14f498a8 | |
Esmeralda Gallardo | 18e4352faf | |
Esmeralda Gallardo | a6e921548b | |
Esmeralda Gallardo | 3f5dec82ed | |
Esmeralda Gallardo | db0b59abaa | |
algorandpa | f5bcd1d91c | |
algorandpa | db11c3c0f8 | |
Tyler Goodlet | df6071ae9e | |
goodboy | cc1694760c | |
goodboy | 4d8b22dd8f | |
Tyler Goodlet | fd296a557e | |
Tyler Goodlet | 0de2f863bd | |
Tyler Goodlet | de93da202b | |
Tyler Goodlet | 5c459f21be | |
goodboy | 5915cf3acf | |
algorandpa | 997bf31bd4 | |
algorandpa | f3427bb13b | |
algorandpa | 6fa266e3e0 | |
Guillermo Rodriguez | 019a6432fb | |
goodboy | 209e1085ae | |
Tyler Goodlet | 0ef75e6aa6 | |
Tyler Goodlet | 243d0329f6 | |
Tyler Goodlet | a0ce9ecc0d | |
Tyler Goodlet | af9c30c3f5 | |
Zoltan | ebbfa47baf | |
Tyler Goodlet | 02fbc0a0ed | |
goodboy | 4729e4c6bc | |
goodboy | a44b8e3e22 | |
goodboy | 8a89303cb3 | |
Tyler Goodlet | e547b307f6 | |
Tyler Goodlet | 72ec9b1e10 | |
Tyler Goodlet | 40c70ae6d8 | |
Tyler Goodlet | d3fefdeaff | |
Tyler Goodlet | 8be005212f | |
Tyler Goodlet | 5a2795e76b | |
Tyler Goodlet | a987f0ab81 | |
Tyler Goodlet | d99b40317d | |
Tyler Goodlet | 9ae519f6fa | |
Tyler Goodlet | 8f3fe8e542 | |
Tyler Goodlet | 490d85aba5 | |
goodboy | ba2e1e04cd | |
Tyler Goodlet | 5d4929db9c | |
Tyler Goodlet | c41400ae18 | |
Tyler Goodlet | e71bd2cb1e | |
Tyler Goodlet | be24473fb4 | |
Tyler Goodlet | b524ea5c22 | |
Tyler Goodlet | d46945cb09 | |
Tyler Goodlet | 1d4fc6f327 | |
Tyler Goodlet | 5976acbe76 | |
goodboy | 11ecf9cb09 | |
goodboy | 2dac531729 | |
Tyler Goodlet | 1fadf58ab7 | |
Tyler Goodlet | ceca0d9fb7 | |
Tyler Goodlet | df16726211 | |
Tyler Goodlet | fb4f1732b6 | |
Tyler Goodlet | d5b357b69a | |
Tyler Goodlet | 610fb5f7c6 | |
Tyler Goodlet | 2b231ba631 | |
Tyler Goodlet | 286228c290 | |
Tyler Goodlet | a1a24da7b6 | |
Tyler Goodlet | 553d0557b6 | |
Tyler Goodlet | 2f7b272d8c | |
Tyler Goodlet | dc1edeecda | |
Tyler Goodlet | 4ca7817735 | |
Tyler Goodlet | 5b63585398 | |
Tyler Goodlet | 0000d9a314 | |
Tyler Goodlet | f7ec66362e | |
Tyler Goodlet | b7ef0596b9 | |
Tyler Goodlet | 143e86a80c | |
Tyler Goodlet | 956c7d3435 | |
Tyler Goodlet | 330d16262e | |
Tyler Goodlet | c7f57b940c | |
Tyler Goodlet | 27bd3c07af | |
Tyler Goodlet | 55dc27a197 | |
Tyler Goodlet | a11f20fac2 | |
Tyler Goodlet | daebb78755 | |
Tyler Goodlet | 90a395a069 | |
Tyler Goodlet | 23d0353934 | |
Tyler Goodlet | ede67ed184 | |
Tyler Goodlet | 811d21e111 | |
Tyler Goodlet | 54567d33da | |
Tyler Goodlet | 61ca5f7e19 | |
Tyler Goodlet | 7396624be0 | |
Tyler Goodlet | 25b90afbdb | |
Tyler Goodlet | 72dfeb2b4e | |
Tyler Goodlet | 6b34c9e866 | |
Tyler Goodlet | e7ec01b8e6 | |
Tyler Goodlet | fce7055c62 | |
Tyler Goodlet | bf7d5e9a71 | |
Tyler Goodlet | 2a866dde65 | |
Tyler Goodlet | 220981e718 | |
Tyler Goodlet | 8537a4091b | |
Tyler Goodlet | 71a11a23bd | |
Tyler Goodlet | fa368b1263 | |
Tyler Goodlet | e6dd1458f8 | |
Tyler Goodlet | 9486d993ce | |
Tyler Goodlet | 30994dac10 | |
Tyler Goodlet | 8a61211c8c | |
Tyler Goodlet | c43f7eb656 | |
goodboy | d05caa4b02 | |
Tyler Goodlet | 63e9af002d | |
goodboy | 5144299f4f | |
Tyler Goodlet | c437f9370a | |
Tyler Goodlet | 94f81587ab | |
Tyler Goodlet | 2bc25e3593 | |
Tyler Goodlet | 1d9ab7b0de | |
Tyler Goodlet | 4c96a4878e | |
Tyler Goodlet | 8cd56cb6d3 | |
Tyler Goodlet | c246dcef6f | |
Tyler Goodlet | 26d6e10ad7 | |
Tyler Goodlet | 3924c66bd0 | |
Tyler Goodlet | 2fbfe583dd | |
Tyler Goodlet | 525f805cdb | |
Tyler Goodlet | b65c02336d | |
Tyler Goodlet | d3abfce540 | |
Tyler Goodlet | 49433ea87d | |
goodboy | 31b0d8cee8 | |
Tyler Goodlet | 35871d0213 | |
Tyler Goodlet | 4877af9bc3 | |
Tyler Goodlet | 909e068121 | |
Tyler Goodlet | cf835b97ca | |
Tyler Goodlet | 30bce42c0b | |
Tyler Goodlet | 48ff4859e6 | |
Tyler Goodlet | 887583d27f | |
Tyler Goodlet | 45b97bf6c3 | |
Tyler Goodlet | 91397b85a4 | |
Tyler Goodlet | 47f81b31af | |
goodboy | 30c452cfd0 | |
Tyler Goodlet | fda1c5b554 | |
goodboy | d6c9834a9a | |
Tyler Goodlet | 41b0c11aaa | |
Tyler Goodlet | cc67d23eee | |
Tyler Goodlet | 4818af1445 | |
Tyler Goodlet | 2cf1742999 | |
Tyler Goodlet | 25ac6e6665 | |
Tyler Goodlet | 90754f979b | |
Tyler Goodlet | c0d490ed63 | |
Tyler Goodlet | 7c6d12d982 | |
Tyler Goodlet | fd8c05e024 | |
Tyler Goodlet | 5d65c86c84 | |
Tyler Goodlet | cf11e8d7d8 | |
Tyler Goodlet | ed868f6246 | |
goodboy | 5d371ad80e | |
Tyler Goodlet | 6897aed6b6 | |
Tyler Goodlet | a61a11f86b | |
Tyler Goodlet | 286f620f8e | |
Tyler Goodlet | b7e60b9653 | |
Tyler Goodlet | df42e7acc4 | |
Tyler Goodlet | e492e9ca0c | |
Tyler Goodlet | 44c6f6dfda | |
Tyler Goodlet | ad2100fe3f | |
Tyler Goodlet | ae64ac79a6 | |
Tyler Goodlet | 20663dfa1c | |
Tyler Goodlet | 70f2241d22 | |
Tyler Goodlet | b3fcc25e21 | |
Tyler Goodlet | 4f15ce346b | |
Tyler Goodlet | 445849337f | |
Tyler Goodlet | 3fd7107e08 | |
Tyler Goodlet | 73a02d54b7 | |
Tyler Goodlet | b734af6dd0 | |
Tyler Goodlet | f7c0ee930a | |
Tyler Goodlet | ead426abc4 | |
Tyler Goodlet | bcd6bbb7ca | |
Tyler Goodlet | 80929d080f | |
Tyler Goodlet | eed47b3733 | |
Tyler Goodlet | d5f0c59b57 | |
Tyler Goodlet | d11dc787a1 | |
Tyler Goodlet | 1e81feee46 | |
Tyler Goodlet | 40a9761943 | |
Tyler Goodlet | 256bcf36d3 | |
Tyler Goodlet | 9944277096 | |
Tyler Goodlet | f9dc5637fa | |
Tyler Goodlet | addedc20f1 | |
Tyler Goodlet | 1fa6e8d9ba | |
Tyler Goodlet | 2a06dc997f | |
Tyler Goodlet | 6b93eedcda | |
Tyler Goodlet | a786df65de | |
Tyler Goodlet | 8f2823d5f0 | |
Tyler Goodlet | 58fe220fde | |
Tyler Goodlet | 161448c31a | |
Tyler Goodlet | 1c685189d1 | |
Tyler Goodlet | ceac3f2ee4 | |
Tyler Goodlet | a07367fae2 | |
Tyler Goodlet | 006190d227 | |
Tyler Goodlet | 412197019e | |
Tyler Goodlet | 271e378ce3 | |
Tyler Goodlet | 8e07fda88f | |
Tyler Goodlet | a4935b8fa8 | |
Tyler Goodlet | 2b76baeb10 | |
Tyler Goodlet | 2dfa8976a0 | |
Tyler Goodlet | d3402f715b | |
Tyler Goodlet | f070f9a984 | |
Tyler Goodlet | 416270ee6c | |
Tyler Goodlet | 14bee778ec | |
Tyler Goodlet | 10c1944de5 | |
Tyler Goodlet | 7958d8ad4f | |
Tyler Goodlet | 50c5dc255c | |
Tyler Goodlet | 31735f26d3 | |
Tyler Goodlet | 2ef6460853 | |
Tyler Goodlet | 5e98a30537 | |
Tyler Goodlet | dd03ef42ac | |
Tyler Goodlet | 59884d251e | |
Tyler Goodlet | e06e257a81 | |
Tyler Goodlet | 6e574835c8 | |
Tyler Goodlet | 49ccfdd673 | |
Tyler Goodlet | 3a434f312b | |
Tyler Goodlet | bb4dc448b3 | |
Tyler Goodlet | 9846396df2 | |
Tyler Goodlet | f0d417ce42 | |
Tyler Goodlet | 55fc4114b4 | |
Tyler Goodlet | 97b074365b | |
Tyler Goodlet | f79c3617d6 | |
Tyler Goodlet | 861fe791eb | |
Tyler Goodlet | 60052ff73a | |
Tyler Goodlet | 4d2708cd42 | |
Tyler Goodlet | d1cc52dff5 | |
Tyler Goodlet | 4fa901dbcb | |
goodboy | f2c488c1e0 | |
Tyler Goodlet | 4a9c16d298 | |
Tyler Goodlet | b9d5b904f4 | |
Tyler Goodlet | 0aef762d9a | |
goodboy | c724117c1a | |
Tyler Goodlet | cc3bb85c66 | |
goodboy | 20817313b1 | |
Tyler Goodlet | 23d0b8a7ac | |
goodboy | 087a34f061 | |
Tyler Goodlet | 653f5c824b | |
Tyler Goodlet | f9217570ab | |
Tyler Goodlet | 7f224f0342 | |
Tyler Goodlet | 75a5f3795a | |
Tyler Goodlet | de9f215c83 | |
Tyler Goodlet | 848e345364 | |
Tyler Goodlet | 38b190e598 | |
Tyler Goodlet | 3a9bc8058f | |
Guillermo Rodriguez | 739a231afc | |
Tyler Goodlet | 7dfa4c3cde | |
Tyler Goodlet | 7b653fe4f4 | |
goodboy | 77a687bced | |
Tyler Goodlet | d5c1cdd91d | |
Tyler Goodlet | 46d3fe88ca | |
Tyler Goodlet | 5c8c5d8fbf | |
goodboy | 71412310c4 | |
Guillermo Rodriguez | 0c323fdc0b | |
Tyler Goodlet | 02f53d0c13 | |
Tyler Goodlet | 8792c97de6 | |
Tyler Goodlet | 980815d075 | |
Tyler Goodlet | 4cedfedc21 | |
Tyler Goodlet | fe3d0c6fdd | |
Tyler Goodlet | 9200e8da57 | |
Tyler Goodlet | 430d065da6 | |
Tyler Goodlet | ecd93cb05a | |
Guillermo Rodriguez | 4facd161a9 | |
goodboy | c5447fda06 | |
Guillermo Rodriguez | 0447612b34 | |
goodboy | b5499b8225 | |
Guillermo Rodriguez | 00aabddfe8 | |
Guillermo Rodriguez | 43fb720877 | |
Guillermo Rodriguez | 9626dbd7ac | |
Guillermo Rodriguez | f286c79a03 | |
Guillermo Rodriguez | accb0eee6c | |
Guillermo Rodriguez | e97dd1cbdb | |
Guillermo Rodriguez | 34fb497eb4 | |
Guillermo Rodriguez | 6669ba6590 | |
Guillermo Rodriguez | cb8099bb8c | |
Guillermo Rodriguez | 80a1a58bfc | |
Guillermo Rodriguez | d60f222bb7 | |
Guillermo Rodriguez | 2c2e43d8ac | |
Guillermo Rodriguez | 212b3d620d | |
Guillermo Rodriguez | 92090b01b8 | |
Guillermo Rodriguez | 9073fbc317 | |
Guillermo Rodriguez | f55f56a29f | |
Guillermo Rodriguez | 28e025d02e | |
Guillermo Rodriguez | e558e5837e | |
Guillermo Rodriguez | a0b415095a | |
Guillermo Rodriguez | 6df181c233 | |
Guillermo Rodriguez | 7acc4e3208 | |
Guillermo Rodriguez | 10ea242143 | |
Tyler Goodlet | eda6ecd529 | |
goodboy | cf5b0bf9c6 | |
Tyler Goodlet | b9dba48306 | |
Tyler Goodlet | 4d2e23b5ce | |
Tyler Goodlet | 973bf87e67 | |
Tyler Goodlet | 5861839783 | |
Tyler Goodlet | 06845e5504 | |
Tyler Goodlet | 43bdd4d022 | |
Tyler Goodlet | bafd2cb44f | |
Tyler Goodlet | be8fd32e7d | |
Tyler Goodlet | ee8c00684b | |
Tyler Goodlet | 7379dc03af | |
Tyler Goodlet | a602c47d47 | |
Tyler Goodlet | 317610e00a | |
Tyler Goodlet | c4af706d51 | |
Tyler Goodlet | 665bb183f7 | |
Tyler Goodlet | f6ba95a6c7 | |
Tyler Goodlet | e2cd8c4aef | |
Tyler Goodlet | c8bff81220 | |
Tyler Goodlet | 2aec1c5f1d | |
Tyler Goodlet | bec32956a8 | |
Tyler Goodlet | 91fdc7c5c7 | |
Tyler Goodlet | b59ed74bc1 | |
Tyler Goodlet | 16012f6f02 | |
Tyler Goodlet | 2b61672723 | |
Tyler Goodlet | 176b230a46 | |
Tyler Goodlet | 7fa9dbf869 | |
Tyler Goodlet | 87ed9abefa | |
Tyler Goodlet | 2548aae73d | |
Tyler Goodlet | 1cfa04927d | |
Tyler Goodlet | e34ea94f9f | |
Tyler Goodlet | 1510383738 | |
Tyler Goodlet | 016b669d63 | |
Tyler Goodlet | 682a0191ef | |
Tyler Goodlet | 9e36dbe47f | |
goodboy | 8bef67642e | |
Tyler Goodlet | 52febac6ae | |
Tyler Goodlet | f202699c25 | |
Tyler Goodlet | 0fb07670d2 | |
Tyler Goodlet | 73d2e7716f | |
Tyler Goodlet | 999ae5a1c6 | |
Tyler Goodlet | 23ba0e5e69 | |
Tyler Goodlet | 941a2196b3 | |
Tyler Goodlet | 0cf4e07b84 | |
Tyler Goodlet | 7bec989eed | |
Tyler Goodlet | 6856ca207f | |
Guillermo Rodriguez | 2e5616850c | |
Tyler Goodlet | a83bd9c608 | |
goodboy | 9651ca84bf | |
Tyler Goodlet | 109b35f6eb | |
Tyler Goodlet | e28c1748fc | |
Tyler Goodlet | 72889b4d1f | |
Tyler Goodlet | ae001c3dd7 | |
Tyler Goodlet | 2309e7ab05 | |
Tyler Goodlet | 46c51b55f7 | |
goodboy | a9185e7d6f | |
Tyler Goodlet | 3a0987e0be | |
Tyler Goodlet | d280a592b1 | |
goodboy | ef5829a6b7 | |
Tyler Goodlet | 30bcfdcc83 | |
Tyler Goodlet | 1a291939c3 | |
Tyler Goodlet | 69e501764a | |
goodboy | 7f3f7f0372 | |
Tyler Goodlet | 1cbf45b4c4 | |
Tyler Goodlet | 227a80469e | |
Tyler Goodlet | dc8072c6db | |
Tyler Goodlet | 808dbb12e6 | |
Tyler Goodlet | 44e21b1de9 | |
Tyler Goodlet | b3058b8c78 | |
Tyler Goodlet | db564d7977 | |
Tyler Goodlet | e6a3e8b65a | |
Tyler Goodlet | d43ba47ebe | |
Tyler Goodlet | 168c9863cb | |
Tyler Goodlet | 0fb31586fd | |
Tyler Goodlet | 8b609f531b | |
Tyler Goodlet | d502274eb9 | |
Tyler Goodlet | b1419c850d | |
Tyler Goodlet | aa7f24b6db | |
Tyler Goodlet | 319e68c855 | |
Tyler Goodlet | 64f920d7e5 | |
Tyler Goodlet | 3b79743c7b | |
Tyler Goodlet | 54008a1976 | |
Tyler Goodlet | b96b7a8b9c | |
Tyler Goodlet | 0fca1b3e1a | |
Tyler Goodlet | 2386270cad | |
Tyler Goodlet | 5b135fad61 | |
Tyler Goodlet | abb6854e74 | |
Tyler Goodlet | 22f9b2552c | |
Tyler Goodlet | 57f2478dc7 | |
Tyler Goodlet | 5dc9a61ec4 | |
Tyler Goodlet | b0d3d9bb01 | |
Tyler Goodlet | caecbaa231 | |
Tyler Goodlet | a20a8d95d5 | |
Tyler Goodlet | ba93f96c71 | |
Tyler Goodlet | 804e9afdde | |
Tyler Goodlet | 89bcaed15e | |
Tyler Goodlet | bb2f8e4304 | |
Tyler Goodlet | 8ab8268edc | |
Tyler Goodlet | bbcc55b24c | |
Tyler Goodlet | 9fa9c27e4d | |
Tyler Goodlet | d9b4c4a413 | |
Tyler Goodlet | 84cab1327d | |
Tyler Goodlet | df4cec930b | |
Tyler Goodlet | ab08dc582d | |
Tyler Goodlet | f79d9865a0 | |
Tyler Goodlet | 00378c330c | |
goodboy | 180b97b180 | |
Tyler Goodlet | f0b3a4d5c0 | |
goodboy | e2e66324cc | |
Tyler Goodlet | d950c78b81 | |
Tyler Goodlet | 7dbcbfdcd5 | |
Tyler Goodlet | 279c899de5 | |
Tyler Goodlet | db5aacdb9c | |
Tyler Goodlet | c7b84ab500 | |
Tyler Goodlet | 9967adb371 | |
Tyler Goodlet | 30ff793a22 | |
Tyler Goodlet | 666587991a | |
goodboy | 01005e40a8 | |
goodboy | d81e629c29 | |
Tyler Goodlet | 2766fad719 | |
Tyler Goodlet | ae71168216 | |
Tyler Goodlet | a0c238daa7 | |
Tyler Goodlet | 7cbdc6a246 | |
Tyler Goodlet | 2ff8be71aa | |
Tyler Goodlet | ddffaa952d | |
Tyler Goodlet | 5520e9ef21 | |
Tyler Goodlet | 958e542f7d | |
goodboy | 927bbc7258 | |
Tyler Goodlet | 45bef0cea9 | |
goodboy | a3d46f713e | |
Tyler Goodlet | 5684120c11 | |
Tyler Goodlet | ddb195ed2c | |
Tyler Goodlet | 6747831677 | |
Tyler Goodlet | 9326379b04 | |
Tyler Goodlet | 09d9a7ea2b | |
Tyler Goodlet | 45871d5846 | |
goodboy | bf7a49c19b | |
goodboy | 0a7fce087c | |
Tyler Goodlet | d3130ca04c | |
Tyler Goodlet | e30a3c5b54 | |
Tyler Goodlet | 2393965e83 | |
Tyler Goodlet | fb39da19f4 | |
Tyler Goodlet | a27431c34f | |
Tyler Goodlet | 070b9f3dc1 | |
goodboy | f2dba44169 | |
Tyler Goodlet | 0ef5da0881 | |
Tyler Goodlet | 0580b204a3 | |
Tyler Goodlet | 6ce699ae1f | |
Tyler Goodlet | 3aa72abacf | |
Tyler Goodlet | 04004525c1 | |
Tyler Goodlet | a7f0adf1cf | |
Tyler Goodlet | cef511092d | |
Tyler Goodlet | 4e5df973a9 | |
Tyler Goodlet | 6a1a62d8c0 | |
Tyler Goodlet | e0491cf2e7 | |
Tyler Goodlet | 90bc9b9730 | |
goodboy | f449672c68 | |
Tyler Goodlet | fd22f45178 | |
goodboy | 37f634a2ed | |
Tyler Goodlet | dfee9dd97e | |
Tyler Goodlet | 2a99f7a4d7 | |
Tyler Goodlet | b44e2d9ed9 | |
Tyler Goodlet | 795d4d76f4 | |
Tyler Goodlet | c26acb1fa8 | |
Tyler Goodlet | 11b6699a54 | |
Tyler Goodlet | f9bdd643cf | |
Tyler Goodlet | 2baea21c7d | |
Tyler Goodlet | bea0111753 | |
Tyler Goodlet | c870665be0 | |
Tyler Goodlet | 4ff1090284 | |
Tyler Goodlet | f22461a844 | |
Tyler Goodlet | 458c7211ee | |
Tyler Goodlet | 5cc4b19a7c | |
goodboy | f5236f658b | |
goodboy | a360b66cc0 | |
Tyler Goodlet | 4bcb791161 | |
Tyler Goodlet | 4c7c78c815 | |
Tyler Goodlet | 019867b413 | |
Tyler Goodlet | f356fb0a68 | |
goodboy | 756249ff70 | |
goodboy | 419ebebe72 | |
goodboy | a229996ebe | |
Tyler Goodlet | af01e89612 | |
Tyler Goodlet | 609034c634 | |
Tyler Goodlet | 95dd0e6bd6 | |
goodboy | 479ad1bb15 | |
Tyler Goodlet | d506235a8b | |
Tyler Goodlet | 7846446a44 | |
Tyler Goodlet | 214f864dcf | |
Tyler Goodlet | 4c0f2099aa | |
Tyler Goodlet | aea7bec2c3 | |
Tyler Goodlet | 47777e4192 | |
Tyler Goodlet | f6888057c3 | |
Tyler Goodlet | f65f56ec75 | |
Tyler Goodlet | 5d39b04552 | |
Tyler Goodlet | 735fbc6259 | |
Tyler Goodlet | fcd7e0f3f3 | |
Tyler Goodlet | 9106d13dfe | |
Tyler Goodlet | d3caad6e11 | |
Tyler Goodlet | f87a2a810a | |
Tyler Goodlet | 208e2e9e97 | |
Tyler Goodlet | 90cc6eb317 | |
Tyler Goodlet | b118becc84 | |
Tyler Goodlet | 7442d68ecf | |
Tyler Goodlet | 076c167d6e | |
Tyler Goodlet | 64d8cd448f | |
Tyler Goodlet | ec6a28a8b1 | |
Tyler Goodlet | cc15d02488 | |
goodboy | d5bc43e8dd | |
Tyler Goodlet | 287a2c8396 | |
Tyler Goodlet | 453ebdfe30 | |
Tyler Goodlet | 2b1fb90e03 | |
Tyler Goodlet | 695ba5288d | |
Tyler Goodlet | d6c32bba86 | |
Tyler Goodlet | fa89207583 | |
Tyler Goodlet | 557562e25c | |
Tyler Goodlet | c6efa2641b | |
Tyler Goodlet | 8a7e391b4e | |
Tyler Goodlet | aec48a1dd5 | |
Tyler Goodlet | 87f301500d | |
Tyler Goodlet | 566a54ffb6 | |
Tyler Goodlet | f9c4b3cc96 | |
Tyler Goodlet | a12e6800ff | |
Tyler Goodlet | cc68501c7a | |
Tyler Goodlet | 7ebf8a8dc0 | |
Tyler Goodlet | 4475823e48 | |
Tyler Goodlet | 3713288b48 | |
Tyler Goodlet | 4fdfb81876 | |
Tyler Goodlet | f32b4d37cb | |
Tyler Goodlet | 2063b9d8bb | |
Tyler Goodlet | fe14605034 | |
Tyler Goodlet | 68b32208de | |
Tyler Goodlet | f1fe369bbf | |
Tyler Goodlet | 16b2937d23 | |
Tyler Goodlet | bfad676b7c | |
Tyler Goodlet | c617a06905 | |
Tyler Goodlet | ff74f4302a | |
Tyler Goodlet | 21153a0e1e | |
Tyler Goodlet | b6f344f34a | |
Tyler Goodlet | ecdc747ced | |
Tyler Goodlet | 5147cd7be0 | |
Tyler Goodlet | 3dcb72d429 | |
Tyler Goodlet | fbee33b00d | |
Tyler Goodlet | 3991d8f911 | |
Tyler Goodlet | 7b2e8f1ba5 | |
Tyler Goodlet | cbcbb2b243 | |
Tyler Goodlet | cd3bfb1ea4 | |
Tyler Goodlet | 82b718d5a3 | |
Tyler Goodlet | 05a1a4e3d8 | |
Tyler Goodlet | 412138a75b | |
Tyler Goodlet | c1b63f4757 | |
Tyler Goodlet | 5d774bef90 | |
Tyler Goodlet | de77c7d209 | |
Tyler Goodlet | ce1eb11b59 | |
Tyler Goodlet | b629ce177d | |
Tyler Goodlet | 73fa320917 | |
Tyler Goodlet | dd05ed1371 | |
Tyler Goodlet | 2a641ab8b4 | |
Tyler Goodlet | f8f7ca350c | |
Tyler Goodlet | 88b4ccc768 | |
Tyler Goodlet | eb2bad5138 | |
Tyler Goodlet | f768576060 | |
Tyler Goodlet | add0e92335 | |
Tyler Goodlet | 1eb7e109e6 | |
Tyler Goodlet | 725909a94c | |
Tyler Goodlet | 050aa7594c | |
Tyler Goodlet | 450009ff9c | |
goodboy | b2d5892010 | |
goodboy | 5a3b465ac0 | |
Tyler Goodlet | be7afdaa89 | |
Tyler Goodlet | 1c561207f5 | |
Tyler Goodlet | ed2c962bb9 | |
Tyler Goodlet | 147ceca016 | |
Tyler Goodlet | 03a7940f83 | |
Tyler Goodlet | dd2a9f74f1 | |
Tyler Goodlet | 49c720af3c | |
Tyler Goodlet | c620517543 | |
Tyler Goodlet | a425c29ef1 | |
Tyler Goodlet | 783914c7fe | |
Tyler Goodlet | 920a394539 | |
Tyler Goodlet | e977597cd0 | |
Tyler Goodlet | 7a33ba64f1 | |
Tyler Goodlet | 191b94b67c | |
Tyler Goodlet | 4ad7b073c3 | |
Tyler Goodlet | d92ff9c7a0 |
|
@ -3,9 +3,8 @@ name: CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# Triggers the workflow on push or pull request events but only for the master branch
|
# Triggers the workflow on push or pull request events but only for the master branch
|
||||||
push:
|
|
||||||
branches: [ master ]
|
|
||||||
pull_request:
|
pull_request:
|
||||||
|
push:
|
||||||
branches: [ master ]
|
branches: [ master ]
|
||||||
|
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
|
@ -14,6 +13,27 @@ on:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
|
# test that we can generate a software distribution and install it
|
||||||
|
# thus avoid missing file issues after packaging.
|
||||||
|
sdist-linux:
|
||||||
|
name: 'sdist'
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Setup python
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: '3.10'
|
||||||
|
|
||||||
|
- name: Build sdist
|
||||||
|
run: python setup.py sdist --formats=zip
|
||||||
|
|
||||||
|
- name: Install sdist from .zips
|
||||||
|
run: python -m pip install dist/*.zip
|
||||||
|
|
||||||
testing:
|
testing:
|
||||||
name: 'install + test-suite'
|
name: 'install + test-suite'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
|
|
|
@ -50,3 +50,8 @@ prefer_data_account = [
|
||||||
paper = "XX0000000"
|
paper = "XX0000000"
|
||||||
margin = "X0000000"
|
margin = "X0000000"
|
||||||
ira = "X0000000"
|
ira = "X0000000"
|
||||||
|
|
||||||
|
|
||||||
|
[deribit]
|
||||||
|
key_id = 'XXXXXXXX'
|
||||||
|
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
||||||
|
|
|
@ -3,11 +3,12 @@
|
||||||
version: "3.5"
|
version: "3.5"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
ib-gateway:
|
ib_gw_paper:
|
||||||
# other image tags available:
|
# other image tags available:
|
||||||
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
||||||
image: waytrade/ib-gateway:981.3j
|
# image: waytrade/ib-gateway:981.3j
|
||||||
restart: always
|
image: waytrade/ib-gateway:1012.2i
|
||||||
|
restart: 'no' # restart on boot whenev there's a crash or user clicsk
|
||||||
network_mode: 'host'
|
network_mode: 'host'
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
|
@ -39,14 +40,12 @@ services:
|
||||||
# this compose file which looks something like:
|
# this compose file which looks something like:
|
||||||
# TWS_USERID='myuser'
|
# TWS_USERID='myuser'
|
||||||
# TWS_PASSWORD='guest'
|
# TWS_PASSWORD='guest'
|
||||||
# TRADING_MODE=paper (or live)
|
|
||||||
# VNC_SERVER_PASSWORD='diggity'
|
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
TWS_USERID: ${TWS_USERID}
|
TWS_USERID: ${TWS_USERID}
|
||||||
TWS_PASSWORD: ${TWS_PASSWORD}
|
TWS_PASSWORD: ${TWS_PASSWORD}
|
||||||
TRADING_MODE: ${TRADING_MODE:-paper}
|
TRADING_MODE: 'paper'
|
||||||
VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-}
|
VNC_SERVER_PASSWORD: 'doggy'
|
||||||
|
VNC_SERVER_PORT: '3003'
|
||||||
|
|
||||||
# ports:
|
# ports:
|
||||||
# - target: 4002
|
# - target: 4002
|
||||||
|
@ -62,3 +61,40 @@ services:
|
||||||
# - "127.0.0.1:4001:4001"
|
# - "127.0.0.1:4001:4001"
|
||||||
# - "127.0.0.1:4002:4002"
|
# - "127.0.0.1:4002:4002"
|
||||||
# - "127.0.0.1:5900:5900"
|
# - "127.0.0.1:5900:5900"
|
||||||
|
|
||||||
|
# ib_gw_live:
|
||||||
|
# image: waytrade/ib-gateway:1012.2i
|
||||||
|
# restart: no
|
||||||
|
# network_mode: 'host'
|
||||||
|
|
||||||
|
# volumes:
|
||||||
|
# - type: bind
|
||||||
|
# source: ./jts_live.ini
|
||||||
|
# target: /root/jts/jts.ini
|
||||||
|
# # don't let ibc clobber this file for
|
||||||
|
# # the main reason of not having a stupid
|
||||||
|
# # timezone set..
|
||||||
|
# read_only: true
|
||||||
|
|
||||||
|
# # force our own ibc config
|
||||||
|
# - type: bind
|
||||||
|
# source: ./ibc.ini
|
||||||
|
# target: /root/ibc/config.ini
|
||||||
|
|
||||||
|
# # force our noop script - socat isn't needed in host mode.
|
||||||
|
# - type: bind
|
||||||
|
# source: ./fork_ports_delayed.sh
|
||||||
|
# target: /root/scripts/fork_ports_delayed.sh
|
||||||
|
|
||||||
|
# # force our noop script - socat isn't needed in host mode.
|
||||||
|
# - type: bind
|
||||||
|
# source: ./run_x11_vnc.sh
|
||||||
|
# target: /root/scripts/run_x11_vnc.sh
|
||||||
|
# read_only: true
|
||||||
|
|
||||||
|
# # NOTE: to fill these out, define an `.env` file in the same dir as
|
||||||
|
# # this compose file which looks something like:
|
||||||
|
# environment:
|
||||||
|
# TRADING_MODE: 'live'
|
||||||
|
# VNC_SERVER_PASSWORD: 'doggy'
|
||||||
|
# VNC_SERVER_PORT: '3004'
|
||||||
|
|
|
@ -188,7 +188,7 @@ AcceptNonBrokerageAccountWarning=yes
|
||||||
#
|
#
|
||||||
# The default value is 60.
|
# The default value is 60.
|
||||||
|
|
||||||
LoginDialogDisplayTimeout = 60
|
LoginDialogDisplayTimeout=20
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
@ -292,7 +292,7 @@ ExistingSessionDetectedAction=primary
|
||||||
# be set dynamically at run-time: most users will never need it,
|
# be set dynamically at run-time: most users will never need it,
|
||||||
# so don't use it unless you know you need it.
|
# so don't use it unless you know you need it.
|
||||||
|
|
||||||
OverrideTwsApiPort=4002
|
; OverrideTwsApiPort=4002
|
||||||
|
|
||||||
|
|
||||||
# Read-only Login
|
# Read-only Login
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
[IBGateway]
|
||||||
|
ApiOnly=true
|
||||||
|
LocalServerPort=4001
|
||||||
|
# NOTE: must be set if using IBC's "reject" mode
|
||||||
|
TrustedIPs=127.0.0.1
|
||||||
|
; RemoteHostOrderRouting=ndc1.ibllc.com
|
||||||
|
; WriteDebug=true
|
||||||
|
; RemotePortOrderRouting=4001
|
||||||
|
; useRemoteSettings=false
|
||||||
|
; tradingMode=p
|
||||||
|
; Steps=8
|
||||||
|
; colorPalletName=dark
|
||||||
|
|
||||||
|
# window geo, this may be useful for sending `xdotool` commands?
|
||||||
|
; MainWindow.Width=1986
|
||||||
|
; screenHeight=3960
|
||||||
|
|
||||||
|
|
||||||
|
[Logon]
|
||||||
|
Locale=en
|
||||||
|
# most markets are oriented around this zone
|
||||||
|
# so might as well hard code it.
|
||||||
|
TimeZone=America/New_York
|
||||||
|
UseSSL=true
|
||||||
|
displayedproxymsg=1
|
||||||
|
os_titlebar=true
|
||||||
|
s3store=true
|
||||||
|
useRemoteSettings=false
|
||||||
|
|
||||||
|
[Communication]
|
||||||
|
ctciAutoEncrypt=true
|
||||||
|
Region=usr
|
||||||
|
; Peer=cdc1.ibllc.com:4001
|
|
@ -1,16 +1,35 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
# start vnc server and listen for connections
|
||||||
|
# on port specced in `$VNC_SERVER_PORT`
|
||||||
|
|
||||||
# start VNC server
|
|
||||||
x11vnc \
|
x11vnc \
|
||||||
-ncache_cr \
|
-listen 127.0.0.1 \
|
||||||
-listen localhost \
|
-allow 127.0.0.1 \
|
||||||
|
-rfbport "${VNC_SERVER_PORT}" \
|
||||||
-display :1 \
|
-display :1 \
|
||||||
-forever \
|
-forever \
|
||||||
-shared \
|
-shared \
|
||||||
-logappend /var/log/x11vnc.log \
|
|
||||||
-bg \
|
-bg \
|
||||||
|
-nowf \
|
||||||
|
-noxdamage \
|
||||||
|
-noxfixes \
|
||||||
|
-no6 \
|
||||||
-noipv6 \
|
-noipv6 \
|
||||||
-autoport 3003 \
|
|
||||||
# can't use this because of ``asyncvnc`` issue:
|
|
||||||
|
# -nowcr \
|
||||||
|
# TODO: can't use this because of ``asyncvnc`` issue:
|
||||||
# https://github.com/barneygale/asyncvnc/issues/1
|
# https://github.com/barneygale/asyncvnc/issues/1
|
||||||
# -passwd 'ibcansmbz'
|
# -passwd 'ibcansmbz'
|
||||||
|
|
||||||
|
# XXX: optional graphics caching flags that seem to rekt the overlay
|
||||||
|
# of the 2 gw windows? When running a single gateway
|
||||||
|
# this seems to maybe optimize some memory usage?
|
||||||
|
# -ncache_cr \
|
||||||
|
# -ncache \
|
||||||
|
|
||||||
|
# NOTE: this will prevent logs from going to the console.
|
||||||
|
# -logappend /var/log/x11vnc.log \
|
||||||
|
|
||||||
|
# where to start allocating ports
|
||||||
|
# -autoport "${VNC_SERVER_PORT}" \
|
||||||
|
|
|
@ -18,3 +18,10 @@
|
||||||
piker: trading gear for hackers.
|
piker: trading gear for hackers.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from ._daemon import open_piker_runtime
|
||||||
|
from .data.feed import open_feed
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'open_piker_runtime',
|
||||||
|
'open_feed',
|
||||||
|
]
|
||||||
|
|
531
piker/_daemon.py
531
piker/_daemon.py
|
@ -18,16 +18,27 @@
|
||||||
Structured, daemon tree service management.
|
Structured, daemon tree service management.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Optional, Union, Callable, Any
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager as acm
|
import os
|
||||||
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
Any,
|
||||||
|
ClassVar,
|
||||||
|
)
|
||||||
|
from contextlib import (
|
||||||
|
asynccontextmanager as acm,
|
||||||
|
)
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from pydantic import BaseModel
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
|
||||||
|
|
||||||
from .log import get_logger, get_console_log
|
from .log import (
|
||||||
|
get_logger,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
from .brokers import get_brokermod
|
from .brokers import get_brokermod
|
||||||
|
|
||||||
|
|
||||||
|
@ -35,28 +46,118 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
_root_dname = 'pikerd'
|
_root_dname = 'pikerd'
|
||||||
|
|
||||||
_registry_addr = ('127.0.0.1', 6116)
|
_default_registry_host: str = '127.0.0.1'
|
||||||
_tractor_kwargs: dict[str, Any] = {
|
_default_registry_port: int = 6116
|
||||||
# use a different registry addr then tractor's default
|
_default_reg_addr: tuple[str, int] = (
|
||||||
'arbiter_addr': _registry_addr
|
_default_registry_host,
|
||||||
}
|
_default_registry_port,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE: this value is set as an actor-global once the first endpoint
|
||||||
|
# who is capable, spawns a `pikerd` service tree.
|
||||||
|
_registry: Registry | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class Registry:
|
||||||
|
addr: None | tuple[str, int] = None
|
||||||
|
|
||||||
|
# TODO: table of uids to sockaddrs
|
||||||
|
peers: dict[
|
||||||
|
tuple[str, str],
|
||||||
|
tuple[str, int],
|
||||||
|
] = {}
|
||||||
|
|
||||||
|
|
||||||
|
_tractor_kwargs: dict[str, Any] = {}
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_registry(
|
||||||
|
addr: None | tuple[str, int] = None,
|
||||||
|
ensure_exists: bool = True,
|
||||||
|
|
||||||
|
) -> tuple[str, int]:
|
||||||
|
|
||||||
|
global _tractor_kwargs
|
||||||
|
actor = tractor.current_actor()
|
||||||
|
uid = actor.uid
|
||||||
|
if (
|
||||||
|
Registry.addr is not None
|
||||||
|
and addr
|
||||||
|
):
|
||||||
|
raise RuntimeError(
|
||||||
|
f'`{uid}` registry addr already bound @ {_registry.sockaddr}'
|
||||||
|
)
|
||||||
|
|
||||||
|
was_set: bool = False
|
||||||
|
|
||||||
|
if (
|
||||||
|
not tractor.is_root_process()
|
||||||
|
and Registry.addr is None
|
||||||
|
):
|
||||||
|
Registry.addr = actor._arb_addr
|
||||||
|
|
||||||
|
if (
|
||||||
|
ensure_exists
|
||||||
|
and Registry.addr is None
|
||||||
|
):
|
||||||
|
raise RuntimeError(
|
||||||
|
f"`{uid}` registry should already exist bug doesn't?"
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
Registry.addr is None
|
||||||
|
):
|
||||||
|
was_set = True
|
||||||
|
Registry.addr = addr or _default_reg_addr
|
||||||
|
|
||||||
|
_tractor_kwargs['arbiter_addr'] = Registry.addr
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield Registry.addr
|
||||||
|
finally:
|
||||||
|
# XXX: always clear the global addr if we set it so that the
|
||||||
|
# next (set of) calls will apply whatever new one is passed
|
||||||
|
# in.
|
||||||
|
if was_set:
|
||||||
|
Registry.addr = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_tractor_runtime_kwargs() -> dict[str, Any]:
|
||||||
|
'''
|
||||||
|
Deliver ``tractor`` related runtime variables in a `dict`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return _tractor_kwargs
|
||||||
|
|
||||||
|
|
||||||
_root_modules = [
|
_root_modules = [
|
||||||
__name__,
|
__name__,
|
||||||
'piker.clearing._ems',
|
'piker.clearing._ems',
|
||||||
'piker.clearing._client',
|
'piker.clearing._client',
|
||||||
|
'piker.data._sampling',
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class Services(BaseModel):
|
# TODO: factor this into a ``tractor.highlevel`` extension
|
||||||
|
# pack for the library.
|
||||||
|
class Services:
|
||||||
|
|
||||||
actor_n: tractor._supervise.ActorNursery
|
actor_n: tractor._supervise.ActorNursery
|
||||||
service_n: trio.Nursery
|
service_n: trio.Nursery
|
||||||
debug_mode: bool # tractor sub-actor debug mode flag
|
debug_mode: bool # tractor sub-actor debug mode flag
|
||||||
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
service_tasks: dict[
|
||||||
|
str,
|
||||||
class Config:
|
tuple[
|
||||||
arbitrary_types_allowed = True
|
trio.CancelScope,
|
||||||
|
tractor.Portal,
|
||||||
|
trio.Event,
|
||||||
|
]
|
||||||
|
] = {}
|
||||||
|
locks = defaultdict(trio.Lock)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
async def start_service_task(
|
async def start_service_task(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
|
@ -75,7 +176,12 @@ class Services(BaseModel):
|
||||||
'''
|
'''
|
||||||
async def open_context_in_task(
|
async def open_context_in_task(
|
||||||
task_status: TaskStatus[
|
task_status: TaskStatus[
|
||||||
trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
tuple[
|
||||||
|
trio.CancelScope,
|
||||||
|
trio.Event,
|
||||||
|
Any,
|
||||||
|
]
|
||||||
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> Any:
|
) -> Any:
|
||||||
|
|
||||||
|
@ -87,143 +193,173 @@ class Services(BaseModel):
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
||||||
# unblock once the remote context has started
|
# unblock once the remote context has started
|
||||||
task_status.started((cs, first))
|
complete = trio.Event()
|
||||||
|
task_status.started((cs, complete, first))
|
||||||
log.info(
|
log.info(
|
||||||
f'`pikerd` service {name} started with value {first}'
|
f'`pikerd` service {name} started with value {first}'
|
||||||
)
|
)
|
||||||
try:
|
try:
|
||||||
# wait on any context's return value
|
# wait on any context's return value
|
||||||
|
# and any final portal result from the
|
||||||
|
# sub-actor.
|
||||||
ctx_res = await ctx.result()
|
ctx_res = await ctx.result()
|
||||||
except tractor.ContextCancelled:
|
|
||||||
return await self.cancel_service(name)
|
# NOTE: blocks indefinitely until cancelled
|
||||||
else:
|
# either by error from the target context
|
||||||
# wait on any error from the sub-actor
|
# function or by being cancelled here by the
|
||||||
# NOTE: this will block indefinitely until
|
# surrounding cancel scope.
|
||||||
# cancelled either by error from the target
|
|
||||||
# context function or by being cancelled here by
|
|
||||||
# the surrounding cancel scope
|
|
||||||
return (await portal.result(), ctx_res)
|
return (await portal.result(), ctx_res)
|
||||||
|
|
||||||
cs, first = await self.service_n.start(open_context_in_task)
|
finally:
|
||||||
|
await portal.cancel_actor()
|
||||||
|
complete.set()
|
||||||
|
self.service_tasks.pop(name)
|
||||||
|
|
||||||
|
cs, complete, first = await self.service_n.start(open_context_in_task)
|
||||||
|
|
||||||
# store the cancel scope and portal for later cancellation or
|
# store the cancel scope and portal for later cancellation or
|
||||||
# retstart if needed.
|
# retstart if needed.
|
||||||
self.service_tasks[name] = (cs, portal)
|
self.service_tasks[name] = (cs, portal, complete)
|
||||||
|
|
||||||
return cs, first
|
return cs, first
|
||||||
|
|
||||||
# TODO: per service cancellation by scope, we aren't using this
|
@classmethod
|
||||||
# anywhere right?
|
|
||||||
async def cancel_service(
|
async def cancel_service(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
|
|
||||||
) -> Any:
|
) -> Any:
|
||||||
|
'''
|
||||||
|
Cancel the service task and actor for the given ``name``.
|
||||||
|
|
||||||
|
'''
|
||||||
log.info(f'Cancelling `pikerd` service {name}')
|
log.info(f'Cancelling `pikerd` service {name}')
|
||||||
cs, portal = self.service_tasks[name]
|
cs, portal, complete = self.service_tasks[name]
|
||||||
# XXX: not entirely sure why this is required,
|
|
||||||
# and should probably be better fine tuned in
|
|
||||||
# ``tractor``?
|
|
||||||
cs.cancel()
|
cs.cancel()
|
||||||
return await portal.cancel_actor()
|
await complete.wait()
|
||||||
|
assert name not in self.service_tasks, \
|
||||||
|
f'Serice task for {name} not terminated?'
|
||||||
_services: Optional[Services] = None
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
|
||||||
async def open_pikerd(
|
|
||||||
start_method: str = 'trio',
|
|
||||||
loglevel: Optional[str] = None,
|
|
||||||
|
|
||||||
# XXX: you should pretty much never want debug mode
|
|
||||||
# for data daemons when running in production.
|
|
||||||
debug_mode: bool = False,
|
|
||||||
|
|
||||||
) -> Optional[tractor._portal.Portal]:
|
|
||||||
'''
|
|
||||||
Start a root piker daemon who's lifetime extends indefinitely
|
|
||||||
until cancelled.
|
|
||||||
|
|
||||||
A root actor nursery is created which can be used to create and keep
|
|
||||||
alive underling services (see below).
|
|
||||||
|
|
||||||
'''
|
|
||||||
global _services
|
|
||||||
assert _services is None
|
|
||||||
|
|
||||||
# XXX: this may open a root actor as well
|
|
||||||
async with (
|
|
||||||
tractor.open_root_actor(
|
|
||||||
|
|
||||||
# passed through to ``open_root_actor``
|
|
||||||
arbiter_addr=_registry_addr,
|
|
||||||
name=_root_dname,
|
|
||||||
loglevel=loglevel,
|
|
||||||
debug_mode=debug_mode,
|
|
||||||
start_method=start_method,
|
|
||||||
|
|
||||||
# TODO: eventually we should be able to avoid
|
|
||||||
# having the root have more then permissions to
|
|
||||||
# spawn other specialized daemons I think?
|
|
||||||
enable_modules=_root_modules,
|
|
||||||
) as _,
|
|
||||||
|
|
||||||
tractor.open_nursery() as actor_nursery,
|
|
||||||
):
|
|
||||||
async with trio.open_nursery() as service_nursery:
|
|
||||||
|
|
||||||
# # setup service mngr singleton instance
|
|
||||||
# async with AsyncExitStack() as stack:
|
|
||||||
|
|
||||||
# assign globally for future daemon/task creation
|
|
||||||
_services = Services(
|
|
||||||
actor_n=actor_nursery,
|
|
||||||
service_n=service_nursery,
|
|
||||||
debug_mode=debug_mode,
|
|
||||||
)
|
|
||||||
|
|
||||||
yield _services
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_piker_runtime(
|
async def open_piker_runtime(
|
||||||
name: str,
|
name: str,
|
||||||
enable_modules: list[str] = [],
|
enable_modules: list[str] = [],
|
||||||
start_method: str = 'trio',
|
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
|
|
||||||
|
# XXX NOTE XXX: you should pretty much never want debug mode
|
||||||
|
# for data daemons when running in production.
|
||||||
|
debug_mode: bool = False,
|
||||||
|
|
||||||
|
registry_addr: None | tuple[str, int] = None,
|
||||||
|
|
||||||
|
# TODO: once we have `rsyscall` support we will read a config
|
||||||
|
# and spawn the service tree distributed per that.
|
||||||
|
start_method: str = 'trio',
|
||||||
|
|
||||||
|
tractor_kwargs: dict = {},
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
tractor.Actor,
|
||||||
|
tuple[str, int],
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Start a piker actor who's runtime will automatically sync with
|
||||||
|
existing piker actors on the local link based on configuration.
|
||||||
|
|
||||||
|
Can be called from a subactor or any program that needs to start
|
||||||
|
a root actor.
|
||||||
|
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
# check for existing runtime
|
||||||
|
actor = tractor.current_actor().uid
|
||||||
|
|
||||||
|
except tractor._exceptions.NoRuntime:
|
||||||
|
|
||||||
|
registry_addr = registry_addr or _default_reg_addr
|
||||||
|
|
||||||
|
async with (
|
||||||
|
tractor.open_root_actor(
|
||||||
|
|
||||||
|
# passed through to ``open_root_actor``
|
||||||
|
arbiter_addr=registry_addr,
|
||||||
|
name=name,
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
start_method=start_method,
|
||||||
|
|
||||||
|
# TODO: eventually we should be able to avoid
|
||||||
|
# having the root have more then permissions to
|
||||||
|
# spawn other specialized daemons I think?
|
||||||
|
enable_modules=enable_modules,
|
||||||
|
|
||||||
|
**tractor_kwargs,
|
||||||
|
) as _,
|
||||||
|
|
||||||
|
open_registry(registry_addr, ensure_exists=False) as addr,
|
||||||
|
):
|
||||||
|
yield (
|
||||||
|
tractor.current_actor(),
|
||||||
|
addr,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
async with open_registry(registry_addr) as addr:
|
||||||
|
yield (
|
||||||
|
actor,
|
||||||
|
addr,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_pikerd(
|
||||||
|
loglevel: str | None = None,
|
||||||
|
|
||||||
# XXX: you should pretty much never want debug mode
|
# XXX: you should pretty much never want debug mode
|
||||||
# for data daemons when running in production.
|
# for data daemons when running in production.
|
||||||
debug_mode: bool = False,
|
debug_mode: bool = False,
|
||||||
|
registry_addr: None | tuple[str, int] = None,
|
||||||
|
|
||||||
) -> Optional[tractor._portal.Portal]:
|
) -> Services:
|
||||||
'''
|
'''
|
||||||
Start a piker actor who's runtime will automatically
|
Start a root piker daemon who's lifetime extends indefinitely until
|
||||||
sync with existing piker actors in local network
|
cancelled.
|
||||||
based on configuration.
|
|
||||||
|
A root actor nursery is created which can be used to create and keep
|
||||||
|
alive underling services (see below).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
global _services
|
|
||||||
assert _services is None
|
|
||||||
|
|
||||||
# XXX: this may open a root actor as well
|
|
||||||
async with (
|
async with (
|
||||||
tractor.open_root_actor(
|
open_piker_runtime(
|
||||||
|
|
||||||
# passed through to ``open_root_actor``
|
|
||||||
arbiter_addr=_registry_addr,
|
|
||||||
name=name,
|
|
||||||
loglevel=loglevel,
|
|
||||||
debug_mode=debug_mode,
|
|
||||||
start_method=start_method,
|
|
||||||
|
|
||||||
|
name=_root_dname,
|
||||||
# TODO: eventually we should be able to avoid
|
# TODO: eventually we should be able to avoid
|
||||||
# having the root have more then permissions to
|
# having the root have more then permissions to
|
||||||
# spawn other specialized daemons I think?
|
# spawn other specialized daemons I think?
|
||||||
enable_modules=_root_modules,
|
enable_modules=_root_modules,
|
||||||
) as _,
|
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
registry_addr=registry_addr,
|
||||||
|
|
||||||
|
) as (root_actor, reg_addr),
|
||||||
|
tractor.open_nursery() as actor_nursery,
|
||||||
|
trio.open_nursery() as service_nursery,
|
||||||
):
|
):
|
||||||
yield tractor.current_actor()
|
assert root_actor.accept_addr == reg_addr
|
||||||
|
|
||||||
|
# assign globally for future daemon/task creation
|
||||||
|
Services.actor_n = actor_nursery
|
||||||
|
Services.service_n = service_nursery
|
||||||
|
Services.debug_mode = debug_mode
|
||||||
|
try:
|
||||||
|
yield Services
|
||||||
|
finally:
|
||||||
|
# TODO: is this more clever/efficient?
|
||||||
|
# if 'samplerd' in Services.service_tasks:
|
||||||
|
# await Services.cancel_service('samplerd')
|
||||||
|
service_nursery.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
@ -232,61 +368,89 @@ async def maybe_open_runtime(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
'''
|
||||||
Start the ``tractor`` runtime (a root actor) if none exists.
|
Start the ``tractor`` runtime (a root actor) if none exists.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
settings = _tractor_kwargs
|
name = kwargs.pop('name')
|
||||||
settings.update(kwargs)
|
|
||||||
|
|
||||||
if not tractor.current_actor(err_on_no_runtime=False):
|
if not tractor.current_actor(err_on_no_runtime=False):
|
||||||
async with tractor.open_root_actor(
|
async with open_piker_runtime(
|
||||||
|
name,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
**settings,
|
**kwargs,
|
||||||
):
|
) as (_, addr):
|
||||||
yield
|
yield addr,
|
||||||
else:
|
else:
|
||||||
yield
|
async with open_registry() as addr:
|
||||||
|
yield addr
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def maybe_open_pikerd(
|
async def maybe_open_pikerd(
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
|
registry_addr: None | tuple = None,
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> Union[tractor._portal.Portal, Services]:
|
) -> tractor._portal.Portal | ClassVar[Services]:
|
||||||
"""If no ``pikerd`` daemon-root-actor can be found start it and
|
'''
|
||||||
|
If no ``pikerd`` daemon-root-actor can be found start it and
|
||||||
yield up (we should probably figure out returning a portal to self
|
yield up (we should probably figure out returning a portal to self
|
||||||
though).
|
though).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
if loglevel:
|
if loglevel:
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
|
||||||
# subtle, we must have the runtime up here or portal lookup will fail
|
# subtle, we must have the runtime up here or portal lookup will fail
|
||||||
async with maybe_open_runtime(loglevel, **kwargs):
|
query_name = kwargs.pop('name', f'piker_query_{os.getpid()}')
|
||||||
|
|
||||||
async with tractor.find_actor(_root_dname) as portal:
|
# TODO: if we need to make the query part faster we could not init
|
||||||
# assert portal is not None
|
# an actor runtime and instead just hit the socket?
|
||||||
if portal is not None:
|
# from tractor._ipc import _connect_chan, Channel
|
||||||
yield portal
|
# async with _connect_chan(host, port) as chan:
|
||||||
return
|
# async with open_portal(chan) as arb_portal:
|
||||||
|
# yield arb_portal
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_piker_runtime(
|
||||||
|
name=query_name,
|
||||||
|
registry_addr=registry_addr,
|
||||||
|
loglevel=loglevel,
|
||||||
|
**kwargs,
|
||||||
|
) as _,
|
||||||
|
tractor.find_actor(
|
||||||
|
_root_dname,
|
||||||
|
arbiter_sockaddr=registry_addr,
|
||||||
|
) as portal
|
||||||
|
):
|
||||||
|
# connect to any existing daemon presuming
|
||||||
|
# its registry socket was selected.
|
||||||
|
if (
|
||||||
|
portal is not None
|
||||||
|
):
|
||||||
|
yield portal
|
||||||
|
return
|
||||||
|
|
||||||
# presume pikerd role since no daemon could be found at
|
# presume pikerd role since no daemon could be found at
|
||||||
# configured address
|
# configured address
|
||||||
async with open_pikerd(
|
async with open_pikerd(
|
||||||
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=kwargs.get('debug_mode', False),
|
debug_mode=kwargs.get('debug_mode', False),
|
||||||
|
registry_addr=registry_addr,
|
||||||
|
|
||||||
) as _:
|
) as service_manager:
|
||||||
# in the case where we're starting up the
|
# in the case where we're starting up the
|
||||||
# tractor-piker runtime stack in **this** process
|
# tractor-piker runtime stack in **this** process
|
||||||
# we return no portal to self.
|
# we return no portal to self.
|
||||||
yield None
|
assert service_manager
|
||||||
|
yield service_manager
|
||||||
|
|
||||||
|
|
||||||
# brokerd enabled modules
|
# `brokerd` enabled modules
|
||||||
|
# NOTE: keeping this list as small as possible is part of our caps-sec
|
||||||
|
# model and should be treated with utmost care!
|
||||||
_data_mods = [
|
_data_mods = [
|
||||||
'piker.brokers.core',
|
'piker.brokers.core',
|
||||||
'piker.brokers.data',
|
'piker.brokers.data',
|
||||||
|
@ -296,37 +460,35 @@ _data_mods = [
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class Brokerd:
|
|
||||||
locks = defaultdict(trio.Lock)
|
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def find_service(
|
async def find_service(
|
||||||
service_name: str,
|
service_name: str,
|
||||||
) -> Optional[tractor.Portal]:
|
) -> tractor.Portal | None:
|
||||||
|
|
||||||
log.info(f'Scanning for service `{service_name}`')
|
async with open_registry() as reg_addr:
|
||||||
# attach to existing daemon by name if possible
|
log.info(f'Scanning for service `{service_name}`')
|
||||||
async with tractor.find_actor(
|
# attach to existing daemon by name if possible
|
||||||
service_name,
|
async with tractor.find_actor(
|
||||||
arbiter_sockaddr=_registry_addr,
|
service_name,
|
||||||
) as maybe_portal:
|
arbiter_sockaddr=reg_addr,
|
||||||
yield maybe_portal
|
) as maybe_portal:
|
||||||
|
yield maybe_portal
|
||||||
|
|
||||||
|
|
||||||
async def check_for_service(
|
async def check_for_service(
|
||||||
service_name: str,
|
service_name: str,
|
||||||
|
|
||||||
) -> bool:
|
) -> None | tuple[str, int]:
|
||||||
'''
|
'''
|
||||||
Service daemon "liveness" predicate.
|
Service daemon "liveness" predicate.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async with tractor.query_actor(
|
async with open_registry(ensure_exists=False) as reg_addr:
|
||||||
service_name,
|
async with tractor.query_actor(
|
||||||
arbiter_sockaddr=_registry_addr,
|
service_name,
|
||||||
) as sockaddr:
|
arbiter_sockaddr=reg_addr,
|
||||||
return sockaddr
|
) as sockaddr:
|
||||||
|
return sockaddr
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
@ -336,6 +498,8 @@ async def maybe_spawn_daemon(
|
||||||
service_task_target: Callable,
|
service_task_target: Callable,
|
||||||
spawn_args: dict[str, Any],
|
spawn_args: dict[str, Any],
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
|
|
||||||
|
singleton: bool = False,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> tractor.Portal:
|
) -> tractor.Portal:
|
||||||
|
@ -356,7 +520,7 @@ async def maybe_spawn_daemon(
|
||||||
|
|
||||||
# serialize access to this section to avoid
|
# serialize access to this section to avoid
|
||||||
# 2 or more tasks racing to create a daemon
|
# 2 or more tasks racing to create a daemon
|
||||||
lock = Brokerd.locks[service_name]
|
lock = Services.locks[service_name]
|
||||||
await lock.acquire()
|
await lock.acquire()
|
||||||
|
|
||||||
async with find_service(service_name) as portal:
|
async with find_service(service_name) as portal:
|
||||||
|
@ -367,6 +531,9 @@ async def maybe_spawn_daemon(
|
||||||
|
|
||||||
log.warning(f"Couldn't find any existing {service_name}")
|
log.warning(f"Couldn't find any existing {service_name}")
|
||||||
|
|
||||||
|
# TODO: really shouldn't the actor spawning be part of the service
|
||||||
|
# starting method `Services.start_service()` ?
|
||||||
|
|
||||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||||
# pikerd is not live we now become the root of the
|
# pikerd is not live we now become the root of the
|
||||||
# process tree
|
# process tree
|
||||||
|
@ -377,15 +544,16 @@ async def maybe_spawn_daemon(
|
||||||
|
|
||||||
) as pikerd_portal:
|
) as pikerd_portal:
|
||||||
|
|
||||||
|
# we are the root and thus are `pikerd`
|
||||||
|
# so spawn the target service directly by calling
|
||||||
|
# the provided target routine.
|
||||||
|
# XXX: this assumes that the target is well formed and will
|
||||||
|
# do the right things to setup both a sub-actor **and** call
|
||||||
|
# the ``_Services`` api from above to start the top level
|
||||||
|
# service task for that actor.
|
||||||
|
started: bool
|
||||||
if pikerd_portal is None:
|
if pikerd_portal is None:
|
||||||
# we are the root and thus are `pikerd`
|
started = await service_task_target(**spawn_args)
|
||||||
# so spawn the target service directly by calling
|
|
||||||
# the provided target routine.
|
|
||||||
# XXX: this assumes that the target is well formed and will
|
|
||||||
# do the right things to setup both a sub-actor **and** call
|
|
||||||
# the ``_Services`` api from above to start the top level
|
|
||||||
# service task for that actor.
|
|
||||||
await service_task_target(**spawn_args)
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# tell the remote `pikerd` to start the target,
|
# tell the remote `pikerd` to start the target,
|
||||||
|
@ -394,11 +562,14 @@ async def maybe_spawn_daemon(
|
||||||
# non-blocking and the target task will persist running
|
# non-blocking and the target task will persist running
|
||||||
# on `pikerd` after the client requesting it's start
|
# on `pikerd` after the client requesting it's start
|
||||||
# disconnects.
|
# disconnects.
|
||||||
await pikerd_portal.run(
|
started = await pikerd_portal.run(
|
||||||
service_task_target,
|
service_task_target,
|
||||||
**spawn_args,
|
**spawn_args,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if started:
|
||||||
|
log.info(f'Service {service_name} started!')
|
||||||
|
|
||||||
async with tractor.wait_for_actor(service_name) as portal:
|
async with tractor.wait_for_actor(service_name) as portal:
|
||||||
lock.release()
|
lock.release()
|
||||||
yield portal
|
yield portal
|
||||||
|
@ -421,9 +592,6 @@ async def spawn_brokerd(
|
||||||
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
||||||
tractor_kwargs.update(extra_tractor_kwargs)
|
tractor_kwargs.update(extra_tractor_kwargs)
|
||||||
|
|
||||||
global _services
|
|
||||||
assert _services
|
|
||||||
|
|
||||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||||
# actor nursery
|
# actor nursery
|
||||||
modpath = brokermod.__name__
|
modpath = brokermod.__name__
|
||||||
|
@ -436,18 +604,18 @@ async def spawn_brokerd(
|
||||||
subpath = f'{modpath}.{submodname}'
|
subpath = f'{modpath}.{submodname}'
|
||||||
broker_enable.append(subpath)
|
broker_enable.append(subpath)
|
||||||
|
|
||||||
portal = await _services.actor_n.start_actor(
|
portal = await Services.actor_n.start_actor(
|
||||||
dname,
|
dname,
|
||||||
enable_modules=_data_mods + broker_enable,
|
enable_modules=_data_mods + broker_enable,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=_services.debug_mode,
|
debug_mode=Services.debug_mode,
|
||||||
**tractor_kwargs
|
**tractor_kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
# non-blocking setup of brokerd service nursery
|
# non-blocking setup of brokerd service nursery
|
||||||
from .data import _setup_persistent_brokerd
|
from .data import _setup_persistent_brokerd
|
||||||
|
|
||||||
await _services.start_service_task(
|
await Services.start_service_task(
|
||||||
dname,
|
dname,
|
||||||
portal,
|
portal,
|
||||||
_setup_persistent_brokerd,
|
_setup_persistent_brokerd,
|
||||||
|
@ -493,24 +661,21 @@ async def spawn_emsd(
|
||||||
"""
|
"""
|
||||||
log.info('Spawning emsd')
|
log.info('Spawning emsd')
|
||||||
|
|
||||||
global _services
|
portal = await Services.actor_n.start_actor(
|
||||||
assert _services
|
|
||||||
|
|
||||||
portal = await _services.actor_n.start_actor(
|
|
||||||
'emsd',
|
'emsd',
|
||||||
enable_modules=[
|
enable_modules=[
|
||||||
'piker.clearing._ems',
|
'piker.clearing._ems',
|
||||||
'piker.clearing._client',
|
'piker.clearing._client',
|
||||||
],
|
],
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=_services.debug_mode, # set by pikerd flag
|
debug_mode=Services.debug_mode, # set by pikerd flag
|
||||||
**extra_tractor_kwargs
|
**extra_tractor_kwargs
|
||||||
)
|
)
|
||||||
|
|
||||||
# non-blocking setup of clearing service
|
# non-blocking setup of clearing service
|
||||||
from .clearing._ems import _setup_persistent_emsd
|
from .clearing._ems import _setup_persistent_emsd
|
||||||
|
|
||||||
await _services.start_service_task(
|
await Services.start_service_task(
|
||||||
'emsd',
|
'emsd',
|
||||||
portal,
|
portal,
|
||||||
_setup_persistent_emsd,
|
_setup_persistent_emsd,
|
||||||
|
@ -537,25 +702,3 @@ async def maybe_open_emsd(
|
||||||
|
|
||||||
) as portal:
|
) as portal:
|
||||||
yield portal
|
yield portal
|
||||||
|
|
||||||
|
|
||||||
# TODO: ideally we can start the tsdb "on demand" but it's
|
|
||||||
# probably going to require "rootless" docker, at least if we don't
|
|
||||||
# want to expect the user to start ``pikerd`` with root perms all the
|
|
||||||
# time.
|
|
||||||
# async def maybe_open_marketstored(
|
|
||||||
# loglevel: Optional[str] = None,
|
|
||||||
# **kwargs,
|
|
||||||
|
|
||||||
# ) -> tractor._portal.Portal: # noqa
|
|
||||||
|
|
||||||
# async with maybe_spawn_daemon(
|
|
||||||
|
|
||||||
# 'marketstored',
|
|
||||||
# service_task_target=spawn_emsd,
|
|
||||||
# spawn_args={'loglevel': loglevel},
|
|
||||||
# loglevel=loglevel,
|
|
||||||
# **kwargs,
|
|
||||||
|
|
||||||
# ) as portal:
|
|
||||||
# yield portal
|
|
||||||
|
|
|
@ -18,7 +18,10 @@
|
||||||
Profiling wrappers for internal libs.
|
Profiling wrappers for internal libs.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
import time
|
import time
|
||||||
|
from time import perf_counter
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
# NOTE: you can pass a flag to enable this:
|
# NOTE: you can pass a flag to enable this:
|
||||||
|
@ -44,3 +47,184 @@ def timeit(fn):
|
||||||
return res
|
return res
|
||||||
|
|
||||||
return wrapper
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
# Modified version of ``pyqtgraph.debug.Profiler`` that
|
||||||
|
# core seems hesitant to land in:
|
||||||
|
# https://github.com/pyqtgraph/pyqtgraph/pull/2281
|
||||||
|
class Profiler(object):
|
||||||
|
'''
|
||||||
|
Simple profiler allowing measurement of multiple time intervals.
|
||||||
|
|
||||||
|
By default, profilers are disabled. To enable profiling, set the
|
||||||
|
environment variable `PYQTGRAPHPROFILE` to a comma-separated list of
|
||||||
|
fully-qualified names of profiled functions.
|
||||||
|
|
||||||
|
Calling a profiler registers a message (defaulting to an increasing
|
||||||
|
counter) that contains the time elapsed since the last call. When the
|
||||||
|
profiler is about to be garbage-collected, the messages are passed to the
|
||||||
|
outer profiler if one is running, or printed to stdout otherwise.
|
||||||
|
|
||||||
|
If `delayed` is set to False, messages are immediately printed instead.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
def function(...):
|
||||||
|
profiler = Profiler()
|
||||||
|
... do stuff ...
|
||||||
|
profiler('did stuff')
|
||||||
|
... do other stuff ...
|
||||||
|
profiler('did other stuff')
|
||||||
|
# profiler is garbage-collected and flushed at function end
|
||||||
|
|
||||||
|
If this function is a method of class C, setting `PYQTGRAPHPROFILE` to
|
||||||
|
"C.function" (without the module name) will enable this profiler.
|
||||||
|
|
||||||
|
For regular functions, use the qualified name of the function, stripping
|
||||||
|
only the initial "pyqtgraph." prefix from the module.
|
||||||
|
'''
|
||||||
|
|
||||||
|
_profilers = os.environ.get("PYQTGRAPHPROFILE", None)
|
||||||
|
_profilers = _profilers.split(",") if _profilers is not None else []
|
||||||
|
|
||||||
|
_depth = 0
|
||||||
|
|
||||||
|
# NOTE: without this defined at the class level
|
||||||
|
# you won't see apprpriately "nested" sub-profiler
|
||||||
|
# instance calls.
|
||||||
|
_msgs = []
|
||||||
|
|
||||||
|
# set this flag to disable all or individual profilers at runtime
|
||||||
|
disable = False
|
||||||
|
|
||||||
|
class DisabledProfiler(object):
|
||||||
|
def __init__(self, *args, **kwds):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def mark(self, msg=None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
_disabledProfiler = DisabledProfiler()
|
||||||
|
|
||||||
|
def __new__(
|
||||||
|
cls,
|
||||||
|
msg=None,
|
||||||
|
disabled='env',
|
||||||
|
delayed=True,
|
||||||
|
ms_threshold: float = 0.0,
|
||||||
|
):
|
||||||
|
"""Optionally create a new profiler based on caller's qualname.
|
||||||
|
|
||||||
|
``ms_threshold`` can be set to value in ms for which, if the
|
||||||
|
total measured time of the lifetime of this profiler is **less
|
||||||
|
than** this value, then no profiling messages will be printed.
|
||||||
|
Setting ``delayed=False`` disables this feature since messages
|
||||||
|
are emitted immediately.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if (
|
||||||
|
disabled is True
|
||||||
|
or (
|
||||||
|
disabled == 'env'
|
||||||
|
and len(cls._profilers) == 0
|
||||||
|
)
|
||||||
|
):
|
||||||
|
return cls._disabledProfiler
|
||||||
|
|
||||||
|
# determine the qualified name of the caller function
|
||||||
|
caller_frame = sys._getframe(1)
|
||||||
|
try:
|
||||||
|
caller_object_type = type(caller_frame.f_locals["self"])
|
||||||
|
|
||||||
|
except KeyError: # we are in a regular function
|
||||||
|
qualifier = caller_frame.f_globals["__name__"].split(".", 1)[-1]
|
||||||
|
|
||||||
|
else: # we are in a method
|
||||||
|
qualifier = caller_object_type.__name__
|
||||||
|
func_qualname = qualifier + "." + caller_frame.f_code.co_name
|
||||||
|
|
||||||
|
if disabled == 'env' and func_qualname not in cls._profilers:
|
||||||
|
# don't do anything
|
||||||
|
return cls._disabledProfiler
|
||||||
|
|
||||||
|
# create an actual profiling object
|
||||||
|
cls._depth += 1
|
||||||
|
obj = super(Profiler, cls).__new__(cls)
|
||||||
|
obj._name = msg or func_qualname
|
||||||
|
obj._delayed = delayed
|
||||||
|
obj._markCount = 0
|
||||||
|
obj._finished = False
|
||||||
|
obj._firstTime = obj._lastTime = perf_counter()
|
||||||
|
obj._mt = ms_threshold
|
||||||
|
obj._newMsg("> Entering " + obj._name)
|
||||||
|
return obj
|
||||||
|
|
||||||
|
def __call__(self, msg=None):
|
||||||
|
"""Register or print a new message with timing information.
|
||||||
|
"""
|
||||||
|
if self.disable:
|
||||||
|
return
|
||||||
|
if msg is None:
|
||||||
|
msg = str(self._markCount)
|
||||||
|
|
||||||
|
self._markCount += 1
|
||||||
|
newTime = perf_counter()
|
||||||
|
ms = (newTime - self._lastTime) * 1000
|
||||||
|
self._newMsg(" %s: %0.4f ms", msg, ms)
|
||||||
|
self._lastTime = newTime
|
||||||
|
|
||||||
|
def mark(self, msg=None):
|
||||||
|
self(msg)
|
||||||
|
|
||||||
|
def _newMsg(self, msg, *args):
|
||||||
|
msg = " " * (self._depth - 1) + msg
|
||||||
|
if self._delayed:
|
||||||
|
self._msgs.append((msg, args))
|
||||||
|
else:
|
||||||
|
print(msg % args)
|
||||||
|
|
||||||
|
def __del__(self):
|
||||||
|
self.finish()
|
||||||
|
|
||||||
|
def finish(self, msg=None):
|
||||||
|
"""Add a final message; flush the message list if no parent profiler.
|
||||||
|
"""
|
||||||
|
if self._finished or self.disable:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._finished = True
|
||||||
|
if msg is not None:
|
||||||
|
self(msg)
|
||||||
|
|
||||||
|
tot_ms = (perf_counter() - self._firstTime) * 1000
|
||||||
|
self._newMsg(
|
||||||
|
"< Exiting %s, total time: %0.4f ms",
|
||||||
|
self._name,
|
||||||
|
tot_ms,
|
||||||
|
)
|
||||||
|
|
||||||
|
if tot_ms < self._mt:
|
||||||
|
# print(f'{tot_ms} < {self._mt}, clearing')
|
||||||
|
# NOTE: this list **must** be an instance var to avoid
|
||||||
|
# deleting common messages during GC I think?
|
||||||
|
self._msgs.clear()
|
||||||
|
# else:
|
||||||
|
# print(f'{tot_ms} > {self._mt}, not clearing')
|
||||||
|
|
||||||
|
# XXX: why is this needed?
|
||||||
|
# don't we **want to show** nested profiler messages?
|
||||||
|
if self._msgs: # and self._depth < 1:
|
||||||
|
|
||||||
|
# if self._msgs:
|
||||||
|
print("\n".join([m[0] % m[1] for m in self._msgs]))
|
||||||
|
|
||||||
|
# clear all entries
|
||||||
|
self._msgs.clear()
|
||||||
|
# type(self)._msgs = []
|
||||||
|
|
||||||
|
type(self)._depth -= 1
|
||||||
|
|
|
@ -26,10 +26,21 @@ asks.init('trio')
|
||||||
|
|
||||||
__brokers__ = [
|
__brokers__ = [
|
||||||
'binance',
|
'binance',
|
||||||
'questrade',
|
|
||||||
'robinhood',
|
|
||||||
'ib',
|
'ib',
|
||||||
'kraken',
|
'kraken',
|
||||||
|
|
||||||
|
# broken but used to work
|
||||||
|
# 'questrade',
|
||||||
|
# 'robinhood',
|
||||||
|
|
||||||
|
# TODO: we should get on these stat!
|
||||||
|
# alpaca
|
||||||
|
# wstrade
|
||||||
|
# iex
|
||||||
|
|
||||||
|
# deribit
|
||||||
|
# kucoin
|
||||||
|
# bitso
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -33,15 +33,23 @@ import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
from pydantic.dataclasses import dataclass
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import wsproto
|
import wsproto
|
||||||
|
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
from ._util import resproc, SymbolNotFound
|
from ._util import (
|
||||||
from ..log import get_logger, get_console_log
|
resproc,
|
||||||
from ..data import ShmArray
|
SymbolNotFound,
|
||||||
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
DataUnavailable,
|
||||||
|
)
|
||||||
|
from ..log import (
|
||||||
|
get_logger,
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
|
from ..data.types import Struct
|
||||||
|
from ..data._web_bs import (
|
||||||
|
open_autorecon_ws,
|
||||||
|
NoBsWs,
|
||||||
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -79,12 +87,14 @@ _show_wap_in_history = False
|
||||||
|
|
||||||
|
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||||
class Pair(BaseModel):
|
class Pair(Struct, frozen=True):
|
||||||
symbol: str
|
symbol: str
|
||||||
status: str
|
status: str
|
||||||
|
|
||||||
baseAsset: str
|
baseAsset: str
|
||||||
baseAssetPrecision: int
|
baseAssetPrecision: int
|
||||||
|
cancelReplaceAllowed: bool
|
||||||
|
allowTrailingStop: bool
|
||||||
quoteAsset: str
|
quoteAsset: str
|
||||||
quotePrecision: int
|
quotePrecision: int
|
||||||
quoteAssetPrecision: int
|
quoteAssetPrecision: int
|
||||||
|
@ -100,18 +110,21 @@ class Pair(BaseModel):
|
||||||
isSpotTradingAllowed: bool
|
isSpotTradingAllowed: bool
|
||||||
isMarginTradingAllowed: bool
|
isMarginTradingAllowed: bool
|
||||||
|
|
||||||
|
defaultSelfTradePreventionMode: str
|
||||||
|
allowedSelfTradePreventionModes: list[str]
|
||||||
|
|
||||||
filters: list[dict[str, Union[str, int, float]]]
|
filters: list[dict[str, Union[str, int, float]]]
|
||||||
permissions: list[str]
|
permissions: list[str]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class OHLC(Struct):
|
||||||
class OHLC:
|
'''
|
||||||
"""Description of the flattened OHLC quote format.
|
Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
For schema details see:
|
For schema details see:
|
||||||
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
||||||
|
|
||||||
"""
|
'''
|
||||||
time: int
|
time: int
|
||||||
|
|
||||||
open: float
|
open: float
|
||||||
|
@ -134,7 +147,9 @@ class OHLC:
|
||||||
|
|
||||||
|
|
||||||
# convert datetime obj timestamp to unixtime in milliseconds
|
# convert datetime obj timestamp to unixtime in milliseconds
|
||||||
def binance_timestamp(when):
|
def binance_timestamp(
|
||||||
|
when: datetime
|
||||||
|
) -> int:
|
||||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||||
|
|
||||||
|
|
||||||
|
@ -173,7 +188,7 @@ class Client:
|
||||||
params = {}
|
params = {}
|
||||||
|
|
||||||
if sym is not None:
|
if sym is not None:
|
||||||
sym = sym.upper()
|
sym = sym.lower()
|
||||||
params = {'symbol': sym}
|
params = {'symbol': sym}
|
||||||
|
|
||||||
resp = await self._api(
|
resp = await self._api(
|
||||||
|
@ -230,7 +245,7 @@ class Client:
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
|
||||||
if end_dt is None:
|
if end_dt is None:
|
||||||
end_dt = pendulum.now('UTC')
|
end_dt = pendulum.now('UTC').add(minutes=1)
|
||||||
|
|
||||||
if start_dt is None:
|
if start_dt is None:
|
||||||
start_dt = end_dt.start_of(
|
start_dt = end_dt.start_of(
|
||||||
|
@ -260,6 +275,7 @@ class Client:
|
||||||
for i, bar in enumerate(bars):
|
for i, bar in enumerate(bars):
|
||||||
|
|
||||||
bar = OHLC(*bar)
|
bar = OHLC(*bar)
|
||||||
|
bar.typecast()
|
||||||
|
|
||||||
row = []
|
row = []
|
||||||
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
||||||
|
@ -287,7 +303,7 @@ async def get_client() -> Client:
|
||||||
|
|
||||||
|
|
||||||
# validation type
|
# validation type
|
||||||
class AggTrade(BaseModel):
|
class AggTrade(Struct):
|
||||||
e: str # Event type
|
e: str # Event type
|
||||||
E: int # Event time
|
E: int # Event time
|
||||||
s: str # Symbol
|
s: str # Symbol
|
||||||
|
@ -341,7 +357,9 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
|
|
||||||
elif msg.get('e') == 'aggTrade':
|
elif msg.get('e') == 'aggTrade':
|
||||||
|
|
||||||
# validate
|
# NOTE: this is purely for a definition, ``msgspec.Struct``
|
||||||
|
# does not runtime-validate until you decode/encode.
|
||||||
|
# see: https://jcristharif.com/msgspec/structs.html#type-validation
|
||||||
msg = AggTrade(**msg)
|
msg = AggTrade(**msg)
|
||||||
|
|
||||||
# TODO: type out and require this quote format
|
# TODO: type out and require this quote format
|
||||||
|
@ -352,8 +370,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
'brokerd_ts': time.time(),
|
'brokerd_ts': time.time(),
|
||||||
'ticks': [{
|
'ticks': [{
|
||||||
'type': 'trade',
|
'type': 'trade',
|
||||||
'price': msg.p,
|
'price': float(msg.p),
|
||||||
'size': msg.q,
|
'size': float(msg.q),
|
||||||
'broker_ts': msg.T,
|
'broker_ts': msg.T,
|
||||||
}],
|
}],
|
||||||
}
|
}
|
||||||
|
@ -384,41 +402,39 @@ async def open_history_client(
|
||||||
async with open_cached_client('binance') as client:
|
async with open_cached_client('binance') as client:
|
||||||
|
|
||||||
async def get_ohlc(
|
async def get_ohlc(
|
||||||
end_dt: Optional[datetime] = None,
|
timeframe: float,
|
||||||
start_dt: Optional[datetime] = None,
|
end_dt: datetime | None = None,
|
||||||
|
start_dt: datetime | None = None,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
np.ndarray,
|
np.ndarray,
|
||||||
datetime, # start
|
datetime, # start
|
||||||
datetime, # end
|
datetime, # end
|
||||||
]:
|
]:
|
||||||
|
if timeframe != 60:
|
||||||
|
raise DataUnavailable('Only 1m bars are supported')
|
||||||
|
|
||||||
array = await client.bars(
|
array = await client.bars(
|
||||||
symbol,
|
symbol,
|
||||||
start_dt=start_dt,
|
start_dt=start_dt,
|
||||||
end_dt=end_dt,
|
end_dt=end_dt,
|
||||||
)
|
)
|
||||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
times = array['time']
|
||||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
if (
|
||||||
|
end_dt is None
|
||||||
|
):
|
||||||
|
inow = round(time.time())
|
||||||
|
if (inow - times[-1]) > 60:
|
||||||
|
await tractor.breakpoint()
|
||||||
|
|
||||||
|
start_dt = pendulum.from_timestamp(times[0])
|
||||||
|
end_dt = pendulum.from_timestamp(times[-1])
|
||||||
|
|
||||||
return array, start_dt, end_dt
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||||
|
|
||||||
|
|
||||||
async def backfill_bars(
|
|
||||||
sym: str,
|
|
||||||
shm: ShmArray, # type: ignore # noqa
|
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
|
||||||
) -> None:
|
|
||||||
"""Fill historical bars into shared mem / storage afap.
|
|
||||||
"""
|
|
||||||
with trio.CancelScope() as cs:
|
|
||||||
async with open_cached_client('binance') as client:
|
|
||||||
bars = await client.bars(symbol=sym)
|
|
||||||
shm.push(bars)
|
|
||||||
task_status.started(cs)
|
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
|
@ -448,12 +464,20 @@ async def stream_quotes(
|
||||||
d = cache[sym.upper()]
|
d = cache[sym.upper()]
|
||||||
syminfo = Pair(**d) # validation
|
syminfo = Pair(**d) # validation
|
||||||
|
|
||||||
si = sym_infos[sym] = syminfo.dict()
|
si = sym_infos[sym] = syminfo.to_dict()
|
||||||
|
filters = {}
|
||||||
|
for entry in syminfo.filters:
|
||||||
|
ftype = entry['filterType']
|
||||||
|
filters[ftype] = entry
|
||||||
|
|
||||||
# XXX: after manually inspecting the response format we
|
# XXX: after manually inspecting the response format we
|
||||||
# just directly pick out the info we need
|
# just directly pick out the info we need
|
||||||
si['price_tick_size'] = float(syminfo.filters[0]['tickSize'])
|
si['price_tick_size'] = float(
|
||||||
si['lot_tick_size'] = float(syminfo.filters[2]['stepSize'])
|
filters['PRICE_FILTER']['tickSize']
|
||||||
|
)
|
||||||
|
si['lot_tick_size'] = float(
|
||||||
|
filters['LOT_SIZE']['stepSize']
|
||||||
|
)
|
||||||
si['asset_type'] = 'crypto'
|
si['asset_type'] = 'crypto'
|
||||||
|
|
||||||
symbol = symbols[0]
|
symbol = symbols[0]
|
||||||
|
@ -495,14 +519,15 @@ async def stream_quotes(
|
||||||
subs.append("{sym}@bookTicker")
|
subs.append("{sym}@bookTicker")
|
||||||
|
|
||||||
# unsub from all pairs on teardown
|
# unsub from all pairs on teardown
|
||||||
await ws.send_msg({
|
if ws.connected():
|
||||||
"method": "UNSUBSCRIBE",
|
await ws.send_msg({
|
||||||
"params": subs,
|
"method": "UNSUBSCRIBE",
|
||||||
"id": uid,
|
"params": subs,
|
||||||
})
|
"id": uid,
|
||||||
|
})
|
||||||
|
|
||||||
# XXX: do we need to ack the unsub?
|
# XXX: do we need to ack the unsub?
|
||||||
# await ws.recv_msg()
|
# await ws.recv_msg()
|
||||||
|
|
||||||
async with open_autorecon_ws(
|
async with open_autorecon_ws(
|
||||||
'wss://stream.binance.com/ws',
|
'wss://stream.binance.com/ws',
|
||||||
|
|
|
@ -39,6 +39,148 @@ _config_dir = click.get_app_dir('piker')
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
|
|
||||||
|
|
||||||
|
OK = '\033[92m'
|
||||||
|
WARNING = '\033[93m'
|
||||||
|
FAIL = '\033[91m'
|
||||||
|
ENDC = '\033[0m'
|
||||||
|
|
||||||
|
|
||||||
|
def print_ok(s: str, **kwargs):
|
||||||
|
print(OK + s + ENDC, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def print_error(s: str, **kwargs):
|
||||||
|
print(FAIL + s + ENDC, **kwargs)
|
||||||
|
|
||||||
|
|
||||||
|
def get_method(client, meth_name: str):
|
||||||
|
print(f'checking client for method \'{meth_name}\'...', end='', flush=True)
|
||||||
|
method = getattr(client, meth_name, None)
|
||||||
|
assert method
|
||||||
|
print_ok('found!.')
|
||||||
|
return method
|
||||||
|
|
||||||
|
async def run_method(client, meth_name: str, **kwargs):
|
||||||
|
method = get_method(client, meth_name)
|
||||||
|
print('running...', end='', flush=True)
|
||||||
|
result = await method(**kwargs)
|
||||||
|
print_ok(f'done! result: {type(result)}')
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def run_test(broker_name: str):
|
||||||
|
brokermod = get_brokermod(broker_name)
|
||||||
|
total = 0
|
||||||
|
passed = 0
|
||||||
|
failed = 0
|
||||||
|
|
||||||
|
print(f'getting client...', end='', flush=True)
|
||||||
|
if not hasattr(brokermod, 'get_client'):
|
||||||
|
print_error('fail! no \'get_client\' context manager found.')
|
||||||
|
return
|
||||||
|
|
||||||
|
async with brokermod.get_client(is_brokercheck=True) as client:
|
||||||
|
print_ok(f'done! inside client context.')
|
||||||
|
|
||||||
|
# check for methods present on brokermod
|
||||||
|
method_list = [
|
||||||
|
'backfill_bars',
|
||||||
|
'get_client',
|
||||||
|
'trades_dialogue',
|
||||||
|
'open_history_client',
|
||||||
|
'open_symbol_search',
|
||||||
|
'stream_quotes',
|
||||||
|
|
||||||
|
]
|
||||||
|
|
||||||
|
for method in method_list:
|
||||||
|
print(
|
||||||
|
f'checking brokermod for method \'{method}\'...',
|
||||||
|
end='', flush=True)
|
||||||
|
if not hasattr(brokermod, method):
|
||||||
|
print_error(f'fail! method \'{method}\' not found.')
|
||||||
|
failed += 1
|
||||||
|
else:
|
||||||
|
print_ok('done!')
|
||||||
|
passed += 1
|
||||||
|
|
||||||
|
total += 1
|
||||||
|
|
||||||
|
# check for methods present con brokermod.Client and their
|
||||||
|
# results
|
||||||
|
|
||||||
|
# for private methods only check is present
|
||||||
|
method_list = [
|
||||||
|
'get_balances',
|
||||||
|
'get_assets',
|
||||||
|
'get_trades',
|
||||||
|
'get_xfers',
|
||||||
|
'submit_limit',
|
||||||
|
'submit_cancel',
|
||||||
|
'search_symbols',
|
||||||
|
]
|
||||||
|
|
||||||
|
for method_name in method_list:
|
||||||
|
try:
|
||||||
|
get_method(client, method_name)
|
||||||
|
passed += 1
|
||||||
|
|
||||||
|
except AssertionError:
|
||||||
|
print_error(f'fail! method \'{method_name}\' not found.')
|
||||||
|
failed += 1
|
||||||
|
|
||||||
|
total += 1
|
||||||
|
|
||||||
|
|
||||||
|
# check for methods present con brokermod.Client and their
|
||||||
|
# results
|
||||||
|
|
||||||
|
syms = await run_method(client, 'symbol_info')
|
||||||
|
total += 1
|
||||||
|
|
||||||
|
if len(syms) == 0:
|
||||||
|
raise BaseException('Empty Symbol list?')
|
||||||
|
|
||||||
|
passed += 1
|
||||||
|
|
||||||
|
first_sym = tuple(syms.keys())[0]
|
||||||
|
|
||||||
|
method_list = [
|
||||||
|
('cache_symbols', {}),
|
||||||
|
('search_symbols', {'pattern': first_sym[:-1]}),
|
||||||
|
('bars', {'symbol': first_sym})
|
||||||
|
]
|
||||||
|
|
||||||
|
for method_name, method_kwargs in method_list:
|
||||||
|
try:
|
||||||
|
await run_method(client, method_name, **method_kwargs)
|
||||||
|
passed += 1
|
||||||
|
|
||||||
|
except AssertionError:
|
||||||
|
print_error(f'fail! method \'{method_name}\' not found.')
|
||||||
|
failed += 1
|
||||||
|
|
||||||
|
total += 1
|
||||||
|
|
||||||
|
print(f'total: {total}, passed: {passed}, failed: {failed}')
|
||||||
|
|
||||||
|
|
||||||
|
@cli.command()
|
||||||
|
@click.argument('broker', nargs=1, required=True)
|
||||||
|
@click.pass_obj
|
||||||
|
def brokercheck(config, broker):
|
||||||
|
'''
|
||||||
|
Test broker apis for completeness.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async def bcheck_main():
|
||||||
|
async with maybe_spawn_brokerd(broker) as portal:
|
||||||
|
await portal.run(run_test, broker)
|
||||||
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
trio.run(run_test, broker)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--keys', '-k', multiple=True,
|
@click.option('--keys', '-k', multiple=True,
|
||||||
help='Return results only for these keys')
|
help='Return results only for these keys')
|
||||||
|
@ -193,6 +335,8 @@ def contracts(ctx, loglevel, broker, symbol, ids):
|
||||||
brokermod = get_brokermod(broker)
|
brokermod = get_brokermod(broker)
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
contracts = trio.run(partial(core.contracts, brokermod, symbol))
|
contracts = trio.run(partial(core.contracts, brokermod, symbol))
|
||||||
if not ids:
|
if not ids:
|
||||||
# just print out expiry dates which can be used with
|
# just print out expiry dates which can be used with
|
||||||
|
|
|
@ -0,0 +1,70 @@
|
||||||
|
``deribit`` backend
|
||||||
|
------------------
|
||||||
|
pretty good liquidity crypto derivatives, uses custom json rpc over ws for
|
||||||
|
client methods, then `cryptofeed` for data streams.
|
||||||
|
|
||||||
|
status
|
||||||
|
******
|
||||||
|
- supports option charts
|
||||||
|
- no order support yet
|
||||||
|
|
||||||
|
|
||||||
|
config
|
||||||
|
******
|
||||||
|
In order to get order mode support your ``brokers.toml``
|
||||||
|
needs to have something like the following:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[deribit]
|
||||||
|
key_id = 'XXXXXXXX'
|
||||||
|
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
||||||
|
|
||||||
|
To obtain an api id and secret you need to create an account, which can be a
|
||||||
|
real market account over at:
|
||||||
|
|
||||||
|
- deribit.com (requires KYC for deposit address)
|
||||||
|
|
||||||
|
Or a testnet account over at:
|
||||||
|
|
||||||
|
- test.deribit.com
|
||||||
|
|
||||||
|
For testnet once the account is created here is how you deposit fake crypto to
|
||||||
|
try it out:
|
||||||
|
|
||||||
|
1) Go to Wallet:
|
||||||
|
|
||||||
|
.. figure:: assets/0_wallet.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/0_wallet.png
|
||||||
|
:alt: wallet page
|
||||||
|
|
||||||
|
2) Then click on the elipsis menu and select deposit
|
||||||
|
|
||||||
|
.. figure:: assets/1_wallet_select_deposit.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/1_wallet_select_deposit.png
|
||||||
|
:alt: wallet deposit page
|
||||||
|
|
||||||
|
3) This will take you to the deposit address page
|
||||||
|
|
||||||
|
.. figure:: assets/2_gen_deposit_addr.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/2_gen_deposit_addr.png
|
||||||
|
:alt: generate deposit address page
|
||||||
|
|
||||||
|
4) After clicking generate you should see the address, copy it and go to the
|
||||||
|
`coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake
|
||||||
|
coins to that address.
|
||||||
|
|
||||||
|
.. figure:: assets/3_deposit_address.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/3_deposit_address.png
|
||||||
|
:alt: generated address
|
||||||
|
|
||||||
|
5) Back in the deposit address page you should see the deposit in your history
|
||||||
|
|
||||||
|
.. figure:: assets/4_wallet_deposit_history.png
|
||||||
|
:align: center
|
||||||
|
:target: assets/4_wallet_deposit_history.png
|
||||||
|
:alt: wallet deposit history
|
|
@ -0,0 +1,65 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Deribit backend.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
from piker.log import get_logger
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
from .api import (
|
||||||
|
get_client,
|
||||||
|
)
|
||||||
|
from .feed import (
|
||||||
|
open_history_client,
|
||||||
|
open_symbol_search,
|
||||||
|
stream_quotes,
|
||||||
|
backfill_bars
|
||||||
|
)
|
||||||
|
# from .broker import (
|
||||||
|
# trades_dialogue,
|
||||||
|
# norm_trade_records,
|
||||||
|
# )
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'get_client',
|
||||||
|
# 'trades_dialogue',
|
||||||
|
'open_history_client',
|
||||||
|
'open_symbol_search',
|
||||||
|
'stream_quotes',
|
||||||
|
# 'norm_trade_records',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# tractor RPC enable arg
|
||||||
|
__enable_modules__: list[str] = [
|
||||||
|
'api',
|
||||||
|
'feed',
|
||||||
|
# 'broker',
|
||||||
|
]
|
||||||
|
|
||||||
|
# passed to ``tractor.ActorNursery.start_actor()``
|
||||||
|
_spawn_kwargs = {
|
||||||
|
'infect_asyncio': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
# annotation to let backend agnostic code
|
||||||
|
# know if ``brokerd`` should be spawned with
|
||||||
|
# ``tractor``'s aio mode.
|
||||||
|
_infect_asyncio: bool = True
|
|
@ -0,0 +1,672 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Deribit backend.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from contextlib import asynccontextmanager as acm, AsyncExitStack
|
||||||
|
from functools import partial
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Optional, Iterable, Callable
|
||||||
|
|
||||||
|
import pendulum
|
||||||
|
import asks
|
||||||
|
import trio
|
||||||
|
from trio_typing import Nursery, TaskStatus
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from piker.data.types import Struct
|
||||||
|
from piker.data._web_bs import (
|
||||||
|
NoBsWs,
|
||||||
|
open_autorecon_ws,
|
||||||
|
open_jsonrpc_session
|
||||||
|
)
|
||||||
|
|
||||||
|
from .._util import resproc
|
||||||
|
|
||||||
|
from piker import config
|
||||||
|
from piker.log import get_logger
|
||||||
|
|
||||||
|
from tractor.trionics import (
|
||||||
|
broadcast_receiver,
|
||||||
|
BroadcastReceiver,
|
||||||
|
maybe_open_context
|
||||||
|
)
|
||||||
|
from tractor import to_asyncio
|
||||||
|
|
||||||
|
from cryptofeed import FeedHandler
|
||||||
|
|
||||||
|
from cryptofeed.defines import (
|
||||||
|
DERIBIT,
|
||||||
|
L1_BOOK, TRADES,
|
||||||
|
OPTION, CALL, PUT
|
||||||
|
)
|
||||||
|
from cryptofeed.symbols import Symbol
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_spawn_kwargs = {
|
||||||
|
'infect_asyncio': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
_url = 'https://www.deribit.com'
|
||||||
|
_ws_url = 'wss://www.deribit.com/ws/api/v2'
|
||||||
|
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
|
||||||
|
|
||||||
|
|
||||||
|
# Broker specific ohlc schema (rest)
|
||||||
|
_ohlc_dtype = [
|
||||||
|
('index', int),
|
||||||
|
('time', int),
|
||||||
|
('open', float),
|
||||||
|
('high', float),
|
||||||
|
('low', float),
|
||||||
|
('close', float),
|
||||||
|
('volume', float),
|
||||||
|
('bar_wap', float), # will be zeroed by sampler if not filled
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class JSONRPCResult(Struct):
|
||||||
|
jsonrpc: str = '2.0'
|
||||||
|
id: int
|
||||||
|
result: Optional[dict] = None
|
||||||
|
error: Optional[dict] = None
|
||||||
|
usIn: int
|
||||||
|
usOut: int
|
||||||
|
usDiff: int
|
||||||
|
testnet: bool
|
||||||
|
|
||||||
|
class JSONRPCChannel(Struct):
|
||||||
|
jsonrpc: str = '2.0'
|
||||||
|
method: str
|
||||||
|
params: dict
|
||||||
|
|
||||||
|
|
||||||
|
class KLinesResult(Struct):
|
||||||
|
close: list[float]
|
||||||
|
cost: list[float]
|
||||||
|
high: list[float]
|
||||||
|
low: list[float]
|
||||||
|
open: list[float]
|
||||||
|
status: str
|
||||||
|
ticks: list[int]
|
||||||
|
volume: list[float]
|
||||||
|
|
||||||
|
class Trade(Struct):
|
||||||
|
trade_seq: int
|
||||||
|
trade_id: str
|
||||||
|
timestamp: int
|
||||||
|
tick_direction: int
|
||||||
|
price: float
|
||||||
|
mark_price: float
|
||||||
|
iv: float
|
||||||
|
instrument_name: str
|
||||||
|
index_price: float
|
||||||
|
direction: str
|
||||||
|
combo_trade_id: Optional[int] = 0,
|
||||||
|
combo_id: Optional[str] = '',
|
||||||
|
amount: float
|
||||||
|
|
||||||
|
class LastTradesResult(Struct):
|
||||||
|
trades: list[Trade]
|
||||||
|
has_more: bool
|
||||||
|
|
||||||
|
|
||||||
|
# convert datetime obj timestamp to unixtime in milliseconds
|
||||||
|
def deribit_timestamp(when):
|
||||||
|
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||||
|
|
||||||
|
|
||||||
|
def str_to_cb_sym(name: str) -> Symbol:
|
||||||
|
base, strike_price, expiry_date, option_type = name.split('-')
|
||||||
|
|
||||||
|
quote = base
|
||||||
|
|
||||||
|
if option_type == 'put':
|
||||||
|
option_type = PUT
|
||||||
|
elif option_type == 'call':
|
||||||
|
option_type = CALL
|
||||||
|
else:
|
||||||
|
raise Exception("Couldn\'t parse option type")
|
||||||
|
|
||||||
|
return Symbol(
|
||||||
|
base, quote,
|
||||||
|
type=OPTION,
|
||||||
|
strike_price=strike_price,
|
||||||
|
option_type=option_type,
|
||||||
|
expiry_date=expiry_date,
|
||||||
|
expiry_normalize=False)
|
||||||
|
|
||||||
|
|
||||||
|
def piker_sym_to_cb_sym(name: str) -> Symbol:
|
||||||
|
base, expiry_date, strike_price, option_type = tuple(
|
||||||
|
name.upper().split('-'))
|
||||||
|
|
||||||
|
quote = base
|
||||||
|
|
||||||
|
if option_type == 'P':
|
||||||
|
option_type = PUT
|
||||||
|
elif option_type == 'C':
|
||||||
|
option_type = CALL
|
||||||
|
else:
|
||||||
|
raise Exception("Couldn\'t parse option type")
|
||||||
|
|
||||||
|
return Symbol(
|
||||||
|
base, quote,
|
||||||
|
type=OPTION,
|
||||||
|
strike_price=strike_price,
|
||||||
|
option_type=option_type,
|
||||||
|
expiry_date=expiry_date.upper())
|
||||||
|
|
||||||
|
|
||||||
|
def cb_sym_to_deribit_inst(sym: Symbol):
|
||||||
|
# cryptofeed normalized
|
||||||
|
cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z']
|
||||||
|
|
||||||
|
# deribit specific
|
||||||
|
months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']
|
||||||
|
|
||||||
|
exp = sym.expiry_date
|
||||||
|
|
||||||
|
# YYMDD
|
||||||
|
# 01234
|
||||||
|
year, month, day = (
|
||||||
|
exp[:2], months[cb_norm.index(exp[2:3])], exp[3:])
|
||||||
|
|
||||||
|
otype = 'C' if sym.option_type == CALL else 'P'
|
||||||
|
|
||||||
|
return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}'
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
|
conf, path = config.load()
|
||||||
|
|
||||||
|
section = conf.get('deribit')
|
||||||
|
|
||||||
|
# TODO: document why we send this, basically because logging params for cryptofeed
|
||||||
|
conf['log'] = {}
|
||||||
|
conf['log']['disabled'] = True
|
||||||
|
|
||||||
|
if section is None:
|
||||||
|
log.warning(f'No config section found for deribit in {path}')
|
||||||
|
|
||||||
|
return conf
|
||||||
|
|
||||||
|
|
||||||
|
class Client:
|
||||||
|
|
||||||
|
def __init__(self, json_rpc: Callable) -> None:
|
||||||
|
self._pairs: dict[str, Any] = None
|
||||||
|
|
||||||
|
config = get_config().get('deribit', {})
|
||||||
|
|
||||||
|
if ('key_id' in config) and ('key_secret' in config):
|
||||||
|
self._key_id = config['key_id']
|
||||||
|
self._key_secret = config['key_secret']
|
||||||
|
|
||||||
|
else:
|
||||||
|
self._key_id = None
|
||||||
|
self._key_secret = None
|
||||||
|
|
||||||
|
self.json_rpc = json_rpc
|
||||||
|
|
||||||
|
@property
|
||||||
|
def currencies(self):
|
||||||
|
return ['btc', 'eth', 'sol', 'usd']
|
||||||
|
|
||||||
|
async def get_balances(self, kind: str = 'option') -> dict[str, float]:
|
||||||
|
"""Return the set of positions for this account
|
||||||
|
by symbol.
|
||||||
|
"""
|
||||||
|
balances = {}
|
||||||
|
|
||||||
|
for currency in self.currencies:
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'private/get_positions', params={
|
||||||
|
'currency': currency.upper(),
|
||||||
|
'kind': kind})
|
||||||
|
|
||||||
|
balances[currency] = resp.result
|
||||||
|
|
||||||
|
return balances
|
||||||
|
|
||||||
|
async def get_assets(self) -> dict[str, float]:
|
||||||
|
"""Return the set of asset balances for this account
|
||||||
|
by symbol.
|
||||||
|
"""
|
||||||
|
balances = {}
|
||||||
|
|
||||||
|
for currency in self.currencies:
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'private/get_account_summary', params={
|
||||||
|
'currency': currency.upper()})
|
||||||
|
|
||||||
|
balances[currency] = resp.result['balance']
|
||||||
|
|
||||||
|
return balances
|
||||||
|
|
||||||
|
async def submit_limit(
|
||||||
|
self,
|
||||||
|
symbol: str,
|
||||||
|
price: float,
|
||||||
|
action: str,
|
||||||
|
size: float
|
||||||
|
) -> dict:
|
||||||
|
"""Place an order
|
||||||
|
"""
|
||||||
|
params = {
|
||||||
|
'instrument_name': symbol.upper(),
|
||||||
|
'amount': size,
|
||||||
|
'type': 'limit',
|
||||||
|
'price': price,
|
||||||
|
}
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
f'private/{action}', params)
|
||||||
|
|
||||||
|
return resp.result
|
||||||
|
|
||||||
|
async def submit_cancel(self, oid: str):
|
||||||
|
"""Send cancel request for order id
|
||||||
|
"""
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'private/cancel', {'order_id': oid})
|
||||||
|
return resp.result
|
||||||
|
|
||||||
|
async def symbol_info(
|
||||||
|
self,
|
||||||
|
instrument: Optional[str] = None,
|
||||||
|
currency: str = 'btc', # BTC, ETH, SOL, USDC
|
||||||
|
kind: str = 'option',
|
||||||
|
expired: bool = False
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Get symbol info for the exchange.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if self._pairs:
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
# will retrieve all symbols by default
|
||||||
|
params = {
|
||||||
|
'currency': currency.upper(),
|
||||||
|
'kind': kind,
|
||||||
|
'expired': str(expired).lower()
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = await self.json_rpc('public/get_instruments', params)
|
||||||
|
results = resp.result
|
||||||
|
|
||||||
|
instruments = {
|
||||||
|
item['instrument_name'].lower(): item
|
||||||
|
for item in results
|
||||||
|
}
|
||||||
|
|
||||||
|
if instrument is not None:
|
||||||
|
return instruments[instrument]
|
||||||
|
else:
|
||||||
|
return instruments
|
||||||
|
|
||||||
|
async def cache_symbols(
|
||||||
|
self,
|
||||||
|
) -> dict:
|
||||||
|
if not self._pairs:
|
||||||
|
self._pairs = await self.symbol_info()
|
||||||
|
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
async def search_symbols(
|
||||||
|
self,
|
||||||
|
pattern: str,
|
||||||
|
limit: int = 30,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
data = await self.symbol_info()
|
||||||
|
|
||||||
|
matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
data,
|
||||||
|
score_cutoff=35,
|
||||||
|
limit=limit
|
||||||
|
)
|
||||||
|
# repack in dict form
|
||||||
|
return {item[0]['instrument_name'].lower(): item[0]
|
||||||
|
for item in matches}
|
||||||
|
|
||||||
|
async def bars(
|
||||||
|
self,
|
||||||
|
symbol: str,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
limit: int = 1000,
|
||||||
|
as_np: bool = True,
|
||||||
|
) -> dict:
|
||||||
|
instrument = symbol
|
||||||
|
|
||||||
|
if end_dt is None:
|
||||||
|
end_dt = pendulum.now('UTC')
|
||||||
|
|
||||||
|
if start_dt is None:
|
||||||
|
start_dt = end_dt.start_of(
|
||||||
|
'minute').subtract(minutes=limit)
|
||||||
|
|
||||||
|
start_time = deribit_timestamp(start_dt)
|
||||||
|
end_time = deribit_timestamp(end_dt)
|
||||||
|
|
||||||
|
# https://docs.deribit.com/#public-get_tradingview_chart_data
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'public/get_tradingview_chart_data',
|
||||||
|
params={
|
||||||
|
'instrument_name': instrument.upper(),
|
||||||
|
'start_timestamp': start_time,
|
||||||
|
'end_timestamp': end_time,
|
||||||
|
'resolution': '1'
|
||||||
|
})
|
||||||
|
|
||||||
|
result = KLinesResult(**resp.result)
|
||||||
|
new_bars = []
|
||||||
|
for i in range(len(result.close)):
|
||||||
|
|
||||||
|
_open = result.open[i]
|
||||||
|
high = result.high[i]
|
||||||
|
low = result.low[i]
|
||||||
|
close = result.close[i]
|
||||||
|
volume = result.volume[i]
|
||||||
|
|
||||||
|
row = [
|
||||||
|
(start_time + (i * (60 * 1000))) / 1000.0, # time
|
||||||
|
result.open[i],
|
||||||
|
result.high[i],
|
||||||
|
result.low[i],
|
||||||
|
result.close[i],
|
||||||
|
result.volume[i],
|
||||||
|
0
|
||||||
|
]
|
||||||
|
|
||||||
|
new_bars.append((i,) + tuple(row))
|
||||||
|
|
||||||
|
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines
|
||||||
|
return array
|
||||||
|
|
||||||
|
async def last_trades(
|
||||||
|
self,
|
||||||
|
instrument: str,
|
||||||
|
count: int = 10
|
||||||
|
):
|
||||||
|
resp = await self.json_rpc(
|
||||||
|
'public/get_last_trades_by_instrument',
|
||||||
|
params={
|
||||||
|
'instrument_name': instrument,
|
||||||
|
'count': count
|
||||||
|
})
|
||||||
|
|
||||||
|
return LastTradesResult(**resp.result)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def get_client(
|
||||||
|
is_brokercheck: bool = False
|
||||||
|
) -> Client:
|
||||||
|
|
||||||
|
async with (
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
open_jsonrpc_session(
|
||||||
|
_testnet_ws_url, dtype=JSONRPCResult) as json_rpc
|
||||||
|
):
|
||||||
|
client = Client(json_rpc)
|
||||||
|
|
||||||
|
_refresh_token: Optional[str] = None
|
||||||
|
_access_token: Optional[str] = None
|
||||||
|
|
||||||
|
async def _auth_loop(
|
||||||
|
task_status: TaskStatus = trio.TASK_STATUS_IGNORED
|
||||||
|
):
|
||||||
|
"""Background task that adquires a first access token and then will
|
||||||
|
refresh the access token while the nursery isn't cancelled.
|
||||||
|
|
||||||
|
https://docs.deribit.com/?python#authentication-2
|
||||||
|
"""
|
||||||
|
renew_time = 10
|
||||||
|
access_scope = 'trade:read_write'
|
||||||
|
_expiry_time = time.time()
|
||||||
|
got_access = False
|
||||||
|
nonlocal _refresh_token
|
||||||
|
nonlocal _access_token
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if time.time() - _expiry_time < renew_time:
|
||||||
|
# if we are close to token expiry time
|
||||||
|
|
||||||
|
if _refresh_token != None:
|
||||||
|
# if we have a refresh token already dont need to send
|
||||||
|
# secret
|
||||||
|
params = {
|
||||||
|
'grant_type': 'refresh_token',
|
||||||
|
'refresh_token': _refresh_token,
|
||||||
|
'scope': access_scope
|
||||||
|
}
|
||||||
|
|
||||||
|
else:
|
||||||
|
# we don't have refresh token, send secret to initialize
|
||||||
|
params = {
|
||||||
|
'grant_type': 'client_credentials',
|
||||||
|
'client_id': client._key_id,
|
||||||
|
'client_secret': client._key_secret,
|
||||||
|
'scope': access_scope
|
||||||
|
}
|
||||||
|
|
||||||
|
resp = await json_rpc('public/auth', params)
|
||||||
|
result = resp.result
|
||||||
|
|
||||||
|
_expiry_time = time.time() + result['expires_in']
|
||||||
|
_refresh_token = result['refresh_token']
|
||||||
|
|
||||||
|
if 'access_token' in result:
|
||||||
|
_access_token = result['access_token']
|
||||||
|
|
||||||
|
if not got_access:
|
||||||
|
# first time this loop runs we must indicate task is
|
||||||
|
# started, we have auth
|
||||||
|
got_access = True
|
||||||
|
task_status.started()
|
||||||
|
|
||||||
|
else:
|
||||||
|
await trio.sleep(renew_time / 2)
|
||||||
|
|
||||||
|
# if we have client creds launch auth loop
|
||||||
|
if client._key_id is not None:
|
||||||
|
await n.start(_auth_loop)
|
||||||
|
|
||||||
|
await client.cache_symbols()
|
||||||
|
yield client
|
||||||
|
n.cancel_scope.cancel()
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_feed_handler():
|
||||||
|
fh = FeedHandler(config=get_config())
|
||||||
|
yield fh
|
||||||
|
await to_asyncio.run_task(fh.stop_async)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_feed_handler() -> trio.abc.ReceiveStream:
|
||||||
|
async with maybe_open_context(
|
||||||
|
acm_func=open_feed_handler,
|
||||||
|
key='feedhandler',
|
||||||
|
) as (cache_hit, fh):
|
||||||
|
yield fh
|
||||||
|
|
||||||
|
|
||||||
|
async def aio_price_feed_relay(
|
||||||
|
fh: FeedHandler,
|
||||||
|
instrument: Symbol,
|
||||||
|
from_trio: asyncio.Queue,
|
||||||
|
to_trio: trio.abc.SendChannel,
|
||||||
|
) -> None:
|
||||||
|
async def _trade(data: dict, receipt_timestamp):
|
||||||
|
to_trio.send_nowait(('trade', {
|
||||||
|
'symbol': cb_sym_to_deribit_inst(
|
||||||
|
str_to_cb_sym(data.symbol)).lower(),
|
||||||
|
'last': data,
|
||||||
|
'broker_ts': time.time(),
|
||||||
|
'data': data.to_dict(),
|
||||||
|
'receipt': receipt_timestamp
|
||||||
|
}))
|
||||||
|
|
||||||
|
async def _l1(data: dict, receipt_timestamp):
|
||||||
|
to_trio.send_nowait(('l1', {
|
||||||
|
'symbol': cb_sym_to_deribit_inst(
|
||||||
|
str_to_cb_sym(data.symbol)).lower(),
|
||||||
|
'ticks': [
|
||||||
|
{'type': 'bid',
|
||||||
|
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||||
|
{'type': 'bsize',
|
||||||
|
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||||
|
{'type': 'ask',
|
||||||
|
'price': float(data.ask_price), 'size': float(data.ask_size)},
|
||||||
|
{'type': 'asize',
|
||||||
|
'price': float(data.ask_price), 'size': float(data.ask_size)}
|
||||||
|
]
|
||||||
|
}))
|
||||||
|
|
||||||
|
fh.add_feed(
|
||||||
|
DERIBIT,
|
||||||
|
channels=[TRADES, L1_BOOK],
|
||||||
|
symbols=[piker_sym_to_cb_sym(instrument)],
|
||||||
|
callbacks={
|
||||||
|
TRADES: _trade,
|
||||||
|
L1_BOOK: _l1
|
||||||
|
})
|
||||||
|
|
||||||
|
if not fh.running:
|
||||||
|
fh.run(
|
||||||
|
start_loop=False,
|
||||||
|
install_signal_handlers=False)
|
||||||
|
|
||||||
|
# sync with trio
|
||||||
|
to_trio.send_nowait(None)
|
||||||
|
|
||||||
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_price_feed(
|
||||||
|
instrument: str
|
||||||
|
) -> trio.abc.ReceiveStream:
|
||||||
|
async with maybe_open_feed_handler() as fh:
|
||||||
|
async with to_asyncio.open_channel_from(
|
||||||
|
partial(
|
||||||
|
aio_price_feed_relay,
|
||||||
|
fh,
|
||||||
|
instrument
|
||||||
|
)
|
||||||
|
) as (first, chan):
|
||||||
|
yield chan
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_price_feed(
|
||||||
|
instrument: str
|
||||||
|
) -> trio.abc.ReceiveStream:
|
||||||
|
|
||||||
|
# TODO: add a predicate to maybe_open_context
|
||||||
|
async with maybe_open_context(
|
||||||
|
acm_func=open_price_feed,
|
||||||
|
kwargs={
|
||||||
|
'instrument': instrument
|
||||||
|
},
|
||||||
|
key=f'{instrument}-price',
|
||||||
|
) as (cache_hit, feed):
|
||||||
|
if cache_hit:
|
||||||
|
yield broadcast_receiver(feed, 10)
|
||||||
|
else:
|
||||||
|
yield feed
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def aio_order_feed_relay(
|
||||||
|
fh: FeedHandler,
|
||||||
|
instrument: Symbol,
|
||||||
|
from_trio: asyncio.Queue,
|
||||||
|
to_trio: trio.abc.SendChannel,
|
||||||
|
) -> None:
|
||||||
|
async def _fill(data: dict, receipt_timestamp):
|
||||||
|
breakpoint()
|
||||||
|
|
||||||
|
async def _order_info(data: dict, receipt_timestamp):
|
||||||
|
breakpoint()
|
||||||
|
|
||||||
|
fh.add_feed(
|
||||||
|
DERIBIT,
|
||||||
|
channels=[FILLS, ORDER_INFO],
|
||||||
|
symbols=[instrument.upper()],
|
||||||
|
callbacks={
|
||||||
|
FILLS: _fill,
|
||||||
|
ORDER_INFO: _order_info,
|
||||||
|
})
|
||||||
|
|
||||||
|
if not fh.running:
|
||||||
|
fh.run(
|
||||||
|
start_loop=False,
|
||||||
|
install_signal_handlers=False)
|
||||||
|
|
||||||
|
# sync with trio
|
||||||
|
to_trio.send_nowait(None)
|
||||||
|
|
||||||
|
await asyncio.sleep(float('inf'))
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_order_feed(
|
||||||
|
instrument: list[str]
|
||||||
|
) -> trio.abc.ReceiveStream:
|
||||||
|
async with maybe_open_feed_handler() as fh:
|
||||||
|
async with to_asyncio.open_channel_from(
|
||||||
|
partial(
|
||||||
|
aio_order_feed_relay,
|
||||||
|
fh,
|
||||||
|
instrument
|
||||||
|
)
|
||||||
|
) as (first, chan):
|
||||||
|
yield chan
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def maybe_open_order_feed(
|
||||||
|
instrument: str
|
||||||
|
) -> trio.abc.ReceiveStream:
|
||||||
|
|
||||||
|
# TODO: add a predicate to maybe_open_context
|
||||||
|
async with maybe_open_context(
|
||||||
|
acm_func=open_order_feed,
|
||||||
|
kwargs={
|
||||||
|
'instrument': instrument,
|
||||||
|
'fh': fh
|
||||||
|
},
|
||||||
|
key=f'{instrument}-order',
|
||||||
|
) as (cache_hit, feed):
|
||||||
|
if cache_hit:
|
||||||
|
yield broadcast_receiver(feed, 10)
|
||||||
|
else:
|
||||||
|
yield feed
|
Binary file not shown.
After Width: | Height: | Size: 169 KiB |
Binary file not shown.
After Width: | Height: | Size: 106 KiB |
Binary file not shown.
After Width: | Height: | Size: 59 KiB |
Binary file not shown.
After Width: | Height: | Size: 70 KiB |
Binary file not shown.
After Width: | Height: | Size: 132 KiB |
|
@ -0,0 +1,185 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Deribit backend.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Optional, Callable
|
||||||
|
import time
|
||||||
|
|
||||||
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import pendulum
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
from piker._cacheables import open_cached_client
|
||||||
|
from piker.log import get_logger, get_console_log
|
||||||
|
from piker.data import ShmArray
|
||||||
|
from piker.brokers._util import (
|
||||||
|
BrokerError,
|
||||||
|
DataUnavailable,
|
||||||
|
)
|
||||||
|
|
||||||
|
from cryptofeed import FeedHandler
|
||||||
|
|
||||||
|
from cryptofeed.defines import (
|
||||||
|
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
|
||||||
|
)
|
||||||
|
from cryptofeed.symbols import Symbol
|
||||||
|
|
||||||
|
from .api import (
|
||||||
|
Client, Trade,
|
||||||
|
get_config,
|
||||||
|
str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst,
|
||||||
|
maybe_open_price_feed
|
||||||
|
)
|
||||||
|
|
||||||
|
_spawn_kwargs = {
|
||||||
|
'infect_asyncio': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
instrument: str,
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
# TODO implement history getter for the new storage layer.
|
||||||
|
async with open_cached_client('deribit') as client:
|
||||||
|
|
||||||
|
async def get_ohlc(
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
datetime, # start
|
||||||
|
datetime, # end
|
||||||
|
]:
|
||||||
|
|
||||||
|
array = await client.bars(
|
||||||
|
instrument,
|
||||||
|
start_dt=start_dt,
|
||||||
|
end_dt=end_dt,
|
||||||
|
)
|
||||||
|
if len(array) == 0:
|
||||||
|
raise DataUnavailable
|
||||||
|
|
||||||
|
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||||
|
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||||
|
|
||||||
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
|
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_quotes(
|
||||||
|
|
||||||
|
send_chan: trio.abc.SendChannel,
|
||||||
|
symbols: list[str],
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
loglevel: str = None,
|
||||||
|
|
||||||
|
# startup sync
|
||||||
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
sym = symbols[0]
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_cached_client('deribit') as client,
|
||||||
|
send_chan as send_chan
|
||||||
|
):
|
||||||
|
|
||||||
|
init_msgs = {
|
||||||
|
# pass back token, and bool, signalling if we're the writer
|
||||||
|
# and that history has been written
|
||||||
|
sym: {
|
||||||
|
'symbol_info': {
|
||||||
|
'asset_type': 'option',
|
||||||
|
'price_tick_size': 0.0005
|
||||||
|
},
|
||||||
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
nsym = piker_sym_to_cb_sym(sym)
|
||||||
|
|
||||||
|
async with maybe_open_price_feed(sym) as stream:
|
||||||
|
|
||||||
|
cache = await client.cache_symbols()
|
||||||
|
|
||||||
|
last_trades = (await client.last_trades(
|
||||||
|
cb_sym_to_deribit_inst(nsym), count=1)).trades
|
||||||
|
|
||||||
|
if len(last_trades) == 0:
|
||||||
|
last_trade = None
|
||||||
|
async for typ, quote in stream:
|
||||||
|
if typ == 'trade':
|
||||||
|
last_trade = Trade(**(quote['data']))
|
||||||
|
break
|
||||||
|
|
||||||
|
else:
|
||||||
|
last_trade = Trade(**(last_trades[0]))
|
||||||
|
|
||||||
|
first_quote = {
|
||||||
|
'symbol': sym,
|
||||||
|
'last': last_trade.price,
|
||||||
|
'brokerd_ts': last_trade.timestamp,
|
||||||
|
'ticks': [{
|
||||||
|
'type': 'trade',
|
||||||
|
'price': last_trade.price,
|
||||||
|
'size': last_trade.amount,
|
||||||
|
'broker_ts': last_trade.timestamp
|
||||||
|
}]
|
||||||
|
}
|
||||||
|
task_status.started((init_msgs, first_quote))
|
||||||
|
|
||||||
|
feed_is_live.set()
|
||||||
|
|
||||||
|
async for typ, quote in stream:
|
||||||
|
topic = quote['symbol']
|
||||||
|
await send_chan.send({topic: quote})
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def open_symbol_search(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
) -> Client:
|
||||||
|
async with open_cached_client('deribit') as client:
|
||||||
|
|
||||||
|
# load all symbols locally for fast search
|
||||||
|
cache = await client.cache_symbols()
|
||||||
|
await ctx.started()
|
||||||
|
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
|
||||||
|
async for pattern in stream:
|
||||||
|
# repack in dict form
|
||||||
|
await stream.send(
|
||||||
|
await client.search_symbols(pattern))
|
|
@ -0,0 +1,134 @@
|
||||||
|
``ib`` backend
|
||||||
|
--------------
|
||||||
|
more or less the "everything broker" for traditional and international
|
||||||
|
markets. they are the "go to" provider for automatic retail trading
|
||||||
|
and we interface to their APIs using the `ib_insync` project.
|
||||||
|
|
||||||
|
status
|
||||||
|
******
|
||||||
|
current support is *production grade* and both real-time data and order
|
||||||
|
management should be correct and fast. this backend is used by core devs
|
||||||
|
for live trading.
|
||||||
|
|
||||||
|
currently there is not yet full support for:
|
||||||
|
- options charting and trading
|
||||||
|
- paxos based crypto rt feeds and trading
|
||||||
|
|
||||||
|
|
||||||
|
config
|
||||||
|
******
|
||||||
|
In order to get order mode support your ``brokers.toml``
|
||||||
|
needs to have something like the following:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[ib]
|
||||||
|
hosts = [
|
||||||
|
"127.0.0.1",
|
||||||
|
]
|
||||||
|
# TODO: when we eventually spawn gateways in our
|
||||||
|
# container, we can just dynamically allocate these
|
||||||
|
# using IBC.
|
||||||
|
ports = [
|
||||||
|
4002,
|
||||||
|
4003,
|
||||||
|
4006,
|
||||||
|
4001,
|
||||||
|
7497,
|
||||||
|
]
|
||||||
|
|
||||||
|
# XXX: for a paper account the flex web query service
|
||||||
|
# is not supported so you have to manually download
|
||||||
|
# and XML report and put it in a location that can be
|
||||||
|
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||||
|
flex_token = '1111111111111111'
|
||||||
|
flex_trades_query_id = '6969696' # live accounts only?
|
||||||
|
|
||||||
|
# 3rd party web-api token
|
||||||
|
# (XXX: not sure if this works yet)
|
||||||
|
trade_log_token = '111111111111111'
|
||||||
|
|
||||||
|
# when clients are being scanned this determines
|
||||||
|
# which clients are preferred to be used for data feeds
|
||||||
|
# based on account names which are detected as active
|
||||||
|
# on each client.
|
||||||
|
prefer_data_account = [
|
||||||
|
# this has to be first in order to make data work with dual paper + live
|
||||||
|
'main',
|
||||||
|
'algopaper',
|
||||||
|
]
|
||||||
|
|
||||||
|
[ib.accounts]
|
||||||
|
main = 'U69696969'
|
||||||
|
algopaper = 'DU9696969'
|
||||||
|
|
||||||
|
|
||||||
|
If everything works correctly you should see any current positions
|
||||||
|
loaded in the pps pane on chart load and you should also be able to
|
||||||
|
check your trade records in the file::
|
||||||
|
|
||||||
|
<pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml
|
||||||
|
|
||||||
|
|
||||||
|
An example ledger file will have entries written verbatim from the
|
||||||
|
trade events schema:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
["0000e1a7.630f5e5a.01.01"]
|
||||||
|
secType = "FUT"
|
||||||
|
conId = 515416577
|
||||||
|
symbol = "MNQ"
|
||||||
|
lastTradeDateOrContractMonth = "20221216"
|
||||||
|
strike = 0.0
|
||||||
|
right = ""
|
||||||
|
multiplier = "2"
|
||||||
|
exchange = "GLOBEX"
|
||||||
|
primaryExchange = ""
|
||||||
|
currency = "USD"
|
||||||
|
localSymbol = "MNQZ2"
|
||||||
|
tradingClass = "MNQ"
|
||||||
|
includeExpired = false
|
||||||
|
secIdType = ""
|
||||||
|
secId = ""
|
||||||
|
comboLegsDescrip = ""
|
||||||
|
comboLegs = []
|
||||||
|
execId = "0000e1a7.630f5e5a.01.01"
|
||||||
|
time = 1661972086.0
|
||||||
|
acctNumber = "DU69696969"
|
||||||
|
side = "BOT"
|
||||||
|
shares = 1.0
|
||||||
|
price = 12372.75
|
||||||
|
permId = 441472655
|
||||||
|
clientId = 6116
|
||||||
|
orderId = 985
|
||||||
|
liquidation = 0
|
||||||
|
cumQty = 1.0
|
||||||
|
avgPrice = 12372.75
|
||||||
|
orderRef = ""
|
||||||
|
evRule = ""
|
||||||
|
evMultiplier = 0.0
|
||||||
|
modelCode = ""
|
||||||
|
lastLiquidity = 1
|
||||||
|
broker_time = 1661972086.0
|
||||||
|
name = "ib"
|
||||||
|
commission = 0.57
|
||||||
|
realizedPNL = 243.41
|
||||||
|
yield_ = 0.0
|
||||||
|
yieldRedemptionDate = 0
|
||||||
|
listingExchange = "GLOBEX"
|
||||||
|
date = "2022-08-31T18:54:46+00:00"
|
||||||
|
|
||||||
|
|
||||||
|
your ``pps.toml`` file will have position entries like,
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[ib.algopaper."mnq.globex.20221216"]
|
||||||
|
size = -1.0
|
||||||
|
ppu = 12423.630576923071
|
||||||
|
bsuid = 515416577
|
||||||
|
expiry = "2022-12-16T00:00:00+00:00"
|
||||||
|
clears = [
|
||||||
|
{ dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
|
||||||
|
]
|
|
@ -20,15 +20,10 @@ Interactive Brokers API backend.
|
||||||
Sub-modules within break into the core functionalities:
|
Sub-modules within break into the core functionalities:
|
||||||
|
|
||||||
- ``broker.py`` part for orders / trading endpoints
|
- ``broker.py`` part for orders / trading endpoints
|
||||||
- ``data.py`` for real-time data feed endpoints
|
- ``feed.py`` for real-time data feed endpoints
|
||||||
|
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||||
- ``client.py`` for the core API machinery which is ``trio``-ized
|
|
||||||
wrapping around ``ib_insync``.
|
wrapping around ``ib_insync``.
|
||||||
|
|
||||||
- ``report.py`` for the hackery to build manual pp calcs
|
|
||||||
to avoid ib's absolute bullshit FIFO style position
|
|
||||||
tracking..
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from .api import (
|
from .api import (
|
||||||
get_client,
|
get_client,
|
||||||
|
@ -38,7 +33,10 @@ from .feed import (
|
||||||
open_symbol_search,
|
open_symbol_search,
|
||||||
stream_quotes,
|
stream_quotes,
|
||||||
)
|
)
|
||||||
from .broker import trades_dialogue
|
from .broker import (
|
||||||
|
trades_dialogue,
|
||||||
|
norm_trade_records,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = [
|
__all__ = [
|
||||||
'get_client',
|
'get_client',
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,64 @@
|
||||||
|
``kraken`` backend
|
||||||
|
------------------
|
||||||
|
though they don't have the most liquidity of all the cexes they sure are
|
||||||
|
accommodating to those of us who appreciate a little ``xmr``.
|
||||||
|
|
||||||
|
status
|
||||||
|
******
|
||||||
|
current support is *production grade* and both real-time data and order
|
||||||
|
management should be correct and fast. this backend is used by core devs
|
||||||
|
for live trading.
|
||||||
|
|
||||||
|
|
||||||
|
config
|
||||||
|
******
|
||||||
|
In order to get order mode support your ``brokers.toml``
|
||||||
|
needs to have something like the following:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[kraken]
|
||||||
|
accounts.spot = 'spot'
|
||||||
|
key_descr = "spot"
|
||||||
|
api_key = "69696969696969696696969696969696969696969696969696969696"
|
||||||
|
secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
|
||||||
|
|
||||||
|
|
||||||
|
If everything works correctly you should see any current positions
|
||||||
|
loaded in the pps pane on chart load and you should also be able to
|
||||||
|
check your trade records in the file::
|
||||||
|
|
||||||
|
<pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
|
||||||
|
|
||||||
|
|
||||||
|
An example ledger file will have entries written verbatim from the
|
||||||
|
trade events schema:
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[TFJBKK-SMBZS-VJ4UWS]
|
||||||
|
ordertxid = "SMBZSA-7CNQU-3HWLNJ"
|
||||||
|
postxid = "SMBZSE-M7IF5-CFI7LT"
|
||||||
|
pair = "XXMRZEUR"
|
||||||
|
time = 1655691993.4133966
|
||||||
|
type = "buy"
|
||||||
|
ordertype = "limit"
|
||||||
|
price = "103.97000000"
|
||||||
|
cost = "499.99999977"
|
||||||
|
fee = "0.80000000"
|
||||||
|
vol = "4.80907954"
|
||||||
|
margin = "0.00000000"
|
||||||
|
misc = ""
|
||||||
|
|
||||||
|
|
||||||
|
your ``pps.toml`` file will have position entries like,
|
||||||
|
|
||||||
|
.. code:: toml
|
||||||
|
|
||||||
|
[kraken.spot."xmreur.kraken"]
|
||||||
|
size = 4.80907954
|
||||||
|
ppu = 103.97000000
|
||||||
|
bsuid = "XXMRZEUR"
|
||||||
|
clears = [
|
||||||
|
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
||||||
|
]
|
|
@ -0,0 +1,61 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Kraken backend.
|
||||||
|
|
||||||
|
Sub-modules within break into the core functionalities:
|
||||||
|
|
||||||
|
- ``broker.py`` part for orders / trading endpoints
|
||||||
|
- ``feed.py`` for real-time data feed endpoints
|
||||||
|
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||||
|
wrapping around ``ib_insync``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
from piker.log import get_logger
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
from .api import (
|
||||||
|
get_client,
|
||||||
|
)
|
||||||
|
from .feed import (
|
||||||
|
open_history_client,
|
||||||
|
open_symbol_search,
|
||||||
|
stream_quotes,
|
||||||
|
)
|
||||||
|
from .broker import (
|
||||||
|
trades_dialogue,
|
||||||
|
norm_trade_records,
|
||||||
|
)
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'get_client',
|
||||||
|
'trades_dialogue',
|
||||||
|
'open_history_client',
|
||||||
|
'open_symbol_search',
|
||||||
|
'stream_quotes',
|
||||||
|
'norm_trade_records',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# tractor RPC enable arg
|
||||||
|
__enable_modules__: list[str] = [
|
||||||
|
'api',
|
||||||
|
'feed',
|
||||||
|
'broker',
|
||||||
|
]
|
|
@ -0,0 +1,536 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Kraken web API wrapping.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
import itertools
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
import time
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
|
import pendulum
|
||||||
|
import asks
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
import urllib.parse
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import base64
|
||||||
|
import trio
|
||||||
|
|
||||||
|
from piker import config
|
||||||
|
from piker.brokers._util import (
|
||||||
|
resproc,
|
||||||
|
SymbolNotFound,
|
||||||
|
BrokerError,
|
||||||
|
DataThrottle,
|
||||||
|
)
|
||||||
|
from piker.pp import Transaction
|
||||||
|
from . import log
|
||||||
|
|
||||||
|
# <uri>/<version>/
|
||||||
|
_url = 'https://api.kraken.com/0'
|
||||||
|
|
||||||
|
|
||||||
|
# Broker specific ohlc schema which includes a vwap field
|
||||||
|
_ohlc_dtype = [
|
||||||
|
('index', int),
|
||||||
|
('time', int),
|
||||||
|
('open', float),
|
||||||
|
('high', float),
|
||||||
|
('low', float),
|
||||||
|
('close', float),
|
||||||
|
('volume', float),
|
||||||
|
('count', int),
|
||||||
|
('bar_wap', float),
|
||||||
|
]
|
||||||
|
|
||||||
|
# UI components allow this to be declared such that additional
|
||||||
|
# (historical) fields can be exposed.
|
||||||
|
ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||||
|
|
||||||
|
_show_wap_in_history = True
|
||||||
|
_symbol_info_translation: dict[str, str] = {
|
||||||
|
'tick_decimals': 'pair_decimals',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
|
conf, path = config.load()
|
||||||
|
section = conf.get('kraken')
|
||||||
|
|
||||||
|
if section is None:
|
||||||
|
log.warning(f'No config section found for kraken in {path}')
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return section
|
||||||
|
|
||||||
|
|
||||||
|
def get_kraken_signature(
|
||||||
|
urlpath: str,
|
||||||
|
data: dict[str, Any],
|
||||||
|
secret: str
|
||||||
|
) -> str:
|
||||||
|
postdata = urllib.parse.urlencode(data)
|
||||||
|
encoded = (str(data['nonce']) + postdata).encode()
|
||||||
|
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
||||||
|
|
||||||
|
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
||||||
|
sigdigest = base64.b64encode(mac.digest())
|
||||||
|
return sigdigest.decode()
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidKey(ValueError):
|
||||||
|
'''
|
||||||
|
EAPI:Invalid key
|
||||||
|
This error is returned when the API key used for the call is
|
||||||
|
either expired or disabled, please review the API key in your
|
||||||
|
Settings -> API tab of account management or generate a new one
|
||||||
|
and update your application.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class Client:
|
||||||
|
|
||||||
|
# global symbol normalization table
|
||||||
|
_ntable: dict[str, str] = {}
|
||||||
|
_atable: bidict[str, str] = bidict()
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
config: dict[str, str],
|
||||||
|
name: str = '',
|
||||||
|
api_key: str = '',
|
||||||
|
secret: str = ''
|
||||||
|
) -> None:
|
||||||
|
self._sesh = asks.Session(connections=4)
|
||||||
|
self._sesh.base_location = _url
|
||||||
|
self._sesh.headers.update({
|
||||||
|
'User-Agent':
|
||||||
|
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||||
|
})
|
||||||
|
self.conf: dict[str, str] = config
|
||||||
|
self._pairs: list[str] = []
|
||||||
|
self._name = name
|
||||||
|
self._api_key = api_key
|
||||||
|
self._secret = secret
|
||||||
|
|
||||||
|
@property
|
||||||
|
def pairs(self) -> dict[str, Any]:
|
||||||
|
if self._pairs is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Make sure to run `cache_symbols()` on startup!"
|
||||||
|
)
|
||||||
|
# retreive and cache all symbols
|
||||||
|
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
async def _public(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
resp = await self._sesh.post(
|
||||||
|
path=f'/public/{method}',
|
||||||
|
json=data,
|
||||||
|
timeout=float('inf')
|
||||||
|
)
|
||||||
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
async def _private(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict,
|
||||||
|
uri_path: str
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
headers = {
|
||||||
|
'Content-Type':
|
||||||
|
'application/x-www-form-urlencoded',
|
||||||
|
'API-Key':
|
||||||
|
self._api_key,
|
||||||
|
'API-Sign':
|
||||||
|
get_kraken_signature(uri_path, data, self._secret)
|
||||||
|
}
|
||||||
|
resp = await self._sesh.post(
|
||||||
|
path=f'/private/{method}',
|
||||||
|
data=data,
|
||||||
|
headers=headers,
|
||||||
|
timeout=float('inf')
|
||||||
|
)
|
||||||
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
async def endpoint(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict[str, Any]
|
||||||
|
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
uri_path = f'/0/private/{method}'
|
||||||
|
data['nonce'] = str(int(1000*time.time()))
|
||||||
|
return await self._private(method, data, uri_path)
|
||||||
|
|
||||||
|
async def get_balances(
|
||||||
|
self,
|
||||||
|
) -> dict[str, float]:
|
||||||
|
'''
|
||||||
|
Return the set of asset balances for this account
|
||||||
|
by symbol.
|
||||||
|
|
||||||
|
'''
|
||||||
|
resp = await self.endpoint(
|
||||||
|
'Balance',
|
||||||
|
{},
|
||||||
|
)
|
||||||
|
by_bsuid = resp['result']
|
||||||
|
return {
|
||||||
|
self._atable[sym].lower(): float(bal)
|
||||||
|
for sym, bal in by_bsuid.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
async def get_assets(self) -> dict[str, dict]:
|
||||||
|
resp = await self._public('Assets', {})
|
||||||
|
return resp['result']
|
||||||
|
|
||||||
|
async def cache_assets(self) -> None:
|
||||||
|
assets = self.assets = await self.get_assets()
|
||||||
|
for bsuid, info in assets.items():
|
||||||
|
self._atable[bsuid] = info['altname']
|
||||||
|
|
||||||
|
async def get_trades(
|
||||||
|
self,
|
||||||
|
fetch_limit: int = 10,
|
||||||
|
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
'''
|
||||||
|
Get the trades (aka cleared orders) history from the rest endpoint:
|
||||||
|
https://docs.kraken.com/rest/#operation/getTradeHistory
|
||||||
|
|
||||||
|
'''
|
||||||
|
ofs = 0
|
||||||
|
trades_by_id: dict[str, Any] = {}
|
||||||
|
|
||||||
|
for i in itertools.count():
|
||||||
|
if i >= fetch_limit:
|
||||||
|
break
|
||||||
|
|
||||||
|
# increment 'ofs' pagination offset
|
||||||
|
ofs = i*50
|
||||||
|
|
||||||
|
resp = await self.endpoint(
|
||||||
|
'TradesHistory',
|
||||||
|
{'ofs': ofs},
|
||||||
|
)
|
||||||
|
by_id = resp['result']['trades']
|
||||||
|
trades_by_id.update(by_id)
|
||||||
|
|
||||||
|
# we can get up to 50 results per query
|
||||||
|
if (
|
||||||
|
len(by_id) < 50
|
||||||
|
):
|
||||||
|
err = resp.get('error')
|
||||||
|
if err:
|
||||||
|
raise BrokerError(err)
|
||||||
|
|
||||||
|
# we know we received the max amount of
|
||||||
|
# trade results so there may be more history.
|
||||||
|
# catch the end of the trades
|
||||||
|
count = resp['result']['count']
|
||||||
|
break
|
||||||
|
|
||||||
|
# santity check on update
|
||||||
|
assert count == len(trades_by_id.values())
|
||||||
|
return trades_by_id
|
||||||
|
|
||||||
|
async def get_xfers(
|
||||||
|
self,
|
||||||
|
asset: str,
|
||||||
|
src_asset: str = '',
|
||||||
|
|
||||||
|
) -> dict[str, Transaction]:
|
||||||
|
'''
|
||||||
|
Get asset balance transfer transactions.
|
||||||
|
|
||||||
|
Currently only withdrawals are supported.
|
||||||
|
|
||||||
|
'''
|
||||||
|
xfers: list[dict] = (await self.endpoint(
|
||||||
|
'WithdrawStatus',
|
||||||
|
{'asset': asset},
|
||||||
|
))['result']
|
||||||
|
|
||||||
|
# eg. resp schema:
|
||||||
|
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
||||||
|
# 'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
|
||||||
|
# 'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
|
||||||
|
# 'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
|
||||||
|
# 'amount': '0.00300726', 'fee': '0.00001000', 'time':
|
||||||
|
# 1658347714, 'status': 'Success'}]}
|
||||||
|
|
||||||
|
trans: dict[str, Transaction] = {}
|
||||||
|
for entry in xfers:
|
||||||
|
# look up the normalized name
|
||||||
|
asset = self._atable[entry['asset']].lower()
|
||||||
|
|
||||||
|
# XXX: this is in the asset units (likely) so it isn't
|
||||||
|
# quite the same as a commisions cost necessarily..)
|
||||||
|
cost = float(entry['fee'])
|
||||||
|
|
||||||
|
tran = Transaction(
|
||||||
|
fqsn=asset + '.kraken',
|
||||||
|
tid=entry['txid'],
|
||||||
|
dt=pendulum.from_timestamp(entry['time']),
|
||||||
|
bsuid=f'{asset}{src_asset}',
|
||||||
|
size=-1*(
|
||||||
|
float(entry['amount'])
|
||||||
|
+
|
||||||
|
cost
|
||||||
|
),
|
||||||
|
# since this will be treated as a "sell" it
|
||||||
|
# shouldn't be needed to compute the be price.
|
||||||
|
price='NaN',
|
||||||
|
|
||||||
|
# XXX: see note above
|
||||||
|
cost=0,
|
||||||
|
)
|
||||||
|
trans[tran.tid] = tran
|
||||||
|
|
||||||
|
return trans
|
||||||
|
|
||||||
|
async def submit_limit(
|
||||||
|
self,
|
||||||
|
symbol: str,
|
||||||
|
price: float,
|
||||||
|
action: str,
|
||||||
|
size: float,
|
||||||
|
reqid: str = None,
|
||||||
|
validate: bool = False # set True test call without a real submission
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Place an order and return integer request id provided by client.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# Build common data dict for common keys from both endpoints
|
||||||
|
data = {
|
||||||
|
"pair": symbol,
|
||||||
|
"price": str(price),
|
||||||
|
"validate": validate
|
||||||
|
}
|
||||||
|
if reqid is None:
|
||||||
|
# Build order data for kraken api
|
||||||
|
data |= {
|
||||||
|
"ordertype": "limit",
|
||||||
|
"type": action,
|
||||||
|
"volume": str(size),
|
||||||
|
}
|
||||||
|
return await self.endpoint('AddOrder', data)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Edit order data for kraken api
|
||||||
|
data["txid"] = reqid
|
||||||
|
return await self.endpoint('EditOrder', data)
|
||||||
|
|
||||||
|
async def submit_cancel(
|
||||||
|
self,
|
||||||
|
reqid: str,
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Send cancel request for order id ``reqid``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# txid is a transaction id given by kraken
|
||||||
|
return await self.endpoint('CancelOrder', {"txid": reqid})
|
||||||
|
|
||||||
|
async def symbol_info(
|
||||||
|
self,
|
||||||
|
pair: Optional[str] = None,
|
||||||
|
|
||||||
|
) -> dict[str, dict[str, str]]:
|
||||||
|
|
||||||
|
if pair is not None:
|
||||||
|
pairs = {'pair': pair}
|
||||||
|
else:
|
||||||
|
pairs = None # get all pairs
|
||||||
|
|
||||||
|
resp = await self._public('AssetPairs', pairs)
|
||||||
|
err = resp['error']
|
||||||
|
if err:
|
||||||
|
symbolname = pairs['pair'] if pair else None
|
||||||
|
raise SymbolNotFound(f'{symbolname}.kraken')
|
||||||
|
|
||||||
|
pairs = resp['result']
|
||||||
|
|
||||||
|
if pair is not None:
|
||||||
|
_, data = next(iter(pairs.items()))
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
return pairs
|
||||||
|
|
||||||
|
async def cache_symbols(
|
||||||
|
self,
|
||||||
|
) -> dict:
|
||||||
|
if not self._pairs:
|
||||||
|
self._pairs = await self.symbol_info()
|
||||||
|
|
||||||
|
ntable = {}
|
||||||
|
for restapikey, info in self._pairs.items():
|
||||||
|
ntable[restapikey] = ntable[info['wsname']] = info['altname']
|
||||||
|
|
||||||
|
self._ntable.update(ntable)
|
||||||
|
|
||||||
|
return self._pairs
|
||||||
|
|
||||||
|
async def search_symbols(
|
||||||
|
self,
|
||||||
|
pattern: str,
|
||||||
|
limit: int = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
if self._pairs is not None:
|
||||||
|
data = self._pairs
|
||||||
|
else:
|
||||||
|
data = await self.symbol_info()
|
||||||
|
|
||||||
|
matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
data,
|
||||||
|
score_cutoff=50,
|
||||||
|
)
|
||||||
|
# repack in dict form
|
||||||
|
return {item[0]['altname']: item[0] for item in matches}
|
||||||
|
|
||||||
|
async def bars(
|
||||||
|
self,
|
||||||
|
symbol: str = 'XBTUSD',
|
||||||
|
|
||||||
|
# UTC 2017-07-02 12:53:20
|
||||||
|
since: Optional[Union[int, datetime]] = None,
|
||||||
|
count: int = 720, # <- max allowed per query
|
||||||
|
as_np: bool = True,
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
|
||||||
|
if since is None:
|
||||||
|
since = pendulum.now('UTC').start_of('minute').subtract(
|
||||||
|
minutes=count).timestamp()
|
||||||
|
|
||||||
|
elif isinstance(since, int):
|
||||||
|
since = pendulum.from_timestamp(since).timestamp()
|
||||||
|
|
||||||
|
else: # presumably a pendulum datetime
|
||||||
|
since = since.timestamp()
|
||||||
|
|
||||||
|
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||||
|
since = str(max(1499000000, int(since)))
|
||||||
|
json = await self._public(
|
||||||
|
'OHLC',
|
||||||
|
data={
|
||||||
|
'pair': symbol,
|
||||||
|
'since': since,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
res = json['result']
|
||||||
|
res.pop('last')
|
||||||
|
bars = next(iter(res.values()))
|
||||||
|
|
||||||
|
new_bars = []
|
||||||
|
|
||||||
|
first = bars[0]
|
||||||
|
last_nz_vwap = first[-3]
|
||||||
|
if last_nz_vwap == 0:
|
||||||
|
# use close if vwap is zero
|
||||||
|
last_nz_vwap = first[-4]
|
||||||
|
|
||||||
|
# convert all fields to native types
|
||||||
|
for i, bar in enumerate(bars):
|
||||||
|
# normalize weird zero-ed vwap values..cmon kraken..
|
||||||
|
# indicates vwap didn't change since last bar
|
||||||
|
vwap = float(bar.pop(-3))
|
||||||
|
if vwap != 0:
|
||||||
|
last_nz_vwap = vwap
|
||||||
|
if vwap == 0:
|
||||||
|
vwap = last_nz_vwap
|
||||||
|
|
||||||
|
# re-insert vwap as the last of the fields
|
||||||
|
bar.append(vwap)
|
||||||
|
|
||||||
|
new_bars.append(
|
||||||
|
(i,) + tuple(
|
||||||
|
ftype(bar[j]) for j, (name, ftype) in enumerate(
|
||||||
|
_ohlc_dtype[1:]
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||||
|
return array
|
||||||
|
except KeyError:
|
||||||
|
errmsg = json['error'][0]
|
||||||
|
|
||||||
|
if 'not found' in errmsg:
|
||||||
|
raise SymbolNotFound(errmsg + f': {symbol}')
|
||||||
|
|
||||||
|
elif 'Too many requests' in errmsg:
|
||||||
|
raise DataThrottle(f'{symbol}')
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise BrokerError(errmsg)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def normalize_symbol(
|
||||||
|
cls,
|
||||||
|
ticker: str
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Normalize symbol names to to a 3x3 pair from the global
|
||||||
|
definition map which we build out from the data retreived from
|
||||||
|
the 'AssetPairs' endpoint, see methods above.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ticker = cls._ntable[ticker]
|
||||||
|
return ticker.lower()
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def get_client() -> Client:
|
||||||
|
|
||||||
|
conf = get_config()
|
||||||
|
if conf:
|
||||||
|
client = Client(
|
||||||
|
conf,
|
||||||
|
name=conf['key_descr'],
|
||||||
|
api_key=conf['api_key'],
|
||||||
|
secret=conf['secret']
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
client = Client({})
|
||||||
|
|
||||||
|
# at startup, load all symbols, and asset info in
|
||||||
|
# batch requests.
|
||||||
|
async with trio.open_nursery() as nurse:
|
||||||
|
nurse.start_soon(client.cache_assets)
|
||||||
|
await client.cache_symbols()
|
||||||
|
|
||||||
|
yield client
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,500 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Real-time and historical data feed endpoints.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
|
import time
|
||||||
|
|
||||||
|
from async_generator import aclosing
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
import pendulum
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import tractor
|
||||||
|
import trio
|
||||||
|
|
||||||
|
from piker._cacheables import open_cached_client
|
||||||
|
from piker.brokers._util import (
|
||||||
|
BrokerError,
|
||||||
|
DataThrottle,
|
||||||
|
DataUnavailable,
|
||||||
|
)
|
||||||
|
from piker.log import get_console_log
|
||||||
|
from piker.data import ShmArray
|
||||||
|
from piker.data.types import Struct
|
||||||
|
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
||||||
|
from . import log
|
||||||
|
from .api import (
|
||||||
|
Client,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# https://www.kraken.com/features/api#get-tradable-pairs
|
||||||
|
class Pair(Struct):
|
||||||
|
altname: str # alternate pair name
|
||||||
|
wsname: str # WebSocket pair name (if available)
|
||||||
|
aclass_base: str # asset class of base component
|
||||||
|
base: str # asset id of base component
|
||||||
|
aclass_quote: str # asset class of quote component
|
||||||
|
quote: str # asset id of quote component
|
||||||
|
lot: str # volume lot size
|
||||||
|
|
||||||
|
cost_decimals: int
|
||||||
|
costmin: float
|
||||||
|
pair_decimals: int # scaling decimal places for pair
|
||||||
|
lot_decimals: int # scaling decimal places for volume
|
||||||
|
|
||||||
|
# amount to multiply lot volume by to get currency volume
|
||||||
|
lot_multiplier: float
|
||||||
|
|
||||||
|
# array of leverage amounts available when buying
|
||||||
|
leverage_buy: list[int]
|
||||||
|
# array of leverage amounts available when selling
|
||||||
|
leverage_sell: list[int]
|
||||||
|
|
||||||
|
# fee schedule array in [volume, percent fee] tuples
|
||||||
|
fees: list[tuple[int, float]]
|
||||||
|
|
||||||
|
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||||
|
# maker/taker)
|
||||||
|
fees_maker: list[tuple[int, float]]
|
||||||
|
|
||||||
|
fee_volume_currency: str # volume discount currency
|
||||||
|
margin_call: str # margin call level
|
||||||
|
margin_stop: str # stop-out/liquidation margin level
|
||||||
|
ordermin: float # minimum order volume for pair
|
||||||
|
tick_size: float # min price step size
|
||||||
|
status: str
|
||||||
|
|
||||||
|
short_position_limit: float
|
||||||
|
long_position_limit: float
|
||||||
|
|
||||||
|
|
||||||
|
class OHLC(Struct):
|
||||||
|
'''
|
||||||
|
Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
|
For schema details see:
|
||||||
|
https://docs.kraken.com/websockets/#message-ohlc
|
||||||
|
|
||||||
|
'''
|
||||||
|
chan_id: int # internal kraken id
|
||||||
|
chan_name: str # eg. ohlc-1 (name-interval)
|
||||||
|
pair: str # fx pair
|
||||||
|
time: float # Begin time of interval, in seconds since epoch
|
||||||
|
etime: float # End time of interval, in seconds since epoch
|
||||||
|
open: float # Open price of interval
|
||||||
|
high: float # High price within interval
|
||||||
|
low: float # Low price within interval
|
||||||
|
close: float # Close price of interval
|
||||||
|
vwap: float # Volume weighted average price within interval
|
||||||
|
volume: float # Accumulated volume **within interval**
|
||||||
|
count: int # Number of trades within interval
|
||||||
|
# (sampled) generated tick data
|
||||||
|
ticks: list[Any] = []
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_messages(
|
||||||
|
ws: NoBsWs,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Message stream parser and heartbeat handler.
|
||||||
|
|
||||||
|
Deliver ws subscription messages as well as handle heartbeat logic
|
||||||
|
though a single async generator.
|
||||||
|
|
||||||
|
'''
|
||||||
|
too_slow_count = last_hb = 0
|
||||||
|
|
||||||
|
while True:
|
||||||
|
|
||||||
|
with trio.move_on_after(5) as cs:
|
||||||
|
msg = await ws.recv_msg()
|
||||||
|
|
||||||
|
# trigger reconnection if heartbeat is laggy
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
|
||||||
|
too_slow_count += 1
|
||||||
|
|
||||||
|
if too_slow_count > 20:
|
||||||
|
log.warning(
|
||||||
|
"Heartbeat is too slow, resetting ws connection")
|
||||||
|
|
||||||
|
await ws._connect()
|
||||||
|
too_slow_count = 0
|
||||||
|
continue
|
||||||
|
|
||||||
|
match msg:
|
||||||
|
case {'event': 'heartbeat'}:
|
||||||
|
now = time.time()
|
||||||
|
delay = now - last_hb
|
||||||
|
last_hb = now
|
||||||
|
|
||||||
|
# XXX: why tf is this not printing without --tl flag?
|
||||||
|
log.debug(f"Heartbeat after {delay}")
|
||||||
|
# print(f"Heartbeat after {delay}")
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
case _:
|
||||||
|
# passthrough sub msgs
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
|
||||||
|
async def process_data_feed_msgs(
|
||||||
|
ws: NoBsWs,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Parse and pack data feed messages.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async for msg in stream_messages(ws):
|
||||||
|
match msg:
|
||||||
|
case {
|
||||||
|
'errorMessage': errmsg
|
||||||
|
}:
|
||||||
|
raise BrokerError(errmsg)
|
||||||
|
|
||||||
|
case {
|
||||||
|
'event': 'subscriptionStatus',
|
||||||
|
} as sub:
|
||||||
|
log.info(
|
||||||
|
'WS subscription is active:\n'
|
||||||
|
f'{sub}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
case [
|
||||||
|
chan_id,
|
||||||
|
*payload_array,
|
||||||
|
chan_name,
|
||||||
|
pair
|
||||||
|
]:
|
||||||
|
if 'ohlc' in chan_name:
|
||||||
|
ohlc = OHLC(
|
||||||
|
chan_id,
|
||||||
|
chan_name,
|
||||||
|
pair,
|
||||||
|
*payload_array[0]
|
||||||
|
)
|
||||||
|
ohlc.typecast()
|
||||||
|
yield 'ohlc', ohlc
|
||||||
|
|
||||||
|
elif 'spread' in chan_name:
|
||||||
|
|
||||||
|
bid, ask, ts, bsize, asize = map(
|
||||||
|
float, payload_array[0])
|
||||||
|
|
||||||
|
# TODO: really makes you think IB has a horrible API...
|
||||||
|
quote = {
|
||||||
|
'symbol': pair.replace('/', ''),
|
||||||
|
'ticks': [
|
||||||
|
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||||
|
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||||
|
|
||||||
|
{'type': 'ask', 'price': ask, 'size': asize},
|
||||||
|
{'type': 'asize', 'price': ask, 'size': asize},
|
||||||
|
],
|
||||||
|
}
|
||||||
|
yield 'l1', quote
|
||||||
|
|
||||||
|
# elif 'book' in msg[-2]:
|
||||||
|
# chan_id, *payload_array, chan_name, pair = msg
|
||||||
|
# print(msg)
|
||||||
|
|
||||||
|
case _:
|
||||||
|
print(f'UNHANDLED MSG: {msg}')
|
||||||
|
# yield msg
|
||||||
|
|
||||||
|
|
||||||
|
def normalize(
|
||||||
|
ohlc: OHLC,
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
quote = ohlc.to_dict()
|
||||||
|
quote['broker_ts'] = quote['time']
|
||||||
|
quote['brokerd_ts'] = time.time()
|
||||||
|
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
||||||
|
quote['last'] = quote['close']
|
||||||
|
quote['bar_wap'] = ohlc.vwap
|
||||||
|
|
||||||
|
# seriously eh? what's with this non-symmetry everywhere
|
||||||
|
# in subscription systems...
|
||||||
|
# XXX: piker style is always lowercases symbols.
|
||||||
|
topic = quote['pair'].replace('/', '').lower()
|
||||||
|
|
||||||
|
# print(quote)
|
||||||
|
return topic, quote
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
symbol: str,
|
||||||
|
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
# TODO implement history getter for the new storage layer.
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
# lol, kraken won't send any more then the "last"
|
||||||
|
# 720 1m bars.. so we have to just ignore further
|
||||||
|
# requests of this type..
|
||||||
|
queries: int = 0
|
||||||
|
|
||||||
|
async def get_ohlc(
|
||||||
|
timeframe: float,
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
datetime, # start
|
||||||
|
datetime, # end
|
||||||
|
]:
|
||||||
|
|
||||||
|
nonlocal queries
|
||||||
|
if (
|
||||||
|
queries > 0
|
||||||
|
or timeframe != 60
|
||||||
|
):
|
||||||
|
raise DataUnavailable(
|
||||||
|
'Only a single query for 1m bars supported')
|
||||||
|
|
||||||
|
count = 0
|
||||||
|
while count <= 3:
|
||||||
|
try:
|
||||||
|
array = await client.bars(
|
||||||
|
symbol,
|
||||||
|
since=end_dt,
|
||||||
|
)
|
||||||
|
count += 1
|
||||||
|
queries += 1
|
||||||
|
break
|
||||||
|
except DataThrottle:
|
||||||
|
log.warning(f'kraken OHLC throttle for {symbol}')
|
||||||
|
await trio.sleep(1)
|
||||||
|
|
||||||
|
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||||
|
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||||
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
|
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_quotes(
|
||||||
|
|
||||||
|
send_chan: trio.abc.SendChannel,
|
||||||
|
symbols: list[str],
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
loglevel: str = None,
|
||||||
|
|
||||||
|
# backend specific
|
||||||
|
sub_type: str = 'ohlc',
|
||||||
|
|
||||||
|
# startup sync
|
||||||
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Subscribe for ohlc stream of quotes for ``pairs``.
|
||||||
|
|
||||||
|
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
ws_pairs = {}
|
||||||
|
sym_infos = {}
|
||||||
|
|
||||||
|
async with open_cached_client('kraken') as client, send_chan as send_chan:
|
||||||
|
|
||||||
|
# keep client cached for real-time section
|
||||||
|
for sym in symbols:
|
||||||
|
|
||||||
|
# transform to upper since piker style is always lower
|
||||||
|
sym = sym.upper()
|
||||||
|
sym_info = await client.symbol_info(sym)
|
||||||
|
try:
|
||||||
|
si = Pair(**sym_info) # validation
|
||||||
|
except TypeError:
|
||||||
|
fields_diff = set(sym_info) - set(Pair.__struct_fields__)
|
||||||
|
raise TypeError(
|
||||||
|
f'Missing msg fields {fields_diff}'
|
||||||
|
)
|
||||||
|
syminfo = si.to_dict()
|
||||||
|
syminfo['price_tick_size'] = 1 / 10**si.pair_decimals
|
||||||
|
syminfo['lot_tick_size'] = 1 / 10**si.lot_decimals
|
||||||
|
syminfo['asset_type'] = 'crypto'
|
||||||
|
sym_infos[sym] = syminfo
|
||||||
|
ws_pairs[sym] = si.wsname
|
||||||
|
|
||||||
|
symbol = symbols[0].lower()
|
||||||
|
|
||||||
|
init_msgs = {
|
||||||
|
# pass back token, and bool, signalling if we're the writer
|
||||||
|
# and that history has been written
|
||||||
|
symbol: {
|
||||||
|
'symbol_info': sym_infos[sym],
|
||||||
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def subscribe(ws: NoBsWs):
|
||||||
|
|
||||||
|
# XXX: setup subs
|
||||||
|
# https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
# specific logic for this in kraken's sync client:
|
||||||
|
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||||
|
ohlc_sub = {
|
||||||
|
'event': 'subscribe',
|
||||||
|
'pair': list(ws_pairs.values()),
|
||||||
|
'subscription': {
|
||||||
|
'name': 'ohlc',
|
||||||
|
'interval': 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO: we want to eventually allow unsubs which should
|
||||||
|
# be completely fine to request from a separate task
|
||||||
|
# since internally the ws methods appear to be FIFO
|
||||||
|
# locked.
|
||||||
|
await ws.send_msg(ohlc_sub)
|
||||||
|
|
||||||
|
# trade data (aka L1)
|
||||||
|
l1_sub = {
|
||||||
|
'event': 'subscribe',
|
||||||
|
'pair': list(ws_pairs.values()),
|
||||||
|
'subscription': {
|
||||||
|
'name': 'spread',
|
||||||
|
# 'depth': 10}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
# pull a first quote and deliver
|
||||||
|
await ws.send_msg(l1_sub)
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
# unsub from all pairs on teardown
|
||||||
|
if ws.connected():
|
||||||
|
await ws.send_msg({
|
||||||
|
'pair': list(ws_pairs.values()),
|
||||||
|
'event': 'unsubscribe',
|
||||||
|
'subscription': ['ohlc', 'spread'],
|
||||||
|
})
|
||||||
|
|
||||||
|
# XXX: do we need to ack the unsub?
|
||||||
|
# await ws.recv_msg()
|
||||||
|
|
||||||
|
# see the tips on reconnection logic:
|
||||||
|
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||||
|
ws: NoBsWs
|
||||||
|
async with (
|
||||||
|
open_autorecon_ws(
|
||||||
|
'wss://ws.kraken.com/',
|
||||||
|
fixture=subscribe,
|
||||||
|
) as ws,
|
||||||
|
aclosing(process_data_feed_msgs(ws)) as msg_gen,
|
||||||
|
):
|
||||||
|
# pull a first quote and deliver
|
||||||
|
typ, ohlc_last = await anext(msg_gen)
|
||||||
|
topic, quote = normalize(ohlc_last)
|
||||||
|
|
||||||
|
task_status.started((init_msgs, quote))
|
||||||
|
|
||||||
|
# lol, only "closes" when they're margin squeezing clients ;P
|
||||||
|
feed_is_live.set()
|
||||||
|
|
||||||
|
# keep start of last interval for volume tracking
|
||||||
|
last_interval_start = ohlc_last.etime
|
||||||
|
|
||||||
|
# start streaming
|
||||||
|
async for typ, ohlc in msg_gen:
|
||||||
|
|
||||||
|
if typ == 'ohlc':
|
||||||
|
|
||||||
|
# TODO: can get rid of all this by using
|
||||||
|
# ``trades`` subscription...
|
||||||
|
|
||||||
|
# generate tick values to match time & sales pane:
|
||||||
|
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
|
||||||
|
volume = ohlc.volume
|
||||||
|
|
||||||
|
# new OHLC sample interval
|
||||||
|
if ohlc.etime > last_interval_start:
|
||||||
|
last_interval_start = ohlc.etime
|
||||||
|
tick_volume = volume
|
||||||
|
|
||||||
|
else:
|
||||||
|
# this is the tick volume *within the interval*
|
||||||
|
tick_volume = volume - ohlc_last.volume
|
||||||
|
|
||||||
|
ohlc_last = ohlc
|
||||||
|
last = ohlc.close
|
||||||
|
|
||||||
|
if tick_volume:
|
||||||
|
ohlc.ticks.append({
|
||||||
|
'type': 'trade',
|
||||||
|
'price': last,
|
||||||
|
'size': tick_volume,
|
||||||
|
})
|
||||||
|
|
||||||
|
topic, quote = normalize(ohlc)
|
||||||
|
|
||||||
|
elif typ == 'l1':
|
||||||
|
quote = ohlc
|
||||||
|
topic = quote['symbol'].lower()
|
||||||
|
|
||||||
|
await send_chan.send({topic: quote})
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def open_symbol_search(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
|
||||||
|
) -> Client:
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
# load all symbols locally for fast search
|
||||||
|
cache = await client.cache_symbols()
|
||||||
|
await ctx.started(cache)
|
||||||
|
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
|
||||||
|
async for pattern in stream:
|
||||||
|
|
||||||
|
matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
cache,
|
||||||
|
score_cutoff=50,
|
||||||
|
)
|
||||||
|
# repack in dict form
|
||||||
|
await stream.send(
|
||||||
|
{item[0]['altname']: item[0]
|
||||||
|
for item in matches}
|
||||||
|
)
|
|
@ -18,3 +18,9 @@
|
||||||
Market machinery for order executions, book, management.
|
Market machinery for order executions, book, management.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from ._client import open_ems
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'open_ems',
|
||||||
|
]
|
||||||
|
|
|
@ -22,54 +22,10 @@ from enum import Enum
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
from pydantic import BaseModel, validator
|
|
||||||
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ._messages import BrokerdPosition, Status
|
from ..data.types import Struct
|
||||||
|
from ..pp import Position
|
||||||
|
|
||||||
class Position(BaseModel):
|
|
||||||
'''
|
|
||||||
Basic pp (personal position) model with attached fills history.
|
|
||||||
|
|
||||||
This type should be IPC wire ready?
|
|
||||||
|
|
||||||
'''
|
|
||||||
symbol: Symbol
|
|
||||||
|
|
||||||
# last size and avg entry price
|
|
||||||
size: float
|
|
||||||
avg_price: float # TODO: contextual pricing
|
|
||||||
|
|
||||||
# ordered record of known constituent trade messages
|
|
||||||
fills: list[Status] = []
|
|
||||||
|
|
||||||
def update_from_msg(
|
|
||||||
self,
|
|
||||||
msg: BrokerdPosition,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
# XXX: better place to do this?
|
|
||||||
symbol = self.symbol
|
|
||||||
|
|
||||||
lot_size_digits = symbol.lot_size_digits
|
|
||||||
avg_price, size = (
|
|
||||||
round(msg['avg_price'], ndigits=symbol.tick_size_digits),
|
|
||||||
round(msg['size'], ndigits=lot_size_digits),
|
|
||||||
)
|
|
||||||
|
|
||||||
self.avg_price = avg_price
|
|
||||||
self.size = size
|
|
||||||
|
|
||||||
@property
|
|
||||||
def dsize(self) -> float:
|
|
||||||
'''
|
|
||||||
The "dollar" size of the pp, normally in trading (fiat) unit
|
|
||||||
terms.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return self.avg_price * self.size
|
|
||||||
|
|
||||||
|
|
||||||
_size_units = bidict({
|
_size_units = bidict({
|
||||||
|
@ -84,34 +40,9 @@ SizeUnit = Enum(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Allocator(BaseModel):
|
class Allocator(Struct):
|
||||||
|
|
||||||
class Config:
|
|
||||||
validate_assignment = True
|
|
||||||
copy_on_model_validation = False
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
# required to get the account validator lookup working?
|
|
||||||
extra = 'allow'
|
|
||||||
underscore_attrs_are_private = False
|
|
||||||
|
|
||||||
symbol: Symbol
|
symbol: Symbol
|
||||||
account: Optional[str] = 'paper'
|
|
||||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
|
||||||
# a default at startup by passing in a `dict` but yet you can set
|
|
||||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
|
||||||
# unintuitive garbage.
|
|
||||||
size_unit: str = 'currency'
|
|
||||||
_size_units: dict[str, Optional[str]] = _size_units
|
|
||||||
|
|
||||||
@validator('size_unit', pre=True)
|
|
||||||
def maybe_lookup_key(cls, v):
|
|
||||||
# apply the corresponding enum key for the text "description" value
|
|
||||||
if v not in _size_units:
|
|
||||||
return _size_units.inverse[v]
|
|
||||||
|
|
||||||
assert v in _size_units
|
|
||||||
return v
|
|
||||||
|
|
||||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||||
# "sizes"
|
# "sizes"
|
||||||
|
@ -120,6 +51,28 @@ class Allocator(BaseModel):
|
||||||
units_limit: float
|
units_limit: float
|
||||||
currency_limit: float
|
currency_limit: float
|
||||||
slots: int
|
slots: int
|
||||||
|
account: Optional[str] = 'paper'
|
||||||
|
|
||||||
|
_size_units: bidict[str, Optional[str]] = _size_units
|
||||||
|
|
||||||
|
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||||
|
# a default at startup by passing in a `dict` but yet you can set
|
||||||
|
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||||
|
# unintuitive garbage.
|
||||||
|
_size_unit: str = 'currency'
|
||||||
|
|
||||||
|
@property
|
||||||
|
def size_unit(self) -> str:
|
||||||
|
return self._size_unit
|
||||||
|
|
||||||
|
@size_unit.setter
|
||||||
|
def size_unit(self, v: str) -> Optional[str]:
|
||||||
|
if v not in _size_units:
|
||||||
|
v = _size_units.inverse[v]
|
||||||
|
|
||||||
|
assert v in _size_units
|
||||||
|
self._size_unit = v
|
||||||
|
return v
|
||||||
|
|
||||||
def step_sizes(
|
def step_sizes(
|
||||||
self,
|
self,
|
||||||
|
@ -140,10 +93,13 @@ class Allocator(BaseModel):
|
||||||
else:
|
else:
|
||||||
return self.units_limit
|
return self.units_limit
|
||||||
|
|
||||||
|
def limit_info(self) -> tuple[str, float]:
|
||||||
|
return self.size_unit, self.limit()
|
||||||
|
|
||||||
def next_order_info(
|
def next_order_info(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
# we only need a startup size for exit calcs, we can the
|
# we only need a startup size for exit calcs, we can then
|
||||||
# determine how large slots should be if the initial pp size was
|
# determine how large slots should be if the initial pp size was
|
||||||
# larger then the current live one, and the live one is smaller
|
# larger then the current live one, and the live one is smaller
|
||||||
# then the initial config settings.
|
# then the initial config settings.
|
||||||
|
@ -173,7 +129,7 @@ class Allocator(BaseModel):
|
||||||
l_sub_pp = self.units_limit - abs_live_size
|
l_sub_pp = self.units_limit - abs_live_size
|
||||||
|
|
||||||
elif size_unit == 'currency':
|
elif size_unit == 'currency':
|
||||||
live_cost_basis = abs_live_size * live_pp.avg_price
|
live_cost_basis = abs_live_size * live_pp.ppu
|
||||||
slot_size = currency_per_slot / price
|
slot_size = currency_per_slot / price
|
||||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||||
|
|
||||||
|
@ -184,12 +140,14 @@ class Allocator(BaseModel):
|
||||||
|
|
||||||
# an entry (adding-to or starting a pp)
|
# an entry (adding-to or starting a pp)
|
||||||
if (
|
if (
|
||||||
action == 'buy' and live_size > 0 or
|
|
||||||
action == 'sell' and live_size < 0 or
|
|
||||||
live_size == 0
|
live_size == 0
|
||||||
|
or (action == 'buy' and live_size > 0)
|
||||||
|
or action == 'sell' and live_size < 0
|
||||||
):
|
):
|
||||||
|
order_size = min(
|
||||||
order_size = min(slot_size, l_sub_pp)
|
slot_size,
|
||||||
|
max(l_sub_pp, 0),
|
||||||
|
)
|
||||||
|
|
||||||
# an exit (removing-from or going to net-zero pp)
|
# an exit (removing-from or going to net-zero pp)
|
||||||
else:
|
else:
|
||||||
|
@ -205,7 +163,7 @@ class Allocator(BaseModel):
|
||||||
if size_unit == 'currency':
|
if size_unit == 'currency':
|
||||||
# compute the "projected" limit's worth of units at the
|
# compute the "projected" limit's worth of units at the
|
||||||
# current pp (weighted) price:
|
# current pp (weighted) price:
|
||||||
slot_size = currency_per_slot / live_pp.avg_price
|
slot_size = currency_per_slot / live_pp.ppu
|
||||||
|
|
||||||
else:
|
else:
|
||||||
slot_size = u_per_slot
|
slot_size = u_per_slot
|
||||||
|
@ -244,7 +202,12 @@ class Allocator(BaseModel):
|
||||||
if order_size < slot_size:
|
if order_size < slot_size:
|
||||||
# compute a fractional slots size to display
|
# compute a fractional slots size to display
|
||||||
slots_used = self.slots_used(
|
slots_used = self.slots_used(
|
||||||
Position(symbol=sym, size=order_size, avg_price=price)
|
Position(
|
||||||
|
symbol=sym,
|
||||||
|
size=order_size,
|
||||||
|
ppu=price,
|
||||||
|
bsuid=sym,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
@ -271,8 +234,8 @@ class Allocator(BaseModel):
|
||||||
abs_pp_size = abs(pp.size)
|
abs_pp_size = abs(pp.size)
|
||||||
|
|
||||||
if self.size_unit == 'currency':
|
if self.size_unit == 'currency':
|
||||||
# live_currency_size = size or (abs_pp_size * pp.avg_price)
|
# live_currency_size = size or (abs_pp_size * pp.ppu)
|
||||||
live_currency_size = abs_pp_size * pp.avg_price
|
live_currency_size = abs_pp_size * pp.ppu
|
||||||
prop = live_currency_size / self.currency_limit
|
prop = live_currency_size / self.currency_limit
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
@ -284,14 +247,6 @@ class Allocator(BaseModel):
|
||||||
return round(prop * self.slots)
|
return round(prop * self.slots)
|
||||||
|
|
||||||
|
|
||||||
_derivs = (
|
|
||||||
'future',
|
|
||||||
'continuous_future',
|
|
||||||
'option',
|
|
||||||
'futures_option',
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def mk_allocator(
|
def mk_allocator(
|
||||||
|
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
|
@ -300,7 +255,7 @@ def mk_allocator(
|
||||||
# default allocation settings
|
# default allocation settings
|
||||||
defaults: dict[str, float] = {
|
defaults: dict[str, float] = {
|
||||||
'account': None, # select paper by default
|
'account': None, # select paper by default
|
||||||
'size_unit': 'currency',
|
# 'size_unit': 'currency',
|
||||||
'units_limit': 400,
|
'units_limit': 400,
|
||||||
'currency_limit': 5e3,
|
'currency_limit': 5e3,
|
||||||
'slots': 4,
|
'slots': 4,
|
||||||
|
@ -318,42 +273,9 @@ def mk_allocator(
|
||||||
'currency_limit': 6e3,
|
'currency_limit': 6e3,
|
||||||
'slots': 6,
|
'slots': 6,
|
||||||
}
|
}
|
||||||
|
|
||||||
defaults.update(user_def)
|
defaults.update(user_def)
|
||||||
|
|
||||||
alloc = Allocator(
|
return Allocator(
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
**defaults,
|
**defaults,
|
||||||
)
|
)
|
||||||
|
|
||||||
asset_type = symbol.type_key
|
|
||||||
|
|
||||||
# specific configs by asset class / type
|
|
||||||
|
|
||||||
if asset_type in _derivs:
|
|
||||||
# since it's harder to know how currency "applies" in this case
|
|
||||||
# given leverage properties
|
|
||||||
alloc.size_unit = '# units'
|
|
||||||
|
|
||||||
# set units limit to slots size thus making make the next
|
|
||||||
# entry step 1.0
|
|
||||||
alloc.units_limit = alloc.slots
|
|
||||||
|
|
||||||
# if the current position is already greater then the limit
|
|
||||||
# settings, increase the limit to the current position
|
|
||||||
if alloc.size_unit == 'currency':
|
|
||||||
startup_size = startup_pp.size * startup_pp.avg_price
|
|
||||||
|
|
||||||
if startup_size > alloc.currency_limit:
|
|
||||||
alloc.currency_limit = round(startup_size, ndigits=2)
|
|
||||||
|
|
||||||
else:
|
|
||||||
startup_size = abs(startup_pp.size)
|
|
||||||
|
|
||||||
if startup_size > alloc.units_limit:
|
|
||||||
alloc.units_limit = startup_size
|
|
||||||
|
|
||||||
if asset_type in _derivs:
|
|
||||||
alloc.slots = alloc.units_limit
|
|
||||||
|
|
||||||
return alloc
|
|
||||||
|
|
|
@ -18,26 +18,32 @@
|
||||||
Orders and execution client API.
|
Orders and execution client API.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager as acm
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import Dict
|
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from dataclasses import dataclass, field
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import broadcast_receiver
|
from tractor.trionics import broadcast_receiver
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._ems import _emsd_main
|
from ..data.types import Struct
|
||||||
from .._daemon import maybe_open_emsd
|
from .._daemon import maybe_open_emsd
|
||||||
from ._messages import Order, Cancel
|
from ._messages import Order, Cancel
|
||||||
|
from ..brokers import get_brokermod
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._messages import (
|
||||||
|
BrokerdPosition,
|
||||||
|
Status,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class OrderBook(Struct):
|
||||||
class OrderBook:
|
|
||||||
'''EMS-client-side order book ctl and tracking.
|
'''EMS-client-side order book ctl and tracking.
|
||||||
|
|
||||||
A style similar to "model-view" is used here where this api is
|
A style similar to "model-view" is used here where this api is
|
||||||
|
@ -52,20 +58,18 @@ class OrderBook:
|
||||||
# mem channels used to relay order requests to the EMS daemon
|
# mem channels used to relay order requests to the EMS daemon
|
||||||
_to_ems: trio.abc.SendChannel
|
_to_ems: trio.abc.SendChannel
|
||||||
_from_order_book: trio.abc.ReceiveChannel
|
_from_order_book: trio.abc.ReceiveChannel
|
||||||
|
_sent_orders: dict[str, Order] = {}
|
||||||
_sent_orders: Dict[str, Order] = field(default_factory=dict)
|
|
||||||
_ready_to_receive: trio.Event = trio.Event()
|
|
||||||
|
|
||||||
def send(
|
def send(
|
||||||
self,
|
self,
|
||||||
msg: Order,
|
msg: Order | dict,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
self._sent_orders[msg.oid] = msg
|
self._sent_orders[msg.oid] = msg
|
||||||
self._to_ems.send_nowait(msg.dict())
|
self._to_ems.send_nowait(msg)
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
def update(
|
def send_update(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
uuid: str,
|
uuid: str,
|
||||||
|
@ -73,9 +77,8 @@ class OrderBook:
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
cmd = self._sent_orders[uuid]
|
cmd = self._sent_orders[uuid]
|
||||||
msg = cmd.dict()
|
msg = cmd.copy(update=data)
|
||||||
msg.update(data)
|
self._sent_orders[uuid] = msg
|
||||||
self._sent_orders[uuid] = Order(**msg)
|
|
||||||
self._to_ems.send_nowait(msg)
|
self._to_ems.send_nowait(msg)
|
||||||
return cmd
|
return cmd
|
||||||
|
|
||||||
|
@ -83,12 +86,18 @@ class OrderBook:
|
||||||
"""Cancel an order (or alert) in the EMS.
|
"""Cancel an order (or alert) in the EMS.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
cmd = self._sent_orders[uuid]
|
cmd = self._sent_orders.get(uuid)
|
||||||
|
if not cmd:
|
||||||
|
log.error(
|
||||||
|
f'Unknown order {uuid}!?\n'
|
||||||
|
f'Maybe there is a stale entry or line?\n'
|
||||||
|
f'You should report this as a bug!'
|
||||||
|
)
|
||||||
msg = Cancel(
|
msg = Cancel(
|
||||||
oid=uuid,
|
oid=uuid,
|
||||||
symbol=cmd.symbol,
|
symbol=cmd.symbol,
|
||||||
)
|
)
|
||||||
self._to_ems.send_nowait(msg.dict())
|
self._to_ems.send_nowait(msg)
|
||||||
|
|
||||||
|
|
||||||
_orders: OrderBook = None
|
_orders: OrderBook = None
|
||||||
|
@ -149,21 +158,35 @@ async def relay_order_cmds_from_sync_code(
|
||||||
book = get_orders()
|
book = get_orders()
|
||||||
async with book._from_order_book.subscribe() as orders_stream:
|
async with book._from_order_book.subscribe() as orders_stream:
|
||||||
async for cmd in orders_stream:
|
async for cmd in orders_stream:
|
||||||
if cmd['symbol'] == symbol_key:
|
sym = cmd.symbol
|
||||||
log.info(f'Send order cmd:\n{pformat(cmd)}')
|
msg = pformat(cmd)
|
||||||
|
if sym == symbol_key:
|
||||||
|
log.info(f'Send order cmd:\n{msg}')
|
||||||
# send msg over IPC / wire
|
# send msg over IPC / wire
|
||||||
await to_ems_stream.send(cmd)
|
await to_ems_stream.send(cmd)
|
||||||
|
else:
|
||||||
|
log.warning(
|
||||||
|
f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
|
||||||
|
f'\n{msg}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_ems(
|
async def open_ems(
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
|
mode: str = 'live',
|
||||||
|
|
||||||
) -> (
|
) -> tuple[
|
||||||
OrderBook,
|
OrderBook,
|
||||||
tractor.MsgStream,
|
tractor.MsgStream,
|
||||||
dict,
|
dict[
|
||||||
):
|
# brokername, acctid
|
||||||
|
tuple[str, str],
|
||||||
|
list[BrokerdPosition],
|
||||||
|
],
|
||||||
|
list[str],
|
||||||
|
dict[str, Status],
|
||||||
|
]:
|
||||||
'''
|
'''
|
||||||
Spawn an EMS daemon and begin sending orders and receiving
|
Spawn an EMS daemon and begin sending orders and receiving
|
||||||
alerts.
|
alerts.
|
||||||
|
@ -206,18 +229,35 @@ async def open_ems(
|
||||||
|
|
||||||
async with maybe_open_emsd(broker) as portal:
|
async with maybe_open_emsd(broker) as portal:
|
||||||
|
|
||||||
|
mod = get_brokermod(broker)
|
||||||
|
if (
|
||||||
|
not getattr(mod, 'trades_dialogue', None)
|
||||||
|
or mode == 'paper'
|
||||||
|
):
|
||||||
|
mode = 'paper'
|
||||||
|
|
||||||
|
from ._ems import _emsd_main
|
||||||
async with (
|
async with (
|
||||||
# connect to emsd
|
# connect to emsd
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
|
|
||||||
_emsd_main,
|
_emsd_main,
|
||||||
fqsn=fqsn,
|
fqsn=fqsn,
|
||||||
|
exec_mode=mode,
|
||||||
|
|
||||||
) as (ctx, (positions, accounts)),
|
) as (
|
||||||
|
ctx,
|
||||||
|
(
|
||||||
|
positions,
|
||||||
|
accounts,
|
||||||
|
dialogs,
|
||||||
|
)
|
||||||
|
),
|
||||||
|
|
||||||
# open 2-way trade command stream
|
# open 2-way trade command stream
|
||||||
ctx.open_stream() as trades_stream,
|
ctx.open_stream() as trades_stream,
|
||||||
):
|
):
|
||||||
|
# start sync code order msg delivery task
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
n.start_soon(
|
n.start_soon(
|
||||||
relay_order_cmds_from_sync_code,
|
relay_order_cmds_from_sync_code,
|
||||||
|
@ -225,4 +265,10 @@ async def open_ems(
|
||||||
trades_stream
|
trades_stream
|
||||||
)
|
)
|
||||||
|
|
||||||
yield book, trades_stream, positions, accounts
|
yield (
|
||||||
|
book,
|
||||||
|
trades_stream,
|
||||||
|
positions,
|
||||||
|
accounts,
|
||||||
|
dialogs,
|
||||||
|
)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -15,108 +15,160 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Clearing system messagingn types and protocols.
|
Clearing sub-system message and protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Optional, Union
|
# from collections import (
|
||||||
|
# ChainMap,
|
||||||
# TODO: try out just encoding/send direction for now?
|
# deque,
|
||||||
# import msgspec
|
# )
|
||||||
from pydantic import BaseModel
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
Literal,
|
||||||
|
)
|
||||||
|
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: a composite for tracking msg flow on 2-legged
|
||||||
|
# dialogs.
|
||||||
|
# class Dialog(ChainMap):
|
||||||
|
# '''
|
||||||
|
# Msg collection abstraction to easily track the state changes of
|
||||||
|
# a msg flow in one high level, query-able and immutable construct.
|
||||||
|
|
||||||
|
# The main use case is to query data from a (long-running)
|
||||||
|
# msg-transaction-sequence
|
||||||
|
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# def update(
|
||||||
|
# self,
|
||||||
|
# msg,
|
||||||
|
# ) -> None:
|
||||||
|
# self.maps.insert(0, msg.to_dict())
|
||||||
|
|
||||||
|
# def flatten(self) -> dict:
|
||||||
|
# return dict(self)
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: ``msgspec`` stuff worth paying attention to:
|
||||||
|
# - schema evolution:
|
||||||
|
# https://jcristharif.com/msgspec/usage.html#schema-evolution
|
||||||
|
# - for eg. ``BrokerdStatus``, instead just have separate messages?
|
||||||
|
# - use literals for a common msg determined by diff keys?
|
||||||
|
# - https://jcristharif.com/msgspec/usage.html#literal
|
||||||
|
|
||||||
|
# --------------
|
||||||
# Client -> emsd
|
# Client -> emsd
|
||||||
|
# --------------
|
||||||
|
|
||||||
|
class Order(Struct):
|
||||||
|
|
||||||
class Cancel(BaseModel):
|
# TODO: ideally we can combine these 2 fields into
|
||||||
'''Cancel msg for removing a dark (ems triggered) or
|
# 1 and just use the size polarity to determine a buy/sell.
|
||||||
broker-submitted (live) trigger/order.
|
# i would like to see this become more like
|
||||||
|
# https://jcristharif.com/msgspec/usage.html#literal
|
||||||
'''
|
# action: Literal[
|
||||||
action: str = 'cancel'
|
# 'live',
|
||||||
oid: str # uuid4
|
# 'dark',
|
||||||
symbol: str
|
# 'alert',
|
||||||
|
# ]
|
||||||
|
|
||||||
class Order(BaseModel):
|
|
||||||
|
|
||||||
action: str # {'buy', 'sell', 'alert'}
|
|
||||||
# internal ``emdsd`` unique "order id"
|
|
||||||
oid: str # uuid4
|
|
||||||
symbol: Union[str, Symbol]
|
|
||||||
account: str # should we set a default as '' ?
|
|
||||||
|
|
||||||
price: float
|
|
||||||
size: float
|
|
||||||
brokers: list[str]
|
|
||||||
|
|
||||||
# Assigned once initial ack is received
|
|
||||||
# ack_time_ns: Optional[int] = None
|
|
||||||
|
|
||||||
|
action: Literal[
|
||||||
|
'buy',
|
||||||
|
'sell',
|
||||||
|
'alert',
|
||||||
|
]
|
||||||
# determines whether the create execution
|
# determines whether the create execution
|
||||||
# will be submitted to the ems or directly to
|
# will be submitted to the ems or directly to
|
||||||
# the backend broker
|
# the backend broker
|
||||||
exec_mode: str # {'dark', 'live', 'paper'}
|
exec_mode: Literal[
|
||||||
|
'dark',
|
||||||
|
'live',
|
||||||
|
# 'paper', no right?
|
||||||
|
]
|
||||||
|
|
||||||
class Config:
|
# internal ``emdsd`` unique "order id"
|
||||||
# just for pre-loading a ``Symbol`` when used
|
oid: str # uuid4
|
||||||
# in the order mode staging process
|
symbol: str | Symbol
|
||||||
arbitrary_types_allowed = True
|
account: str # should we set a default as '' ?
|
||||||
# don't copy this model instance when used in
|
|
||||||
# a recursive model
|
|
||||||
copy_on_model_validation = False
|
|
||||||
|
|
||||||
|
price: float
|
||||||
|
size: float # -ve is "sell", +ve is "buy"
|
||||||
|
|
||||||
|
brokers: Optional[list[str]] = []
|
||||||
|
|
||||||
|
|
||||||
|
class Cancel(Struct):
|
||||||
|
'''
|
||||||
|
Cancel msg for removing a dark (ems triggered) or
|
||||||
|
broker-submitted (live) trigger/order.
|
||||||
|
|
||||||
|
'''
|
||||||
|
oid: str # uuid4
|
||||||
|
symbol: str
|
||||||
|
action: str = 'cancel'
|
||||||
|
|
||||||
|
|
||||||
|
# --------------
|
||||||
# Client <- emsd
|
# Client <- emsd
|
||||||
|
# --------------
|
||||||
# update msgs from ems which relay state change info
|
# update msgs from ems which relay state change info
|
||||||
# from the active clearing engine.
|
# from the active clearing engine.
|
||||||
|
|
||||||
|
class Status(Struct):
|
||||||
|
|
||||||
class Status(BaseModel):
|
time_ns: int
|
||||||
|
oid: str # uuid4 ems-order dialog id
|
||||||
|
|
||||||
|
resp: Literal[
|
||||||
|
'pending', # acked by broker but not yet open
|
||||||
|
'open',
|
||||||
|
'dark_open', # dark/algo triggered order is open in ems clearing loop
|
||||||
|
'triggered', # above triggered order sent to brokerd, or an alert closed
|
||||||
|
'closed', # fully cleared all size/units
|
||||||
|
'fill', # partial execution
|
||||||
|
'canceled',
|
||||||
|
'error',
|
||||||
|
]
|
||||||
|
|
||||||
name: str = 'status'
|
name: str = 'status'
|
||||||
oid: str # uuid4
|
|
||||||
time_ns: int
|
|
||||||
|
|
||||||
# {
|
|
||||||
# 'dark_submitted',
|
|
||||||
# 'dark_cancelled',
|
|
||||||
# 'dark_triggered',
|
|
||||||
|
|
||||||
# 'broker_submitted',
|
|
||||||
# 'broker_cancelled',
|
|
||||||
# 'broker_executed',
|
|
||||||
# 'broker_filled',
|
|
||||||
# 'broker_errored',
|
|
||||||
|
|
||||||
# 'alert_submitted',
|
|
||||||
# 'alert_triggered',
|
|
||||||
|
|
||||||
# }
|
|
||||||
resp: str # "response", see above
|
|
||||||
|
|
||||||
# symbol: str
|
|
||||||
|
|
||||||
# trigger info
|
|
||||||
trigger_price: Optional[float] = None
|
|
||||||
# price: float
|
|
||||||
|
|
||||||
# broker: Optional[str] = None
|
|
||||||
|
|
||||||
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
|
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
|
||||||
# normally allocated internally by the backend broker routing system
|
# normally allocated internally by the backend broker routing system
|
||||||
broker_reqid: Optional[Union[int, str]] = None
|
reqid: Optional[int | str] = None
|
||||||
|
|
||||||
# for relaying backend msg data "through" the ems layer
|
# the (last) source order/request msg if provided
|
||||||
|
# (eg. the Order/Cancel which causes this msg) and
|
||||||
|
# acts as a back-reference to the corresponding
|
||||||
|
# request message which was the source of this msg.
|
||||||
|
req: Order | None = None
|
||||||
|
|
||||||
|
# XXX: better design/name here?
|
||||||
|
# flag that can be set to indicate a message for an order
|
||||||
|
# event that wasn't originated by piker's emsd (eg. some external
|
||||||
|
# trading system which does it's own order control but that you
|
||||||
|
# might want to "track" using piker UIs/systems).
|
||||||
|
src: Optional[str] = None
|
||||||
|
|
||||||
|
# set when a cancel request msg was set for this order flow dialog
|
||||||
|
# but the brokerd dialog isn't yet in a cancelled state.
|
||||||
|
cancel_called: bool = False
|
||||||
|
|
||||||
|
# for relaying a boxed brokerd-dialog-side msg data "through" the
|
||||||
|
# ems layer to clients.
|
||||||
brokerd_msg: dict = {}
|
brokerd_msg: dict = {}
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------
|
||||||
# emsd -> brokerd
|
# emsd -> brokerd
|
||||||
|
# ---------------
|
||||||
# requests *sent* from ems to respective backend broker daemon
|
# requests *sent* from ems to respective backend broker daemon
|
||||||
|
|
||||||
class BrokerdCancel(BaseModel):
|
class BrokerdCancel(Struct):
|
||||||
|
|
||||||
action: str = 'cancel'
|
|
||||||
oid: str # piker emsd order id
|
oid: str # piker emsd order id
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
|
||||||
|
@ -127,34 +179,39 @@ class BrokerdCancel(BaseModel):
|
||||||
# for setting a unique order id then this value will be relayed back
|
# for setting a unique order id then this value will be relayed back
|
||||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||||
# field
|
# field
|
||||||
reqid: Optional[Union[int, str]] = None
|
reqid: Optional[int | str] = None
|
||||||
|
action: str = 'cancel'
|
||||||
|
|
||||||
|
|
||||||
class BrokerdOrder(BaseModel):
|
class BrokerdOrder(Struct):
|
||||||
|
|
||||||
action: str # {buy, sell}
|
|
||||||
oid: str
|
oid: str
|
||||||
account: str
|
account: str
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
|
||||||
|
symbol: str # fqsn
|
||||||
|
price: float
|
||||||
|
size: float
|
||||||
|
|
||||||
|
# TODO: if we instead rely on a +ve/-ve size to determine
|
||||||
|
# the action we more or less don't need this field right?
|
||||||
|
action: str = '' # {buy, sell}
|
||||||
|
|
||||||
# "broker request id": broker specific/internal order id if this is
|
# "broker request id": broker specific/internal order id if this is
|
||||||
# None, creates a new order otherwise if the id is valid the backend
|
# None, creates a new order otherwise if the id is valid the backend
|
||||||
# api must modify the existing matching order. If the broker allows
|
# api must modify the existing matching order. If the broker allows
|
||||||
# for setting a unique order id then this value will be relayed back
|
# for setting a unique order id then this value will be relayed back
|
||||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||||
# field
|
# field
|
||||||
reqid: Optional[Union[int, str]] = None
|
reqid: Optional[int | str] = None
|
||||||
|
|
||||||
symbol: str # symbol.<providername> ?
|
|
||||||
price: float
|
|
||||||
size: float
|
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------
|
||||||
# emsd <- brokerd
|
# emsd <- brokerd
|
||||||
|
# ---------------
|
||||||
# requests *received* to ems from broker backend
|
# requests *received* to ems from broker backend
|
||||||
|
|
||||||
|
class BrokerdOrderAck(Struct):
|
||||||
class BrokerdOrderAck(BaseModel):
|
|
||||||
'''
|
'''
|
||||||
Immediate reponse to a brokerd order request providing the broker
|
Immediate reponse to a brokerd order request providing the broker
|
||||||
specific unique order id so that the EMS can associate this
|
specific unique order id so that the EMS can associate this
|
||||||
|
@ -162,42 +219,35 @@ class BrokerdOrderAck(BaseModel):
|
||||||
``.oid`` (which is a uuid4).
|
``.oid`` (which is a uuid4).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'ack'
|
|
||||||
|
|
||||||
# defined and provided by backend
|
# defined and provided by backend
|
||||||
reqid: Union[int, str]
|
reqid: int | str
|
||||||
|
|
||||||
# emsd id originally sent in matching request msg
|
# emsd id originally sent in matching request msg
|
||||||
oid: str
|
oid: str
|
||||||
account: str = ''
|
account: str = ''
|
||||||
|
name: str = 'ack'
|
||||||
|
|
||||||
|
|
||||||
class BrokerdStatus(BaseModel):
|
class BrokerdStatus(Struct):
|
||||||
|
|
||||||
name: str = 'status'
|
reqid: int | str
|
||||||
reqid: Union[int, str]
|
|
||||||
time_ns: int
|
time_ns: int
|
||||||
|
status: Literal[
|
||||||
|
'open',
|
||||||
|
'canceled',
|
||||||
|
'fill',
|
||||||
|
'pending',
|
||||||
|
'error',
|
||||||
|
]
|
||||||
|
|
||||||
# XXX: should be best effort set for every update
|
account: str
|
||||||
account: str = ''
|
name: str = 'status'
|
||||||
|
|
||||||
# {
|
|
||||||
# 'submitted',
|
|
||||||
# 'cancelled',
|
|
||||||
# 'filled',
|
|
||||||
# }
|
|
||||||
status: str
|
|
||||||
|
|
||||||
filled: float = 0.0
|
filled: float = 0.0
|
||||||
reason: str = ''
|
reason: str = ''
|
||||||
remaining: float = 0.0
|
remaining: float = 0.0
|
||||||
|
|
||||||
# XXX: better design/name here?
|
# external: bool = False
|
||||||
# flag that can be set to indicate a message for an order
|
|
||||||
# event that wasn't originated by piker's emsd (eg. some external
|
|
||||||
# trading system which does it's own order control but that you
|
|
||||||
# might want to "track" using piker UIs/systems).
|
|
||||||
external: bool = False
|
|
||||||
|
|
||||||
# XXX: not required schema as of yet
|
# XXX: not required schema as of yet
|
||||||
broker_details: dict = {
|
broker_details: dict = {
|
||||||
|
@ -205,59 +255,57 @@ class BrokerdStatus(BaseModel):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class BrokerdFill(BaseModel):
|
class BrokerdFill(Struct):
|
||||||
'''
|
'''
|
||||||
A single message indicating a "fill-details" event from the broker
|
A single message indicating a "fill-details" event from the broker
|
||||||
if avaiable.
|
if avaiable.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'fill'
|
|
||||||
reqid: Union[int, str]
|
|
||||||
time_ns: int
|
|
||||||
|
|
||||||
# order exeuction related
|
|
||||||
action: str
|
|
||||||
size: float
|
|
||||||
price: float
|
|
||||||
|
|
||||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
|
||||||
|
|
||||||
# brokerd timestamp required for order mode arrow placement on x-axis
|
# brokerd timestamp required for order mode arrow placement on x-axis
|
||||||
|
|
||||||
# TODO: maybe int if we force ns?
|
# TODO: maybe int if we force ns?
|
||||||
# we need to normalize this somehow since backends will use their
|
# we need to normalize this somehow since backends will use their
|
||||||
# own format and likely across many disparate epoch clocks...
|
# own format and likely across many disparate epoch clocks...
|
||||||
broker_time: float
|
broker_time: float
|
||||||
|
reqid: int | str
|
||||||
|
time_ns: int
|
||||||
|
|
||||||
|
# order exeuction related
|
||||||
|
size: float
|
||||||
|
price: float
|
||||||
|
|
||||||
|
name: str = 'fill'
|
||||||
|
action: Optional[str] = None
|
||||||
|
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||||
|
|
||||||
|
|
||||||
class BrokerdError(BaseModel):
|
class BrokerdError(Struct):
|
||||||
'''
|
'''
|
||||||
Optional error type that can be relayed to emsd for error handling.
|
Optional error type that can be relayed to emsd for error handling.
|
||||||
|
|
||||||
This is still a TODO thing since we're not sure how to employ it yet.
|
This is still a TODO thing since we're not sure how to employ it yet.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'error'
|
|
||||||
oid: str
|
oid: str
|
||||||
|
symbol: str
|
||||||
|
reason: str
|
||||||
|
|
||||||
# if no brokerd order request was actually submitted (eg. we errored
|
# if no brokerd order request was actually submitted (eg. we errored
|
||||||
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
||||||
reqid: Optional[Union[int, str]] = None
|
reqid: Optional[int | str] = None
|
||||||
|
|
||||||
symbol: str
|
name: str = 'error'
|
||||||
reason: str
|
|
||||||
broker_details: dict = {}
|
broker_details: dict = {}
|
||||||
|
|
||||||
|
|
||||||
class BrokerdPosition(BaseModel):
|
class BrokerdPosition(Struct):
|
||||||
'''Position update event from brokerd.
|
'''Position update event from brokerd.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'position'
|
|
||||||
|
|
||||||
broker: str
|
broker: str
|
||||||
account: str
|
account: str
|
||||||
symbol: str
|
symbol: str
|
||||||
currency: str
|
|
||||||
size: float
|
size: float
|
||||||
avg_price: float
|
avg_price: float
|
||||||
|
currency: str = ''
|
||||||
|
name: str = 'position'
|
||||||
|
|
|
@ -18,54 +18,71 @@
|
||||||
Fake trading for forward testing.
|
Fake trading for forward testing.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from collections import defaultdict
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
import itertools
|
||||||
import time
|
import time
|
||||||
from typing import Tuple, Optional, Callable
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
)
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
|
import pendulum
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from dataclasses import dataclass
|
|
||||||
|
|
||||||
from .. import data
|
from .. import data
|
||||||
|
from ..data._source import Symbol
|
||||||
|
from ..data.types import Struct
|
||||||
|
from ..pp import (
|
||||||
|
Position,
|
||||||
|
Transaction,
|
||||||
|
)
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data._source import unpack_fqsn
|
from ..data._source import unpack_fqsn
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
BrokerdCancel,
|
||||||
BrokerdFill, BrokerdPosition, BrokerdError
|
BrokerdOrder,
|
||||||
|
BrokerdOrderAck,
|
||||||
|
BrokerdStatus,
|
||||||
|
BrokerdFill,
|
||||||
|
BrokerdPosition,
|
||||||
|
BrokerdError,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class PaperBoi(Struct):
|
||||||
class PaperBoi:
|
'''
|
||||||
"""
|
Emulates a broker order client providing approximately the same API
|
||||||
Emulates a broker order client providing the same API and
|
and delivering an order-event response stream but with methods for
|
||||||
delivering an order-event response stream but with methods for
|
|
||||||
triggering desired events based on forward testing engine
|
triggering desired events based on forward testing engine
|
||||||
requirements.
|
requirements (eg open, closed, fill msgs).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
broker: str
|
broker: str
|
||||||
|
|
||||||
ems_trades_stream: tractor.MsgStream
|
ems_trades_stream: tractor.MsgStream
|
||||||
|
|
||||||
# map of paper "live" orders which be used
|
# map of paper "live" orders which be used
|
||||||
# to simulate fills based on paper engine settings
|
# to simulate fills based on paper engine settings
|
||||||
_buys: bidict
|
_buys: defaultdict[str, bidict]
|
||||||
_sells: bidict
|
_sells: defaultdict[str, bidict]
|
||||||
_reqids: bidict
|
_reqids: bidict
|
||||||
_positions: dict[str, BrokerdPosition]
|
_positions: dict[str, Position]
|
||||||
|
_trade_ledger: dict[str, Any]
|
||||||
|
|
||||||
# init edge case L1 spread
|
# init edge case L1 spread
|
||||||
last_ask: Tuple[float, float] = (float('inf'), 0) # price, size
|
last_ask: tuple[float, float] = (float('inf'), 0) # price, size
|
||||||
last_bid: Tuple[float, float] = (0, 0)
|
last_bid: tuple[float, float] = (0, 0)
|
||||||
|
|
||||||
async def submit_limit(
|
async def submit_limit(
|
||||||
self,
|
self,
|
||||||
|
@ -75,27 +92,24 @@ class PaperBoi:
|
||||||
action: str,
|
action: str,
|
||||||
size: float,
|
size: float,
|
||||||
reqid: Optional[str],
|
reqid: Optional[str],
|
||||||
|
|
||||||
) -> int:
|
) -> int:
|
||||||
"""Place an order and return integer request id provided by client.
|
'''
|
||||||
|
Place an order and return integer request id provided by client.
|
||||||
"""
|
|
||||||
is_modify: bool = False
|
|
||||||
if reqid is None:
|
|
||||||
reqid = str(uuid.uuid4())
|
|
||||||
|
|
||||||
else:
|
|
||||||
# order is already existing, this is a modify
|
|
||||||
(oid, symbol, action, old_price) = self._reqids[reqid]
|
|
||||||
assert old_price != price
|
|
||||||
is_modify = True
|
|
||||||
|
|
||||||
# register order internally
|
|
||||||
self._reqids[reqid] = (oid, symbol, action, price)
|
|
||||||
|
|
||||||
|
'''
|
||||||
if action == 'alert':
|
if action == 'alert':
|
||||||
# bypass all fill simulation
|
# bypass all fill simulation
|
||||||
return reqid
|
return reqid
|
||||||
|
|
||||||
|
entry = self._reqids.get(reqid)
|
||||||
|
if entry:
|
||||||
|
# order is already existing, this is a modify
|
||||||
|
(oid, symbol, action, old_price) = entry
|
||||||
|
else:
|
||||||
|
# register order internally
|
||||||
|
self._reqids[reqid] = (oid, symbol, action, price)
|
||||||
|
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
# we checkpoint here quickly particulalry
|
# we checkpoint here quickly particulalry
|
||||||
# for dark orders since we want the dark_executed
|
# for dark orders since we want the dark_executed
|
||||||
|
@ -107,15 +121,18 @@ class PaperBoi:
|
||||||
size = -size
|
size = -size
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
status='submitted',
|
status='open',
|
||||||
|
# account=f'paper_{self.broker}',
|
||||||
|
account='paper',
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
broker=self.broker,
|
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
filled=0.0,
|
filled=0.0,
|
||||||
reason='paper_trigger',
|
reason='paper_trigger',
|
||||||
remaining=size,
|
remaining=size,
|
||||||
|
|
||||||
|
broker_details={'name': 'paperboi'},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
# if we're already a clearing price simulate an immediate fill
|
# if we're already a clearing price simulate an immediate fill
|
||||||
if (
|
if (
|
||||||
|
@ -123,28 +140,28 @@ class PaperBoi:
|
||||||
) or (
|
) or (
|
||||||
action == 'sell' and (clear_price := self.last_bid[0]) >= price
|
action == 'sell' and (clear_price := self.last_bid[0]) >= price
|
||||||
):
|
):
|
||||||
await self.fake_fill(symbol, clear_price, size, action, reqid, oid)
|
await self.fake_fill(
|
||||||
|
symbol,
|
||||||
|
clear_price,
|
||||||
|
size,
|
||||||
|
action,
|
||||||
|
reqid,
|
||||||
|
oid,
|
||||||
|
)
|
||||||
|
|
||||||
|
# register this submissions as a paper live order
|
||||||
else:
|
else:
|
||||||
# register this submissions as a paper live order
|
# set the simulated order in the respective table for lookup
|
||||||
|
# and trigger by the simulated clearing task normally
|
||||||
# submit order to book simulation fill loop
|
# running ``simulate_fills()``.
|
||||||
if action == 'buy':
|
if action == 'buy':
|
||||||
orders = self._buys
|
orders = self._buys
|
||||||
|
|
||||||
elif action == 'sell':
|
elif action == 'sell':
|
||||||
orders = self._sells
|
orders = self._sells
|
||||||
|
|
||||||
# set the simulated order in the respective table for lookup
|
# {symbol -> bidict[oid, (<price data>)]}
|
||||||
# and trigger by the simulated clearing task normally
|
orders[symbol][oid] = (price, size, reqid, action)
|
||||||
# running ``simulate_fills()``.
|
|
||||||
|
|
||||||
if is_modify:
|
|
||||||
# remove any existing order for the old price
|
|
||||||
orders[symbol].pop((oid, old_price))
|
|
||||||
|
|
||||||
# buys/sells: (symbol -> (price -> order))
|
|
||||||
orders.setdefault(symbol, {})[(oid, price)] = (size, reqid, action)
|
|
||||||
|
|
||||||
return reqid
|
return reqid
|
||||||
|
|
||||||
|
@ -157,26 +174,26 @@ class PaperBoi:
|
||||||
oid, symbol, action, price = self._reqids[reqid]
|
oid, symbol, action, price = self._reqids[reqid]
|
||||||
|
|
||||||
if action == 'buy':
|
if action == 'buy':
|
||||||
self._buys[symbol].pop((oid, price))
|
self._buys[symbol].pop(oid, None)
|
||||||
elif action == 'sell':
|
elif action == 'sell':
|
||||||
self._sells[symbol].pop((oid, price))
|
self._sells[symbol].pop(oid, None)
|
||||||
|
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
await trio.sleep(0.05)
|
await trio.sleep(0.05)
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
status='cancelled',
|
status='canceled',
|
||||||
oid=oid,
|
account='paper',
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
broker=self.broker,
|
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
|
broker_details={'name': 'paperboi'},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
async def fake_fill(
|
async def fake_fill(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
symbol: str,
|
fqsn: str,
|
||||||
price: float,
|
price: float,
|
||||||
size: float,
|
size: float,
|
||||||
action: str, # one of {'buy', 'sell'}
|
action: str, # one of {'buy', 'sell'}
|
||||||
|
@ -190,21 +207,21 @@ class PaperBoi:
|
||||||
remaining: float = 0,
|
remaining: float = 0,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Pretend to fill a broker order @ price and size.
|
'''
|
||||||
|
Pretend to fill a broker order @ price and size.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# TODO: net latency model
|
# TODO: net latency model
|
||||||
await trio.sleep(0.05)
|
await trio.sleep(0.05)
|
||||||
|
fill_time_ns = time.time_ns()
|
||||||
|
fill_time_s = time.time()
|
||||||
|
|
||||||
msg = BrokerdFill(
|
fill_msg = BrokerdFill(
|
||||||
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
time_ns=time.time_ns(),
|
time_ns=fill_time_ns,
|
||||||
|
|
||||||
action=action,
|
action=action,
|
||||||
size=size,
|
size=size,
|
||||||
price=price,
|
price=price,
|
||||||
|
|
||||||
broker_time=datetime.now().timestamp(),
|
broker_time=datetime.now().timestamp(),
|
||||||
broker_details={
|
broker_details={
|
||||||
'paper_info': {
|
'paper_info': {
|
||||||
|
@ -214,79 +231,67 @@ class PaperBoi:
|
||||||
'name': self.broker + '_paper',
|
'name': self.broker + '_paper',
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
log.info(f'Fake filling order:\n{fill_msg}')
|
||||||
|
await self.ems_trades_stream.send(fill_msg)
|
||||||
|
|
||||||
|
self._trade_ledger.update(fill_msg.to_dict())
|
||||||
|
|
||||||
if order_complete:
|
if order_complete:
|
||||||
|
|
||||||
msg = BrokerdStatus(
|
msg = BrokerdStatus(
|
||||||
|
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
|
# account=f'paper_{self.broker}',
|
||||||
status='filled',
|
account='paper',
|
||||||
|
status='closed',
|
||||||
filled=size,
|
filled=size,
|
||||||
remaining=0 if order_complete else remaining,
|
remaining=0 if order_complete else remaining,
|
||||||
|
|
||||||
action=action,
|
|
||||||
size=size,
|
|
||||||
price=price,
|
|
||||||
|
|
||||||
broker_details={
|
|
||||||
'paper_info': {
|
|
||||||
'oid': oid,
|
|
||||||
},
|
|
||||||
'name': self.broker,
|
|
||||||
},
|
|
||||||
)
|
)
|
||||||
await self.ems_trades_stream.send(msg.dict())
|
await self.ems_trades_stream.send(msg)
|
||||||
|
|
||||||
# lookup any existing position
|
# lookup any existing position
|
||||||
token = f'{symbol}.{self.broker}'
|
key = fqsn.rstrip(f'.{self.broker}')
|
||||||
pp_msg = self._positions.setdefault(
|
pp = self._positions.setdefault(
|
||||||
token,
|
fqsn,
|
||||||
BrokerdPosition(
|
Position(
|
||||||
broker=self.broker,
|
Symbol(
|
||||||
account='paper',
|
key=key,
|
||||||
symbol=symbol,
|
broker_info={self.broker: {}},
|
||||||
# TODO: we need to look up the asset currency from
|
),
|
||||||
# broker info. i guess for crypto this can be
|
size=size,
|
||||||
# inferred from the pair?
|
ppu=price,
|
||||||
currency='',
|
bsuid=key,
|
||||||
size=0.0,
|
|
||||||
avg_price=0,
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
t = Transaction(
|
||||||
|
fqsn=fqsn,
|
||||||
|
tid=oid,
|
||||||
|
size=size,
|
||||||
|
price=price,
|
||||||
|
cost=0, # TODO: cost model
|
||||||
|
dt=pendulum.from_timestamp(fill_time_s),
|
||||||
|
bsuid=key,
|
||||||
|
)
|
||||||
|
pp.add_clear(t)
|
||||||
|
|
||||||
# "avg position price" calcs
|
pp_msg = BrokerdPosition(
|
||||||
# TODO: eventually it'd be nice to have a small set of routines
|
broker=self.broker,
|
||||||
# to do this stuff from a sequence of cleared orders to enable
|
account='paper',
|
||||||
# so called "contextual positions".
|
symbol=fqsn,
|
||||||
new_size = size + pp_msg.size
|
# TODO: we need to look up the asset currency from
|
||||||
|
# broker info. i guess for crypto this can be
|
||||||
|
# inferred from the pair?
|
||||||
|
currency='',
|
||||||
|
size=pp.size,
|
||||||
|
avg_price=pp.ppu,
|
||||||
|
)
|
||||||
|
|
||||||
# old size minus the new size gives us size differential with
|
await self.ems_trades_stream.send(pp_msg)
|
||||||
# +ve -> increase in pp size
|
|
||||||
# -ve -> decrease in pp size
|
|
||||||
size_diff = abs(new_size) - abs(pp_msg.size)
|
|
||||||
|
|
||||||
if new_size == 0:
|
|
||||||
pp_msg.avg_price = 0
|
|
||||||
|
|
||||||
elif size_diff > 0:
|
|
||||||
# only update the "average position price" when the position
|
|
||||||
# size increases not when it decreases (i.e. the position is
|
|
||||||
# being made smaller)
|
|
||||||
pp_msg.avg_price = (
|
|
||||||
abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
|
|
||||||
) / abs(new_size)
|
|
||||||
|
|
||||||
pp_msg.size = new_size
|
|
||||||
|
|
||||||
await self.ems_trades_stream.send(pp_msg.dict())
|
|
||||||
|
|
||||||
|
|
||||||
async def simulate_fills(
|
async def simulate_fills(
|
||||||
quote_stream: 'tractor.ReceiveStream', # noqa
|
quote_stream: tractor.MsgStream, # noqa
|
||||||
client: PaperBoi,
|
client: PaperBoi,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# TODO: more machinery to better simulate real-world market things:
|
# TODO: more machinery to better simulate real-world market things:
|
||||||
|
@ -306,61 +311,116 @@ async def simulate_fills(
|
||||||
|
|
||||||
# this stream may eventually contain multiple symbols
|
# this stream may eventually contain multiple symbols
|
||||||
async for quotes in quote_stream:
|
async for quotes in quote_stream:
|
||||||
|
|
||||||
for sym, quote in quotes.items():
|
for sym, quote in quotes.items():
|
||||||
|
|
||||||
for tick in iterticks(
|
for tick in iterticks(
|
||||||
quote,
|
quote,
|
||||||
# dark order price filter(s)
|
# dark order price filter(s)
|
||||||
types=('ask', 'bid', 'trade', 'last')
|
types=('ask', 'bid', 'trade', 'last')
|
||||||
):
|
):
|
||||||
# print(tick)
|
tick_price = tick['price']
|
||||||
tick_price = tick.get('price')
|
|
||||||
ttype = tick['type']
|
|
||||||
|
|
||||||
if ttype in ('ask',):
|
buys: bidict[str, tuple] = client._buys[sym]
|
||||||
|
iter_buys = reversed(sorted(
|
||||||
|
buys.values(),
|
||||||
|
key=itemgetter(0),
|
||||||
|
))
|
||||||
|
|
||||||
client.last_ask = (
|
def buy_on_ask(our_price):
|
||||||
tick_price,
|
return tick_price <= our_price
|
||||||
tick.get('size', client.last_ask[1]),
|
|
||||||
)
|
|
||||||
|
|
||||||
orders = client._buys.get(sym, {})
|
sells: bidict[str, tuple] = client._sells[sym]
|
||||||
|
iter_sells = sorted(
|
||||||
|
sells.values(),
|
||||||
|
key=itemgetter(0)
|
||||||
|
)
|
||||||
|
|
||||||
book_sequence = reversed(
|
def sell_on_bid(our_price):
|
||||||
sorted(orders.keys(), key=itemgetter(1)))
|
return tick_price >= our_price
|
||||||
|
|
||||||
def pred(our_price):
|
match tick:
|
||||||
return tick_price < our_price
|
|
||||||
|
|
||||||
elif ttype in ('bid',):
|
# on an ask queue tick, only clear buy entries
|
||||||
|
case {
|
||||||
|
'price': tick_price,
|
||||||
|
'type': 'ask',
|
||||||
|
}:
|
||||||
|
client.last_ask = (
|
||||||
|
tick_price,
|
||||||
|
tick.get('size', client.last_ask[1]),
|
||||||
|
)
|
||||||
|
|
||||||
client.last_bid = (
|
iter_entries = zip(
|
||||||
tick_price,
|
iter_buys,
|
||||||
tick.get('size', client.last_bid[1]),
|
itertools.repeat(buy_on_ask)
|
||||||
)
|
)
|
||||||
|
|
||||||
orders = client._sells.get(sym, {})
|
# on a bid queue tick, only clear sell entries
|
||||||
book_sequence = sorted(orders.keys(), key=itemgetter(1))
|
case {
|
||||||
|
'price': tick_price,
|
||||||
|
'type': 'bid',
|
||||||
|
}:
|
||||||
|
client.last_bid = (
|
||||||
|
tick_price,
|
||||||
|
tick.get('size', client.last_bid[1]),
|
||||||
|
)
|
||||||
|
|
||||||
def pred(our_price):
|
iter_entries = zip(
|
||||||
return tick_price > our_price
|
iter_sells,
|
||||||
|
itertools.repeat(sell_on_bid)
|
||||||
|
)
|
||||||
|
|
||||||
elif ttype in ('trade', 'last'):
|
# TODO: fix this block, though it definitely
|
||||||
# TODO: simulate actual book queues and our orders
|
# costs a lot more CPU-wise
|
||||||
# place in it, might require full L2 data?
|
# - doesn't seem like clears are happening still on
|
||||||
continue
|
# "resting" limit orders?
|
||||||
|
case {
|
||||||
|
'price': tick_price,
|
||||||
|
'type': ('trade' | 'last'),
|
||||||
|
}:
|
||||||
|
# in the clearing price / last price case we
|
||||||
|
# want to iterate both sides of our book for
|
||||||
|
# clears since we don't know which direction the
|
||||||
|
# price is going to move (especially with HFT)
|
||||||
|
# and thus we simply interleave both sides (buys
|
||||||
|
# and sells) until one side clears and then
|
||||||
|
# break until the next tick?
|
||||||
|
def interleave():
|
||||||
|
for pair in zip(
|
||||||
|
iter_buys,
|
||||||
|
iter_sells,
|
||||||
|
):
|
||||||
|
for order_info, pred in zip(
|
||||||
|
pair,
|
||||||
|
itertools.cycle([buy_on_ask, sell_on_bid]),
|
||||||
|
):
|
||||||
|
yield order_info, pred
|
||||||
|
|
||||||
# iterate book prices descending
|
iter_entries = interleave()
|
||||||
for oid, our_price in book_sequence:
|
|
||||||
if pred(our_price):
|
|
||||||
|
|
||||||
# retreive order info
|
# NOTE: all other (non-clearable) tick event types
|
||||||
(size, reqid, action) = orders.pop((oid, our_price))
|
# - we don't want to sping the simulated clear loop
|
||||||
|
# below unecessarily and further don't want to pop
|
||||||
|
# simulated live orders prematurely.
|
||||||
|
case _:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# iterate all potentially clearable book prices
|
||||||
|
# in FIFO order per side.
|
||||||
|
for order_info, pred in iter_entries:
|
||||||
|
(our_price, size, reqid, action) = order_info
|
||||||
|
|
||||||
|
# print(order_info)
|
||||||
|
clearable = pred(our_price)
|
||||||
|
if clearable:
|
||||||
|
# pop and retreive order info
|
||||||
|
oid = {
|
||||||
|
'buy': buys,
|
||||||
|
'sell': sells
|
||||||
|
}[action].inverse.pop(order_info)
|
||||||
|
|
||||||
# clearing price would have filled entirely
|
# clearing price would have filled entirely
|
||||||
await client.fake_fill(
|
await client.fake_fill(
|
||||||
symbol=sym,
|
fqsn=sym,
|
||||||
# todo slippage to determine fill price
|
# todo slippage to determine fill price
|
||||||
price=tick_price,
|
price=tick_price,
|
||||||
size=size,
|
size=size,
|
||||||
|
@ -368,9 +428,6 @@ async def simulate_fills(
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
oid=oid,
|
oid=oid,
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
# prices are iterated in sorted order so we're done
|
|
||||||
break
|
|
||||||
|
|
||||||
|
|
||||||
async def handle_order_requests(
|
async def handle_order_requests(
|
||||||
|
@ -380,66 +437,81 @@ async def handle_order_requests(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
# order_request: dict
|
request_msg: dict
|
||||||
async for request_msg in ems_order_stream:
|
async for request_msg in ems_order_stream:
|
||||||
|
match request_msg:
|
||||||
|
case {'action': ('buy' | 'sell')}:
|
||||||
|
order = BrokerdOrder(**request_msg)
|
||||||
|
account = order.account
|
||||||
|
|
||||||
action = request_msg['action']
|
# error on bad inputs
|
||||||
|
reason = None
|
||||||
|
if account != 'paper':
|
||||||
|
reason = f'No account found:`{account}` (paper only)?'
|
||||||
|
|
||||||
if action in {'buy', 'sell'}:
|
elif order.size == 0:
|
||||||
|
reason = 'Invalid size: 0'
|
||||||
|
|
||||||
account = request_msg['account']
|
if reason:
|
||||||
if account != 'paper':
|
log.error(reason)
|
||||||
log.error(
|
await ems_order_stream.send(BrokerdError(
|
||||||
'This is a paper account, only a `paper` selection is valid'
|
oid=order.oid,
|
||||||
|
symbol=order.symbol,
|
||||||
|
reason=reason,
|
||||||
|
))
|
||||||
|
continue
|
||||||
|
|
||||||
|
reqid = order.reqid or str(uuid.uuid4())
|
||||||
|
|
||||||
|
# deliver ack that order has been submitted to broker routing
|
||||||
|
await ems_order_stream.send(
|
||||||
|
BrokerdOrderAck(
|
||||||
|
oid=order.oid,
|
||||||
|
reqid=reqid,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
await ems_order_stream.send(BrokerdError(
|
|
||||||
oid=request_msg['oid'],
|
|
||||||
symbol=request_msg['symbol'],
|
|
||||||
reason=f'Paper only. No account found: `{account}` ?',
|
|
||||||
).dict())
|
|
||||||
continue
|
|
||||||
|
|
||||||
# validate
|
# call our client api to submit the order
|
||||||
order = BrokerdOrder(**request_msg)
|
reqid = await client.submit_limit(
|
||||||
|
|
||||||
# call our client api to submit the order
|
|
||||||
reqid = await client.submit_limit(
|
|
||||||
|
|
||||||
oid=order.oid,
|
|
||||||
symbol=order.symbol,
|
|
||||||
price=order.price,
|
|
||||||
action=order.action,
|
|
||||||
size=order.size,
|
|
||||||
|
|
||||||
# XXX: by default 0 tells ``ib_insync`` methods that
|
|
||||||
# there is no existing order so ask the client to create
|
|
||||||
# a new one (which it seems to do by allocating an int
|
|
||||||
# counter - collision prone..)
|
|
||||||
reqid=order.reqid,
|
|
||||||
)
|
|
||||||
|
|
||||||
# deliver ack that order has been submitted to broker routing
|
|
||||||
await ems_order_stream.send(
|
|
||||||
BrokerdOrderAck(
|
|
||||||
|
|
||||||
# ems order request id
|
|
||||||
oid=order.oid,
|
oid=order.oid,
|
||||||
|
symbol=f'{order.symbol}.{client.broker}',
|
||||||
# broker specific request id
|
price=order.price,
|
||||||
|
action=order.action,
|
||||||
|
size=order.size,
|
||||||
|
# XXX: by default 0 tells ``ib_insync`` methods that
|
||||||
|
# there is no existing order so ask the client to create
|
||||||
|
# a new one (which it seems to do by allocating an int
|
||||||
|
# counter - collision prone..)
|
||||||
reqid=reqid,
|
reqid=reqid,
|
||||||
|
)
|
||||||
|
log.info(f'Submitted paper LIMIT {reqid}:\n{order}')
|
||||||
|
|
||||||
).dict()
|
case {'action': 'cancel'}:
|
||||||
)
|
msg = BrokerdCancel(**request_msg)
|
||||||
|
await client.submit_cancel(
|
||||||
|
reqid=msg.reqid
|
||||||
|
)
|
||||||
|
|
||||||
elif action == 'cancel':
|
case _:
|
||||||
msg = BrokerdCancel(**request_msg)
|
log.error(f'Unknown order command: {request_msg}')
|
||||||
|
|
||||||
await client.submit_cancel(
|
|
||||||
reqid=msg.reqid
|
|
||||||
)
|
|
||||||
|
|
||||||
else:
|
_reqids: bidict[str, tuple] = {}
|
||||||
log.error(f'Unknown order command: {request_msg}')
|
_buys: defaultdict[
|
||||||
|
str, # symbol
|
||||||
|
bidict[
|
||||||
|
str, # oid
|
||||||
|
tuple[float, float, str, str], # order info
|
||||||
|
]
|
||||||
|
] = defaultdict(bidict)
|
||||||
|
_sells: defaultdict[
|
||||||
|
str, # symbol
|
||||||
|
bidict[
|
||||||
|
str, # oid
|
||||||
|
tuple[float, float, str, str], # order info
|
||||||
|
]
|
||||||
|
] = defaultdict(bidict)
|
||||||
|
_positions: dict[str, Position] = {}
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -451,42 +523,62 @@ async def trades_dialogue(
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
tractor.log.get_console_log(loglevel)
|
tractor.log.get_console_log(loglevel)
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
data.open_feed(
|
data.open_feed(
|
||||||
[fqsn],
|
[fqsn],
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as feed,
|
) as feed,
|
||||||
|
|
||||||
):
|
):
|
||||||
|
pp_msgs: list[BrokerdPosition] = []
|
||||||
|
pos: Position
|
||||||
|
token: str # f'{symbol}.{self.broker}'
|
||||||
|
for token, pos in _positions.items():
|
||||||
|
pp_msgs.append(BrokerdPosition(
|
||||||
|
broker=broker,
|
||||||
|
account='paper',
|
||||||
|
symbol=pos.symbol.front_fqsn(),
|
||||||
|
size=pos.size,
|
||||||
|
avg_price=pos.ppu,
|
||||||
|
))
|
||||||
|
|
||||||
# TODO: load paper positions per broker from .toml config file
|
# TODO: load paper positions per broker from .toml config file
|
||||||
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
||||||
# await ctx.started(all_positions)
|
await ctx.started((
|
||||||
await ctx.started(({}, {'paper',}))
|
pp_msgs,
|
||||||
|
['paper'],
|
||||||
|
))
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
ctx.open_stream() as ems_stream,
|
ctx.open_stream() as ems_stream,
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
|
|
||||||
client = PaperBoi(
|
client = PaperBoi(
|
||||||
broker,
|
broker,
|
||||||
ems_stream,
|
ems_stream,
|
||||||
_buys={},
|
_buys=_buys,
|
||||||
_sells={},
|
_sells=_sells,
|
||||||
|
|
||||||
_reqids={},
|
_reqids=_reqids,
|
||||||
|
|
||||||
# TODO: load paper positions from ``positions.toml``
|
# TODO: load paper positions from ``positions.toml``
|
||||||
_positions={},
|
_positions=_positions,
|
||||||
|
|
||||||
|
# TODO: load postions from ledger file
|
||||||
|
_trade_ledger={},
|
||||||
)
|
)
|
||||||
|
|
||||||
n.start_soon(handle_order_requests, client, ems_stream)
|
n.start_soon(
|
||||||
|
handle_order_requests,
|
||||||
|
client,
|
||||||
|
ems_stream,
|
||||||
|
)
|
||||||
|
|
||||||
# paper engine simulator clearing task
|
# paper engine simulator clearing task
|
||||||
await simulate_fills(feed.stream, client)
|
await simulate_fills(feed.streams[broker], client)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
|
@ -511,17 +603,17 @@ async def open_paperboi(
|
||||||
# (we likely don't need more then one proc for basic
|
# (we likely don't need more then one proc for basic
|
||||||
# simulated order clearing)
|
# simulated order clearing)
|
||||||
if portal is None:
|
if portal is None:
|
||||||
|
log.info('Starting new paper-engine actor')
|
||||||
portal = await tn.start_actor(
|
portal = await tn.start_actor(
|
||||||
service_name,
|
service_name,
|
||||||
enable_modules=[__name__]
|
enable_modules=[__name__]
|
||||||
)
|
)
|
||||||
|
|
||||||
async with portal.open_context(
|
async with portal.open_context(
|
||||||
trades_dialogue,
|
trades_dialogue,
|
||||||
broker=broker,
|
broker=broker,
|
||||||
fqsn=fqsn,
|
fqsn=fqsn,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
||||||
yield ctx, first
|
yield ctx, first
|
||||||
|
|
|
@ -27,25 +27,35 @@ import tractor
|
||||||
|
|
||||||
from ..log import get_console_log, get_logger, colorize_json
|
from ..log import get_console_log, get_logger, colorize_json
|
||||||
from ..brokers import get_brokermod
|
from ..brokers import get_brokermod
|
||||||
from .._daemon import _tractor_kwargs
|
from .._daemon import (
|
||||||
|
_default_registry_host,
|
||||||
|
_default_registry_port,
|
||||||
|
)
|
||||||
from .. import config
|
from .. import config
|
||||||
|
|
||||||
|
|
||||||
log = get_logger('cli')
|
log = get_logger('cli')
|
||||||
DEFAULT_BROKER = 'questrade'
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
||||||
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
||||||
|
@click.option('--port', '-p', default=None, help='Port number to bind')
|
||||||
@click.option(
|
@click.option(
|
||||||
'--tsdb',
|
'--tsdb',
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
help='Enable local ``marketstore`` instance'
|
help='Enable local ``marketstore`` instance'
|
||||||
)
|
)
|
||||||
def pikerd(loglevel, host, tl, pdb, tsdb):
|
def pikerd(
|
||||||
|
loglevel: str,
|
||||||
|
host: str,
|
||||||
|
port: int,
|
||||||
|
tl: bool,
|
||||||
|
pdb: bool,
|
||||||
|
tsdb: bool,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Spawn the piker broker-daemon.
|
Spawn the piker broker-daemon.
|
||||||
|
|
||||||
|
@ -62,12 +72,21 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
"\n"
|
"\n"
|
||||||
))
|
))
|
||||||
|
|
||||||
|
reg_addr: None | tuple[str, int] = None
|
||||||
|
if host or port:
|
||||||
|
reg_addr = (
|
||||||
|
host or _default_registry_host,
|
||||||
|
int(port) or _default_registry_port,
|
||||||
|
)
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_pikerd(
|
open_pikerd(
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=pdb,
|
debug_mode=pdb,
|
||||||
|
registry_addr=reg_addr,
|
||||||
|
|
||||||
), # normally delivers a ``Services`` handle
|
), # normally delivers a ``Services`` handle
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
|
@ -83,9 +102,9 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
|
|
||||||
)
|
)
|
||||||
log.info(
|
log.info(
|
||||||
f'`marketstore` up!\n'
|
f'`marketstored` up!\n'
|
||||||
f'`marketstored` pid: {pid}\n'
|
f'pid: {pid}\n'
|
||||||
f'docker container id: {cid}\n'
|
f'container id: {cid[:12]}\n'
|
||||||
f'config: {pformat(config)}'
|
f'config: {pformat(config)}'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -97,25 +116,46 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
@click.group(context_settings=config._context_defaults)
|
@click.group(context_settings=config._context_defaults)
|
||||||
@click.option(
|
@click.option(
|
||||||
'--brokers', '-b',
|
'--brokers', '-b',
|
||||||
default=[DEFAULT_BROKER],
|
default=None,
|
||||||
multiple=True,
|
multiple=True,
|
||||||
help='Broker backend to use'
|
help='Broker backend to use'
|
||||||
)
|
)
|
||||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--configdir', '-c', help='Configuration directory')
|
@click.option('--configdir', '-c', help='Configuration directory')
|
||||||
|
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
||||||
|
@click.option('--port', '-p', default=None, help='Port number to bind')
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def cli(ctx, brokers, loglevel, tl, configdir):
|
def cli(
|
||||||
|
ctx: click.Context,
|
||||||
|
brokers: list[str],
|
||||||
|
loglevel: str,
|
||||||
|
tl: bool,
|
||||||
|
configdir: str,
|
||||||
|
host: str,
|
||||||
|
port: int,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
if configdir is not None:
|
if configdir is not None:
|
||||||
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
|
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
|
||||||
config._override_config_dir(configdir)
|
config._override_config_dir(configdir)
|
||||||
|
|
||||||
ctx.ensure_object(dict)
|
ctx.ensure_object(dict)
|
||||||
|
|
||||||
if len(brokers) == 1:
|
if not brokers:
|
||||||
brokermods = [get_brokermod(brokers[0])]
|
# (try to) load all (supposedly) supported data/broker backends
|
||||||
else:
|
from piker.brokers import __brokers__
|
||||||
brokermods = [get_brokermod(broker) for broker in brokers]
|
brokers = __brokers__
|
||||||
|
|
||||||
|
brokermods = [get_brokermod(broker) for broker in brokers]
|
||||||
|
assert brokermods
|
||||||
|
|
||||||
|
reg_addr: None | tuple[str, int] = None
|
||||||
|
if host or port:
|
||||||
|
reg_addr = (
|
||||||
|
host or _default_registry_host,
|
||||||
|
int(port) or _default_registry_port,
|
||||||
|
)
|
||||||
|
|
||||||
ctx.obj.update({
|
ctx.obj.update({
|
||||||
'brokers': brokers,
|
'brokers': brokers,
|
||||||
|
@ -125,6 +165,7 @@ def cli(ctx, brokers, loglevel, tl, configdir):
|
||||||
'log': get_console_log(loglevel),
|
'log': get_console_log(loglevel),
|
||||||
'confdir': config._config_dir,
|
'confdir': config._config_dir,
|
||||||
'wl_path': config._watchlists_data_path,
|
'wl_path': config._watchlists_data_path,
|
||||||
|
'registry_addr': reg_addr,
|
||||||
})
|
})
|
||||||
|
|
||||||
# allow enabling same loglevel in ``tractor`` machinery
|
# allow enabling same loglevel in ``tractor`` machinery
|
||||||
|
@ -134,29 +175,40 @@ def cli(ctx, brokers, loglevel, tl, configdir):
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.argument('names', nargs=-1, required=False)
|
@click.argument('ports', nargs=-1, required=False)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def services(config, tl, names):
|
def services(config, tl, ports):
|
||||||
|
|
||||||
|
from .._daemon import (
|
||||||
|
open_piker_runtime,
|
||||||
|
_default_registry_port,
|
||||||
|
_default_registry_host,
|
||||||
|
)
|
||||||
|
|
||||||
|
host = _default_registry_host
|
||||||
|
if not ports:
|
||||||
|
ports = [_default_registry_port]
|
||||||
|
|
||||||
async def list_services():
|
async def list_services():
|
||||||
|
nonlocal host
|
||||||
async with tractor.get_arbiter(
|
async with (
|
||||||
*_tractor_kwargs['arbiter_addr']
|
open_piker_runtime(
|
||||||
) as portal:
|
name='service_query',
|
||||||
|
loglevel=config['loglevel'] if tl else None,
|
||||||
|
),
|
||||||
|
tractor.get_arbiter(
|
||||||
|
host=host,
|
||||||
|
port=ports[0]
|
||||||
|
) as portal
|
||||||
|
):
|
||||||
registry = await portal.run_from_ns('self', 'get_registry')
|
registry = await portal.run_from_ns('self', 'get_registry')
|
||||||
json_d = {}
|
json_d = {}
|
||||||
for key, socket in registry.items():
|
for key, socket in registry.items():
|
||||||
# name, uuid = uid
|
|
||||||
host, port = socket
|
host, port = socket
|
||||||
json_d[key] = f'{host}:{port}'
|
json_d[key] = f'{host}:{port}'
|
||||||
click.echo(f"{colorize_json(json_d)}")
|
click.echo(f"{colorize_json(json_d)}")
|
||||||
|
|
||||||
tractor.run(
|
trio.run(list_services)
|
||||||
list_services,
|
|
||||||
name='service_query',
|
|
||||||
loglevel=config['loglevel'] if tl else None,
|
|
||||||
arbiter_addr=_tractor_kwargs['arbiter_addr'],
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _load_clis() -> None:
|
def _load_clis() -> None:
|
||||||
|
|
|
@ -21,6 +21,7 @@ Broker configuration mgmt.
|
||||||
import platform
|
import platform
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
from os import path
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
import shutil
|
import shutil
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
@ -111,6 +112,7 @@ if _parent_user:
|
||||||
|
|
||||||
_conf_names: set[str] = {
|
_conf_names: set[str] = {
|
||||||
'brokers',
|
'brokers',
|
||||||
|
'pps',
|
||||||
'trades',
|
'trades',
|
||||||
'watchlists',
|
'watchlists',
|
||||||
}
|
}
|
||||||
|
@ -147,19 +149,21 @@ def get_conf_path(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers',
|
||||||
|
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Return the default config path normally under
|
'''
|
||||||
``~/.config/piker`` on linux.
|
Return the top-level default config path normally under
|
||||||
|
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
||||||
|
name.
|
||||||
|
|
||||||
Contains files such as:
|
Contains files such as:
|
||||||
- brokers.toml
|
- brokers.toml
|
||||||
|
- pp.toml
|
||||||
- watchlists.toml
|
- watchlists.toml
|
||||||
- trades.toml
|
|
||||||
|
|
||||||
# maybe coming soon ;)
|
# maybe coming soon ;)
|
||||||
- signals.toml
|
- signals.toml
|
||||||
- strats.toml
|
- strats.toml
|
||||||
|
|
||||||
"""
|
'''
|
||||||
assert conf_name in _conf_names
|
assert conf_name in _conf_names
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
return os.path.join(
|
return os.path.join(
|
||||||
|
@ -173,7 +177,7 @@ def repodir():
|
||||||
Return the abspath to the repo directory.
|
Return the abspath to the repo directory.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
dirpath = os.path.abspath(
|
dirpath = path.abspath(
|
||||||
# we're 3 levels down in **this** module file
|
# we're 3 levels down in **this** module file
|
||||||
dirname(dirname(os.path.realpath(__file__)))
|
dirname(dirname(os.path.realpath(__file__)))
|
||||||
)
|
)
|
||||||
|
@ -182,7 +186,9 @@ def repodir():
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
conf_name: str = 'brokers',
|
conf_name: str = 'brokers',
|
||||||
path: str = None
|
path: str = None,
|
||||||
|
|
||||||
|
**tomlkws,
|
||||||
|
|
||||||
) -> (dict, str):
|
) -> (dict, str):
|
||||||
'''
|
'''
|
||||||
|
@ -190,6 +196,10 @@ def load(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
path = path or get_conf_path(conf_name)
|
path = path or get_conf_path(conf_name)
|
||||||
|
|
||||||
|
if not os.path.isdir(_config_dir):
|
||||||
|
os.mkdir(_config_dir)
|
||||||
|
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
fn = _conf_fn_w_ext(conf_name)
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
|
|
||||||
|
@ -202,8 +212,11 @@ def load(
|
||||||
# if one exists.
|
# if one exists.
|
||||||
if os.path.isfile(template):
|
if os.path.isfile(template):
|
||||||
shutil.copyfile(template, path)
|
shutil.copyfile(template, path)
|
||||||
|
else:
|
||||||
|
with open(path, 'r'):
|
||||||
|
pass # touch it
|
||||||
|
|
||||||
config = toml.load(path)
|
config = toml.load(path, **tomlkws)
|
||||||
log.debug(f"Read config file {path}")
|
log.debug(f"Read config file {path}")
|
||||||
return config, path
|
return config, path
|
||||||
|
|
||||||
|
@ -212,6 +225,7 @@ def write(
|
||||||
config: dict, # toml config as dict
|
config: dict, # toml config as dict
|
||||||
name: str = 'brokers',
|
name: str = 'brokers',
|
||||||
path: str = None,
|
path: str = None,
|
||||||
|
**toml_kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
''''
|
''''
|
||||||
|
@ -235,11 +249,14 @@ def write(
|
||||||
f"{path}"
|
f"{path}"
|
||||||
)
|
)
|
||||||
with open(path, 'w') as cf:
|
with open(path, 'w') as cf:
|
||||||
return toml.dump(config, cf)
|
return toml.dump(
|
||||||
|
config,
|
||||||
|
cf,
|
||||||
|
**toml_kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def load_accounts(
|
def load_accounts(
|
||||||
|
|
||||||
providers: Optional[list[str]] = None
|
providers: Optional[list[str]] = None
|
||||||
|
|
||||||
) -> bidict[str, Optional[str]]:
|
) -> bidict[str, Optional[str]]:
|
||||||
|
|
|
@ -22,6 +22,12 @@ and storing data from your brokers as well as
|
||||||
sharing live streams over a network.
|
sharing live streams over a network.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
import tractor
|
||||||
|
import trio
|
||||||
|
|
||||||
|
from ..log import (
|
||||||
|
get_console_log,
|
||||||
|
)
|
||||||
from ._normalize import iterticks
|
from ._normalize import iterticks
|
||||||
from ._sharedmem import (
|
from ._sharedmem import (
|
||||||
maybe_open_shm_array,
|
maybe_open_shm_array,
|
||||||
|
@ -32,7 +38,6 @@ from ._sharedmem import (
|
||||||
)
|
)
|
||||||
from .feed import (
|
from .feed import (
|
||||||
open_feed,
|
open_feed,
|
||||||
_setup_persistent_brokerd,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -44,5 +49,40 @@ __all__ = [
|
||||||
'attach_shm_array',
|
'attach_shm_array',
|
||||||
'open_shm_array',
|
'open_shm_array',
|
||||||
'get_shm_token',
|
'get_shm_token',
|
||||||
'_setup_persistent_brokerd',
|
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def _setup_persistent_brokerd(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
brokername: str,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Allocate a actor-wide service nursery in ``brokerd``
|
||||||
|
such that feeds can be run in the background persistently by
|
||||||
|
the broker backend as needed.
|
||||||
|
|
||||||
|
'''
|
||||||
|
get_console_log(tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
from .feed import (
|
||||||
|
_bus,
|
||||||
|
get_feed_bus,
|
||||||
|
)
|
||||||
|
global _bus
|
||||||
|
assert not _bus
|
||||||
|
|
||||||
|
async with trio.open_nursery() as service_nursery:
|
||||||
|
# assign a nursery to the feeds bus for spawning
|
||||||
|
# background tasks from clients
|
||||||
|
get_feed_bus(brokername, service_nursery)
|
||||||
|
|
||||||
|
# unblock caller
|
||||||
|
await ctx.started()
|
||||||
|
|
||||||
|
# we pin this task to keep the feeds manager active until the
|
||||||
|
# parent actor decides to tear it down
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -37,8 +37,13 @@ from docker.models.containers import Container as DockerContainer
|
||||||
from docker.errors import (
|
from docker.errors import (
|
||||||
DockerException,
|
DockerException,
|
||||||
APIError,
|
APIError,
|
||||||
|
# ContainerError,
|
||||||
|
)
|
||||||
|
import requests
|
||||||
|
from requests.exceptions import (
|
||||||
|
ConnectionError,
|
||||||
|
ReadTimeout,
|
||||||
)
|
)
|
||||||
from requests.exceptions import ConnectionError, ReadTimeout
|
|
||||||
|
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .. import config
|
from .. import config
|
||||||
|
@ -50,8 +55,8 @@ class DockerNotStarted(Exception):
|
||||||
'Prolly you dint start da daemon bruh'
|
'Prolly you dint start da daemon bruh'
|
||||||
|
|
||||||
|
|
||||||
class ContainerError(RuntimeError):
|
class ApplicationLogError(Exception):
|
||||||
'Error reported via app-container logging level'
|
'App in container reported an error in logs'
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
|
@ -96,9 +101,9 @@ async def open_docker(
|
||||||
# not perms?
|
# not perms?
|
||||||
raise
|
raise
|
||||||
|
|
||||||
finally:
|
# finally:
|
||||||
if client:
|
# if client:
|
||||||
client.close()
|
# client.close()
|
||||||
|
|
||||||
|
|
||||||
class Container:
|
class Container:
|
||||||
|
@ -156,7 +161,7 @@ class Container:
|
||||||
|
|
||||||
# print(f'level: {level}')
|
# print(f'level: {level}')
|
||||||
if level in ('error', 'fatal'):
|
if level in ('error', 'fatal'):
|
||||||
raise ContainerError(msg)
|
raise ApplicationLogError(msg)
|
||||||
|
|
||||||
if patt in msg:
|
if patt in msg:
|
||||||
return True
|
return True
|
||||||
|
@ -185,12 +190,29 @@ class Container:
|
||||||
if 'is not running' in err.explanation:
|
if 'is not running' in err.explanation:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def hard_kill(self, start: float) -> None:
|
||||||
|
delay = time.time() - start
|
||||||
|
# get out the big guns, bc apparently marketstore
|
||||||
|
# doesn't actually know how to terminate gracefully
|
||||||
|
# :eyeroll:...
|
||||||
|
log.error(
|
||||||
|
f'SIGKILL-ing: {self.cntr.id} after {delay}s\n'
|
||||||
|
)
|
||||||
|
self.try_signal('SIGKILL')
|
||||||
|
self.cntr.wait(
|
||||||
|
timeout=3,
|
||||||
|
condition='not-running',
|
||||||
|
)
|
||||||
|
|
||||||
async def cancel(
|
async def cancel(
|
||||||
self,
|
self,
|
||||||
stop_msg: str,
|
stop_msg: str,
|
||||||
|
hard_kill: bool = False,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
cid = self.cntr.id
|
cid = self.cntr.id
|
||||||
|
|
||||||
# first try a graceful cancel
|
# first try a graceful cancel
|
||||||
log.cancel(
|
log.cancel(
|
||||||
f'SIGINT cancelling container: {cid}\n'
|
f'SIGINT cancelling container: {cid}\n'
|
||||||
|
@ -199,15 +221,25 @@ class Container:
|
||||||
self.try_signal('SIGINT')
|
self.try_signal('SIGINT')
|
||||||
|
|
||||||
start = time.time()
|
start = time.time()
|
||||||
for _ in range(30):
|
for _ in range(6):
|
||||||
|
|
||||||
with trio.move_on_after(0.5) as cs:
|
with trio.move_on_after(0.5) as cs:
|
||||||
cs.shield = True
|
log.cancel('polling for CNTR logs...')
|
||||||
await self.process_logs_until(stop_msg)
|
|
||||||
|
|
||||||
# if we aren't cancelled on above checkpoint then we
|
try:
|
||||||
# assume we read the expected stop msg and terminated.
|
await self.process_logs_until(stop_msg)
|
||||||
break
|
except ApplicationLogError:
|
||||||
|
hard_kill = True
|
||||||
|
else:
|
||||||
|
# if we aren't cancelled on above checkpoint then we
|
||||||
|
# assume we read the expected stop msg and
|
||||||
|
# terminated.
|
||||||
|
break
|
||||||
|
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
# on timeout just try a hard kill after
|
||||||
|
# a quick container sync-wait.
|
||||||
|
hard_kill = True
|
||||||
|
|
||||||
try:
|
try:
|
||||||
log.info(f'Polling for container shutdown:\n{cid}')
|
log.info(f'Polling for container shutdown:\n{cid}')
|
||||||
|
@ -218,6 +250,7 @@ class Container:
|
||||||
condition='not-running',
|
condition='not-running',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# graceful exit if we didn't time out
|
||||||
break
|
break
|
||||||
|
|
||||||
except (
|
except (
|
||||||
|
@ -229,25 +262,23 @@ class Container:
|
||||||
except (
|
except (
|
||||||
docker.errors.APIError,
|
docker.errors.APIError,
|
||||||
ConnectionError,
|
ConnectionError,
|
||||||
|
requests.exceptions.ConnectionError,
|
||||||
|
trio.Cancelled,
|
||||||
):
|
):
|
||||||
log.exception('Docker connection failure')
|
log.exception('Docker connection failure')
|
||||||
break
|
self.hard_kill(start)
|
||||||
else:
|
raise
|
||||||
delay = time.time() - start
|
|
||||||
log.error(
|
|
||||||
f'Failed to kill container {cid} after {delay}s\n'
|
|
||||||
'sending SIGKILL..'
|
|
||||||
)
|
|
||||||
# get out the big guns, bc apparently marketstore
|
|
||||||
# doesn't actually know how to terminate gracefully
|
|
||||||
# :eyeroll:...
|
|
||||||
self.try_signal('SIGKILL')
|
|
||||||
self.cntr.wait(
|
|
||||||
timeout=3,
|
|
||||||
condition='not-running',
|
|
||||||
)
|
|
||||||
|
|
||||||
log.cancel(f'Container stopped: {cid}')
|
except trio.Cancelled:
|
||||||
|
log.exception('trio cancelled...')
|
||||||
|
self.hard_kill(start)
|
||||||
|
else:
|
||||||
|
hard_kill = True
|
||||||
|
|
||||||
|
if hard_kill:
|
||||||
|
self.hard_kill(start)
|
||||||
|
else:
|
||||||
|
log.cancel(f'Container stopped: {cid}')
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -289,15 +320,13 @@ async def open_ahabd(
|
||||||
))
|
))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# TODO: we might eventually want a proxy-style msg-prot here
|
# TODO: we might eventually want a proxy-style msg-prot here
|
||||||
# to allow remote control of containers without needing
|
# to allow remote control of containers without needing
|
||||||
# callers to have root perms?
|
# callers to have root perms?
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
with trio.CancelScope(shield=True):
|
await cntr.cancel(stop_msg)
|
||||||
await cntr.cancel(stop_msg)
|
|
||||||
|
|
||||||
|
|
||||||
async def start_ahab(
|
async def start_ahab(
|
||||||
|
|
|
@ -56,7 +56,7 @@ def iterticks(
|
||||||
sig = (
|
sig = (
|
||||||
time,
|
time,
|
||||||
tick['price'],
|
tick['price'],
|
||||||
tick['size']
|
tick.get('size')
|
||||||
)
|
)
|
||||||
|
|
||||||
if ttype == 'dark_trade':
|
if ttype == 'dark_trade':
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -27,13 +27,14 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||||
if _USE_POSIX:
|
if _USE_POSIX:
|
||||||
from _posixshmem import shm_unlink
|
from _posixshmem import shm_unlink
|
||||||
|
|
||||||
import tractor
|
# import msgspec
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel
|
|
||||||
from numpy.lib import recfunctions as rfn
|
from numpy.lib import recfunctions as rfn
|
||||||
|
import tractor
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._source import base_iohlc_dtype
|
from ._source import base_iohlc_dtype
|
||||||
|
from .types import Struct
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -49,7 +50,11 @@ _rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
|
||||||
|
|
||||||
|
|
||||||
def cuckoff_mantracker():
|
def cuckoff_mantracker():
|
||||||
|
'''
|
||||||
|
Disable all ``multiprocessing``` "resource tracking" machinery since
|
||||||
|
it's an absolute multi-threaded mess of non-SC madness.
|
||||||
|
|
||||||
|
'''
|
||||||
from multiprocessing import resource_tracker as mantracker
|
from multiprocessing import resource_tracker as mantracker
|
||||||
|
|
||||||
# Tell the "resource tracker" thing to fuck off.
|
# Tell the "resource tracker" thing to fuck off.
|
||||||
|
@ -107,36 +112,39 @@ class SharedInt:
|
||||||
log.warning(f'Shm for {name} already unlinked?')
|
log.warning(f'Shm for {name} already unlinked?')
|
||||||
|
|
||||||
|
|
||||||
class _Token(BaseModel):
|
class _Token(Struct, frozen=True):
|
||||||
'''
|
'''
|
||||||
Internal represenation of a shared memory "token"
|
Internal represenation of a shared memory "token"
|
||||||
which can be used to key a system wide post shm entry.
|
which can be used to key a system wide post shm entry.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
frozen = True
|
|
||||||
|
|
||||||
shm_name: str # this servers as a "key" value
|
shm_name: str # this servers as a "key" value
|
||||||
shm_first_index_name: str
|
shm_first_index_name: str
|
||||||
shm_last_index_name: str
|
shm_last_index_name: str
|
||||||
dtype_descr: tuple
|
dtype_descr: tuple
|
||||||
|
size: int # in struct-array index / row terms
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def dtype(self) -> np.dtype:
|
def dtype(self) -> np.dtype:
|
||||||
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||||
|
|
||||||
def as_msg(self):
|
def as_msg(self):
|
||||||
return self.dict()
|
return self.to_dict()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_msg(cls, msg: dict) -> _Token:
|
def from_msg(cls, msg: dict) -> _Token:
|
||||||
if isinstance(msg, _Token):
|
if isinstance(msg, _Token):
|
||||||
return msg
|
return msg
|
||||||
|
|
||||||
|
# TODO: native struct decoding
|
||||||
|
# return _token_dec.decode(msg)
|
||||||
|
|
||||||
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||||
return _Token(**msg)
|
return _Token(**msg)
|
||||||
|
|
||||||
|
|
||||||
|
# _token_dec = msgspec.msgpack.Decoder(_Token)
|
||||||
|
|
||||||
# TODO: this api?
|
# TODO: this api?
|
||||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
||||||
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
||||||
|
@ -155,6 +163,7 @@ def get_shm_token(key: str) -> _Token:
|
||||||
|
|
||||||
def _make_token(
|
def _make_token(
|
||||||
key: str,
|
key: str,
|
||||||
|
size: int,
|
||||||
dtype: Optional[np.dtype] = None,
|
dtype: Optional[np.dtype] = None,
|
||||||
) -> _Token:
|
) -> _Token:
|
||||||
'''
|
'''
|
||||||
|
@ -167,7 +176,8 @@ def _make_token(
|
||||||
shm_name=key,
|
shm_name=key,
|
||||||
shm_first_index_name=key + "_first",
|
shm_first_index_name=key + "_first",
|
||||||
shm_last_index_name=key + "_last",
|
shm_last_index_name=key + "_last",
|
||||||
dtype_descr=np.dtype(dtype).descr
|
dtype_descr=tuple(np.dtype(dtype).descr),
|
||||||
|
size=size,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -219,6 +229,7 @@ class ShmArray:
|
||||||
shm_first_index_name=self._first._shm.name,
|
shm_first_index_name=self._first._shm.name,
|
||||||
shm_last_index_name=self._last._shm.name,
|
shm_last_index_name=self._last._shm.name,
|
||||||
dtype_descr=tuple(self._array.dtype.descr),
|
dtype_descr=tuple(self._array.dtype.descr),
|
||||||
|
size=self._len,
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -433,7 +444,7 @@ class ShmArray:
|
||||||
def open_shm_array(
|
def open_shm_array(
|
||||||
|
|
||||||
key: Optional[str] = None,
|
key: Optional[str] = None,
|
||||||
size: int = _default_size,
|
size: int = _default_size, # see above
|
||||||
dtype: Optional[np.dtype] = None,
|
dtype: Optional[np.dtype] = None,
|
||||||
readonly: bool = False,
|
readonly: bool = False,
|
||||||
|
|
||||||
|
@ -464,7 +475,8 @@ def open_shm_array(
|
||||||
|
|
||||||
token = _make_token(
|
token = _make_token(
|
||||||
key=key,
|
key=key,
|
||||||
dtype=dtype
|
size=size,
|
||||||
|
dtype=dtype,
|
||||||
)
|
)
|
||||||
|
|
||||||
# create single entry arrays for storing an first and last indices
|
# create single entry arrays for storing an first and last indices
|
||||||
|
@ -516,15 +528,15 @@ def open_shm_array(
|
||||||
# "unlink" created shm on process teardown by
|
# "unlink" created shm on process teardown by
|
||||||
# pushing teardown calls onto actor context stack
|
# pushing teardown calls onto actor context stack
|
||||||
|
|
||||||
tractor._actor._lifetime_stack.callback(shmarr.close)
|
stack = tractor.current_actor().lifetime_stack
|
||||||
tractor._actor._lifetime_stack.callback(shmarr.destroy)
|
stack.callback(shmarr.close)
|
||||||
|
stack.callback(shmarr.destroy)
|
||||||
|
|
||||||
return shmarr
|
return shmarr
|
||||||
|
|
||||||
|
|
||||||
def attach_shm_array(
|
def attach_shm_array(
|
||||||
token: tuple[str, str, tuple[str, str]],
|
token: tuple[str, str, tuple[str, str]],
|
||||||
size: int = _default_size,
|
|
||||||
readonly: bool = True,
|
readonly: bool = True,
|
||||||
|
|
||||||
) -> ShmArray:
|
) -> ShmArray:
|
||||||
|
@ -563,7 +575,7 @@ def attach_shm_array(
|
||||||
raise _err
|
raise _err
|
||||||
|
|
||||||
shmarr = np.ndarray(
|
shmarr = np.ndarray(
|
||||||
(size,),
|
(token.size,),
|
||||||
dtype=token.dtype,
|
dtype=token.dtype,
|
||||||
buffer=shm.buf
|
buffer=shm.buf
|
||||||
)
|
)
|
||||||
|
@ -602,8 +614,8 @@ def attach_shm_array(
|
||||||
if key not in _known_tokens:
|
if key not in _known_tokens:
|
||||||
_known_tokens[key] = token
|
_known_tokens[key] = token
|
||||||
|
|
||||||
# "close" attached shm on process teardown
|
# "close" attached shm on actor teardown
|
||||||
tractor._actor._lifetime_stack.callback(sha.close)
|
tractor.current_actor().lifetime_stack.callback(sha.close)
|
||||||
|
|
||||||
return sha
|
return sha
|
||||||
|
|
||||||
|
@ -631,6 +643,7 @@ def maybe_open_shm_array(
|
||||||
use ``attach_shm_array``.
|
use ``attach_shm_array``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
size = kwargs.pop('size', _default_size)
|
||||||
try:
|
try:
|
||||||
# see if we already know this key
|
# see if we already know this key
|
||||||
token = _known_tokens[key]
|
token = _known_tokens[key]
|
||||||
|
@ -638,7 +651,11 @@ def maybe_open_shm_array(
|
||||||
except KeyError:
|
except KeyError:
|
||||||
log.warning(f"Could not find {key} in shms cache")
|
log.warning(f"Could not find {key} in shms cache")
|
||||||
if dtype:
|
if dtype:
|
||||||
token = _make_token(key, dtype)
|
token = _make_token(
|
||||||
|
key,
|
||||||
|
size=size,
|
||||||
|
dtype=dtype,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
return attach_shm_array(token=token, **kwargs), False
|
return attach_shm_array(token=token, **kwargs), False
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
|
|
|
@ -23,7 +23,8 @@ import decimal
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import BaseModel
|
|
||||||
|
from .types import Struct
|
||||||
# from numba import from_dtype
|
# from numba import from_dtype
|
||||||
|
|
||||||
|
|
||||||
|
@ -126,7 +127,7 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class Symbol(BaseModel):
|
class Symbol(Struct):
|
||||||
'''
|
'''
|
||||||
I guess this is some kinda container thing for dealing with
|
I guess this is some kinda container thing for dealing with
|
||||||
all the different meta-data formats from brokers?
|
all the different meta-data formats from brokers?
|
||||||
|
@ -152,9 +153,7 @@ class Symbol(BaseModel):
|
||||||
info: dict[str, Any],
|
info: dict[str, Any],
|
||||||
suffix: str = '',
|
suffix: str = '',
|
||||||
|
|
||||||
# XXX: like wtf..
|
) -> Symbol:
|
||||||
# ) -> 'Symbol':
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
tick_size = info.get('price_tick_size', 0.01)
|
tick_size = info.get('price_tick_size', 0.01)
|
||||||
lot_tick_size = info.get('lot_tick_size', 0.0)
|
lot_tick_size = info.get('lot_tick_size', 0.0)
|
||||||
|
@ -175,9 +174,7 @@ class Symbol(BaseModel):
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
info: dict[str, Any],
|
info: dict[str, Any],
|
||||||
|
|
||||||
# XXX: like wtf..
|
) -> Symbol:
|
||||||
# ) -> 'Symbol':
|
|
||||||
) -> None:
|
|
||||||
broker, key, suffix = unpack_fqsn(fqsn)
|
broker, key, suffix = unpack_fqsn(fqsn)
|
||||||
return cls.from_broker_info(
|
return cls.from_broker_info(
|
||||||
broker,
|
broker,
|
||||||
|
@ -221,6 +218,10 @@ class Symbol(BaseModel):
|
||||||
else:
|
else:
|
||||||
return (key, broker)
|
return (key, broker)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def fqsn(self) -> str:
|
||||||
|
return '.'.join(self.tokens()).lower()
|
||||||
|
|
||||||
def front_fqsn(self) -> str:
|
def front_fqsn(self) -> str:
|
||||||
'''
|
'''
|
||||||
fqsn = "fully qualified symbol name"
|
fqsn = "fully qualified symbol name"
|
||||||
|
@ -240,7 +241,7 @@ class Symbol(BaseModel):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
tokens = self.tokens()
|
tokens = self.tokens()
|
||||||
fqsn = '.'.join(tokens)
|
fqsn = '.'.join(map(str.lower, tokens))
|
||||||
return fqsn
|
return fqsn
|
||||||
|
|
||||||
def iterfqsns(self) -> list[str]:
|
def iterfqsns(self) -> list[str]:
|
||||||
|
|
|
@ -18,13 +18,24 @@
|
||||||
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager, AsyncExitStack
|
from contextlib import (
|
||||||
|
asynccontextmanager,
|
||||||
|
AsyncExitStack,
|
||||||
|
)
|
||||||
|
from itertools import count
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Any, Callable, AsyncGenerator
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
AsyncGenerator,
|
||||||
|
Iterable,
|
||||||
|
)
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import trio_websocket
|
import trio_websocket
|
||||||
|
from wsproto.utilities import LocalProtocolError
|
||||||
from trio_websocket._impl import (
|
from trio_websocket._impl import (
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
DisconnectionTimeout,
|
DisconnectionTimeout,
|
||||||
|
@ -35,43 +46,53 @@ from trio_websocket._impl import (
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
|
from .types import Struct
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class NoBsWs:
|
class NoBsWs:
|
||||||
"""Make ``trio_websocket`` sockets stay up no matter the bs.
|
'''
|
||||||
|
Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||||
|
|
||||||
"""
|
You can provide a ``fixture`` async-context-manager which will be
|
||||||
|
enter/exitted around each reconnect operation.
|
||||||
|
'''
|
||||||
recon_errors = (
|
recon_errors = (
|
||||||
ConnectionClosed,
|
ConnectionClosed,
|
||||||
DisconnectionTimeout,
|
DisconnectionTimeout,
|
||||||
ConnectionRejected,
|
ConnectionRejected,
|
||||||
HandshakeError,
|
HandshakeError,
|
||||||
ConnectionTimeout,
|
ConnectionTimeout,
|
||||||
|
LocalProtocolError,
|
||||||
)
|
)
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
token: str,
|
|
||||||
stack: AsyncExitStack,
|
stack: AsyncExitStack,
|
||||||
fixture: Callable,
|
fixture: Optional[Callable] = None,
|
||||||
serializer: ModuleType = json,
|
serializer: ModuleType = json
|
||||||
):
|
):
|
||||||
self.url = url
|
self.url = url
|
||||||
self.token = token
|
|
||||||
self.fixture = fixture
|
self.fixture = fixture
|
||||||
self._stack = stack
|
self._stack = stack
|
||||||
self._ws: 'WebSocketConnection' = None # noqa
|
self._ws: 'WebSocketConnection' = None # noqa
|
||||||
|
|
||||||
|
# TODO: is there some method we can call
|
||||||
|
# on the underlying `._ws` to get this?
|
||||||
|
self._connected: bool = False
|
||||||
|
|
||||||
async def _connect(
|
async def _connect(
|
||||||
self,
|
self,
|
||||||
tries: int = 1000,
|
tries: int = 1000,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
self._connected = False
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
await self._stack.aclose()
|
await self._stack.aclose()
|
||||||
except (DisconnectionTimeout, RuntimeError):
|
except self.recon_errors:
|
||||||
await trio.sleep(0.5)
|
await trio.sleep(0.5)
|
||||||
else:
|
else:
|
||||||
break
|
break
|
||||||
|
@ -82,19 +103,18 @@ class NoBsWs:
|
||||||
self._ws = await self._stack.enter_async_context(
|
self._ws = await self._stack.enter_async_context(
|
||||||
trio_websocket.open_websocket_url(self.url)
|
trio_websocket.open_websocket_url(self.url)
|
||||||
)
|
)
|
||||||
# rerun user code fixture
|
|
||||||
if self.token == '':
|
if self.fixture is not None:
|
||||||
|
# rerun user code fixture
|
||||||
ret = await self._stack.enter_async_context(
|
ret = await self._stack.enter_async_context(
|
||||||
self.fixture(self)
|
self.fixture(self)
|
||||||
)
|
)
|
||||||
else:
|
|
||||||
ret = await self._stack.enter_async_context(
|
|
||||||
self.fixture(self, self.token)
|
|
||||||
)
|
|
||||||
|
|
||||||
assert ret is None
|
assert ret is None
|
||||||
|
|
||||||
log.info(f'Connection success: {self.url}')
|
log.info(f'Connection success: {self.url}')
|
||||||
|
|
||||||
|
self._connected = True
|
||||||
return self._ws
|
return self._ws
|
||||||
|
|
||||||
except self.recon_errors as err:
|
except self.recon_errors as err:
|
||||||
|
@ -104,11 +124,15 @@ class NoBsWs:
|
||||||
f'{type(err)}...retry attempt {i}'
|
f'{type(err)}...retry attempt {i}'
|
||||||
)
|
)
|
||||||
await trio.sleep(0.5)
|
await trio.sleep(0.5)
|
||||||
|
self._connected = False
|
||||||
continue
|
continue
|
||||||
else:
|
else:
|
||||||
log.exception('ws connection fail...')
|
log.exception('ws connection fail...')
|
||||||
raise last_err
|
raise last_err
|
||||||
|
|
||||||
|
def connected(self) -> bool:
|
||||||
|
return self._connected
|
||||||
|
|
||||||
async def send_msg(
|
async def send_msg(
|
||||||
self,
|
self,
|
||||||
data: Any,
|
data: Any,
|
||||||
|
@ -128,21 +152,26 @@ class NoBsWs:
|
||||||
except self.recon_errors:
|
except self.recon_errors:
|
||||||
await self._connect()
|
await self._connect()
|
||||||
|
|
||||||
|
def __aiter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __anext__(self):
|
||||||
|
return await self.recv_msg()
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_autorecon_ws(
|
async def open_autorecon_ws(
|
||||||
url: str,
|
url: str,
|
||||||
|
|
||||||
# TODO: proper type annot smh
|
# TODO: proper type cannot smh
|
||||||
fixture: Callable,
|
fixture: Optional[Callable] = None,
|
||||||
# used for authenticated websockets
|
|
||||||
token: str = '',
|
|
||||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
async with AsyncExitStack() as stack:
|
async with AsyncExitStack() as stack:
|
||||||
ws = NoBsWs(url, token, stack, fixture=fixture)
|
ws = NoBsWs(url, stack, fixture=fixture)
|
||||||
await ws._connect()
|
await ws._connect()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
@ -150,3 +179,114 @@ async def open_autorecon_ws(
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
await stack.aclose()
|
await stack.aclose()
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
JSONRPC response-request style machinery for transparent multiplexing of msgs
|
||||||
|
over a NoBsWs.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
|
class JSONRPCResult(Struct):
|
||||||
|
id: int
|
||||||
|
jsonrpc: str = '2.0'
|
||||||
|
result: Optional[dict] = None
|
||||||
|
error: Optional[dict] = None
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def open_jsonrpc_session(
|
||||||
|
url: str,
|
||||||
|
start_id: int = 0,
|
||||||
|
response_type: type = JSONRPCResult,
|
||||||
|
request_type: Optional[type] = None,
|
||||||
|
request_hook: Optional[Callable] = None,
|
||||||
|
error_hook: Optional[Callable] = None,
|
||||||
|
) -> Callable[[str, dict], dict]:
|
||||||
|
|
||||||
|
async with (
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
open_autorecon_ws(url) as ws
|
||||||
|
):
|
||||||
|
rpc_id: Iterable = count(start_id)
|
||||||
|
rpc_results: dict[int, dict] = {}
|
||||||
|
|
||||||
|
async def json_rpc(method: str, params: dict) -> dict:
|
||||||
|
'''
|
||||||
|
perform a json rpc call and wait for the result, raise exception in
|
||||||
|
case of error field present on response
|
||||||
|
'''
|
||||||
|
msg = {
|
||||||
|
'jsonrpc': '2.0',
|
||||||
|
'id': next(rpc_id),
|
||||||
|
'method': method,
|
||||||
|
'params': params
|
||||||
|
}
|
||||||
|
_id = msg['id']
|
||||||
|
|
||||||
|
rpc_results[_id] = {
|
||||||
|
'result': None,
|
||||||
|
'event': trio.Event()
|
||||||
|
}
|
||||||
|
|
||||||
|
await ws.send_msg(msg)
|
||||||
|
|
||||||
|
await rpc_results[_id]['event'].wait()
|
||||||
|
|
||||||
|
ret = rpc_results[_id]['result']
|
||||||
|
|
||||||
|
del rpc_results[_id]
|
||||||
|
|
||||||
|
if ret.error is not None:
|
||||||
|
raise Exception(json.dumps(ret.error, indent=4))
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
async def recv_task():
|
||||||
|
'''
|
||||||
|
receives every ws message and stores it in its corresponding
|
||||||
|
result field, then sets the event to wakeup original sender
|
||||||
|
tasks. also recieves responses to requests originated from
|
||||||
|
the server side.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async for msg in ws:
|
||||||
|
match msg:
|
||||||
|
case {
|
||||||
|
'result': _,
|
||||||
|
'id': mid,
|
||||||
|
} if res_entry := rpc_results.get(mid):
|
||||||
|
|
||||||
|
res_entry['result'] = response_type(**msg)
|
||||||
|
res_entry['event'].set()
|
||||||
|
|
||||||
|
case {
|
||||||
|
'result': _,
|
||||||
|
'id': mid,
|
||||||
|
} if not rpc_results.get(mid):
|
||||||
|
log.warning(
|
||||||
|
f'Unexpected ws msg: {json.dumps(msg, indent=4)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
case {
|
||||||
|
'method': _,
|
||||||
|
'params': _,
|
||||||
|
}:
|
||||||
|
log.debug(f'Recieved\n{msg}')
|
||||||
|
if request_hook:
|
||||||
|
await request_hook(request_type(**msg))
|
||||||
|
|
||||||
|
case {
|
||||||
|
'error': error
|
||||||
|
}:
|
||||||
|
log.warning(f'Recieved\n{error}')
|
||||||
|
if error_hook:
|
||||||
|
await error_hook(response_type(**msg))
|
||||||
|
|
||||||
|
case _:
|
||||||
|
log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
|
||||||
|
|
||||||
|
n.start_soon(recv_task)
|
||||||
|
yield json_rpc
|
||||||
|
n.cancel_scope.cancel()
|
||||||
|
|
1918
piker/data/feed.py
1918
piker/data/feed.py
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,321 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
abstractions for organizing, managing and generally operating-on
|
||||||
|
real-time data processing data-structures.
|
||||||
|
|
||||||
|
"Streams, flumes, cascades and flows.."
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from functools import partial
|
||||||
|
from typing import (
|
||||||
|
AsyncIterator,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
|
import tractor
|
||||||
|
from tractor.trionics import (
|
||||||
|
maybe_open_context,
|
||||||
|
)
|
||||||
|
import pendulum
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
|
from .types import Struct
|
||||||
|
from ._source import (
|
||||||
|
Symbol,
|
||||||
|
)
|
||||||
|
from ._sharedmem import (
|
||||||
|
attach_shm_array,
|
||||||
|
ShmArray,
|
||||||
|
_Token,
|
||||||
|
)
|
||||||
|
from ._sampling import (
|
||||||
|
open_sample_stream,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from pyqtgraph import PlotItem
|
||||||
|
from .feed import Feed
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: ideas for further abstractions as per
|
||||||
|
# https://github.com/pikers/piker/issues/216 and
|
||||||
|
# https://github.com/pikers/piker/issues/270:
|
||||||
|
# - a ``Cascade`` would be the minimal "connection" of 2 ``Flumes``
|
||||||
|
# as per circuit parlance:
|
||||||
|
# https://en.wikipedia.org/wiki/Two-port_network#Cascade_connection
|
||||||
|
# - could cover the combination of our `FspAdmin` and the
|
||||||
|
# backend `.fsp._engine` related machinery to "connect" one flume
|
||||||
|
# to another?
|
||||||
|
# - a (financial signal) ``Flow`` would be the a "collection" of such
|
||||||
|
# minmial cascades. Some engineering based jargon concepts:
|
||||||
|
# - https://en.wikipedia.org/wiki/Signal_chain
|
||||||
|
# - https://en.wikipedia.org/wiki/Daisy_chain_(electrical_engineering)
|
||||||
|
# - https://en.wikipedia.org/wiki/Audio_signal_flow
|
||||||
|
# - https://en.wikipedia.org/wiki/Digital_signal_processing#Implementation
|
||||||
|
# - https://en.wikipedia.org/wiki/Dataflow_programming
|
||||||
|
# - https://en.wikipedia.org/wiki/Signal_programming
|
||||||
|
# - https://en.wikipedia.org/wiki/Incremental_computing
|
||||||
|
|
||||||
|
|
||||||
|
class Flume(Struct):
|
||||||
|
'''
|
||||||
|
Composite reference type which points to all the addressing handles
|
||||||
|
and other meta-data necessary for the read, measure and management
|
||||||
|
of a set of real-time updated data flows.
|
||||||
|
|
||||||
|
Can be thought of as a "flow descriptor" or "flow frame" which
|
||||||
|
describes the high level properties of a set of data flows that can
|
||||||
|
be used seamlessly across process-memory boundaries.
|
||||||
|
|
||||||
|
Each instance's sub-components normally includes:
|
||||||
|
- a msg oriented quote stream provided via an IPC transport
|
||||||
|
- history and real-time shm buffers which are both real-time
|
||||||
|
updated and backfilled.
|
||||||
|
- associated startup indexing information related to both buffer
|
||||||
|
real-time-append and historical prepend addresses.
|
||||||
|
- low level APIs to read and measure the updated data and manage
|
||||||
|
queuing properties.
|
||||||
|
|
||||||
|
'''
|
||||||
|
symbol: Symbol
|
||||||
|
first_quote: dict
|
||||||
|
_rt_shm_token: _Token
|
||||||
|
|
||||||
|
# optional since some data flows won't have a "downsampled" history
|
||||||
|
# buffer/stream (eg. FSPs).
|
||||||
|
_hist_shm_token: _Token | None = None
|
||||||
|
|
||||||
|
# private shm refs loaded dynamically from tokens
|
||||||
|
_hist_shm: ShmArray | None = None
|
||||||
|
_rt_shm: ShmArray | None = None
|
||||||
|
|
||||||
|
stream: tractor.MsgStream | None = None
|
||||||
|
izero_hist: int = 0
|
||||||
|
izero_rt: int = 0
|
||||||
|
throttle_rate: int | None = None
|
||||||
|
|
||||||
|
# TODO: do we need this really if we can pull the `Portal` from
|
||||||
|
# ``tractor``'s internals?
|
||||||
|
feed: Feed | None = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rt_shm(self) -> ShmArray:
|
||||||
|
|
||||||
|
if self._rt_shm is None:
|
||||||
|
self._rt_shm = attach_shm_array(
|
||||||
|
token=self._rt_shm_token,
|
||||||
|
readonly=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._rt_shm
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hist_shm(self) -> ShmArray:
|
||||||
|
|
||||||
|
if self._hist_shm_token is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
'No shm token has been set for the history buffer?'
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
self._hist_shm is None
|
||||||
|
):
|
||||||
|
self._hist_shm = attach_shm_array(
|
||||||
|
token=self._hist_shm_token,
|
||||||
|
readonly=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return self._hist_shm
|
||||||
|
|
||||||
|
async def receive(self) -> dict:
|
||||||
|
return await self.stream.receive()
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def index_stream(
|
||||||
|
self,
|
||||||
|
delay_s: float = 1,
|
||||||
|
|
||||||
|
) -> AsyncIterator[int]:
|
||||||
|
|
||||||
|
if not self.feed:
|
||||||
|
raise RuntimeError('This flume is not part of any ``Feed``?')
|
||||||
|
|
||||||
|
# TODO: maybe a public (property) API for this in ``tractor``?
|
||||||
|
portal = self.stream._ctx._portal
|
||||||
|
assert portal
|
||||||
|
|
||||||
|
# XXX: this should be singleton on a host,
|
||||||
|
# a lone broker-daemon per provider should be
|
||||||
|
# created for all practical purposes
|
||||||
|
async with open_sample_stream(float(delay_s)) as stream:
|
||||||
|
yield stream
|
||||||
|
|
||||||
|
def get_ds_info(
|
||||||
|
self,
|
||||||
|
) -> tuple[float, float, float]:
|
||||||
|
'''
|
||||||
|
Compute the "downsampling" ratio info between the historical shm
|
||||||
|
buffer and the real-time (HFT) one.
|
||||||
|
|
||||||
|
Return a tuple of the fast sample period, historical sample
|
||||||
|
period and ratio between them.
|
||||||
|
|
||||||
|
'''
|
||||||
|
times = self.hist_shm.array['time']
|
||||||
|
end = pendulum.from_timestamp(times[-1])
|
||||||
|
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
||||||
|
hist_step_size_s = (end - start).seconds
|
||||||
|
|
||||||
|
times = self.rt_shm.array['time']
|
||||||
|
end = pendulum.from_timestamp(times[-1])
|
||||||
|
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
||||||
|
rt_step_size_s = (end - start).seconds
|
||||||
|
|
||||||
|
ratio = hist_step_size_s / rt_step_size_s
|
||||||
|
return (
|
||||||
|
rt_step_size_s,
|
||||||
|
hist_step_size_s,
|
||||||
|
ratio,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: get native msgspec decoding for these workinn
|
||||||
|
def to_msg(self) -> dict:
|
||||||
|
msg = self.to_dict()
|
||||||
|
msg['symbol'] = msg['symbol'].to_dict()
|
||||||
|
|
||||||
|
# can't serialize the stream or feed objects, it's expected
|
||||||
|
# you'll have a ref to it since this msg should be rxed on
|
||||||
|
# a stream on whatever far end IPC..
|
||||||
|
msg.pop('stream')
|
||||||
|
msg.pop('feed')
|
||||||
|
return msg
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_msg(cls, msg: dict) -> dict:
|
||||||
|
symbol = Symbol(**msg.pop('symbol'))
|
||||||
|
return cls(
|
||||||
|
symbol=symbol,
|
||||||
|
**msg,
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_index(
|
||||||
|
self,
|
||||||
|
time_s: float,
|
||||||
|
|
||||||
|
) -> int:
|
||||||
|
'''
|
||||||
|
Return array shm-buffer index for for epoch time.
|
||||||
|
|
||||||
|
'''
|
||||||
|
array = self.rt_shm.array
|
||||||
|
times = array['time']
|
||||||
|
mask = (times >= time_s)
|
||||||
|
|
||||||
|
if any(mask):
|
||||||
|
return array['index'][mask][0]
|
||||||
|
|
||||||
|
# just the latest index
|
||||||
|
array['index'][-1]
|
||||||
|
|
||||||
|
def slice_from_time(
|
||||||
|
self,
|
||||||
|
array: np.ndarray,
|
||||||
|
start_t: float,
|
||||||
|
stop_t: float,
|
||||||
|
timeframe_s: int = 1,
|
||||||
|
return_data: bool = False,
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Slice an input struct array providing only datums
|
||||||
|
"in view" of this chart.
|
||||||
|
|
||||||
|
'''
|
||||||
|
arr = {
|
||||||
|
1: self.rt_shm.array,
|
||||||
|
60: self.hist_shm.arry,
|
||||||
|
}[timeframe_s]
|
||||||
|
|
||||||
|
times = arr['time']
|
||||||
|
index = array['index']
|
||||||
|
|
||||||
|
# use advanced indexing to map the
|
||||||
|
# time range to the index range.
|
||||||
|
mask = (
|
||||||
|
(times >= start_t)
|
||||||
|
&
|
||||||
|
(times < stop_t)
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: if we can ensure each time field has a uniform
|
||||||
|
# step we can instead do some arithmetic to determine
|
||||||
|
# the equivalent index like we used to?
|
||||||
|
# return array[
|
||||||
|
# lbar - ifirst:
|
||||||
|
# (rbar - ifirst) + 1
|
||||||
|
# ]
|
||||||
|
|
||||||
|
i_by_t = index[mask]
|
||||||
|
i_0 = i_by_t[0]
|
||||||
|
|
||||||
|
abs_slc = slice(
|
||||||
|
i_0,
|
||||||
|
i_by_t[-1],
|
||||||
|
)
|
||||||
|
# slice data by offset from the first index
|
||||||
|
# available in the passed datum set.
|
||||||
|
read_slc = slice(
|
||||||
|
0,
|
||||||
|
i_by_t[-1] - i_0,
|
||||||
|
)
|
||||||
|
if not return_data:
|
||||||
|
return (
|
||||||
|
abs_slc,
|
||||||
|
read_slc,
|
||||||
|
)
|
||||||
|
|
||||||
|
# also return the readable data from the timerange
|
||||||
|
return (
|
||||||
|
abs_slc,
|
||||||
|
read_slc,
|
||||||
|
arr[mask],
|
||||||
|
)
|
||||||
|
|
||||||
|
def view_data(
|
||||||
|
self,
|
||||||
|
plot: PlotItem,
|
||||||
|
timeframe_s: int = 1,
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
|
||||||
|
# get far-side x-indices plot view
|
||||||
|
vr = plot.viewRect()
|
||||||
|
|
||||||
|
(
|
||||||
|
abs_slc,
|
||||||
|
buf_slc,
|
||||||
|
iv_arr,
|
||||||
|
) = self.slice_from_time(
|
||||||
|
start_t=vr.left(),
|
||||||
|
stop_t=vr.right(),
|
||||||
|
timeframe_s=timeframe_s,
|
||||||
|
return_data=True,
|
||||||
|
)
|
||||||
|
return iv_arr
|
|
@ -37,8 +37,8 @@ import time
|
||||||
from math import isnan
|
from math import isnan
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import msgpack
|
from msgspec.msgpack import encode, decode
|
||||||
import pyqtgraph as pg
|
# import pyqtgraph as pg
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import tractor
|
import tractor
|
||||||
from trio_websocket import open_websocket_url
|
from trio_websocket import open_websocket_url
|
||||||
|
@ -56,6 +56,7 @@ if TYPE_CHECKING:
|
||||||
|
|
||||||
from .feed import maybe_open_feed
|
from .feed import maybe_open_feed
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -131,7 +132,10 @@ def start_marketstore(
|
||||||
|
|
||||||
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
||||||
|
|
||||||
# create when dne
|
# create dirs when dne
|
||||||
|
if not os.path.isdir(config._config_dir):
|
||||||
|
os.mkdir(config._config_dir)
|
||||||
|
|
||||||
if not os.path.isdir(mktsdir):
|
if not os.path.isdir(mktsdir):
|
||||||
os.mkdir(mktsdir)
|
os.mkdir(mktsdir)
|
||||||
|
|
||||||
|
@ -387,50 +391,54 @@ class Storage:
|
||||||
async def load(
|
async def load(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
|
timeframe: int,
|
||||||
|
|
||||||
) -> tuple[
|
) -> tuple[
|
||||||
dict[int, np.ndarray], # timeframe (in secs) to series
|
np.ndarray, # timeframe sampled array-series
|
||||||
Optional[datetime], # first dt
|
Optional[datetime], # first dt
|
||||||
Optional[datetime], # last dt
|
Optional[datetime], # last dt
|
||||||
]:
|
]:
|
||||||
|
|
||||||
first_tsdb_dt, last_tsdb_dt = None, None
|
first_tsdb_dt, last_tsdb_dt = None, None
|
||||||
tsdb_arrays = await self.read_ohlcv(
|
hist = await self.read_ohlcv(
|
||||||
fqsn,
|
fqsn,
|
||||||
# on first load we don't need to pull the max
|
# on first load we don't need to pull the max
|
||||||
# history per request size worth.
|
# history per request size worth.
|
||||||
limit=3000,
|
limit=3000,
|
||||||
|
timeframe=timeframe,
|
||||||
)
|
)
|
||||||
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
log.info(f'Loaded tsdb history {hist}')
|
||||||
|
|
||||||
if tsdb_arrays:
|
if len(hist):
|
||||||
fastest = list(tsdb_arrays.values())[0]
|
times = hist['Epoch']
|
||||||
times = fastest['Epoch']
|
|
||||||
first, last = times[0], times[-1]
|
first, last = times[0], times[-1]
|
||||||
first_tsdb_dt, last_tsdb_dt = map(
|
first_tsdb_dt, last_tsdb_dt = map(
|
||||||
pendulum.from_timestamp, [first, last]
|
pendulum.from_timestamp, [first, last]
|
||||||
)
|
)
|
||||||
|
|
||||||
return tsdb_arrays, first_tsdb_dt, last_tsdb_dt
|
return (
|
||||||
|
hist, # array-data
|
||||||
|
first_tsdb_dt, # start of query-frame
|
||||||
|
last_tsdb_dt, # most recent
|
||||||
|
)
|
||||||
|
|
||||||
async def read_ohlcv(
|
async def read_ohlcv(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
timeframe: Optional[Union[int, str]] = None,
|
timeframe: int | str,
|
||||||
end: Optional[int] = None,
|
end: Optional[int] = None,
|
||||||
limit: int = int(800e3),
|
limit: int = int(800e3),
|
||||||
|
|
||||||
) -> tuple[
|
) -> np.ndarray:
|
||||||
MarketstoreClient,
|
|
||||||
Union[dict, np.ndarray]
|
|
||||||
]:
|
|
||||||
client = self.client
|
client = self.client
|
||||||
syms = await client.list_symbols()
|
syms = await client.list_symbols()
|
||||||
|
|
||||||
if fqsn not in syms:
|
if fqsn not in syms:
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
tfstr = tf_in_1s[1]
|
# use the provided timeframe or 1s by default
|
||||||
|
tfstr = tf_in_1s.get(timeframe, tf_in_1s[1])
|
||||||
|
|
||||||
params = Params(
|
params = Params(
|
||||||
symbols=fqsn,
|
symbols=fqsn,
|
||||||
|
@ -444,58 +452,72 @@ class Storage:
|
||||||
limit=limit,
|
limit=limit,
|
||||||
)
|
)
|
||||||
|
|
||||||
if timeframe is None:
|
try:
|
||||||
log.info(f'starting {fqsn} tsdb granularity scan..')
|
|
||||||
# loop through and try to find highest granularity
|
|
||||||
for tfstr in tf_in_1s.values():
|
|
||||||
try:
|
|
||||||
log.info(f'querying for {tfstr}@{fqsn}')
|
|
||||||
params.set('timeframe', tfstr)
|
|
||||||
result = await client.query(params)
|
|
||||||
break
|
|
||||||
|
|
||||||
except purerpc.grpclib.exceptions.UnknownError:
|
|
||||||
# XXX: this is already logged by the container and
|
|
||||||
# thus shows up through `marketstored` logs relay.
|
|
||||||
# log.warning(f'{tfstr}@{fqsn} not found')
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
return {}
|
|
||||||
|
|
||||||
else:
|
|
||||||
result = await client.query(params)
|
result = await client.query(params)
|
||||||
|
except purerpc.grpclib.exceptions.UnknownError as err:
|
||||||
|
# indicate there is no history for this timeframe
|
||||||
|
log.exception(
|
||||||
|
f'Unknown mkts QUERY error: {params}\n'
|
||||||
|
f'{err.args}'
|
||||||
|
)
|
||||||
|
return {}
|
||||||
|
|
||||||
# TODO: it turns out column access on recarrays is actually slower:
|
# TODO: it turns out column access on recarrays is actually slower:
|
||||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||||
# it might make sense to make these structured arrays?
|
# it might make sense to make these structured arrays?
|
||||||
# Fill out a `numpy` array-results map
|
data_set = result.by_symbols()[fqsn]
|
||||||
arrays = {}
|
array = data_set.array
|
||||||
for fqsn, data_set in result.by_symbols().items():
|
|
||||||
arrays.setdefault(fqsn, {})[
|
|
||||||
tf_in_1s.inverse[data_set.timeframe]
|
|
||||||
] = data_set.array
|
|
||||||
|
|
||||||
return arrays[fqsn][timeframe] if timeframe else arrays[fqsn]
|
# XXX: ensure sample rate is as expected
|
||||||
|
time = data_set.array['Epoch']
|
||||||
|
if len(time) > 1:
|
||||||
|
time_step = time[-1] - time[-2]
|
||||||
|
ts = tf_in_1s.inverse[data_set.timeframe]
|
||||||
|
|
||||||
|
if time_step != ts:
|
||||||
|
log.warning(
|
||||||
|
f'MKTS BUG: wrong timeframe loaded: {time_step}'
|
||||||
|
'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG'
|
||||||
|
f'WIPING HISTORY FOR {ts}s'
|
||||||
|
)
|
||||||
|
await self.delete_ts(fqsn, timeframe)
|
||||||
|
|
||||||
|
# try reading again..
|
||||||
|
return await self.read_ohlcv(
|
||||||
|
fqsn,
|
||||||
|
timeframe,
|
||||||
|
end,
|
||||||
|
limit,
|
||||||
|
)
|
||||||
|
|
||||||
|
return array
|
||||||
|
|
||||||
async def delete_ts(
|
async def delete_ts(
|
||||||
self,
|
self,
|
||||||
key: str,
|
key: str,
|
||||||
timeframe: Optional[Union[int, str]] = None,
|
timeframe: Optional[Union[int, str]] = None,
|
||||||
|
fmt: str = 'OHLCV',
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
|
|
||||||
client = self.client
|
client = self.client
|
||||||
syms = await client.list_symbols()
|
syms = await client.list_symbols()
|
||||||
print(syms)
|
print(syms)
|
||||||
# if key not in syms:
|
if key not in syms:
|
||||||
# raise KeyError(f'`{fqsn}` table key not found?')
|
raise KeyError(f'`{key}` table key not found in\n{syms}?')
|
||||||
|
|
||||||
return await client.destroy(tbk=key)
|
tbk = mk_tbk((
|
||||||
|
key,
|
||||||
|
tf_in_1s.get(timeframe, tf_in_1s[60]),
|
||||||
|
fmt,
|
||||||
|
))
|
||||||
|
return await client.destroy(tbk=tbk)
|
||||||
|
|
||||||
async def write_ohlcv(
|
async def write_ohlcv(
|
||||||
self,
|
self,
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
ohlcv: np.ndarray,
|
ohlcv: np.ndarray,
|
||||||
|
timeframe: int,
|
||||||
append_and_duplicate: bool = True,
|
append_and_duplicate: bool = True,
|
||||||
limit: int = int(800e3),
|
limit: int = int(800e3),
|
||||||
|
|
||||||
|
@ -519,17 +541,18 @@ class Storage:
|
||||||
|
|
||||||
m, r = divmod(len(mkts_array), limit)
|
m, r = divmod(len(mkts_array), limit)
|
||||||
|
|
||||||
|
tfkey = tf_in_1s[timeframe]
|
||||||
for i in range(m, 1):
|
for i in range(m, 1):
|
||||||
to_push = mkts_array[i-1:i*limit]
|
to_push = mkts_array[i-1:i*limit]
|
||||||
|
|
||||||
# write to db
|
# write to db
|
||||||
resp = await self.client.write(
|
resp = await self.client.write(
|
||||||
to_push,
|
to_push,
|
||||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
# NOTE: will will append duplicates
|
||||||
# for the same timestamp-index.
|
# for the same timestamp-index.
|
||||||
# TODO: pre deduplicate?
|
# TODO: pre-deduplicate?
|
||||||
isvariablelength=append_and_duplicate,
|
isvariablelength=append_and_duplicate,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -548,7 +571,7 @@ class Storage:
|
||||||
# write to db
|
# write to db
|
||||||
resp = await self.client.write(
|
resp = await self.client.write(
|
||||||
to_push,
|
to_push,
|
||||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
||||||
|
|
||||||
# NOTE: will will append duplicates
|
# NOTE: will will append duplicates
|
||||||
# for the same timestamp-index.
|
# for the same timestamp-index.
|
||||||
|
@ -577,6 +600,7 @@ class Storage:
|
||||||
# def delete_range(self, start_dt, end_dt) -> None:
|
# def delete_range(self, start_dt, end_dt) -> None:
|
||||||
# ...
|
# ...
|
||||||
|
|
||||||
|
|
||||||
@acm
|
@acm
|
||||||
async def open_storage_client(
|
async def open_storage_client(
|
||||||
fqsn: str,
|
fqsn: str,
|
||||||
|
@ -626,7 +650,7 @@ async def tsdb_history_update(
|
||||||
# * the original data feed arch blurb:
|
# * the original data feed arch blurb:
|
||||||
# - https://github.com/pikers/piker/issues/98
|
# - https://github.com/pikers/piker/issues/98
|
||||||
#
|
#
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
disabled=False, # not pg_profile_enabled(),
|
disabled=False, # not pg_profile_enabled(),
|
||||||
delayed=False,
|
delayed=False,
|
||||||
)
|
)
|
||||||
|
@ -638,34 +662,35 @@ async def tsdb_history_update(
|
||||||
[fqsn],
|
[fqsn],
|
||||||
start_stream=False,
|
start_stream=False,
|
||||||
|
|
||||||
) as (feed, stream),
|
) as feed,
|
||||||
):
|
):
|
||||||
profiler(f'opened feed for {fqsn}')
|
profiler(f'opened feed for {fqsn}')
|
||||||
|
|
||||||
to_append = feed.shm.array
|
# to_append = feed.hist_shm.array
|
||||||
to_prepend = None
|
# to_prepend = None
|
||||||
|
|
||||||
if fqsn:
|
if fqsn:
|
||||||
symbol = feed.symbols.get(fqsn)
|
flume = feed.flumes[fqsn]
|
||||||
|
symbol = flume.symbol
|
||||||
if symbol:
|
if symbol:
|
||||||
fqsn = symbol.front_fqsn()
|
fqsn = symbol.fqsn
|
||||||
|
|
||||||
# diff db history with shm and only write the missing portions
|
# diff db history with shm and only write the missing portions
|
||||||
ohlcv = feed.shm.array
|
# ohlcv = flume.hist_shm.array
|
||||||
|
|
||||||
# TODO: use pg profiler
|
# TODO: use pg profiler
|
||||||
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
# for secs in (1, 60):
|
||||||
# hist diffing
|
# tsdb_array = await storage.read_ohlcv(
|
||||||
if tsdb_arrays:
|
# fqsn,
|
||||||
for secs in (1, 60):
|
# timeframe=timeframe,
|
||||||
ts = tsdb_arrays.get(secs)
|
# )
|
||||||
if ts is not None and len(ts):
|
# # hist diffing:
|
||||||
# these aren't currently used but can be referenced from
|
# # these aren't currently used but can be referenced from
|
||||||
# within the embedded ipython shell below.
|
# # within the embedded ipython shell below.
|
||||||
to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
# to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
||||||
to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
# to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
||||||
|
|
||||||
profiler('Finished db arrays diffs')
|
# profiler('Finished db arrays diffs')
|
||||||
|
|
||||||
syms = await storage.client.list_symbols()
|
syms = await storage.client.list_symbols()
|
||||||
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||||
|
@ -774,12 +799,13 @@ async def stream_quotes(
|
||||||
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
||||||
# send subs topics to server
|
# send subs topics to server
|
||||||
resp = await ws.send_message(
|
resp = await ws.send_message(
|
||||||
msgpack.dumps({'streams': list(tbks.values())})
|
|
||||||
|
encode({'streams': list(tbks.values())})
|
||||||
)
|
)
|
||||||
log.info(resp)
|
log.info(resp)
|
||||||
|
|
||||||
async def recv() -> dict[str, Any]:
|
async def recv() -> dict[str, Any]:
|
||||||
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
return decode((await ws.get_message()), encoding='utf-8')
|
||||||
|
|
||||||
streams = (await recv())['streams']
|
streams = (await recv())['streams']
|
||||||
log.info(f"Subscribed to {streams}")
|
log.info(f"Subscribed to {streams}")
|
||||||
|
|
|
@ -0,0 +1,88 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Built-in (extension) types.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import sys
|
||||||
|
from typing import Optional
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
|
import msgspec
|
||||||
|
|
||||||
|
|
||||||
|
class Struct(
|
||||||
|
msgspec.Struct,
|
||||||
|
|
||||||
|
# https://jcristharif.com/msgspec/structs.html#tagged-unions
|
||||||
|
# tag='pikerstruct',
|
||||||
|
# tag=True,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
A "human friendlier" (aka repl buddy) struct subtype.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
f: getattr(self, f)
|
||||||
|
for f in self.__struct_fields__
|
||||||
|
}
|
||||||
|
|
||||||
|
# Lul, doesn't seem to work that well..
|
||||||
|
# def __repr__(self):
|
||||||
|
# # only turn on pprint when we detect a python REPL
|
||||||
|
# # at runtime B)
|
||||||
|
# if (
|
||||||
|
# hasattr(sys, 'ps1')
|
||||||
|
# # TODO: check if we're in pdb
|
||||||
|
# ):
|
||||||
|
# return self.pformat()
|
||||||
|
|
||||||
|
# return super().__repr__()
|
||||||
|
|
||||||
|
def pformat(self) -> str:
|
||||||
|
return f'Struct({pformat(self.to_dict())})'
|
||||||
|
|
||||||
|
def copy(
|
||||||
|
self,
|
||||||
|
update: Optional[dict] = None,
|
||||||
|
|
||||||
|
) -> msgspec.Struct:
|
||||||
|
'''
|
||||||
|
Validate-typecast all self defined fields, return a copy of us
|
||||||
|
with all such fields.
|
||||||
|
|
||||||
|
This is kinda like the default behaviour in `pydantic.BaseModel`.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if update:
|
||||||
|
for k, v in update.items():
|
||||||
|
setattr(self, k, v)
|
||||||
|
|
||||||
|
# roundtrip serialize to validate
|
||||||
|
return msgspec.msgpack.Decoder(
|
||||||
|
type=type(self)
|
||||||
|
).decode(
|
||||||
|
msgspec.msgpack.Encoder().encode(self)
|
||||||
|
)
|
||||||
|
|
||||||
|
def typecast(
|
||||||
|
self,
|
||||||
|
# fields: Optional[list[str]] = None,
|
||||||
|
) -> None:
|
||||||
|
for fname, ftype in self.__annotations__.items():
|
||||||
|
setattr(self, fname, ftype(getattr(self, fname)))
|
|
@ -78,7 +78,8 @@ class Fsp:
|
||||||
# + the consuming fsp *to* the consumers output
|
# + the consuming fsp *to* the consumers output
|
||||||
# shm flow.
|
# shm flow.
|
||||||
_flow_registry: dict[
|
_flow_registry: dict[
|
||||||
tuple[_Token, str], _Token,
|
tuple[_Token, str],
|
||||||
|
tuple[_Token, Optional[ShmArray]],
|
||||||
] = {}
|
] = {}
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -120,7 +121,6 @@ class Fsp:
|
||||||
):
|
):
|
||||||
return self.func(*args, **kwargs)
|
return self.func(*args, **kwargs)
|
||||||
|
|
||||||
# TODO: lru_cache this? prettty sure it'll work?
|
|
||||||
def get_shm(
|
def get_shm(
|
||||||
self,
|
self,
|
||||||
src_shm: ShmArray,
|
src_shm: ShmArray,
|
||||||
|
@ -131,12 +131,27 @@ class Fsp:
|
||||||
for this "instance" of a signal processor for
|
for this "instance" of a signal processor for
|
||||||
the given ``key``.
|
the given ``key``.
|
||||||
|
|
||||||
|
The destination shm "token" and array are cached if possible to
|
||||||
|
minimize multiple stdlib/system calls.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
dst_token = self._flow_registry[
|
dst_token, maybe_array = self._flow_registry[
|
||||||
(src_shm._token, self.name)
|
(src_shm._token, self.name)
|
||||||
]
|
]
|
||||||
shm = attach_shm_array(dst_token)
|
if maybe_array is None:
|
||||||
return shm
|
self._flow_registry[
|
||||||
|
(src_shm._token, self.name)
|
||||||
|
] = (
|
||||||
|
dst_token,
|
||||||
|
# "cache" the ``ShmArray`` such that
|
||||||
|
# we call the underlying "attach" code as few
|
||||||
|
# times as possible as per:
|
||||||
|
# - https://github.com/pikers/piker/issues/359
|
||||||
|
# - https://github.com/pikers/piker/issues/332
|
||||||
|
maybe_array := attach_shm_array(dst_token)
|
||||||
|
)
|
||||||
|
|
||||||
|
return maybe_array
|
||||||
|
|
||||||
|
|
||||||
def fsp(
|
def fsp(
|
||||||
|
@ -184,7 +199,10 @@ def maybe_mk_fsp_shm(
|
||||||
# TODO: load output types from `Fsp`
|
# TODO: load output types from `Fsp`
|
||||||
# - should `index` be a required internal field?
|
# - should `index` be a required internal field?
|
||||||
fsp_dtype = np.dtype(
|
fsp_dtype = np.dtype(
|
||||||
[('index', int)] +
|
[('index', int)]
|
||||||
|
+
|
||||||
|
[('time', float)]
|
||||||
|
+
|
||||||
[(field_name, float) for field_name in target.outputs]
|
[(field_name, float) for field_name in target.outputs]
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -21,12 +21,13 @@ core task logic for processing chains
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import (
|
from typing import (
|
||||||
AsyncIterator, Callable, Optional,
|
AsyncIterator,
|
||||||
|
Callable,
|
||||||
|
Optional,
|
||||||
Union,
|
Union,
|
||||||
)
|
)
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
|
@ -35,14 +36,22 @@ from tractor.msg import NamespacePath
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .. import data
|
from .. import data
|
||||||
from ..data import attach_shm_array
|
from ..data import attach_shm_array
|
||||||
from ..data.feed import Feed
|
from ..data.feed import (
|
||||||
|
Flume,
|
||||||
|
Feed,
|
||||||
|
)
|
||||||
from ..data._sharedmem import ShmArray
|
from ..data._sharedmem import ShmArray
|
||||||
|
from ..data._sampling import (
|
||||||
|
_default_delay_s,
|
||||||
|
open_sample_stream,
|
||||||
|
)
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ._api import (
|
from ._api import (
|
||||||
Fsp,
|
Fsp,
|
||||||
_load_builtins,
|
_load_builtins,
|
||||||
_Token,
|
_Token,
|
||||||
)
|
)
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -77,7 +86,7 @@ async def filter_quotes_by_sym(
|
||||||
async def fsp_compute(
|
async def fsp_compute(
|
||||||
|
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
feed: Feed,
|
flume: Flume,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
|
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
|
@ -90,7 +99,7 @@ async def fsp_compute(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
disabled=True
|
disabled=True
|
||||||
)
|
)
|
||||||
|
@ -105,16 +114,17 @@ async def fsp_compute(
|
||||||
filter_quotes_by_sym(fqsn, quote_stream),
|
filter_quotes_by_sym(fqsn, quote_stream),
|
||||||
|
|
||||||
# XXX: currently the ``ohlcv`` arg
|
# XXX: currently the ``ohlcv`` arg
|
||||||
feed.shm,
|
flume.rt_shm,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Conduct a single iteration of fsp with historical bars input
|
# HISTORY COMPUTE PHASE
|
||||||
# and get historical output
|
# conduct a single iteration of fsp with historical bars input
|
||||||
|
# and get historical output.
|
||||||
history_output: Union[
|
history_output: Union[
|
||||||
dict[str, np.ndarray], # multi-output case
|
dict[str, np.ndarray], # multi-output case
|
||||||
np.ndarray, # single output case
|
np.ndarray, # single output case
|
||||||
]
|
]
|
||||||
history_output = await out_stream.__anext__()
|
history_output = await anext(out_stream)
|
||||||
|
|
||||||
func_name = func.__name__
|
func_name = func.__name__
|
||||||
profiler(f'{func_name} generated history')
|
profiler(f'{func_name} generated history')
|
||||||
|
@ -126,9 +136,13 @@ async def fsp_compute(
|
||||||
# each respective field.
|
# each respective field.
|
||||||
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
||||||
fields.pop('index')
|
fields.pop('index')
|
||||||
history: Optional[np.ndarray] = None # TODO: nptyping here!
|
history_by_field: Optional[np.ndarray] = None
|
||||||
|
src_time = src.array['time']
|
||||||
|
|
||||||
if fields and len(fields) > 1 and fields:
|
if (
|
||||||
|
fields and
|
||||||
|
len(fields) > 1
|
||||||
|
):
|
||||||
if not isinstance(history_output, dict):
|
if not isinstance(history_output, dict):
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'`{func_name}` is a multi-output FSP and should yield a '
|
f'`{func_name}` is a multi-output FSP and should yield a '
|
||||||
|
@ -139,7 +153,7 @@ async def fsp_compute(
|
||||||
if key in history_output:
|
if key in history_output:
|
||||||
output = history_output[key]
|
output = history_output[key]
|
||||||
|
|
||||||
if history is None:
|
if history_by_field is None:
|
||||||
|
|
||||||
if output is None:
|
if output is None:
|
||||||
length = len(src.array)
|
length = len(src.array)
|
||||||
|
@ -149,7 +163,7 @@ async def fsp_compute(
|
||||||
# using the first output, determine
|
# using the first output, determine
|
||||||
# the length of the struct-array that
|
# the length of the struct-array that
|
||||||
# will be pushed to shm.
|
# will be pushed to shm.
|
||||||
history = np.zeros(
|
history_by_field = np.zeros(
|
||||||
length,
|
length,
|
||||||
dtype=dst.array.dtype
|
dtype=dst.array.dtype
|
||||||
)
|
)
|
||||||
|
@ -157,7 +171,7 @@ async def fsp_compute(
|
||||||
if output is None:
|
if output is None:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
history[key] = output
|
history_by_field[key] = output
|
||||||
|
|
||||||
# single-key output stream
|
# single-key output stream
|
||||||
else:
|
else:
|
||||||
|
@ -166,11 +180,13 @@ async def fsp_compute(
|
||||||
f'`{func_name}` is a single output FSP and should yield an '
|
f'`{func_name}` is a single output FSP and should yield an '
|
||||||
'`np.ndarray` for history'
|
'`np.ndarray` for history'
|
||||||
)
|
)
|
||||||
history = np.zeros(
|
history_by_field = np.zeros(
|
||||||
len(history_output),
|
len(history_output),
|
||||||
dtype=dst.array.dtype
|
dtype=dst.array.dtype
|
||||||
)
|
)
|
||||||
history[func_name] = history_output
|
history_by_field[func_name] = history_output
|
||||||
|
|
||||||
|
history_by_field['time'] = src_time[-len(history_by_field):]
|
||||||
|
|
||||||
# TODO: XXX:
|
# TODO: XXX:
|
||||||
# THERE'S A BIG BUG HERE WITH THE `index` field since we're
|
# THERE'S A BIG BUG HERE WITH THE `index` field since we're
|
||||||
|
@ -187,7 +203,10 @@ async def fsp_compute(
|
||||||
|
|
||||||
# TODO: can we use this `start` flag instead of the manual
|
# TODO: can we use this `start` flag instead of the manual
|
||||||
# setting above?
|
# setting above?
|
||||||
index = dst.push(history, start=first)
|
index = dst.push(
|
||||||
|
history_by_field,
|
||||||
|
start=first,
|
||||||
|
)
|
||||||
|
|
||||||
profiler(f'{func_name} pushed history')
|
profiler(f'{func_name} pushed history')
|
||||||
profiler.finish()
|
profiler.finish()
|
||||||
|
@ -213,8 +232,14 @@ async def fsp_compute(
|
||||||
|
|
||||||
log.debug(f"{func_name}: {processed}")
|
log.debug(f"{func_name}: {processed}")
|
||||||
key, output = processed
|
key, output = processed
|
||||||
index = src.index
|
# dst.array[-1][key] = output
|
||||||
dst.array[-1][key] = output
|
dst.array[[key, 'time']][-1] = (
|
||||||
|
output,
|
||||||
|
# TODO: what about pushing ``time.time_ns()``
|
||||||
|
# in which case we'll need to round at the graphics
|
||||||
|
# processing / sampling layer?
|
||||||
|
src.array[-1]['time']
|
||||||
|
)
|
||||||
|
|
||||||
# NOTE: for now we aren't streaming this to the consumer
|
# NOTE: for now we aren't streaming this to the consumer
|
||||||
# stream latest array index entry which basically just acts
|
# stream latest array index entry which basically just acts
|
||||||
|
@ -225,6 +250,7 @@ async def fsp_compute(
|
||||||
# N-consumers who subscribe for the real-time output,
|
# N-consumers who subscribe for the real-time output,
|
||||||
# which we'll likely want to implement using local-mem
|
# which we'll likely want to implement using local-mem
|
||||||
# chans for the fan out?
|
# chans for the fan out?
|
||||||
|
# index = src.index
|
||||||
# if attach_stream:
|
# if attach_stream:
|
||||||
# await client_stream.send(index)
|
# await client_stream.send(index)
|
||||||
|
|
||||||
|
@ -261,7 +287,7 @@ async def cascade(
|
||||||
destination shm array buffer.
|
destination shm array buffer.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
disabled=False
|
disabled=False
|
||||||
)
|
)
|
||||||
|
@ -284,9 +310,10 @@ async def cascade(
|
||||||
# TODO: ugh i hate this wind/unwind to list over the wire
|
# TODO: ugh i hate this wind/unwind to list over the wire
|
||||||
# but not sure how else to do it.
|
# but not sure how else to do it.
|
||||||
for (token, fsp_name, dst_token) in shm_registry:
|
for (token, fsp_name, dst_token) in shm_registry:
|
||||||
Fsp._flow_registry[
|
Fsp._flow_registry[(
|
||||||
(_Token.from_msg(token), fsp_name)
|
_Token.from_msg(token),
|
||||||
] = _Token.from_msg(dst_token)
|
fsp_name,
|
||||||
|
)] = _Token.from_msg(dst_token), None
|
||||||
|
|
||||||
fsp: Fsp = reg.get(
|
fsp: Fsp = reg.get(
|
||||||
NamespacePath(ns_path)
|
NamespacePath(ns_path)
|
||||||
|
@ -298,6 +325,7 @@ async def cascade(
|
||||||
raise ValueError(f'Unknown fsp target: {ns_path}')
|
raise ValueError(f'Unknown fsp target: {ns_path}')
|
||||||
|
|
||||||
# open a data feed stream with requested broker
|
# open a data feed stream with requested broker
|
||||||
|
feed: Feed
|
||||||
async with data.feed.maybe_open_feed(
|
async with data.feed.maybe_open_feed(
|
||||||
[fqsn],
|
[fqsn],
|
||||||
|
|
||||||
|
@ -307,14 +335,13 @@ async def cascade(
|
||||||
# needs to get throttled the ticks we generate.
|
# needs to get throttled the ticks we generate.
|
||||||
# tick_throttle=60,
|
# tick_throttle=60,
|
||||||
|
|
||||||
) as (feed, quote_stream):
|
) as feed:
|
||||||
symbol = feed.symbols[fqsn]
|
|
||||||
|
|
||||||
|
flume = feed.flumes[fqsn]
|
||||||
|
symbol = flume.symbol
|
||||||
|
assert src.token == flume.rt_shm.token
|
||||||
profiler(f'{func}: feed up')
|
profiler(f'{func}: feed up')
|
||||||
|
|
||||||
assert src.token == feed.shm.token
|
|
||||||
# last_len = new_len = len(src.array)
|
|
||||||
|
|
||||||
func_name = func.__name__
|
func_name = func.__name__
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
|
@ -324,8 +351,8 @@ async def cascade(
|
||||||
|
|
||||||
fsp_compute,
|
fsp_compute,
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
feed=feed,
|
flume=flume,
|
||||||
quote_stream=quote_stream,
|
quote_stream=flume.stream,
|
||||||
|
|
||||||
# shm
|
# shm
|
||||||
src=src,
|
src=src,
|
||||||
|
@ -361,7 +388,7 @@ async def cascade(
|
||||||
) -> tuple[TaskTracker, int]:
|
) -> tuple[TaskTracker, int]:
|
||||||
# TODO: adopt an incremental update engine/approach
|
# TODO: adopt an incremental update engine/approach
|
||||||
# where possible here eventually!
|
# where possible here eventually!
|
||||||
log.debug(f're-syncing fsp {func_name} to source')
|
log.info(f're-syncing fsp {func_name} to source')
|
||||||
tracker.cs.cancel()
|
tracker.cs.cancel()
|
||||||
await tracker.complete.wait()
|
await tracker.complete.wait()
|
||||||
tracker, index = await n.start(fsp_target)
|
tracker, index = await n.start(fsp_target)
|
||||||
|
@ -374,14 +401,16 @@ async def cascade(
|
||||||
'key': dst_shm_token,
|
'key': dst_shm_token,
|
||||||
'first': dst._first.value,
|
'first': dst._first.value,
|
||||||
'last': dst._last.value,
|
'last': dst._last.value,
|
||||||
}})
|
}
|
||||||
|
})
|
||||||
return tracker, index
|
return tracker, index
|
||||||
|
|
||||||
def is_synced(
|
def is_synced(
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
dst: ShmArray
|
dst: ShmArray
|
||||||
) -> tuple[bool, int, int]:
|
) -> tuple[bool, int, int]:
|
||||||
'''Predicate to dertmine if a destination FSP
|
'''
|
||||||
|
Predicate to dertmine if a destination FSP
|
||||||
output array is aligned to its source array.
|
output array is aligned to its source array.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -390,16 +419,15 @@ async def cascade(
|
||||||
return not (
|
return not (
|
||||||
# the source is likely backfilling and we must
|
# the source is likely backfilling and we must
|
||||||
# sync history calculations
|
# sync history calculations
|
||||||
len_diff > 2 or
|
len_diff > 2
|
||||||
|
|
||||||
# we aren't step synced to the source and may be
|
# we aren't step synced to the source and may be
|
||||||
# leading/lagging by a step
|
# leading/lagging by a step
|
||||||
step_diff > 1 or
|
or step_diff > 1
|
||||||
step_diff < 0
|
or step_diff < 0
|
||||||
), step_diff, len_diff
|
), step_diff, len_diff
|
||||||
|
|
||||||
async def poll_and_sync_to_step(
|
async def poll_and_sync_to_step(
|
||||||
|
|
||||||
tracker: TaskTracker,
|
tracker: TaskTracker,
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
dst: ShmArray,
|
dst: ShmArray,
|
||||||
|
@ -418,18 +446,23 @@ async def cascade(
|
||||||
# detect sample period step for subscription to increment
|
# detect sample period step for subscription to increment
|
||||||
# signal
|
# signal
|
||||||
times = src.array['time']
|
times = src.array['time']
|
||||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
if len(times) > 1:
|
||||||
|
last_ts = times[-1]
|
||||||
|
delay_s = float(last_ts - times[times != last_ts][-1])
|
||||||
|
else:
|
||||||
|
# our default "HFT" sample rate.
|
||||||
|
delay_s = _default_delay_s
|
||||||
|
|
||||||
# Increment the underlying shared memory buffer on every
|
# sub and increment the underlying shared memory buffer
|
||||||
# "increment" msg received from the underlying data feed.
|
# on every step msg received from the global `samplerd`
|
||||||
async with feed.index_stream(
|
# service.
|
||||||
int(delay_s)
|
async with open_sample_stream(float(delay_s)) as istream:
|
||||||
) as istream:
|
|
||||||
|
|
||||||
profiler(f'{func_name}: sample stream up')
|
profiler(f'{func_name}: sample stream up')
|
||||||
profiler.finish()
|
profiler.finish()
|
||||||
|
|
||||||
async for _ in istream:
|
async for i in istream:
|
||||||
|
# print(f'FSP incrementing {i}')
|
||||||
|
|
||||||
# respawn the compute task if the source
|
# respawn the compute task if the source
|
||||||
# array has been updated such that we compute
|
# array has been updated such that we compute
|
||||||
|
@ -458,3 +491,23 @@ async def cascade(
|
||||||
last = array[-1:].copy()
|
last = array[-1:].copy()
|
||||||
|
|
||||||
dst.push(last)
|
dst.push(last)
|
||||||
|
|
||||||
|
# sync with source buffer's time step
|
||||||
|
src_l2 = src.array[-2:]
|
||||||
|
src_li, src_lt = src_l2[-1][['index', 'time']]
|
||||||
|
src_2li, src_2lt = src_l2[-2][['index', 'time']]
|
||||||
|
dst._array['time'][src_li] = src_lt
|
||||||
|
dst._array['time'][src_2li] = src_2lt
|
||||||
|
|
||||||
|
# last2 = dst.array[-2:]
|
||||||
|
# if (
|
||||||
|
# last2[-1]['index'] != src_li
|
||||||
|
# or last2[-2]['index'] != src_2li
|
||||||
|
# ):
|
||||||
|
# dstl2 = list(last2)
|
||||||
|
# srcl2 = list(src_l2)
|
||||||
|
# print(
|
||||||
|
# # f'{dst.token}\n'
|
||||||
|
# f'src: {srcl2}\n'
|
||||||
|
# f'dst: {dstl2}\n'
|
||||||
|
# )
|
||||||
|
|
|
@ -234,7 +234,7 @@ async def flow_rates(
|
||||||
# FSPs, user input, and possibly any general event stream in
|
# FSPs, user input, and possibly any general event stream in
|
||||||
# real-time. Hint: ideally implemented with caching until mutated
|
# real-time. Hint: ideally implemented with caching until mutated
|
||||||
# ;)
|
# ;)
|
||||||
period: 'Param[int]' = 6, # noqa
|
period: 'Param[int]' = 1, # noqa
|
||||||
|
|
||||||
# TODO: support other means by providing a map
|
# TODO: support other means by providing a map
|
||||||
# to weights `partial()`-ed with `wma()`?
|
# to weights `partial()`-ed with `wma()`?
|
||||||
|
@ -268,8 +268,7 @@ async def flow_rates(
|
||||||
'dark_dvlm_rate': None,
|
'dark_dvlm_rate': None,
|
||||||
}
|
}
|
||||||
|
|
||||||
# TODO: 3.10 do ``anext()``
|
quote = await anext(source)
|
||||||
quote = await source.__anext__()
|
|
||||||
|
|
||||||
# ltr = 0
|
# ltr = 0
|
||||||
# lvr = 0
|
# lvr = 0
|
||||||
|
|
|
@ -0,0 +1,998 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
'''
|
||||||
|
Personal/Private position parsing, calculating, summarizing in a way
|
||||||
|
that doesn't try to cuk most humans who prefer to not lose their moneys..
|
||||||
|
(looking at you `ib` and dirt-bird friends)
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import contextmanager as cm
|
||||||
|
from pprint import pformat
|
||||||
|
import os
|
||||||
|
from os import path
|
||||||
|
from math import copysign
|
||||||
|
import re
|
||||||
|
import time
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Iterator,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
|
import pendulum
|
||||||
|
from pendulum import datetime, now
|
||||||
|
import tomli
|
||||||
|
import toml
|
||||||
|
|
||||||
|
from . import config
|
||||||
|
from .brokers import get_brokermod
|
||||||
|
from .clearing._messages import BrokerdPosition, Status
|
||||||
|
from .data._source import Symbol
|
||||||
|
from .log import get_logger
|
||||||
|
from .data.types import Struct
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def open_trade_ledger(
|
||||||
|
broker: str,
|
||||||
|
account: str,
|
||||||
|
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Indempotently create and read in a trade log file from the
|
||||||
|
``<configuration_dir>/ledgers/`` directory.
|
||||||
|
|
||||||
|
Files are named per broker account of the form
|
||||||
|
``<brokername>_<accountname>.toml``. The ``accountname`` here is the
|
||||||
|
name as defined in the user's ``brokers.toml`` config.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ldir = path.join(config._config_dir, 'ledgers')
|
||||||
|
if not path.isdir(ldir):
|
||||||
|
os.makedirs(ldir)
|
||||||
|
|
||||||
|
fname = f'trades_{broker}_{account}.toml'
|
||||||
|
tradesfile = path.join(ldir, fname)
|
||||||
|
|
||||||
|
if not path.isfile(tradesfile):
|
||||||
|
log.info(
|
||||||
|
f'Creating new local trades ledger: {tradesfile}'
|
||||||
|
)
|
||||||
|
with open(tradesfile, 'w') as cf:
|
||||||
|
pass # touch
|
||||||
|
with open(tradesfile, 'rb') as cf:
|
||||||
|
start = time.time()
|
||||||
|
ledger = tomli.load(cf)
|
||||||
|
print(f'Ledger load took {time.time() - start}s')
|
||||||
|
cpy = ledger.copy()
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield cpy
|
||||||
|
finally:
|
||||||
|
if cpy != ledger:
|
||||||
|
# TODO: show diff output?
|
||||||
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||||
|
print(f'Updating ledger for {tradesfile}:\n')
|
||||||
|
ledger.update(cpy)
|
||||||
|
|
||||||
|
# we write on close the mutated ledger data
|
||||||
|
with open(tradesfile, 'w') as cf:
|
||||||
|
toml.dump(ledger, cf)
|
||||||
|
|
||||||
|
|
||||||
|
class Transaction(Struct, frozen=True):
|
||||||
|
# TODO: should this be ``.to`` (see below)?
|
||||||
|
fqsn: str
|
||||||
|
|
||||||
|
tid: Union[str, int] # unique transaction id
|
||||||
|
size: float
|
||||||
|
price: float
|
||||||
|
cost: float # commisions or other additional costs
|
||||||
|
dt: datetime
|
||||||
|
expiry: Optional[datetime] = None
|
||||||
|
|
||||||
|
# optional key normally derived from the broker
|
||||||
|
# backend which ensures the instrument-symbol this record
|
||||||
|
# is for is truly unique.
|
||||||
|
bsuid: Optional[Union[str, int]] = None
|
||||||
|
|
||||||
|
# optional fqsn for the source "asset"/money symbol?
|
||||||
|
# from: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
def iter_by_dt(
|
||||||
|
clears: dict[str, Any],
|
||||||
|
) -> Iterator[tuple[str, dict]]:
|
||||||
|
'''
|
||||||
|
Iterate entries of a ``clears: dict`` table sorted by entry recorded
|
||||||
|
datetime presumably set at the ``'dt'`` field in each entry.
|
||||||
|
|
||||||
|
'''
|
||||||
|
for tid, data in sorted(
|
||||||
|
list(clears.items()),
|
||||||
|
key=lambda item: item[1]['dt'],
|
||||||
|
):
|
||||||
|
yield tid, data
|
||||||
|
|
||||||
|
|
||||||
|
class Position(Struct):
|
||||||
|
'''
|
||||||
|
Basic pp (personal/piker position) model with attached clearing
|
||||||
|
transaction history.
|
||||||
|
|
||||||
|
'''
|
||||||
|
symbol: Symbol
|
||||||
|
|
||||||
|
# can be +ve or -ve for long/short
|
||||||
|
size: float
|
||||||
|
|
||||||
|
# "breakeven price" above or below which pnl moves above and below
|
||||||
|
# zero for the entirety of the current "trade state".
|
||||||
|
ppu: float
|
||||||
|
|
||||||
|
# unique backend symbol id
|
||||||
|
bsuid: str
|
||||||
|
|
||||||
|
split_ratio: Optional[int] = None
|
||||||
|
|
||||||
|
# ordered record of known constituent trade messages
|
||||||
|
clears: dict[
|
||||||
|
Union[str, int, Status], # trade id
|
||||||
|
dict[str, Any], # transaction history summaries
|
||||||
|
] = {}
|
||||||
|
first_clear_dt: Optional[datetime] = None
|
||||||
|
|
||||||
|
expiry: Optional[datetime] = None
|
||||||
|
|
||||||
|
def to_dict(self) -> dict:
|
||||||
|
return {
|
||||||
|
f: getattr(self, f)
|
||||||
|
for f in self.__struct_fields__
|
||||||
|
}
|
||||||
|
|
||||||
|
def to_pretoml(self) -> tuple[str, dict]:
|
||||||
|
'''
|
||||||
|
Prep this position's data contents for export to toml including
|
||||||
|
re-structuring of the ``.clears`` table to an array of
|
||||||
|
inline-subtables for better ``pps.toml`` compactness.
|
||||||
|
|
||||||
|
'''
|
||||||
|
d = self.to_dict()
|
||||||
|
clears = d.pop('clears')
|
||||||
|
expiry = d.pop('expiry')
|
||||||
|
|
||||||
|
if self.split_ratio is None:
|
||||||
|
d.pop('split_ratio')
|
||||||
|
|
||||||
|
# should be obvious from clears/event table
|
||||||
|
d.pop('first_clear_dt')
|
||||||
|
|
||||||
|
# TODO: we need to figure out how to have one top level
|
||||||
|
# listing venue here even when the backend isn't providing
|
||||||
|
# it via the trades ledger..
|
||||||
|
# drop symbol obj in serialized form
|
||||||
|
s = d.pop('symbol')
|
||||||
|
fqsn = s.front_fqsn()
|
||||||
|
|
||||||
|
if self.expiry is None:
|
||||||
|
d.pop('expiry', None)
|
||||||
|
elif expiry:
|
||||||
|
d['expiry'] = str(expiry)
|
||||||
|
|
||||||
|
toml_clears_list = []
|
||||||
|
|
||||||
|
# reverse sort so latest clears are at top of section?
|
||||||
|
for tid, data in iter_by_dt(clears):
|
||||||
|
inline_table = toml.TomlDecoder().get_empty_inline_table()
|
||||||
|
|
||||||
|
# serialize datetime to parsable `str`
|
||||||
|
inline_table['dt'] = str(data['dt'])
|
||||||
|
|
||||||
|
# insert optional clear fields in column order
|
||||||
|
for k in ['ppu', 'accum_size']:
|
||||||
|
val = data.get(k)
|
||||||
|
if val:
|
||||||
|
inline_table[k] = val
|
||||||
|
|
||||||
|
# insert required fields
|
||||||
|
for k in ['price', 'size', 'cost']:
|
||||||
|
inline_table[k] = data[k]
|
||||||
|
|
||||||
|
inline_table['tid'] = tid
|
||||||
|
toml_clears_list.append(inline_table)
|
||||||
|
|
||||||
|
d['clears'] = toml_clears_list
|
||||||
|
|
||||||
|
return fqsn, d
|
||||||
|
|
||||||
|
def ensure_state(self) -> None:
|
||||||
|
'''
|
||||||
|
Audit either the `.size` and `.ppu` local instance vars against
|
||||||
|
the clears table calculations and return the calc-ed values if
|
||||||
|
they differ and log warnings to console.
|
||||||
|
|
||||||
|
'''
|
||||||
|
clears = list(self.clears.values())
|
||||||
|
self.first_clear_dt = min(list(entry['dt'] for entry in clears))
|
||||||
|
last_clear = clears[-1]
|
||||||
|
|
||||||
|
csize = self.calc_size()
|
||||||
|
accum = last_clear['accum_size']
|
||||||
|
if not self.expired():
|
||||||
|
if (
|
||||||
|
csize != accum
|
||||||
|
and csize != round(accum * self.split_ratio or 1)
|
||||||
|
):
|
||||||
|
raise ValueError(f'Size mismatch: {csize}')
|
||||||
|
else:
|
||||||
|
assert csize == 0, 'Contract is expired but non-zero size?'
|
||||||
|
|
||||||
|
if self.size != csize:
|
||||||
|
log.warning(
|
||||||
|
'Position state mismatch:\n'
|
||||||
|
f'{self.size} => {csize}'
|
||||||
|
)
|
||||||
|
self.size = csize
|
||||||
|
|
||||||
|
cppu = self.calc_ppu()
|
||||||
|
ppu = last_clear['ppu']
|
||||||
|
if (
|
||||||
|
cppu != ppu
|
||||||
|
and self.split_ratio is not None
|
||||||
|
# handle any split info entered (for now) manually by user
|
||||||
|
and cppu != (ppu / self.split_ratio)
|
||||||
|
):
|
||||||
|
raise ValueError(f'PPU mismatch: {cppu}')
|
||||||
|
|
||||||
|
if self.ppu != cppu:
|
||||||
|
log.warning(
|
||||||
|
'Position state mismatch:\n'
|
||||||
|
f'{self.ppu} => {cppu}'
|
||||||
|
)
|
||||||
|
self.ppu = cppu
|
||||||
|
|
||||||
|
def update_from_msg(
|
||||||
|
self,
|
||||||
|
msg: BrokerdPosition,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# XXX: better place to do this?
|
||||||
|
symbol = self.symbol
|
||||||
|
|
||||||
|
lot_size_digits = symbol.lot_size_digits
|
||||||
|
ppu, size = (
|
||||||
|
round(
|
||||||
|
msg['avg_price'],
|
||||||
|
ndigits=symbol.tick_size_digits
|
||||||
|
),
|
||||||
|
round(
|
||||||
|
msg['size'],
|
||||||
|
ndigits=lot_size_digits
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.ppu = ppu
|
||||||
|
self.size = size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dsize(self) -> float:
|
||||||
|
'''
|
||||||
|
The "dollar" size of the pp, normally in trading (fiat) unit
|
||||||
|
terms.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return self.ppu * self.size
|
||||||
|
|
||||||
|
# TODO: idea: "real LIFO" dynamic positioning.
|
||||||
|
# - when a trade takes place where the pnl for
|
||||||
|
# the (set of) trade(s) is below the breakeven price
|
||||||
|
# it may be that the trader took a +ve pnl on a short(er)
|
||||||
|
# term trade in the same account.
|
||||||
|
# - in this case we could recalc the be price to
|
||||||
|
# be reverted back to it's prior value before the nearest term
|
||||||
|
# trade was opened.?
|
||||||
|
# def lifo_price() -> float:
|
||||||
|
# ...
|
||||||
|
|
||||||
|
def iter_clears(self) -> Iterator[tuple[str, dict]]:
|
||||||
|
'''
|
||||||
|
Iterate the internally managed ``.clears: dict`` table in
|
||||||
|
datetime-stamped order.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return iter_by_dt(self.clears)
|
||||||
|
|
||||||
|
def calc_ppu(
|
||||||
|
self,
|
||||||
|
# include transaction cost in breakeven price
|
||||||
|
# and presume the worst case of the same cost
|
||||||
|
# to exit this transaction (even though in reality
|
||||||
|
# it will be dynamic based on exit stratetgy).
|
||||||
|
cost_scalar: float = 2,
|
||||||
|
|
||||||
|
) -> float:
|
||||||
|
'''
|
||||||
|
Compute the "price-per-unit" price for the given non-zero sized
|
||||||
|
rolling position.
|
||||||
|
|
||||||
|
The recurrence relation which computes this (exponential) mean
|
||||||
|
per new clear which **increases** the accumulative postiion size
|
||||||
|
is:
|
||||||
|
|
||||||
|
ppu[-1] = (
|
||||||
|
ppu[-2] * accum_size[-2]
|
||||||
|
+
|
||||||
|
ppu[-1] * size
|
||||||
|
) / accum_size[-1]
|
||||||
|
|
||||||
|
where `cost_basis` for the current step is simply the price
|
||||||
|
* size of the most recent clearing transaction.
|
||||||
|
|
||||||
|
'''
|
||||||
|
asize_h: list[float] = [] # historical accumulative size
|
||||||
|
ppu_h: list[float] = [] # historical price-per-unit
|
||||||
|
|
||||||
|
tid: str
|
||||||
|
entry: dict[str, Any]
|
||||||
|
for (tid, entry) in self.iter_clears():
|
||||||
|
clear_size = entry['size']
|
||||||
|
clear_price = entry['price']
|
||||||
|
|
||||||
|
last_accum_size = asize_h[-1] if asize_h else 0
|
||||||
|
accum_size = last_accum_size + clear_size
|
||||||
|
accum_sign = copysign(1, accum_size)
|
||||||
|
|
||||||
|
sign_change: bool = False
|
||||||
|
|
||||||
|
if accum_size == 0:
|
||||||
|
ppu_h.append(0)
|
||||||
|
asize_h.append(0)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if accum_size == 0:
|
||||||
|
ppu_h.append(0)
|
||||||
|
asize_h.append(0)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# test if the pp somehow went "passed" a net zero size state
|
||||||
|
# resulting in a change of the "sign" of the size (+ve for
|
||||||
|
# long, -ve for short).
|
||||||
|
sign_change = (
|
||||||
|
copysign(1, last_accum_size) + accum_sign == 0
|
||||||
|
and last_accum_size != 0
|
||||||
|
)
|
||||||
|
|
||||||
|
# since we passed the net-zero-size state the new size
|
||||||
|
# after sum should be the remaining size the new
|
||||||
|
# "direction" (aka, long vs. short) for this clear.
|
||||||
|
if sign_change:
|
||||||
|
clear_size = accum_size
|
||||||
|
abs_diff = abs(accum_size)
|
||||||
|
asize_h.append(0)
|
||||||
|
ppu_h.append(0)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# old size minus the new size gives us size diff with
|
||||||
|
# +ve -> increase in pp size
|
||||||
|
# -ve -> decrease in pp size
|
||||||
|
abs_diff = abs(accum_size) - abs(last_accum_size)
|
||||||
|
|
||||||
|
# XXX: LIFO breakeven price update. only an increaze in size
|
||||||
|
# of the position contributes the breakeven price,
|
||||||
|
# a decrease does not (i.e. the position is being made
|
||||||
|
# smaller).
|
||||||
|
# abs_clear_size = abs(clear_size)
|
||||||
|
abs_new_size = abs(accum_size)
|
||||||
|
|
||||||
|
if abs_diff > 0:
|
||||||
|
|
||||||
|
cost_basis = (
|
||||||
|
# cost basis for this clear
|
||||||
|
clear_price * abs(clear_size)
|
||||||
|
+
|
||||||
|
# transaction cost
|
||||||
|
accum_sign * cost_scalar * entry['cost']
|
||||||
|
)
|
||||||
|
|
||||||
|
if asize_h:
|
||||||
|
size_last = abs(asize_h[-1])
|
||||||
|
cb_last = ppu_h[-1] * size_last
|
||||||
|
ppu = (cost_basis + cb_last) / abs_new_size
|
||||||
|
|
||||||
|
else:
|
||||||
|
ppu = cost_basis / abs_new_size
|
||||||
|
|
||||||
|
ppu_h.append(ppu)
|
||||||
|
asize_h.append(accum_size)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# on "exit" clears from a given direction,
|
||||||
|
# only the size changes not the price-per-unit
|
||||||
|
# need to be updated since the ppu remains constant
|
||||||
|
# and gets weighted by the new size.
|
||||||
|
asize_h.append(accum_size)
|
||||||
|
ppu_h.append(ppu_h[-1])
|
||||||
|
|
||||||
|
final_ppu = ppu_h[-1] if ppu_h else 0
|
||||||
|
|
||||||
|
# handle any split info entered (for now) manually by user
|
||||||
|
if self.split_ratio is not None:
|
||||||
|
final_ppu /= self.split_ratio
|
||||||
|
|
||||||
|
return final_ppu
|
||||||
|
|
||||||
|
def expired(self) -> bool:
|
||||||
|
'''
|
||||||
|
Predicate which checks if the contract/instrument is past its expiry.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return bool(self.expiry) and self.expiry < now()
|
||||||
|
|
||||||
|
def calc_size(self) -> float:
|
||||||
|
'''
|
||||||
|
Calculate the unit size of this position in the destination
|
||||||
|
asset using the clears/trade event table; zero if expired.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size: float = 0
|
||||||
|
|
||||||
|
# time-expired pps (normally derivatives) are "closed"
|
||||||
|
# and have a zero size.
|
||||||
|
if self.expired():
|
||||||
|
return 0
|
||||||
|
|
||||||
|
for tid, entry in self.clears.items():
|
||||||
|
size += entry['size']
|
||||||
|
|
||||||
|
if self.split_ratio is not None:
|
||||||
|
size = round(size * self.split_ratio)
|
||||||
|
|
||||||
|
return size
|
||||||
|
|
||||||
|
def minimize_clears(
|
||||||
|
self,
|
||||||
|
|
||||||
|
) -> dict[str, dict]:
|
||||||
|
'''
|
||||||
|
Minimize the position's clears entries by removing
|
||||||
|
all transactions before the last net zero size to avoid
|
||||||
|
unecessary history irrelevant to the current pp state.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size: float = 0
|
||||||
|
clears_since_zero: list[tuple(str, dict)] = []
|
||||||
|
|
||||||
|
# TODO: we might just want to always do this when iterating
|
||||||
|
# a ledger? keep a state of the last net-zero and only do the
|
||||||
|
# full iterate when no state was stashed?
|
||||||
|
|
||||||
|
# scan for the last "net zero" position by iterating
|
||||||
|
# transactions until the next net-zero size, rinse, repeat.
|
||||||
|
for tid, clear in self.clears.items():
|
||||||
|
size += clear['size']
|
||||||
|
clears_since_zero.append((tid, clear))
|
||||||
|
|
||||||
|
if size == 0:
|
||||||
|
clears_since_zero.clear()
|
||||||
|
|
||||||
|
self.clears = dict(clears_since_zero)
|
||||||
|
return self.clears
|
||||||
|
|
||||||
|
def add_clear(
|
||||||
|
self,
|
||||||
|
t: Transaction,
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Update clearing table and populate rolling ppu and accumulative
|
||||||
|
size in both the clears entry and local attrs state.
|
||||||
|
|
||||||
|
'''
|
||||||
|
clear = self.clears[t.tid] = {
|
||||||
|
'cost': t.cost,
|
||||||
|
'price': t.price,
|
||||||
|
'size': t.size,
|
||||||
|
'dt': t.dt,
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO: compute these incrementally instead
|
||||||
|
# of re-looping through each time resulting in O(n**2)
|
||||||
|
# behaviour..?
|
||||||
|
|
||||||
|
# NOTE: we compute these **after** adding the entry in order to
|
||||||
|
# make the recurrence relation math work inside
|
||||||
|
# ``.calc_size()``.
|
||||||
|
self.size = clear['accum_size'] = self.calc_size()
|
||||||
|
self.ppu = clear['ppu'] = self.calc_ppu()
|
||||||
|
|
||||||
|
return clear
|
||||||
|
|
||||||
|
def sugest_split(self) -> float:
|
||||||
|
...
|
||||||
|
|
||||||
|
|
||||||
|
class PpTable(Struct):
|
||||||
|
|
||||||
|
brokername: str
|
||||||
|
acctid: str
|
||||||
|
pps: dict[str, Position]
|
||||||
|
conf: Optional[dict] = {}
|
||||||
|
|
||||||
|
def update_from_trans(
|
||||||
|
self,
|
||||||
|
trans: dict[str, Transaction],
|
||||||
|
cost_scalar: float = 2,
|
||||||
|
|
||||||
|
) -> dict[str, Position]:
|
||||||
|
|
||||||
|
pps = self.pps
|
||||||
|
updated: dict[str, Position] = {}
|
||||||
|
|
||||||
|
# lifo update all pps from records
|
||||||
|
for tid, t in trans.items():
|
||||||
|
|
||||||
|
pp = pps.setdefault(
|
||||||
|
t.bsuid,
|
||||||
|
|
||||||
|
# if no existing pp, allocate fresh one.
|
||||||
|
Position(
|
||||||
|
Symbol.from_fqsn(
|
||||||
|
t.fqsn,
|
||||||
|
info={},
|
||||||
|
),
|
||||||
|
size=0.0,
|
||||||
|
ppu=0.0,
|
||||||
|
bsuid=t.bsuid,
|
||||||
|
expiry=t.expiry,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
clears = pp.clears
|
||||||
|
if clears:
|
||||||
|
first_clear_dt = pp.first_clear_dt
|
||||||
|
|
||||||
|
# don't do updates for ledger records we already have
|
||||||
|
# included in the current pps state.
|
||||||
|
if (
|
||||||
|
t.tid in clears
|
||||||
|
or first_clear_dt and t.dt < first_clear_dt
|
||||||
|
):
|
||||||
|
# NOTE: likely you'll see repeats of the same
|
||||||
|
# ``Transaction`` passed in here if/when you are restarting
|
||||||
|
# a ``brokerd.ib`` where the API will re-report trades from
|
||||||
|
# the current session, so we need to make sure we don't
|
||||||
|
# "double count" these in pp calculations.
|
||||||
|
continue
|
||||||
|
|
||||||
|
# update clearing table
|
||||||
|
pp.add_clear(t)
|
||||||
|
updated[t.bsuid] = pp
|
||||||
|
|
||||||
|
# minimize clears tables and update sizing.
|
||||||
|
for bsuid, pp in updated.items():
|
||||||
|
pp.ensure_state()
|
||||||
|
|
||||||
|
return updated
|
||||||
|
|
||||||
|
def dump_active(
|
||||||
|
self,
|
||||||
|
) -> tuple[
|
||||||
|
dict[str, Position],
|
||||||
|
dict[str, Position]
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Iterate all tabulated positions, render active positions to
|
||||||
|
a ``dict`` format amenable to serialization (via TOML) and drop
|
||||||
|
from state (``.pps``) as well as return in a ``dict`` all
|
||||||
|
``Position``s which have recently closed.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# NOTE: newly closed position are also important to report/return
|
||||||
|
# since a consumer, like an order mode UI ;), might want to react
|
||||||
|
# based on the closure (for example removing the breakeven line
|
||||||
|
# and clearing the entry from any lists/monitors).
|
||||||
|
closed_pp_objs: dict[str, Position] = {}
|
||||||
|
open_pp_objs: dict[str, Position] = {}
|
||||||
|
|
||||||
|
pp_objs = self.pps
|
||||||
|
for bsuid in list(pp_objs):
|
||||||
|
pp = pp_objs[bsuid]
|
||||||
|
|
||||||
|
# XXX: debug hook for size mismatches
|
||||||
|
# qqqbsuid = 320227571
|
||||||
|
# if bsuid == qqqbsuid:
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
|
pp.ensure_state()
|
||||||
|
|
||||||
|
if (
|
||||||
|
# "net-zero" is a "closed" position
|
||||||
|
pp.size == 0
|
||||||
|
|
||||||
|
# time-expired pps (normally derivatives) are "closed"
|
||||||
|
or (pp.expiry and pp.expiry < now())
|
||||||
|
):
|
||||||
|
# for expired cases
|
||||||
|
pp.size = 0
|
||||||
|
|
||||||
|
# NOTE: we DO NOT pop the pp here since it can still be
|
||||||
|
# used to check for duplicate clears that may come in as
|
||||||
|
# new transaction from some backend API and need to be
|
||||||
|
# ignored; the closed positions won't be written to the
|
||||||
|
# ``pps.toml`` since ``pp_active_entries`` above is what's
|
||||||
|
# written.
|
||||||
|
closed_pp_objs[bsuid] = pp
|
||||||
|
|
||||||
|
else:
|
||||||
|
open_pp_objs[bsuid] = pp
|
||||||
|
|
||||||
|
return open_pp_objs, closed_pp_objs
|
||||||
|
|
||||||
|
def to_toml(
|
||||||
|
self,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
|
||||||
|
active, closed = self.dump_active()
|
||||||
|
|
||||||
|
# ONLY dict-serialize all active positions; those that are closed
|
||||||
|
# we don't store in the ``pps.toml``.
|
||||||
|
to_toml_dict = {}
|
||||||
|
|
||||||
|
for bsuid, pos in active.items():
|
||||||
|
|
||||||
|
# keep the minimal amount of clears that make up this
|
||||||
|
# position since the last net-zero state.
|
||||||
|
pos.minimize_clears()
|
||||||
|
pos.ensure_state()
|
||||||
|
|
||||||
|
# serialize to pre-toml form
|
||||||
|
fqsn, asdict = pos.to_pretoml()
|
||||||
|
log.info(f'Updating active pp: {fqsn}')
|
||||||
|
|
||||||
|
# XXX: ugh, it's cuz we push the section under
|
||||||
|
# the broker name.. maybe we need to rethink this?
|
||||||
|
brokerless_key = fqsn.removeprefix(f'{self.brokername}.')
|
||||||
|
to_toml_dict[brokerless_key] = asdict
|
||||||
|
|
||||||
|
return to_toml_dict
|
||||||
|
|
||||||
|
def write_config(self) -> None:
|
||||||
|
'''
|
||||||
|
Write the current position table to the user's ``pps.toml``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# TODO: show diff output?
|
||||||
|
# https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
|
||||||
|
print(f'Updating ``pps.toml`` for {path}:\n')
|
||||||
|
|
||||||
|
# active, closed_pp_objs = table.dump_active()
|
||||||
|
pp_entries = self.to_toml()
|
||||||
|
self.conf[self.brokername][self.acctid] = pp_entries
|
||||||
|
|
||||||
|
# TODO: why tf haven't they already done this for inline
|
||||||
|
# tables smh..
|
||||||
|
enc = PpsEncoder(preserve=True)
|
||||||
|
# table_bs_type = type(toml.TomlDecoder().get_empty_inline_table())
|
||||||
|
enc.dump_funcs[
|
||||||
|
toml.decoder.InlineTableDict
|
||||||
|
] = enc.dump_inline_table
|
||||||
|
|
||||||
|
config.write(
|
||||||
|
self.conf,
|
||||||
|
'pps',
|
||||||
|
encoder=enc,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def load_pps_from_ledger(
|
||||||
|
|
||||||
|
brokername: str,
|
||||||
|
acctname: str,
|
||||||
|
|
||||||
|
# post normalization filter on ledger entries to be processed
|
||||||
|
filter_by: Optional[list[dict]] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
dict[str, Transaction],
|
||||||
|
dict[str, Position],
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Open a ledger file by broker name and account and read in and
|
||||||
|
process any trade records into our normalized ``Transaction`` form
|
||||||
|
and then update the equivalent ``Pptable`` and deliver the two
|
||||||
|
bsuid-mapped dict-sets of the transactions and pps.
|
||||||
|
|
||||||
|
'''
|
||||||
|
with (
|
||||||
|
open_trade_ledger(brokername, acctname) as ledger,
|
||||||
|
open_pps(brokername, acctname) as table,
|
||||||
|
):
|
||||||
|
if not ledger:
|
||||||
|
# null case, no ledger file with content
|
||||||
|
return {}
|
||||||
|
|
||||||
|
mod = get_brokermod(brokername)
|
||||||
|
src_records: dict[str, Transaction] = mod.norm_trade_records(ledger)
|
||||||
|
|
||||||
|
if filter_by:
|
||||||
|
records = {}
|
||||||
|
bsuids = set(filter_by)
|
||||||
|
for tid, r in src_records.items():
|
||||||
|
if r.bsuid in bsuids:
|
||||||
|
records[tid] = r
|
||||||
|
else:
|
||||||
|
records = src_records
|
||||||
|
|
||||||
|
updated = table.update_from_trans(records)
|
||||||
|
|
||||||
|
return records, updated
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: instead see if we can hack tomli and tomli-w to do the same:
|
||||||
|
# - https://github.com/hukkin/tomli
|
||||||
|
# - https://github.com/hukkin/tomli-w
|
||||||
|
class PpsEncoder(toml.TomlEncoder):
|
||||||
|
'''
|
||||||
|
Special "styled" encoder that makes a ``pps.toml`` redable and
|
||||||
|
compact by putting `.clears` tables inline and everything else
|
||||||
|
flat-ish.
|
||||||
|
|
||||||
|
'''
|
||||||
|
separator = ','
|
||||||
|
|
||||||
|
def dump_list(self, v):
|
||||||
|
'''
|
||||||
|
Dump an inline list with a newline after every element and
|
||||||
|
with consideration for denoted inline table types.
|
||||||
|
|
||||||
|
'''
|
||||||
|
retval = "[\n"
|
||||||
|
for u in v:
|
||||||
|
if isinstance(u, toml.decoder.InlineTableDict):
|
||||||
|
out = self.dump_inline_table(u)
|
||||||
|
else:
|
||||||
|
out = str(self.dump_value(u))
|
||||||
|
|
||||||
|
retval += " " + out + "," + "\n"
|
||||||
|
retval += "]"
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def dump_inline_table(self, section):
|
||||||
|
"""Preserve inline table in its compact syntax instead of expanding
|
||||||
|
into subsection.
|
||||||
|
https://github.com/toml-lang/toml#user-content-inline-table
|
||||||
|
"""
|
||||||
|
val_list = []
|
||||||
|
for k, v in section.items():
|
||||||
|
# if isinstance(v, toml.decoder.InlineTableDict):
|
||||||
|
if isinstance(v, dict):
|
||||||
|
val = self.dump_inline_table(v)
|
||||||
|
else:
|
||||||
|
val = str(self.dump_value(v))
|
||||||
|
|
||||||
|
val_list.append(k + " = " + val)
|
||||||
|
|
||||||
|
retval = "{ " + ", ".join(val_list) + " }"
|
||||||
|
return retval
|
||||||
|
|
||||||
|
def dump_sections(self, o, sup):
|
||||||
|
retstr = ""
|
||||||
|
if sup != "" and sup[-1] != ".":
|
||||||
|
sup += '.'
|
||||||
|
retdict = self._dict()
|
||||||
|
arraystr = ""
|
||||||
|
for section in o:
|
||||||
|
qsection = str(section)
|
||||||
|
value = o[section]
|
||||||
|
|
||||||
|
if not re.match(r'^[A-Za-z0-9_-]+$', section):
|
||||||
|
qsection = toml.encoder._dump_str(section)
|
||||||
|
|
||||||
|
# arrayoftables = False
|
||||||
|
if (
|
||||||
|
self.preserve
|
||||||
|
and isinstance(value, toml.decoder.InlineTableDict)
|
||||||
|
):
|
||||||
|
retstr += (
|
||||||
|
qsection
|
||||||
|
+
|
||||||
|
" = "
|
||||||
|
+
|
||||||
|
self.dump_inline_table(o[section])
|
||||||
|
+
|
||||||
|
'\n' # only on the final terminating left brace
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: this code i'm pretty sure is just blatantly bad
|
||||||
|
# and/or wrong..
|
||||||
|
# if isinstance(o[section], list):
|
||||||
|
# for a in o[section]:
|
||||||
|
# if isinstance(a, dict):
|
||||||
|
# arrayoftables = True
|
||||||
|
# if arrayoftables:
|
||||||
|
# for a in o[section]:
|
||||||
|
# arraytabstr = "\n"
|
||||||
|
# arraystr += "[[" + sup + qsection + "]]\n"
|
||||||
|
# s, d = self.dump_sections(a, sup + qsection)
|
||||||
|
# if s:
|
||||||
|
# if s[0] == "[":
|
||||||
|
# arraytabstr += s
|
||||||
|
# else:
|
||||||
|
# arraystr += s
|
||||||
|
# while d:
|
||||||
|
# newd = self._dict()
|
||||||
|
# for dsec in d:
|
||||||
|
# s1, d1 = self.dump_sections(d[dsec], sup +
|
||||||
|
# qsection + "." +
|
||||||
|
# dsec)
|
||||||
|
# if s1:
|
||||||
|
# arraytabstr += ("[" + sup + qsection +
|
||||||
|
# "." + dsec + "]\n")
|
||||||
|
# arraytabstr += s1
|
||||||
|
# for s1 in d1:
|
||||||
|
# newd[dsec + "." + s1] = d1[s1]
|
||||||
|
# d = newd
|
||||||
|
# arraystr += arraytabstr
|
||||||
|
|
||||||
|
elif isinstance(value, dict):
|
||||||
|
retdict[qsection] = o[section]
|
||||||
|
|
||||||
|
elif o[section] is not None:
|
||||||
|
retstr += (
|
||||||
|
qsection
|
||||||
|
+
|
||||||
|
" = "
|
||||||
|
+
|
||||||
|
str(self.dump_value(o[section]))
|
||||||
|
)
|
||||||
|
|
||||||
|
# if not isinstance(value, dict):
|
||||||
|
if not isinstance(value, toml.decoder.InlineTableDict):
|
||||||
|
# inline tables should not contain newlines:
|
||||||
|
# https://toml.io/en/v1.0.0#inline-table
|
||||||
|
retstr += '\n'
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError(value)
|
||||||
|
|
||||||
|
retstr += arraystr
|
||||||
|
return (retstr, retdict)
|
||||||
|
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def open_pps(
|
||||||
|
brokername: str,
|
||||||
|
acctid: str,
|
||||||
|
write_on_exit: bool = True,
|
||||||
|
|
||||||
|
) -> PpTable:
|
||||||
|
'''
|
||||||
|
Read out broker-specific position entries from
|
||||||
|
incremental update file: ``pps.toml``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
conf, path = config.load('pps')
|
||||||
|
brokersection = conf.setdefault(brokername, {})
|
||||||
|
pps = brokersection.setdefault(acctid, {})
|
||||||
|
|
||||||
|
# TODO: ideally we can pass in an existing
|
||||||
|
# pps state to this right? such that we
|
||||||
|
# don't have to do a ledger reload all the
|
||||||
|
# time.. a couple ideas I can think of,
|
||||||
|
# - mirror this in some client side actor which
|
||||||
|
# does the actual ledger updates (say the paper
|
||||||
|
# engine proc if we decide to always spawn it?),
|
||||||
|
# - do diffs against updates from the ledger writer
|
||||||
|
# actor and the in-mem state here?
|
||||||
|
|
||||||
|
pp_objs = {}
|
||||||
|
table = PpTable(
|
||||||
|
brokername,
|
||||||
|
acctid,
|
||||||
|
pp_objs,
|
||||||
|
conf=conf,
|
||||||
|
)
|
||||||
|
|
||||||
|
# unmarshal/load ``pps.toml`` config entries into object form
|
||||||
|
# and update `PpTable` obj entries.
|
||||||
|
for fqsn, entry in pps.items():
|
||||||
|
bsuid = entry['bsuid']
|
||||||
|
|
||||||
|
# convert clears sub-tables (only in this form
|
||||||
|
# for toml re-presentation) back into a master table.
|
||||||
|
clears_list = entry['clears']
|
||||||
|
|
||||||
|
# index clears entries in "object" form by tid in a top
|
||||||
|
# level dict instead of a list (as is presented in our
|
||||||
|
# ``pps.toml``).
|
||||||
|
clears = pp_objs.setdefault(bsuid, {})
|
||||||
|
|
||||||
|
# TODO: should be make a ``Struct`` for clear/event entries?
|
||||||
|
# convert "clear events table" from the toml config (list of
|
||||||
|
# a dicts) and load it into object form for use in position
|
||||||
|
# processing of new clear events.
|
||||||
|
trans: list[Transaction] = []
|
||||||
|
|
||||||
|
for clears_table in clears_list:
|
||||||
|
tid = clears_table.pop('tid')
|
||||||
|
dtstr = clears_table['dt']
|
||||||
|
dt = pendulum.parse(dtstr)
|
||||||
|
clears_table['dt'] = dt
|
||||||
|
trans.append(Transaction(
|
||||||
|
fqsn=bsuid,
|
||||||
|
bsuid=bsuid,
|
||||||
|
tid=tid,
|
||||||
|
size=clears_table['size'],
|
||||||
|
price=clears_table['price'],
|
||||||
|
cost=clears_table['cost'],
|
||||||
|
dt=dt,
|
||||||
|
))
|
||||||
|
clears[tid] = clears_table
|
||||||
|
|
||||||
|
size = entry['size']
|
||||||
|
|
||||||
|
# TODO: remove but, handle old field name for now
|
||||||
|
ppu = entry.get('ppu', entry.get('be_price', 0))
|
||||||
|
split_ratio = entry.get('split_ratio')
|
||||||
|
|
||||||
|
expiry = entry.get('expiry')
|
||||||
|
if expiry:
|
||||||
|
expiry = pendulum.parse(expiry)
|
||||||
|
|
||||||
|
pp = pp_objs[bsuid] = Position(
|
||||||
|
Symbol.from_fqsn(fqsn, info={}),
|
||||||
|
size=size,
|
||||||
|
ppu=ppu,
|
||||||
|
split_ratio=split_ratio,
|
||||||
|
expiry=expiry,
|
||||||
|
bsuid=entry['bsuid'],
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: super critical, we need to be sure to include
|
||||||
|
# all pps.toml clears to avoid reusing clears that were
|
||||||
|
# already included in the current incremental update
|
||||||
|
# state, since today's records may have already been
|
||||||
|
# processed!
|
||||||
|
for t in trans:
|
||||||
|
pp.add_clear(t)
|
||||||
|
|
||||||
|
# audit entries loaded from toml
|
||||||
|
pp.ensure_state()
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield table
|
||||||
|
finally:
|
||||||
|
if write_on_exit:
|
||||||
|
table.write_config()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import sys
|
||||||
|
|
||||||
|
args = sys.argv
|
||||||
|
assert len(args) > 1, 'Specifiy account(s) from `brokers.toml`'
|
||||||
|
args = args[1:]
|
||||||
|
for acctid in args:
|
||||||
|
broker, name = acctid.split('.')
|
||||||
|
trans, updated_pps = load_pps_from_ledger(broker, name)
|
||||||
|
print(
|
||||||
|
f'Processing transactions into pps for {broker}:{acctid}\n'
|
||||||
|
f'{pformat(trans)}\n\n'
|
||||||
|
f'{pformat(updated_pps)}'
|
||||||
|
)
|
|
@ -32,16 +32,22 @@ def mk_marker_path(
|
||||||
style: str,
|
style: str,
|
||||||
|
|
||||||
) -> QGraphicsPathItem:
|
) -> QGraphicsPathItem:
|
||||||
"""Add a marker to be displayed on the line wrapped in a ``QGraphicsPathItem``
|
'''
|
||||||
ready to be placed using scene coordinates (not view).
|
Add a marker to be displayed on the line wrapped in
|
||||||
|
a ``QGraphicsPathItem`` ready to be placed using scene coordinates
|
||||||
|
(not view).
|
||||||
|
|
||||||
**Arguments**
|
**Arguments**
|
||||||
style String indicating the style of marker to add:
|
style String indicating the style of marker to add:
|
||||||
``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
|
``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
|
||||||
``'>|<'``, ``'^'``, ``'v'``, ``'o'``
|
``'>|<'``, ``'^'``, ``'v'``, ``'o'``
|
||||||
size Size of the marker in pixels.
|
|
||||||
|
|
||||||
"""
|
This code is taken nearly verbatim from the
|
||||||
|
`InfiniteLine.addMarker()` method but does not attempt do be aware
|
||||||
|
of low(er) level graphics controls and expects for the output
|
||||||
|
polygon to be applied to a ``QGraphicsPathItem``.
|
||||||
|
|
||||||
|
'''
|
||||||
path = QtGui.QPainterPath()
|
path = QtGui.QPainterPath()
|
||||||
|
|
||||||
if style == 'o':
|
if style == 'o':
|
||||||
|
@ -87,7 +93,8 @@ def mk_marker_path(
|
||||||
|
|
||||||
|
|
||||||
class LevelMarker(QGraphicsPathItem):
|
class LevelMarker(QGraphicsPathItem):
|
||||||
'''An arrow marker path graphich which redraws itself
|
'''
|
||||||
|
An arrow marker path graphich which redraws itself
|
||||||
to the specified view coordinate level on each paint cycle.
|
to the specified view coordinate level on each paint cycle.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -104,7 +111,8 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
# get polygon and scale
|
# get polygon and scale
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.scale(size, size)
|
# self.setScale(size, size)
|
||||||
|
self.setScale(size)
|
||||||
|
|
||||||
# interally generates path
|
# interally generates path
|
||||||
self._style = None
|
self._style = None
|
||||||
|
@ -114,6 +122,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
self.get_level = get_level
|
self.get_level = get_level
|
||||||
self._on_paint = on_paint
|
self._on_paint = on_paint
|
||||||
|
|
||||||
self.scene_x = lambda: chart.marker_right_points()[1]
|
self.scene_x = lambda: chart.marker_right_points()[1]
|
||||||
self.level: float = 0
|
self.level: float = 0
|
||||||
self.keep_in_view = keep_in_view
|
self.keep_in_view = keep_in_view
|
||||||
|
@ -149,12 +158,9 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
def w(self) -> float:
|
def w(self) -> float:
|
||||||
return self.path_br().width()
|
return self.path_br().width()
|
||||||
|
|
||||||
def position_in_view(
|
def position_in_view(self) -> None:
|
||||||
self,
|
'''
|
||||||
# level: float,
|
Show a pp off-screen indicator for a level label.
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''Show a pp off-screen indicator for a level label.
|
|
||||||
|
|
||||||
This is like in fps games where you have a gps "nav" indicator
|
This is like in fps games where you have a gps "nav" indicator
|
||||||
but your teammate is outside the range of view, except in 2D, on
|
but your teammate is outside the range of view, except in 2D, on
|
||||||
|
@ -162,7 +168,6 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
level = self.get_level()
|
level = self.get_level()
|
||||||
|
|
||||||
view = self.chart.getViewBox()
|
view = self.chart.getViewBox()
|
||||||
vr = view.state['viewRange']
|
vr = view.state['viewRange']
|
||||||
ymn, ymx = vr[1]
|
ymn, ymx = vr[1]
|
||||||
|
@ -186,7 +191,6 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
)
|
)
|
||||||
|
|
||||||
elif level < ymn: # pin to bottom of view
|
elif level < ymn: # pin to bottom of view
|
||||||
|
|
||||||
self.setPos(
|
self.setPos(
|
||||||
QPointF(
|
QPointF(
|
||||||
x,
|
x,
|
||||||
|
@ -211,7 +215,8 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
w: QtWidgets.QWidget
|
w: QtWidgets.QWidget
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Core paint which we override to always update
|
'''
|
||||||
|
Core paint which we override to always update
|
||||||
our marker position in scene coordinates from a
|
our marker position in scene coordinates from a
|
||||||
view cooridnate "level".
|
view cooridnate "level".
|
||||||
|
|
||||||
|
@ -235,11 +240,12 @@ def qgo_draw_markers(
|
||||||
right_offset: float,
|
right_offset: float,
|
||||||
|
|
||||||
) -> float:
|
) -> float:
|
||||||
"""Paint markers in ``pg.GraphicsItem`` style by first
|
'''
|
||||||
|
Paint markers in ``pg.GraphicsItem`` style by first
|
||||||
removing the view transform for the painter, drawing the markers
|
removing the view transform for the painter, drawing the markers
|
||||||
in scene coords, then restoring the view coords.
|
in scene coords, then restoring the view coords.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# paint markers in native coordinate system
|
# paint markers in native coordinate system
|
||||||
orig_tr = p.transform()
|
orig_tr = p.transform()
|
||||||
|
|
||||||
|
|
|
@ -19,15 +19,16 @@ Main app startup and run.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
from types import ModuleType
|
||||||
|
|
||||||
from PyQt5.QtCore import QEvent
|
from PyQt5.QtCore import QEvent
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from .._daemon import maybe_spawn_brokerd
|
from .._daemon import maybe_spawn_brokerd
|
||||||
from ..brokers import get_brokermod
|
|
||||||
from . import _event
|
from . import _event
|
||||||
from ._exec import run_qtractor
|
from ._exec import run_qtractor
|
||||||
from ..data.feed import install_brokerd_search
|
from ..data.feed import install_brokerd_search
|
||||||
|
from ..data._source import unpack_fqsn
|
||||||
from . import _search
|
from . import _search
|
||||||
from ._chart import GodWidget
|
from ._chart import GodWidget
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
@ -36,27 +37,26 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
async def load_provider_search(
|
async def load_provider_search(
|
||||||
|
brokermod: str,
|
||||||
broker: str,
|
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
log.info(f'loading brokerd for {broker}..')
|
name = brokermod.name
|
||||||
|
log.info(f'loading brokerd for {name}..')
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
maybe_spawn_brokerd(
|
maybe_spawn_brokerd(
|
||||||
broker,
|
name,
|
||||||
loglevel=loglevel
|
loglevel=loglevel
|
||||||
) as portal,
|
) as portal,
|
||||||
|
|
||||||
install_brokerd_search(
|
install_brokerd_search(
|
||||||
portal,
|
portal,
|
||||||
get_brokermod(broker),
|
brokermod,
|
||||||
),
|
),
|
||||||
):
|
):
|
||||||
|
|
||||||
# keep search engine stream up until cancelled
|
# keep search engine stream up until cancelled
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
@ -66,8 +66,8 @@ async def _async_main(
|
||||||
# implicit required argument provided by ``qtractor_run()``
|
# implicit required argument provided by ``qtractor_run()``
|
||||||
main_widget: GodWidget,
|
main_widget: GodWidget,
|
||||||
|
|
||||||
sym: str,
|
syms: list[str],
|
||||||
brokernames: str,
|
brokers: dict[str, ModuleType],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -78,6 +78,8 @@ async def _async_main(
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from . import _display
|
from . import _display
|
||||||
|
from ._pg_overrides import _do_overrides
|
||||||
|
_do_overrides()
|
||||||
|
|
||||||
godwidget = main_widget
|
godwidget = main_widget
|
||||||
|
|
||||||
|
@ -97,6 +99,11 @@ async def _async_main(
|
||||||
sbar = godwidget.window.status_bar
|
sbar = godwidget.window.status_bar
|
||||||
starting_done = sbar.open_status('starting ze sexy chartz')
|
starting_done = sbar.open_status('starting ze sexy chartz')
|
||||||
|
|
||||||
|
needed_brokermods: dict[str, ModuleType] = {}
|
||||||
|
for fqsn in syms:
|
||||||
|
brokername, *_ = unpack_fqsn(fqsn)
|
||||||
|
needed_brokermods[brokername] = brokers[brokername]
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as root_n,
|
trio.open_nursery() as root_n,
|
||||||
):
|
):
|
||||||
|
@ -107,17 +114,20 @@ async def _async_main(
|
||||||
# setup search widget and focus main chart view at startup
|
# setup search widget and focus main chart view at startup
|
||||||
# search widget is a singleton alongside the godwidget
|
# search widget is a singleton alongside the godwidget
|
||||||
search = _search.SearchWidget(godwidget=godwidget)
|
search = _search.SearchWidget(godwidget=godwidget)
|
||||||
search.bar.unfocus()
|
# search.bar.unfocus()
|
||||||
|
# godwidget.hbox.addWidget(search)
|
||||||
godwidget.hbox.addWidget(search)
|
|
||||||
godwidget.search = search
|
godwidget.search = search
|
||||||
|
|
||||||
symbol, _, provider = sym.rpartition('.')
|
symbols: list[str] = []
|
||||||
|
|
||||||
|
for sym in syms:
|
||||||
|
symbol, _, provider = sym.rpartition('.')
|
||||||
|
symbols.append(symbol)
|
||||||
|
|
||||||
# this internally starts a ``display_symbol_data()`` task above
|
# this internally starts a ``display_symbol_data()`` task above
|
||||||
order_mode_ready = await godwidget.load_symbol(
|
order_mode_ready = await godwidget.load_symbols(
|
||||||
provider,
|
provider,
|
||||||
symbol,
|
symbols,
|
||||||
loglevel
|
loglevel
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -135,8 +145,12 @@ async def _async_main(
|
||||||
):
|
):
|
||||||
# load other providers into search **after**
|
# load other providers into search **after**
|
||||||
# the chart's select cache
|
# the chart's select cache
|
||||||
for broker in brokernames:
|
for brokername, mod in needed_brokermods.items():
|
||||||
root_n.start_soon(load_provider_search, broker, loglevel)
|
root_n.start_soon(
|
||||||
|
load_provider_search,
|
||||||
|
mod,
|
||||||
|
loglevel,
|
||||||
|
)
|
||||||
|
|
||||||
await order_mode_ready.wait()
|
await order_mode_ready.wait()
|
||||||
|
|
||||||
|
@ -165,8 +179,8 @@ async def _async_main(
|
||||||
|
|
||||||
|
|
||||||
def _main(
|
def _main(
|
||||||
sym: str,
|
syms: list[str],
|
||||||
brokernames: [str],
|
brokermods: list[ModuleType],
|
||||||
piker_loglevel: str,
|
piker_loglevel: str,
|
||||||
tractor_kwargs,
|
tractor_kwargs,
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -177,7 +191,11 @@ def _main(
|
||||||
'''
|
'''
|
||||||
run_qtractor(
|
run_qtractor(
|
||||||
func=_async_main,
|
func=_async_main,
|
||||||
args=(sym, brokernames, piker_loglevel),
|
args=(
|
||||||
main_widget=GodWidget,
|
syms,
|
||||||
|
{mod.name: mod for mod in brokermods},
|
||||||
|
piker_loglevel,
|
||||||
|
),
|
||||||
|
main_widget_type=GodWidget,
|
||||||
tractor_kwargs=tractor_kwargs,
|
tractor_kwargs=tractor_kwargs,
|
||||||
)
|
)
|
||||||
|
|
|
@ -39,12 +39,17 @@ class Axis(pg.AxisItem):
|
||||||
'''
|
'''
|
||||||
A better axis that sizes tick contents considering font size.
|
A better axis that sizes tick contents considering font size.
|
||||||
|
|
||||||
|
Also includes tick values lru caching originally proposed in but never
|
||||||
|
accepted upstream:
|
||||||
|
https://github.com/pyqtgraph/pyqtgraph/pull/2160
|
||||||
|
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
linkedsplits,
|
linkedsplits,
|
||||||
typical_max_str: str = '100 000.000',
|
typical_max_str: str = '100 000.000',
|
||||||
text_color: str = 'bracket',
|
text_color: str = 'bracket',
|
||||||
|
lru_cache_tick_strings: bool = True,
|
||||||
**kwargs
|
**kwargs
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -91,6 +96,34 @@ class Axis(pg.AxisItem):
|
||||||
# size the pertinent axis dimension to a "typical value"
|
# size the pertinent axis dimension to a "typical value"
|
||||||
self.size_to_values()
|
self.size_to_values()
|
||||||
|
|
||||||
|
# NOTE: requires override ``.tickValues()`` method seen below.
|
||||||
|
if lru_cache_tick_strings:
|
||||||
|
self.tickStrings = lru_cache(
|
||||||
|
maxsize=2**20
|
||||||
|
)(self.tickStrings)
|
||||||
|
|
||||||
|
# NOTE: only overriden to cast tick values entries into tuples
|
||||||
|
# for use with the lru caching.
|
||||||
|
def tickValues(
|
||||||
|
self,
|
||||||
|
minVal: float,
|
||||||
|
maxVal: float,
|
||||||
|
size: int,
|
||||||
|
|
||||||
|
) -> list[tuple[float, tuple[str]]]:
|
||||||
|
'''
|
||||||
|
Repack tick values into tuples for lru caching.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ticks = []
|
||||||
|
for scalar, values in super().tickValues(minVal, maxVal, size):
|
||||||
|
ticks.append((
|
||||||
|
scalar,
|
||||||
|
tuple(values), # this
|
||||||
|
))
|
||||||
|
|
||||||
|
return ticks
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def text_color(self) -> str:
|
def text_color(self) -> str:
|
||||||
return self._text_color
|
return self._text_color
|
||||||
|
|
|
@ -19,7 +19,11 @@ High level chart-widget apis.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import Optional, TYPE_CHECKING
|
from typing import (
|
||||||
|
Iterator,
|
||||||
|
Optional,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
from PyQt5 import QtCore, QtWidgets
|
from PyQt5 import QtCore, QtWidgets
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
|
@ -68,6 +72,9 @@ from ._forms import FieldsForm
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._overlay import PlotItemOverlay
|
from ._overlay import PlotItemOverlay
|
||||||
from ._flows import Flow
|
from ._flows import Flow
|
||||||
|
from ._search import SearchWidget
|
||||||
|
from . import _pg_overrides as pgo
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._display import DisplayState
|
from ._display import DisplayState
|
||||||
|
@ -85,6 +92,9 @@ class GodWidget(QWidget):
|
||||||
modify them.
|
modify them.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
search: SearchWidget
|
||||||
|
mode_name: str = 'god'
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
|
@ -94,6 +104,8 @@ class GodWidget(QWidget):
|
||||||
|
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
|
|
||||||
|
self.search: Optional[SearchWidget] = None
|
||||||
|
|
||||||
self.hbox = QHBoxLayout(self)
|
self.hbox = QHBoxLayout(self)
|
||||||
self.hbox.setContentsMargins(0, 0, 0, 0)
|
self.hbox.setContentsMargins(0, 0, 0, 0)
|
||||||
self.hbox.setSpacing(6)
|
self.hbox.setSpacing(6)
|
||||||
|
@ -115,7 +127,10 @@ class GodWidget(QWidget):
|
||||||
# self.vbox.addLayout(self.hbox)
|
# self.vbox.addLayout(self.hbox)
|
||||||
|
|
||||||
self._chart_cache: dict[str, LinkedSplits] = {}
|
self._chart_cache: dict[str, LinkedSplits] = {}
|
||||||
self.linkedsplits: Optional[LinkedSplits] = None
|
|
||||||
|
self.hist_linked: Optional[LinkedSplits] = None
|
||||||
|
self.rt_linked: Optional[LinkedSplits] = None
|
||||||
|
self._active_cursor: Optional[Cursor] = None
|
||||||
|
|
||||||
# assigned in the startup func `_async_main()`
|
# assigned in the startup func `_async_main()`
|
||||||
self._root_n: trio.Nursery = None
|
self._root_n: trio.Nursery = None
|
||||||
|
@ -123,6 +138,14 @@ class GodWidget(QWidget):
|
||||||
self._widgets: dict[str, QWidget] = {}
|
self._widgets: dict[str, QWidget] = {}
|
||||||
self._resizing: bool = False
|
self._resizing: bool = False
|
||||||
|
|
||||||
|
# TODO: do we need this, when would god get resized
|
||||||
|
# and the window does not? Never right?!
|
||||||
|
# self.reg_for_resize(self)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def linkedsplits(self) -> LinkedSplits:
|
||||||
|
return self.rt_linked
|
||||||
|
|
||||||
# def init_timeframes_ui(self):
|
# def init_timeframes_ui(self):
|
||||||
# self.tf_layout = QHBoxLayout()
|
# self.tf_layout = QHBoxLayout()
|
||||||
# self.tf_layout.setSpacing(0)
|
# self.tf_layout.setSpacing(0)
|
||||||
|
@ -148,25 +171,25 @@ class GodWidget(QWidget):
|
||||||
def set_chart_symbol(
|
def set_chart_symbol(
|
||||||
self,
|
self,
|
||||||
symbol_key: str, # of form <fqsn>.<providername>
|
symbol_key: str, # of form <fqsn>.<providername>
|
||||||
linkedsplits: LinkedSplits, # type: ignore
|
all_linked: tuple[LinkedSplits, LinkedSplits], # type: ignore
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# re-sort org cache symbol list in LIFO order
|
# re-sort org cache symbol list in LIFO order
|
||||||
cache = self._chart_cache
|
cache = self._chart_cache
|
||||||
cache.pop(symbol_key, None)
|
cache.pop(symbol_key, None)
|
||||||
cache[symbol_key] = linkedsplits
|
cache[symbol_key] = all_linked
|
||||||
|
|
||||||
def get_chart_symbol(
|
def get_chart_symbol(
|
||||||
self,
|
self,
|
||||||
symbol_key: str,
|
symbol_key: str,
|
||||||
|
|
||||||
) -> LinkedSplits: # type: ignore
|
) -> tuple[LinkedSplits, LinkedSplits]: # type: ignore
|
||||||
return self._chart_cache.get(symbol_key)
|
return self._chart_cache.get(symbol_key)
|
||||||
|
|
||||||
async def load_symbol(
|
async def load_symbols(
|
||||||
self,
|
self,
|
||||||
providername: str,
|
providername: str,
|
||||||
symbol_key: str,
|
symbol_keys: list[str],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
reset: bool = False,
|
reset: bool = False,
|
||||||
|
|
||||||
|
@ -177,81 +200,121 @@ class GodWidget(QWidget):
|
||||||
Expects a ``numpy`` structured array containing all the ohlcv fields.
|
Expects a ``numpy`` structured array containing all the ohlcv fields.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
fqsns: list[str] = []
|
||||||
|
|
||||||
# our symbol key style is always lower case
|
# our symbol key style is always lower case
|
||||||
symbol_key = symbol_key.lower()
|
for key in list(map(str.lower, symbol_keys)):
|
||||||
|
|
||||||
# fully qualified symbol name (SNS i guess is what we're making?)
|
# fully qualified symbol name (SNS i guess is what we're making?)
|
||||||
fqsn = '.'.join([symbol_key, providername])
|
fqsn = '.'.join([key, providername])
|
||||||
|
fqsns.append(fqsn)
|
||||||
|
|
||||||
linkedsplits = self.get_chart_symbol(fqsn)
|
# NOTE: for now we use the first symbol in the set as the "key"
|
||||||
|
# for the overlay of feeds on the chart.
|
||||||
|
group_key = fqsns[0]
|
||||||
|
|
||||||
|
all_linked = self.get_chart_symbol(group_key)
|
||||||
order_mode_started = trio.Event()
|
order_mode_started = trio.Event()
|
||||||
|
|
||||||
if not self.vbox.isEmpty():
|
if not self.vbox.isEmpty():
|
||||||
|
|
||||||
# XXX: this is CRITICAL especially with pixel buffer caching
|
# XXX: seems to make switching slower?
|
||||||
self.linkedsplits.hide()
|
# qframe = self.hist_linked.chart.qframe
|
||||||
self.linkedsplits.unfocus()
|
# if qframe.sidepane is self.search:
|
||||||
|
# qframe.hbox.removeWidget(self.search)
|
||||||
|
|
||||||
# XXX: pretty sure we don't need this
|
for linked in [self.rt_linked, self.hist_linked]:
|
||||||
# remove any existing plots?
|
# XXX: this is CRITICAL especially with pixel buffer caching
|
||||||
# XXX: ahh we might want to support cache unloading..
|
linked.hide()
|
||||||
# self.vbox.removeWidget(self.linkedsplits)
|
linked.unfocus()
|
||||||
|
|
||||||
|
# XXX: pretty sure we don't need this
|
||||||
|
# remove any existing plots?
|
||||||
|
# XXX: ahh we might want to support cache unloading..
|
||||||
|
# self.vbox.removeWidget(linked)
|
||||||
|
|
||||||
# switching to a new viewable chart
|
# switching to a new viewable chart
|
||||||
if linkedsplits is None or reset:
|
if all_linked is None or reset:
|
||||||
from ._display import display_symbol_data
|
from ._display import display_symbol_data
|
||||||
|
|
||||||
# we must load a fresh linked charts set
|
# we must load a fresh linked charts set
|
||||||
linkedsplits = LinkedSplits(self)
|
self.rt_linked = rt_charts = LinkedSplits(self)
|
||||||
|
self.hist_linked = hist_charts = LinkedSplits(self)
|
||||||
|
|
||||||
# spawn new task to start up and update new sub-chart instances
|
# spawn new task to start up and update new sub-chart instances
|
||||||
self._root_n.start_soon(
|
self._root_n.start_soon(
|
||||||
display_symbol_data,
|
display_symbol_data,
|
||||||
self,
|
self,
|
||||||
providername,
|
providername,
|
||||||
symbol_key,
|
fqsns,
|
||||||
loglevel,
|
loglevel,
|
||||||
order_mode_started,
|
order_mode_started,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.set_chart_symbol(fqsn, linkedsplits)
|
# self.vbox.addWidget(hist_charts)
|
||||||
self.vbox.addWidget(linkedsplits)
|
self.vbox.addWidget(rt_charts)
|
||||||
|
self.set_chart_symbol(
|
||||||
|
fqsn,
|
||||||
|
(hist_charts, rt_charts),
|
||||||
|
)
|
||||||
|
|
||||||
|
for linked in [hist_charts, rt_charts]:
|
||||||
|
linked.show()
|
||||||
|
linked.focus()
|
||||||
|
|
||||||
linkedsplits.show()
|
|
||||||
linkedsplits.focus()
|
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# symbol is already loaded and ems ready
|
# symbol is already loaded and ems ready
|
||||||
order_mode_started.set()
|
order_mode_started.set()
|
||||||
|
|
||||||
# TODO:
|
self.hist_linked, self.rt_linked = all_linked
|
||||||
# - we'll probably want per-instrument/provider state here?
|
|
||||||
# change the order config form over to the new chart
|
|
||||||
|
|
||||||
# XXX: since the pp config is a singleton widget we have to
|
for linked in all_linked:
|
||||||
# also switch it over to the new chart's interal-layout
|
# TODO:
|
||||||
# self.linkedsplits.chart.qframe.hbox.removeWidget(self.pp_pane)
|
# - we'll probably want per-instrument/provider state here?
|
||||||
chart = linkedsplits.chart
|
# change the order config form over to the new chart
|
||||||
|
|
||||||
# chart is already in memory so just focus it
|
# chart is already in memory so just focus it
|
||||||
linkedsplits.show()
|
linked.show()
|
||||||
linkedsplits.focus()
|
linked.focus()
|
||||||
linkedsplits.graphics_cycle()
|
linked.graphics_cycle()
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
# resume feeds *after* rendering chart view asap
|
# resume feeds *after* rendering chart view asap
|
||||||
chart.resume_all_feeds()
|
chart = linked.chart
|
||||||
|
if chart:
|
||||||
|
chart.resume_all_feeds()
|
||||||
|
|
||||||
# TODO: we need a check to see if the chart
|
# TODO: we need a check to see if the chart
|
||||||
# last had the xlast in view, if so then shift so it's
|
# last had the xlast in view, if so then shift so it's
|
||||||
# still in view, if the user was viewing history then
|
# still in view, if the user was viewing history then
|
||||||
# do nothing yah?
|
# do nothing yah?
|
||||||
chart.default_view()
|
self.rt_linked.chart.default_view()
|
||||||
|
|
||||||
self.linkedsplits = linkedsplits
|
# if a history chart instance is already up then
|
||||||
symbol = linkedsplits.symbol
|
# set the search widget as its sidepane.
|
||||||
|
hist_chart = self.hist_linked.chart
|
||||||
|
if hist_chart:
|
||||||
|
hist_chart.qframe.set_sidepane(self.search)
|
||||||
|
|
||||||
|
# NOTE: this is really stupid/hard to follow.
|
||||||
|
# we have to reposition the active position nav
|
||||||
|
# **AFTER** applying the search bar as a sidepane
|
||||||
|
# to the newly switched to symbol.
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
# TODO: probably stick this in some kinda `LooknFeel` API?
|
||||||
|
for tracker in self.rt_linked.mode.trackers.values():
|
||||||
|
pp_nav = tracker.nav
|
||||||
|
if tracker.live_pp.size:
|
||||||
|
pp_nav.show()
|
||||||
|
pp_nav.hide_info()
|
||||||
|
else:
|
||||||
|
pp_nav.hide()
|
||||||
|
|
||||||
|
# set window titlebar info
|
||||||
|
symbol = self.rt_linked.symbol
|
||||||
if symbol is not None:
|
if symbol is not None:
|
||||||
self.window.setWindowTitle(
|
self.window.setWindowTitle(
|
||||||
f'{symbol.front_fqsn()} '
|
f'{symbol.front_fqsn()} '
|
||||||
|
@ -268,11 +331,23 @@ class GodWidget(QWidget):
|
||||||
'''
|
'''
|
||||||
# go back to view-mode focus (aka chart focus)
|
# go back to view-mode focus (aka chart focus)
|
||||||
self.clearFocus()
|
self.clearFocus()
|
||||||
self.linkedsplits.chart.setFocus()
|
chart = self.rt_linked.chart
|
||||||
|
if chart:
|
||||||
|
chart.setFocus()
|
||||||
|
|
||||||
def resizeEvent(self, event: QtCore.QEvent) -> None:
|
def reg_for_resize(
|
||||||
|
self,
|
||||||
|
widget: QWidget,
|
||||||
|
) -> None:
|
||||||
|
getattr(widget, 'on_resize')
|
||||||
|
self._widgets[widget.mode_name] = widget
|
||||||
|
|
||||||
|
def on_win_resize(self, event: QtCore.QEvent) -> None:
|
||||||
'''
|
'''
|
||||||
Top level god widget resize handler.
|
Top level god widget handler from window (the real yaweh) resize
|
||||||
|
events such that any registered widgets which wish to be
|
||||||
|
notified are invoked using our pythonic `.on_resize()` method
|
||||||
|
api.
|
||||||
|
|
||||||
Where we do UX magic to make things not suck B)
|
Where we do UX magic to make things not suck B)
|
||||||
|
|
||||||
|
@ -288,6 +363,28 @@ class GodWidget(QWidget):
|
||||||
|
|
||||||
self._resizing = False
|
self._resizing = False
|
||||||
|
|
||||||
|
# on_resize = on_win_resize
|
||||||
|
|
||||||
|
def get_cursor(self) -> Cursor:
|
||||||
|
return self._active_cursor
|
||||||
|
|
||||||
|
def iter_linked(self) -> Iterator[LinkedSplits]:
|
||||||
|
for linked in [self.hist_linked, self.rt_linked]:
|
||||||
|
yield linked
|
||||||
|
|
||||||
|
def resize_all(self) -> None:
|
||||||
|
'''
|
||||||
|
Dynamic resize sequence: adjusts all sub-widgets/charts to
|
||||||
|
sensible default ratios of what space is detected as available
|
||||||
|
on the display / window.
|
||||||
|
|
||||||
|
'''
|
||||||
|
rt_linked = self.rt_linked
|
||||||
|
rt_linked.set_split_sizes()
|
||||||
|
self.rt_linked.resize_sidepanes()
|
||||||
|
self.hist_linked.resize_sidepanes(from_linked=rt_linked)
|
||||||
|
self.search.on_resize()
|
||||||
|
|
||||||
|
|
||||||
class ChartnPane(QFrame):
|
class ChartnPane(QFrame):
|
||||||
'''
|
'''
|
||||||
|
@ -300,9 +397,9 @@ class ChartnPane(QFrame):
|
||||||
https://doc.qt.io/qt-5/qwidget.html#composite-widgets
|
https://doc.qt.io/qt-5/qwidget.html#composite-widgets
|
||||||
|
|
||||||
'''
|
'''
|
||||||
sidepane: FieldsForm
|
sidepane: FieldsForm | SearchWidget
|
||||||
hbox: QHBoxLayout
|
hbox: QHBoxLayout
|
||||||
chart: Optional['ChartPlotWidget'] = None
|
chart: Optional[ChartPlotWidget] = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -314,7 +411,7 @@ class ChartnPane(QFrame):
|
||||||
|
|
||||||
super().__init__(parent)
|
super().__init__(parent)
|
||||||
|
|
||||||
self.sidepane = sidepane
|
self._sidepane = sidepane
|
||||||
self.chart = None
|
self.chart = None
|
||||||
|
|
||||||
hbox = self.hbox = QHBoxLayout(self)
|
hbox = self.hbox = QHBoxLayout(self)
|
||||||
|
@ -322,6 +419,21 @@ class ChartnPane(QFrame):
|
||||||
hbox.setContentsMargins(0, 0, 0, 0)
|
hbox.setContentsMargins(0, 0, 0, 0)
|
||||||
hbox.setSpacing(3)
|
hbox.setSpacing(3)
|
||||||
|
|
||||||
|
def set_sidepane(
|
||||||
|
self,
|
||||||
|
sidepane: FieldsForm | SearchWidget,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# add sidepane **after** chart; place it on axis side
|
||||||
|
self.hbox.addWidget(
|
||||||
|
sidepane,
|
||||||
|
alignment=Qt.AlignTop
|
||||||
|
)
|
||||||
|
self._sidepane = sidepane
|
||||||
|
|
||||||
|
def sidepane(self) -> FieldsForm | SearchWidget:
|
||||||
|
return self._sidepane
|
||||||
|
|
||||||
|
|
||||||
class LinkedSplits(QWidget):
|
class LinkedSplits(QWidget):
|
||||||
'''
|
'''
|
||||||
|
@ -356,6 +468,7 @@ class LinkedSplits(QWidget):
|
||||||
self.splitter = QSplitter(QtCore.Qt.Vertical)
|
self.splitter = QSplitter(QtCore.Qt.Vertical)
|
||||||
self.splitter.setMidLineWidth(0)
|
self.splitter.setMidLineWidth(0)
|
||||||
self.splitter.setHandleWidth(2)
|
self.splitter.setHandleWidth(2)
|
||||||
|
self.splitter.splitterMoved.connect(self.on_splitter_adjust)
|
||||||
|
|
||||||
self.layout = QVBoxLayout(self)
|
self.layout = QVBoxLayout(self)
|
||||||
self.layout.setContentsMargins(0, 0, 0, 0)
|
self.layout.setContentsMargins(0, 0, 0, 0)
|
||||||
|
@ -368,6 +481,16 @@ class LinkedSplits(QWidget):
|
||||||
|
|
||||||
self._symbol: Symbol = None
|
self._symbol: Symbol = None
|
||||||
|
|
||||||
|
def on_splitter_adjust(
|
||||||
|
self,
|
||||||
|
pos: int,
|
||||||
|
index: int,
|
||||||
|
) -> None:
|
||||||
|
# print(f'splitter moved pos:{pos}, index:{index}')
|
||||||
|
godw = self.godwidget
|
||||||
|
if self is godw.rt_linked:
|
||||||
|
godw.search.on_resize()
|
||||||
|
|
||||||
def graphics_cycle(self, **kwargs) -> None:
|
def graphics_cycle(self, **kwargs) -> None:
|
||||||
from . import _display
|
from . import _display
|
||||||
ds = self.display_state
|
ds = self.display_state
|
||||||
|
@ -383,28 +506,32 @@ class LinkedSplits(QWidget):
|
||||||
prop: Optional[float] = None,
|
prop: Optional[float] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Set the proportion of space allocated for linked subcharts.
|
'''
|
||||||
|
Set the proportion of space allocated for linked subcharts.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
ln = len(self.subplots)
|
ln = len(self.subplots) or 1
|
||||||
|
|
||||||
# proportion allocated to consumer subcharts
|
# proportion allocated to consumer subcharts
|
||||||
if not prop:
|
if not prop:
|
||||||
prop = 3/8*5/8
|
prop = 3/8
|
||||||
|
|
||||||
# if ln < 2:
|
h = self.height()
|
||||||
# prop = 3/8*5/8
|
histview_h = h * (6/16)
|
||||||
|
h = h - histview_h
|
||||||
# elif ln >= 2:
|
|
||||||
# prop = 3/8
|
|
||||||
|
|
||||||
major = 1 - prop
|
major = 1 - prop
|
||||||
min_h_ind = int((self.height() * prop) / ln)
|
min_h_ind = int((h * prop) / ln)
|
||||||
|
sizes = [
|
||||||
|
int(histview_h),
|
||||||
|
int(h * major),
|
||||||
|
]
|
||||||
|
|
||||||
sizes = [int(self.height() * major)]
|
# give all subcharts the same remaining proportional height
|
||||||
sizes.extend([min_h_ind] * ln)
|
sizes.extend([min_h_ind] * ln)
|
||||||
|
|
||||||
self.splitter.setSizes(sizes)
|
if self.godwidget.rt_linked is self:
|
||||||
|
self.splitter.setSizes(sizes)
|
||||||
|
|
||||||
def focus(self) -> None:
|
def focus(self) -> None:
|
||||||
if self.chart is not None:
|
if self.chart is not None:
|
||||||
|
@ -452,13 +579,6 @@ class LinkedSplits(QWidget):
|
||||||
# add crosshair graphic
|
# add crosshair graphic
|
||||||
self.chart.addItem(self.cursor)
|
self.chart.addItem(self.cursor)
|
||||||
|
|
||||||
# axis placement
|
|
||||||
if (
|
|
||||||
_xaxis_at == 'bottom' and
|
|
||||||
'bottom' in self.chart.plotItem.axes
|
|
||||||
):
|
|
||||||
self.chart.hideAxis('bottom')
|
|
||||||
|
|
||||||
# style?
|
# style?
|
||||||
self.chart.setFrameStyle(
|
self.chart.setFrameStyle(
|
||||||
QFrame.StyledPanel |
|
QFrame.StyledPanel |
|
||||||
|
@ -504,10 +624,15 @@ class LinkedSplits(QWidget):
|
||||||
'bottom': xaxis,
|
'bottom': xaxis,
|
||||||
}
|
}
|
||||||
|
|
||||||
qframe = ChartnPane(
|
if sidepane is not False:
|
||||||
sidepane=sidepane,
|
parent = qframe = ChartnPane(
|
||||||
parent=self.splitter,
|
sidepane=sidepane,
|
||||||
)
|
parent=self.splitter,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
parent = self.splitter
|
||||||
|
qframe = None
|
||||||
|
|
||||||
cpw = ChartPlotWidget(
|
cpw = ChartPlotWidget(
|
||||||
|
|
||||||
# this name will be used to register the primary
|
# this name will be used to register the primary
|
||||||
|
@ -515,7 +640,7 @@ class LinkedSplits(QWidget):
|
||||||
name=name,
|
name=name,
|
||||||
data_key=array_key or name,
|
data_key=array_key or name,
|
||||||
|
|
||||||
parent=qframe,
|
parent=parent,
|
||||||
linkedsplits=self,
|
linkedsplits=self,
|
||||||
axisItems=axes,
|
axisItems=axes,
|
||||||
**cpw_kwargs,
|
**cpw_kwargs,
|
||||||
|
@ -523,37 +648,45 @@ class LinkedSplits(QWidget):
|
||||||
cpw.hideAxis('left')
|
cpw.hideAxis('left')
|
||||||
cpw.hideAxis('bottom')
|
cpw.hideAxis('bottom')
|
||||||
|
|
||||||
if self.xaxis_chart:
|
if (
|
||||||
self.xaxis_chart.hideAxis('bottom')
|
_xaxis_at == 'bottom' and (
|
||||||
|
self.xaxis_chart
|
||||||
|
or (
|
||||||
|
not self.subplots
|
||||||
|
and self.xaxis_chart is None
|
||||||
|
)
|
||||||
|
)
|
||||||
|
):
|
||||||
|
if self.xaxis_chart:
|
||||||
|
self.xaxis_chart.hideAxis('bottom')
|
||||||
|
|
||||||
# presuming we only want it at the true bottom of all charts.
|
# presuming we only want it at the true bottom of all charts.
|
||||||
# XXX: uses new api from our ``pyqtgraph`` fork.
|
# XXX: uses new api from our ``pyqtgraph`` fork.
|
||||||
# https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master
|
# https://github.com/pikers/pyqtgraph/tree/plotitemoverlay_onto_pg_master
|
||||||
# _ = self.xaxis_chart.removeAxis('bottom', unlink=False)
|
# _ = self.xaxis_chart.removeAxis('bottom', unlink=False)
|
||||||
# assert 'bottom' not in self.xaxis_chart.plotItem.axes
|
# assert 'bottom' not in self.xaxis_chart.plotItem.axes
|
||||||
|
|
||||||
self.xaxis_chart = cpw
|
self.xaxis_chart = cpw
|
||||||
cpw.showAxis('bottom')
|
cpw.showAxis('bottom')
|
||||||
|
|
||||||
if self.xaxis_chart is None:
|
if qframe is not None:
|
||||||
self.xaxis_chart = cpw
|
qframe.chart = cpw
|
||||||
|
qframe.hbox.addWidget(cpw)
|
||||||
|
|
||||||
qframe.chart = cpw
|
# so we can look this up and add back to the splitter
|
||||||
qframe.hbox.addWidget(cpw)
|
# on a symbol switch
|
||||||
|
cpw.qframe = qframe
|
||||||
|
assert cpw.parent() == qframe
|
||||||
|
|
||||||
# so we can look this up and add back to the splitter
|
# add sidepane **after** chart; place it on axis side
|
||||||
# on a symbol switch
|
qframe.set_sidepane(sidepane)
|
||||||
cpw.qframe = qframe
|
# qframe.hbox.addWidget(
|
||||||
assert cpw.parent() == qframe
|
# sidepane,
|
||||||
|
# alignment=Qt.AlignTop
|
||||||
|
# )
|
||||||
|
|
||||||
# add sidepane **after** chart; place it on axis side
|
cpw.sidepane = sidepane
|
||||||
qframe.hbox.addWidget(
|
|
||||||
sidepane,
|
|
||||||
alignment=Qt.AlignTop
|
|
||||||
)
|
|
||||||
cpw.sidepane = sidepane
|
|
||||||
|
|
||||||
cpw.plotItem.vb.linkedsplits = self
|
cpw.plotItem.vb.linked = self
|
||||||
cpw.setFrameStyle(
|
cpw.setFrameStyle(
|
||||||
QtWidgets.QFrame.StyledPanel
|
QtWidgets.QFrame.StyledPanel
|
||||||
# | QtWidgets.QFrame.Plain
|
# | QtWidgets.QFrame.Plain
|
||||||
|
@ -614,9 +747,8 @@ class LinkedSplits(QWidget):
|
||||||
if not _is_main:
|
if not _is_main:
|
||||||
# track by name
|
# track by name
|
||||||
self.subplots[name] = cpw
|
self.subplots[name] = cpw
|
||||||
self.splitter.addWidget(qframe)
|
if qframe is not None:
|
||||||
# scale split regions
|
self.splitter.addWidget(qframe)
|
||||||
self.set_split_sizes()
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
assert style == 'bar', 'main chart must be OHLC'
|
assert style == 'bar', 'main chart must be OHLC'
|
||||||
|
@ -642,19 +774,28 @@ class LinkedSplits(QWidget):
|
||||||
|
|
||||||
def resize_sidepanes(
|
def resize_sidepanes(
|
||||||
self,
|
self,
|
||||||
|
from_linked: Optional[LinkedSplits] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Size all sidepanes based on the OHLC "main" plot and its
|
Size all sidepanes based on the OHLC "main" plot and its
|
||||||
sidepane width.
|
sidepane width.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
main_chart = self.chart
|
if from_linked:
|
||||||
if main_chart:
|
main_chart = from_linked.chart
|
||||||
|
else:
|
||||||
|
main_chart = self.chart
|
||||||
|
|
||||||
|
if main_chart and main_chart.sidepane:
|
||||||
sp_w = main_chart.sidepane.width()
|
sp_w = main_chart.sidepane.width()
|
||||||
for name, cpw in self.subplots.items():
|
for name, cpw in self.subplots.items():
|
||||||
cpw.sidepane.setMinimumWidth(sp_w)
|
cpw.sidepane.setMinimumWidth(sp_w)
|
||||||
cpw.sidepane.setMaximumWidth(sp_w)
|
cpw.sidepane.setMaximumWidth(sp_w)
|
||||||
|
|
||||||
|
if from_linked:
|
||||||
|
self.chart.sidepane.setMinimumWidth(sp_w)
|
||||||
|
|
||||||
|
|
||||||
class ChartPlotWidget(pg.PlotWidget):
|
class ChartPlotWidget(pg.PlotWidget):
|
||||||
'''
|
'''
|
||||||
|
@ -681,7 +822,8 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
# a better one?
|
# a better one?
|
||||||
def mk_vb(self, name: str) -> ChartView:
|
def mk_vb(self, name: str) -> ChartView:
|
||||||
cv = ChartView(name)
|
cv = ChartView(name)
|
||||||
cv.linkedsplits = self.linked
|
# link new view to chart's view set
|
||||||
|
cv.linked = self.linked
|
||||||
return cv
|
return cv
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -700,6 +842,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
static_yrange: Optional[tuple[float, float]] = None,
|
static_yrange: Optional[tuple[float, float]] = None,
|
||||||
|
|
||||||
|
parent=None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
|
@ -712,16 +855,20 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
# NOTE: must be set bfore calling ``.mk_vb()``
|
# NOTE: must be set bfore calling ``.mk_vb()``
|
||||||
self.linked = linkedsplits
|
self.linked = linkedsplits
|
||||||
|
self.sidepane: Optional[FieldsForm] = None
|
||||||
|
|
||||||
# source of our custom interactions
|
# source of our custom interactions
|
||||||
self.cv = cv = self.mk_vb(name)
|
self.cv = cv = self.mk_vb(name)
|
||||||
|
|
||||||
|
pi = pgo.PlotItem(viewBox=cv, **kwargs)
|
||||||
super().__init__(
|
super().__init__(
|
||||||
background=hcolor(view_color),
|
background=hcolor(view_color),
|
||||||
viewBox=cv,
|
viewBox=cv,
|
||||||
# parent=None,
|
# parent=None,
|
||||||
# plotItem=None,
|
# plotItem=None,
|
||||||
# antialias=True,
|
# antialias=True,
|
||||||
|
parent=parent,
|
||||||
|
plotItem=pi,
|
||||||
**kwargs
|
**kwargs
|
||||||
)
|
)
|
||||||
# give viewbox as reference to chart
|
# give viewbox as reference to chart
|
||||||
|
@ -760,13 +907,24 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
self.pi_overlay: PlotItemOverlay = PlotItemOverlay(self.plotItem)
|
||||||
|
|
||||||
|
# indempotent startup flag for auto-yrange subsys
|
||||||
|
# to detect the "first time" y-domain graphics begin
|
||||||
|
# to be shown in the (main) graphics view.
|
||||||
|
self._on_screen: bool = False
|
||||||
|
|
||||||
def resume_all_feeds(self):
|
def resume_all_feeds(self):
|
||||||
for feed in self._feeds.values():
|
try:
|
||||||
self.linked.godwidget._root_n.start_soon(feed.resume)
|
for feed in self._feeds.values():
|
||||||
|
for flume in feed.flumes.values():
|
||||||
|
self.linked.godwidget._root_n.start_soon(feed.resume)
|
||||||
|
except RuntimeError:
|
||||||
|
# TODO: cancel the qtractor runtime here?
|
||||||
|
raise
|
||||||
|
|
||||||
def pause_all_feeds(self):
|
def pause_all_feeds(self):
|
||||||
for feed in self._feeds.values():
|
for feed in self._feeds.values():
|
||||||
self.linked.godwidget._root_n.start_soon(feed.pause)
|
for flume in feed.flumes.values():
|
||||||
|
self.linked.godwidget._root_n.start_soon(feed.pause)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def view(self) -> ChartView:
|
def view(self) -> ChartView:
|
||||||
|
@ -859,7 +1017,9 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
def default_view(
|
def default_view(
|
||||||
self,
|
self,
|
||||||
bars_from_y: int = 3000,
|
bars_from_y: int = int(616 * 3/8),
|
||||||
|
y_offset: int = 0,
|
||||||
|
do_ds: bool = True,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
|
@ -897,8 +1057,12 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
# terms now that we've scaled either by user control
|
# terms now that we've scaled either by user control
|
||||||
# or to the default set of bars as per the immediate block
|
# or to the default set of bars as per the immediate block
|
||||||
# above.
|
# above.
|
||||||
marker_pos, l1_len = self.pre_l1_xs()
|
if not y_offset:
|
||||||
end = xlast + l1_len + 1
|
marker_pos, l1_len = self.pre_l1_xs()
|
||||||
|
end = xlast + l1_len + 1
|
||||||
|
else:
|
||||||
|
end = xlast + y_offset + 1
|
||||||
|
|
||||||
begin = end - (r - l)
|
begin = end - (r - l)
|
||||||
|
|
||||||
# for debugging
|
# for debugging
|
||||||
|
@ -920,8 +1084,11 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
max=end,
|
max=end,
|
||||||
padding=0,
|
padding=0,
|
||||||
)
|
)
|
||||||
self.view.maybe_downsample_graphics()
|
|
||||||
view._set_yrange()
|
if do_ds:
|
||||||
|
self.view.maybe_downsample_graphics()
|
||||||
|
view._set_yrange()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.linked.graphics_cycle()
|
self.linked.graphics_cycle()
|
||||||
except IndexError:
|
except IndexError:
|
||||||
|
@ -994,7 +1161,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
axis_side: str = 'right',
|
axis_side: str = 'right',
|
||||||
axis_kwargs: dict = {},
|
axis_kwargs: dict = {},
|
||||||
|
|
||||||
) -> pg.PlotItem:
|
) -> pgo.PlotItem:
|
||||||
|
|
||||||
# Custom viewbox impl
|
# Custom viewbox impl
|
||||||
cv = self.mk_vb(name)
|
cv = self.mk_vb(name)
|
||||||
|
@ -1003,13 +1170,14 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
allowed_sides = {'left', 'right'}
|
allowed_sides = {'left', 'right'}
|
||||||
if axis_side not in allowed_sides:
|
if axis_side not in allowed_sides:
|
||||||
raise ValueError(f'``axis_side``` must be in {allowed_sides}')
|
raise ValueError(f'``axis_side``` must be in {allowed_sides}')
|
||||||
|
|
||||||
yaxis = PriceAxis(
|
yaxis = PriceAxis(
|
||||||
orientation=axis_side,
|
orientation=axis_side,
|
||||||
linkedsplits=self.linked,
|
linkedsplits=self.linked,
|
||||||
**axis_kwargs,
|
**axis_kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
pi = pg.PlotItem(
|
pi = pgo.PlotItem(
|
||||||
parent=self.plotItem,
|
parent=self.plotItem,
|
||||||
name=name,
|
name=name,
|
||||||
enableMenu=False,
|
enableMenu=False,
|
||||||
|
@ -1022,19 +1190,27 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
)
|
)
|
||||||
pi.hideButtons()
|
pi.hideButtons()
|
||||||
|
|
||||||
# cv.enable_auto_yrange(self.view)
|
|
||||||
cv.enable_auto_yrange()
|
|
||||||
|
|
||||||
# compose this new plot's graphics with the current chart's
|
# compose this new plot's graphics with the current chart's
|
||||||
# existing one but with separate axes as neede and specified.
|
# existing one but with separate axes as neede and specified.
|
||||||
self.pi_overlay.add_plotitem(
|
self.pi_overlay.add_plotitem(
|
||||||
pi,
|
pi,
|
||||||
index=index,
|
index=index,
|
||||||
|
|
||||||
# only link x-axes,
|
# only link x-axes and
|
||||||
|
# don't relay any ``ViewBox`` derived event
|
||||||
|
# handlers since we only care about keeping charts
|
||||||
|
# x-synced on interaction (at least for now).
|
||||||
link_axes=(0,),
|
link_axes=(0,),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# connect auto-yrange callbacks *from* this new
|
||||||
|
# view **to** this parent and likewise *from* the
|
||||||
|
# main/parent chart back *to* the created overlay.
|
||||||
|
cv.enable_auto_yrange(src_vb=self.view)
|
||||||
|
# makes it so that interaction on the new overlay will reflect
|
||||||
|
# back on the main chart (which overlay was added to).
|
||||||
|
self.view.enable_auto_yrange(src_vb=cv)
|
||||||
|
|
||||||
# add axis title
|
# add axis title
|
||||||
# TODO: do we want this API to still work?
|
# TODO: do we want this API to still work?
|
||||||
# raxis = pi.getAxis('right')
|
# raxis = pi.getAxis('right')
|
||||||
|
@ -1096,7 +1272,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
# TODO: this probably needs its own method?
|
# TODO: this probably needs its own method?
|
||||||
if overlay:
|
if overlay:
|
||||||
if isinstance(overlay, pg.PlotItem):
|
if isinstance(overlay, pgo.PlotItem):
|
||||||
if overlay not in self.pi_overlay.overlays:
|
if overlay not in self.pi_overlay.overlays:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
f'{overlay} must be from `.plotitem_overlay()`'
|
f'{overlay} must be from `.plotitem_overlay()`'
|
||||||
|
@ -1255,8 +1431,7 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
If ``bars_range`` is provided use that range.
|
If ``bars_range`` is provided use that range.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# print(f'Chart[{self.name}].maxmin()')
|
profiler = Profiler(
|
||||||
profiler = pg.debug.Profiler(
|
|
||||||
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
msg=f'`{str(self)}.maxmin(name={name})`: `{self.name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
|
@ -1287,11 +1462,18 @@ class ChartPlotWidget(pg.PlotWidget):
|
||||||
|
|
||||||
key = round(lbar), round(rbar)
|
key = round(lbar), round(rbar)
|
||||||
res = flow.maxmin(*key)
|
res = flow.maxmin(*key)
|
||||||
if res == (None, None):
|
|
||||||
log.error(
|
if (
|
||||||
|
res is None
|
||||||
|
):
|
||||||
|
log.warning(
|
||||||
f"{flow_key} no mxmn for bars_range => {key} !?"
|
f"{flow_key} no mxmn for bars_range => {key} !?"
|
||||||
)
|
)
|
||||||
res = 0, 0
|
res = 0, 0
|
||||||
|
if not self._on_screen:
|
||||||
|
self.default_view(do_ds=False)
|
||||||
|
self._on_screen = True
|
||||||
|
|
||||||
profiler(f'yrange mxmn: {key} -> {res}')
|
profiler(f'yrange mxmn: {key} -> {res}')
|
||||||
|
# print(f'{flow_key} yrange mxmn: {key} -> {res}')
|
||||||
return res
|
return res
|
||||||
|
|
|
@ -223,14 +223,20 @@ def ds_m4(
|
||||||
assert frames >= (xrange / uppx)
|
assert frames >= (xrange / uppx)
|
||||||
|
|
||||||
# call into ``numba``
|
# call into ``numba``
|
||||||
nb, i_win, y_out = _m4(
|
(
|
||||||
|
nb,
|
||||||
|
x_out,
|
||||||
|
y_out,
|
||||||
|
ymn,
|
||||||
|
ymx,
|
||||||
|
) = _m4(
|
||||||
x,
|
x,
|
||||||
y,
|
y,
|
||||||
|
|
||||||
frames,
|
frames,
|
||||||
|
|
||||||
# TODO: see func below..
|
# TODO: see func below..
|
||||||
# i_win,
|
# x_out,
|
||||||
# y_out,
|
# y_out,
|
||||||
|
|
||||||
# first index in x data to start at
|
# first index in x data to start at
|
||||||
|
@ -243,10 +249,11 @@ def ds_m4(
|
||||||
# filter out any overshoot in the input allocation arrays by
|
# filter out any overshoot in the input allocation arrays by
|
||||||
# removing zero-ed tail entries which should start at a certain
|
# removing zero-ed tail entries which should start at a certain
|
||||||
# index.
|
# index.
|
||||||
i_win = i_win[i_win != 0]
|
x_out = x_out[x_out != 0]
|
||||||
y_out = y_out[:i_win.size]
|
y_out = y_out[:x_out.size]
|
||||||
|
|
||||||
return nb, i_win, y_out
|
# print(f'M4 output ymn, ymx: {ymn},{ymx}')
|
||||||
|
return nb, x_out, y_out, ymn, ymx
|
||||||
|
|
||||||
|
|
||||||
@jit(
|
@jit(
|
||||||
|
@ -260,8 +267,8 @@ def _m4(
|
||||||
|
|
||||||
frames: int,
|
frames: int,
|
||||||
|
|
||||||
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
# TODO: using this approach, having the ``.zeros()`` alloc lines
|
||||||
# below, in put python was causing segs faults and alloc crashes..
|
# below in pure python, there were segs faults and alloc crashes..
|
||||||
# we might need to see how it behaves with shm arrays and consider
|
# we might need to see how it behaves with shm arrays and consider
|
||||||
# allocating them once at startup?
|
# allocating them once at startup?
|
||||||
|
|
||||||
|
@ -274,14 +281,22 @@ def _m4(
|
||||||
x_start: int,
|
x_start: int,
|
||||||
step: float,
|
step: float,
|
||||||
|
|
||||||
) -> int:
|
) -> tuple[
|
||||||
# nbins = len(i_win)
|
int,
|
||||||
# count = len(xs)
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
float,
|
||||||
|
float,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Implementation of the m4 algorithm in ``numba``:
|
||||||
|
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||||
|
|
||||||
|
'''
|
||||||
# these are pre-allocated and mutated by ``numba``
|
# these are pre-allocated and mutated by ``numba``
|
||||||
# code in-place.
|
# code in-place.
|
||||||
y_out = np.zeros((frames, 4), ys.dtype)
|
y_out = np.zeros((frames, 4), ys.dtype)
|
||||||
i_win = np.zeros(frames, xs.dtype)
|
x_out = np.zeros(frames, xs.dtype)
|
||||||
|
|
||||||
bincount = 0
|
bincount = 0
|
||||||
x_left = x_start
|
x_left = x_start
|
||||||
|
@ -295,24 +310,34 @@ def _m4(
|
||||||
|
|
||||||
# set all bins in the left-most entry to the starting left-most x value
|
# set all bins in the left-most entry to the starting left-most x value
|
||||||
# (aka a row broadcast).
|
# (aka a row broadcast).
|
||||||
i_win[bincount] = x_left
|
x_out[bincount] = x_left
|
||||||
# set all y-values to the first value passed in.
|
# set all y-values to the first value passed in.
|
||||||
y_out[bincount] = ys[0]
|
y_out[bincount] = ys[0]
|
||||||
|
|
||||||
|
# full input y-data mx and mn
|
||||||
|
mx: float = -np.inf
|
||||||
|
mn: float = np.inf
|
||||||
|
|
||||||
|
# compute OHLC style max / min values per window sized x-frame.
|
||||||
for i in range(len(xs)):
|
for i in range(len(xs)):
|
||||||
|
|
||||||
x = xs[i]
|
x = xs[i]
|
||||||
y = ys[i]
|
y = ys[i]
|
||||||
|
|
||||||
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||||
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||||
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||||
y_out[bincount, 3] = y
|
y_out[bincount, 3] = y
|
||||||
|
mx = max(mx, ymx)
|
||||||
|
mn = min(mn, ymn)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# Find the next bin
|
# Find the next bin
|
||||||
while x >= x_left + step:
|
while x >= x_left + step:
|
||||||
x_left += step
|
x_left += step
|
||||||
|
|
||||||
bincount += 1
|
bincount += 1
|
||||||
i_win[bincount] = x_left
|
x_out[bincount] = x_left
|
||||||
y_out[bincount] = y
|
y_out[bincount] = y
|
||||||
|
|
||||||
return bincount, i_win, y_out
|
return bincount, x_out, y_out, mn, mx
|
||||||
|
|
|
@ -18,8 +18,13 @@
|
||||||
Mouse interaction graphics
|
Mouse interaction graphics
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import Optional, Callable
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
import inspect
|
import inspect
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -36,6 +41,12 @@ from ._style import (
|
||||||
from ._axes import YAxisLabel, XAxisLabel
|
from ._axes import YAxisLabel, XAxisLabel
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import (
|
||||||
|
ChartPlotWidget,
|
||||||
|
LinkedSplits,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -58,7 +69,7 @@ class LineDot(pg.CurvePoint):
|
||||||
curve: pg.PlotCurveItem,
|
curve: pg.PlotCurveItem,
|
||||||
index: int,
|
index: int,
|
||||||
|
|
||||||
plot: 'ChartPlotWidget', # type: ingore # noqa
|
plot: ChartPlotWidget, # type: ingore # noqa
|
||||||
pos=None,
|
pos=None,
|
||||||
color: str = 'default_light',
|
color: str = 'default_light',
|
||||||
|
|
||||||
|
@ -151,7 +162,7 @@ class ContentsLabel(pg.LabelItem):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
# chart: 'ChartPlotWidget', # noqa
|
# chart: ChartPlotWidget, # noqa
|
||||||
view: pg.ViewBox,
|
view: pg.ViewBox,
|
||||||
|
|
||||||
anchor_at: str = ('top', 'right'),
|
anchor_at: str = ('top', 'right'),
|
||||||
|
@ -244,7 +255,7 @@ class ContentsLabels:
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
linkedsplits: 'LinkedSplits', # type: ignore # noqa
|
linkedsplits: LinkedSplits, # type: ignore # noqa
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
@ -289,7 +300,7 @@ class ContentsLabels:
|
||||||
def add_label(
|
def add_label(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
chart: 'ChartPlotWidget', # type: ignore # noqa
|
chart: ChartPlotWidget, # type: ignore # noqa
|
||||||
name: str,
|
name: str,
|
||||||
anchor_at: tuple[str, str] = ('top', 'left'),
|
anchor_at: tuple[str, str] = ('top', 'left'),
|
||||||
update_func: Callable = ContentsLabel.update_from_value,
|
update_func: Callable = ContentsLabel.update_from_value,
|
||||||
|
@ -316,7 +327,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
linkedsplits: 'LinkedSplits', # noqa
|
linkedsplits: LinkedSplits, # noqa
|
||||||
digits: int = 0
|
digits: int = 0
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -325,6 +336,8 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
self.linked = linkedsplits
|
self.linked = linkedsplits
|
||||||
self.graphics: dict[str, pg.GraphicsObject] = {}
|
self.graphics: dict[str, pg.GraphicsObject] = {}
|
||||||
|
self.xaxis_label: Optional[XAxisLabel] = None
|
||||||
|
self.always_show_xlabel: bool = True
|
||||||
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
||||||
self.active_plot = None
|
self.active_plot = None
|
||||||
self.digits: int = digits
|
self.digits: int = digits
|
||||||
|
@ -385,7 +398,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
def add_plot(
|
def add_plot(
|
||||||
self,
|
self,
|
||||||
plot: 'ChartPlotWidget', # noqa
|
plot: ChartPlotWidget, # noqa
|
||||||
digits: int = 0,
|
digits: int = 0,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -469,7 +482,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
def add_curve_cursor(
|
def add_curve_cursor(
|
||||||
self,
|
self,
|
||||||
plot: 'ChartPlotWidget', # noqa
|
plot: ChartPlotWidget, # noqa
|
||||||
curve: 'PlotCurveItem', # noqa
|
curve: 'PlotCurveItem', # noqa
|
||||||
|
|
||||||
) -> LineDot:
|
) -> LineDot:
|
||||||
|
@ -491,17 +504,29 @@ class Cursor(pg.GraphicsObject):
|
||||||
log.debug(f"{(action, plot.name)}")
|
log.debug(f"{(action, plot.name)}")
|
||||||
if action == 'Enter':
|
if action == 'Enter':
|
||||||
self.active_plot = plot
|
self.active_plot = plot
|
||||||
|
plot.linked.godwidget._active_cursor = self
|
||||||
|
|
||||||
# show horiz line and y-label
|
# show horiz line and y-label
|
||||||
self.graphics[plot]['hl'].show()
|
self.graphics[plot]['hl'].show()
|
||||||
self.graphics[plot]['yl'].show()
|
self.graphics[plot]['yl'].show()
|
||||||
|
|
||||||
else: # Leave
|
if (
|
||||||
|
not self.always_show_xlabel
|
||||||
|
and not self.xaxis_label.isVisible()
|
||||||
|
):
|
||||||
|
self.xaxis_label.show()
|
||||||
|
|
||||||
# hide horiz line and y-label
|
# Leave: hide horiz line and y-label
|
||||||
|
else:
|
||||||
self.graphics[plot]['hl'].hide()
|
self.graphics[plot]['hl'].hide()
|
||||||
self.graphics[plot]['yl'].hide()
|
self.graphics[plot]['yl'].hide()
|
||||||
|
|
||||||
|
if (
|
||||||
|
not self.always_show_xlabel
|
||||||
|
and self.xaxis_label.isVisible()
|
||||||
|
):
|
||||||
|
self.xaxis_label.hide()
|
||||||
|
|
||||||
def mouseMoved(
|
def mouseMoved(
|
||||||
self,
|
self,
|
||||||
coords: tuple[QPointF], # noqa
|
coords: tuple[QPointF], # noqa
|
||||||
|
@ -590,13 +615,17 @@ class Cursor(pg.GraphicsObject):
|
||||||
left_axis_width += left.width()
|
left_axis_width += left.width()
|
||||||
|
|
||||||
# map back to abs (label-local) coordinates
|
# map back to abs (label-local) coordinates
|
||||||
self.xaxis_label.update_label(
|
if (
|
||||||
abs_pos=(
|
self.always_show_xlabel
|
||||||
plot.mapFromView(QPointF(vl_x, iy)) -
|
or self.xaxis_label.isVisible()
|
||||||
QPointF(left_axis_width, 0)
|
):
|
||||||
),
|
self.xaxis_label.update_label(
|
||||||
value=ix,
|
abs_pos=(
|
||||||
)
|
plot.mapFromView(QPointF(vl_x, iy)) -
|
||||||
|
QPointF(left_axis_width, 0)
|
||||||
|
),
|
||||||
|
value=ix,
|
||||||
|
)
|
||||||
|
|
||||||
self._datum_xy = ix, iy
|
self._datum_xy = ix, iy
|
||||||
|
|
||||||
|
|
|
@ -28,10 +28,7 @@ from PyQt5.QtWidgets import QGraphicsItem
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
Qt,
|
Qt,
|
||||||
QLineF,
|
QLineF,
|
||||||
QSizeF,
|
|
||||||
QRectF,
|
QRectF,
|
||||||
# QRect,
|
|
||||||
QPointF,
|
|
||||||
)
|
)
|
||||||
from PyQt5.QtGui import (
|
from PyQt5.QtGui import (
|
||||||
QPainter,
|
QPainter,
|
||||||
|
@ -44,6 +41,7 @@ from ._style import hcolor
|
||||||
# ds_m4,
|
# ds_m4,
|
||||||
# )
|
# )
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -88,9 +86,9 @@ class Curve(pg.GraphicsObject):
|
||||||
'''
|
'''
|
||||||
|
|
||||||
# sub-type customization methods
|
# sub-type customization methods
|
||||||
sub_br: Optional[Callable] = None
|
|
||||||
sub_paint: Optional[Callable] = None
|
|
||||||
declare_paintables: Optional[Callable] = None
|
declare_paintables: Optional[Callable] = None
|
||||||
|
sub_paint: Optional[Callable] = None
|
||||||
|
# sub_br: Optional[Callable] = None
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -139,9 +137,7 @@ class Curve(pg.GraphicsObject):
|
||||||
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||||
self.last_step_pen = pg.mkPen(pen, width=2)
|
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||||
|
|
||||||
# self._last_line: Optional[QLineF] = None
|
|
||||||
self._last_line = QLineF()
|
self._last_line = QLineF()
|
||||||
self._last_w: float = 1
|
|
||||||
|
|
||||||
# flat-top style histogram-like discrete curve
|
# flat-top style histogram-like discrete curve
|
||||||
# self._step_mode: bool = step_mode
|
# self._step_mode: bool = step_mode
|
||||||
|
@ -230,8 +226,8 @@ class Curve(pg.GraphicsObject):
|
||||||
self.path.clear()
|
self.path.clear()
|
||||||
|
|
||||||
if self.fast_path:
|
if self.fast_path:
|
||||||
# self.fast_path.clear()
|
self.fast_path.clear()
|
||||||
self.fast_path = None
|
# self.fast_path = None
|
||||||
|
|
||||||
@cm
|
@cm
|
||||||
def reset_cache(self) -> None:
|
def reset_cache(self) -> None:
|
||||||
|
@ -251,77 +247,65 @@ class Curve(pg.GraphicsObject):
|
||||||
self.boundingRect = self._path_br
|
self.boundingRect = self._path_br
|
||||||
return self._path_br()
|
return self._path_br()
|
||||||
|
|
||||||
|
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||||
def _path_br(self):
|
def _path_br(self):
|
||||||
'''
|
'''
|
||||||
Post init ``.boundingRect()```.
|
Post init ``.boundingRect()```.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# hb = self.path.boundingRect()
|
# profiler = Profiler(
|
||||||
hb = self.path.controlPointRect()
|
# msg=f'Curve.boundingRect(): `{self._name}`',
|
||||||
hb_size = hb.size()
|
# disabled=not pg_profile_enabled(),
|
||||||
|
# ms_threshold=ms_slower_then,
|
||||||
fp = self.fast_path
|
|
||||||
if fp:
|
|
||||||
fhb = fp.controlPointRect()
|
|
||||||
hb_size = fhb.size() + hb_size
|
|
||||||
|
|
||||||
# print(f'hb_size: {hb_size}')
|
|
||||||
|
|
||||||
# if self._last_step_rect:
|
|
||||||
# hb_size += self._last_step_rect.size()
|
|
||||||
|
|
||||||
# if self._line:
|
|
||||||
# br = self._last_step_rect.bottomRight()
|
|
||||||
|
|
||||||
# tl = QPointF(
|
|
||||||
# # self._vr[0],
|
|
||||||
# # hb.topLeft().y(),
|
|
||||||
# # 0,
|
|
||||||
# # hb_size.height() + 1
|
|
||||||
# )
|
# )
|
||||||
|
pr = self.path.controlPointRect()
|
||||||
# br = self._last_step_rect.bottomRight()
|
hb_tl, hb_br = (
|
||||||
|
pr.topLeft(),
|
||||||
w = hb_size.width()
|
pr.bottomRight(),
|
||||||
h = hb_size.height()
|
)
|
||||||
|
mn_y = hb_tl.y()
|
||||||
sbr = self.sub_br
|
mx_y = hb_br.y()
|
||||||
if sbr:
|
most_left = hb_tl.x()
|
||||||
w, h = self.sub_br(w, h)
|
most_right = hb_br.x()
|
||||||
else:
|
# profiler('calc path vertices')
|
||||||
# assume plain line graphic and use
|
|
||||||
# default unit step in each direction.
|
# TODO: if/when we get fast path appends working in the
|
||||||
|
# `Renderer`, then we might need to actually use this..
|
||||||
# only on a plane line do we include
|
# fp = self.fast_path
|
||||||
# and extra index step's worth of width
|
# if fp:
|
||||||
# since in the step case the end of the curve
|
# fhb = fp.controlPointRect()
|
||||||
# actually terminates earlier so we don't need
|
# # hb_size = fhb.size() + hb_size
|
||||||
# this for the last step.
|
# br = pr.united(fhb)
|
||||||
w += self._last_w
|
|
||||||
# ll = self._last_line
|
# XXX: *was* a way to allow sub-types to extend the
|
||||||
h += 1 # ll.y2() - ll.y1()
|
# boundingrect calc, but in the one use case for a step curve
|
||||||
|
# doesn't seem like we need it as long as the last line segment
|
||||||
# br = QPointF(
|
# is drawn as it is?
|
||||||
# self._vr[-1],
|
|
||||||
# # tl.x() + w,
|
# sbr = self.sub_br
|
||||||
# tl.y() + h,
|
# if sbr:
|
||||||
# )
|
# # w, h = self.sub_br(w, h)
|
||||||
|
# sub_br = sbr()
|
||||||
br = QRectF(
|
# br = br.united(sub_br)
|
||||||
|
|
||||||
# top left
|
# assume plain line graphic and use
|
||||||
# hb.topLeft()
|
# default unit step in each direction.
|
||||||
# tl,
|
ll = self._last_line
|
||||||
QPointF(hb.topLeft()),
|
y1, y2 = ll.y1(), ll.y2()
|
||||||
|
x1, x2 = ll.x1(), ll.x2()
|
||||||
# br,
|
|
||||||
# total size
|
ymn = min(y1, y2, mn_y)
|
||||||
# QSizeF(hb_size)
|
ymx = max(y1, y2, mx_y)
|
||||||
# hb_size,
|
most_left = min(x1, x2, most_left)
|
||||||
QSizeF(w, h)
|
most_right = max(x1, x2, most_right)
|
||||||
|
# profiler('calc last line vertices')
|
||||||
|
|
||||||
|
return QRectF(
|
||||||
|
most_left,
|
||||||
|
ymn,
|
||||||
|
most_right - most_left + 1,
|
||||||
|
ymx,
|
||||||
)
|
)
|
||||||
# print(f'bounding rect: {br}')
|
|
||||||
return br
|
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
self,
|
self,
|
||||||
|
@ -331,7 +315,7 @@ class Curve(pg.GraphicsObject):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
msg=f'Curve.paint(): `{self._name}`',
|
msg=f'Curve.paint(): `{self._name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
|
@ -339,7 +323,7 @@ class Curve(pg.GraphicsObject):
|
||||||
|
|
||||||
sub_paint = self.sub_paint
|
sub_paint = self.sub_paint
|
||||||
if sub_paint:
|
if sub_paint:
|
||||||
sub_paint(p, profiler)
|
sub_paint(p)
|
||||||
|
|
||||||
p.setPen(self.last_step_pen)
|
p.setPen(self.last_step_pen)
|
||||||
p.drawLine(self._last_line)
|
p.drawLine(self._last_line)
|
||||||
|
@ -449,36 +433,34 @@ class StepCurve(Curve):
|
||||||
y = src_data[array_key]
|
y = src_data[array_key]
|
||||||
|
|
||||||
x_last = x[-1]
|
x_last = x[-1]
|
||||||
|
x_2last = x[-2]
|
||||||
y_last = y[-1]
|
y_last = y[-1]
|
||||||
|
step_size = x_last - x_2last
|
||||||
|
half_step = step_size / 2
|
||||||
|
|
||||||
# lol, commenting this makes step curves
|
# lol, commenting this makes step curves
|
||||||
# all "black" for me :eyeroll:..
|
# all "black" for me :eyeroll:..
|
||||||
self._last_line = QLineF(
|
self._last_line = QLineF(
|
||||||
x_last - w, 0,
|
x_2last, 0,
|
||||||
x_last + w, 0,
|
x_last, 0,
|
||||||
)
|
)
|
||||||
self._last_step_rect = QRectF(
|
self._last_step_rect = QRectF(
|
||||||
x_last - w, 0,
|
x_last - half_step, 0,
|
||||||
x_last + w, y_last,
|
step_size, y_last,
|
||||||
)
|
)
|
||||||
return x, y
|
return x, y
|
||||||
|
|
||||||
def sub_paint(
|
def sub_paint(
|
||||||
self,
|
self,
|
||||||
p: QPainter,
|
p: QPainter,
|
||||||
profiler: pg.debug.Profiler,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||||
# p.drawRect(self._last_step_rect)
|
# p.drawRect(self._last_step_rect)
|
||||||
p.fillRect(self._last_step_rect, self._brush)
|
p.fillRect(self._last_step_rect, self._brush)
|
||||||
profiler('.fillRect()')
|
|
||||||
|
|
||||||
def sub_br(
|
# def sub_br(
|
||||||
self,
|
# self,
|
||||||
path_w: float,
|
# parent_br: QRectF | None = None,
|
||||||
path_h: float,
|
# ) -> QRectF:
|
||||||
|
# return self._last_step_rect
|
||||||
) -> (float, float):
|
|
||||||
# passthrough
|
|
||||||
return path_w, path_h
|
|
||||||
|
|
|
@ -21,19 +21,21 @@ this module ties together quote and computational (fsp) streams with
|
||||||
graphics update methods via our custom ``pyqtgraph`` charting api.
|
graphics update methods via our custom ``pyqtgraph`` charting api.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from dataclasses import dataclass
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
import time
|
import time
|
||||||
from typing import Optional, Any, Callable
|
from typing import Optional, Any, Callable
|
||||||
|
|
||||||
import numpy as np
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
import pendulum
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
|
|
||||||
# from .. import brokers
|
# from .. import brokers
|
||||||
from ..data.feed import open_feed
|
from ..data.feed import (
|
||||||
|
open_feed,
|
||||||
|
Feed,
|
||||||
|
Flume,
|
||||||
|
)
|
||||||
|
from ..data.types import Struct
|
||||||
from ._axes import YAxisLabel
|
from ._axes import YAxisLabel
|
||||||
from ._chart import (
|
from ._chart import (
|
||||||
ChartPlotWidget,
|
ChartPlotWidget,
|
||||||
|
@ -41,29 +43,36 @@ from ._chart import (
|
||||||
GodWidget,
|
GodWidget,
|
||||||
)
|
)
|
||||||
from ._l1 import L1Labels
|
from ._l1 import L1Labels
|
||||||
|
from ._style import hcolor
|
||||||
from ._fsp import (
|
from ._fsp import (
|
||||||
update_fsp_chart,
|
update_fsp_chart,
|
||||||
start_fsp_displays,
|
start_fsp_displays,
|
||||||
has_vlm,
|
has_vlm,
|
||||||
open_vlm_displays,
|
open_vlm_displays,
|
||||||
)
|
)
|
||||||
from ..data._sharedmem import ShmArray
|
from ..data._sharedmem import (
|
||||||
|
ShmArray,
|
||||||
|
)
|
||||||
from ..data._source import tf_in_1s
|
from ..data._source import tf_in_1s
|
||||||
from ._forms import (
|
from ._forms import (
|
||||||
FieldsForm,
|
FieldsForm,
|
||||||
mk_order_pane_layout,
|
mk_order_pane_layout,
|
||||||
)
|
)
|
||||||
from .order_mode import open_order_mode
|
from .order_mode import (
|
||||||
|
open_order_mode,
|
||||||
|
OrderMode,
|
||||||
|
)
|
||||||
from .._profile import (
|
from .._profile import (
|
||||||
pg_profile_enabled,
|
pg_profile_enabled,
|
||||||
ms_slower_then,
|
ms_slower_then,
|
||||||
)
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# TODO: load this from a config.toml!
|
# TODO: load this from a config.toml!
|
||||||
_quote_throttle_rate: int = 22 # Hz
|
_quote_throttle_rate: int = 16 # Hz
|
||||||
|
|
||||||
|
|
||||||
# a working tick-type-classes template
|
# a working tick-type-classes template
|
||||||
|
@ -105,6 +114,10 @@ def chart_maxmin(
|
||||||
mn, mx = out
|
mn, mx = out
|
||||||
|
|
||||||
mx_vlm_in_view = 0
|
mx_vlm_in_view = 0
|
||||||
|
|
||||||
|
# TODO: we need to NOT call this to avoid a manual
|
||||||
|
# np.max/min trigger and especially on the vlm_chart
|
||||||
|
# flows which aren't shown.. like vlm?
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
out = vlm_chart.maxmin()
|
out = vlm_chart.maxmin()
|
||||||
if out:
|
if out:
|
||||||
|
@ -118,39 +131,105 @@ def chart_maxmin(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class DisplayState(Struct):
|
||||||
class DisplayState:
|
|
||||||
'''
|
'''
|
||||||
Chart-local real-time graphics state container.
|
Chart-local real-time graphics state container.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
godwidget: GodWidget
|
||||||
quotes: dict[str, Any]
|
quotes: dict[str, Any]
|
||||||
|
|
||||||
maxmin: Callable
|
maxmin: Callable
|
||||||
ohlcv: ShmArray
|
ohlcv: ShmArray
|
||||||
|
hist_ohlcv: ShmArray
|
||||||
|
|
||||||
# high level chart handles
|
# high level chart handles
|
||||||
linked: LinkedSplits
|
|
||||||
chart: ChartPlotWidget
|
chart: ChartPlotWidget
|
||||||
vlm_chart: ChartPlotWidget
|
|
||||||
|
|
||||||
# axis labels
|
# axis labels
|
||||||
l1: L1Labels
|
l1: L1Labels
|
||||||
last_price_sticky: YAxisLabel
|
last_price_sticky: YAxisLabel
|
||||||
vlm_sticky: YAxisLabel
|
hist_last_price_sticky: YAxisLabel
|
||||||
|
|
||||||
# misc state tracking
|
# misc state tracking
|
||||||
vars: dict[str, Any]
|
vars: dict[str, Any] = {
|
||||||
|
'tick_margin': 0,
|
||||||
|
'i_last': 0,
|
||||||
|
'i_last_append': 0,
|
||||||
|
'last_mx_vlm': 0,
|
||||||
|
'last_mx': 0,
|
||||||
|
'last_mn': 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
vlm_chart: Optional[ChartPlotWidget] = None
|
||||||
|
vlm_sticky: Optional[YAxisLabel] = None
|
||||||
wap_in_history: bool = False
|
wap_in_history: bool = False
|
||||||
|
|
||||||
|
def incr_info(
|
||||||
|
self,
|
||||||
|
chart: Optional[ChartPlotWidget] = None,
|
||||||
|
shm: Optional[ShmArray] = None,
|
||||||
|
state: Optional[dict] = None, # pass in a copy if you don't
|
||||||
|
|
||||||
|
update_state: bool = True,
|
||||||
|
update_uppx: float = 16,
|
||||||
|
|
||||||
|
) -> tuple:
|
||||||
|
|
||||||
|
shm = shm or self.ohlcv
|
||||||
|
chart = chart or self.chart
|
||||||
|
state = state or self.vars
|
||||||
|
|
||||||
|
if not update_state:
|
||||||
|
state = state.copy()
|
||||||
|
|
||||||
|
# compute the first available graphic's x-units-per-pixel
|
||||||
|
uppx = chart.view.x_uppx()
|
||||||
|
|
||||||
|
# NOTE: this used to be implemented in a dedicated
|
||||||
|
# "increment task": ``check_for_new_bars()`` but it doesn't
|
||||||
|
# make sense to do a whole task switch when we can just do
|
||||||
|
# this simple index-diff and all the fsp sub-curve graphics
|
||||||
|
# are diffed on each draw cycle anyway; so updates to the
|
||||||
|
# "curve" length is already automatic.
|
||||||
|
|
||||||
|
# increment the view position by the sample offset.
|
||||||
|
i_step = shm.index
|
||||||
|
i_diff = i_step - state['i_last']
|
||||||
|
state['i_last'] = i_step
|
||||||
|
|
||||||
|
append_diff = i_step - state['i_last_append']
|
||||||
|
|
||||||
|
# update the "last datum" (aka extending the flow graphic with
|
||||||
|
# new data) only if the number of unit steps is >= the number of
|
||||||
|
# such unit steps per pixel (aka uppx). Iow, if the zoom level
|
||||||
|
# is such that a datum(s) update to graphics wouldn't span
|
||||||
|
# to a new pixel, we don't update yet.
|
||||||
|
do_append = (append_diff >= uppx)
|
||||||
|
if do_append:
|
||||||
|
state['i_last_append'] = i_step
|
||||||
|
|
||||||
|
do_rt_update = uppx < update_uppx
|
||||||
|
|
||||||
|
_, _, _, r = chart.bars_range()
|
||||||
|
liv = r >= i_step
|
||||||
|
|
||||||
|
# TODO: pack this into a struct
|
||||||
|
return (
|
||||||
|
uppx,
|
||||||
|
liv,
|
||||||
|
do_append,
|
||||||
|
i_diff,
|
||||||
|
append_diff,
|
||||||
|
do_rt_update,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
async def graphics_update_loop(
|
async def graphics_update_loop(
|
||||||
|
|
||||||
linked: LinkedSplits,
|
nurse: trio.Nursery,
|
||||||
stream: tractor.MsgStream,
|
godwidget: GodWidget,
|
||||||
ohlcv: np.ndarray,
|
flume: Flume,
|
||||||
|
|
||||||
wap_in_history: bool = False,
|
wap_in_history: bool = False,
|
||||||
vlm_chart: Optional[ChartPlotWidget] = None,
|
vlm_chart: Optional[ChartPlotWidget] = None,
|
||||||
|
|
||||||
|
@ -171,22 +250,29 @@ async def graphics_update_loop(
|
||||||
# of copying it from last bar's close
|
# of copying it from last bar's close
|
||||||
# - 1-5 sec bar lookback-autocorrection like tws does?
|
# - 1-5 sec bar lookback-autocorrection like tws does?
|
||||||
# (would require a background history checker task)
|
# (would require a background history checker task)
|
||||||
display_rate = linked.godwidget.window.current_screen().refreshRate()
|
linked: LinkedSplits = godwidget.rt_linked
|
||||||
|
display_rate = godwidget.window.current_screen().refreshRate()
|
||||||
|
|
||||||
chart = linked.chart
|
fast_chart = linked.chart
|
||||||
|
hist_chart = godwidget.hist_linked.chart
|
||||||
|
|
||||||
|
ohlcv = flume.rt_shm
|
||||||
|
hist_ohlcv = flume.hist_shm
|
||||||
|
|
||||||
# update last price sticky
|
# update last price sticky
|
||||||
last_price_sticky = chart._ysticks[chart.name]
|
last_price_sticky = fast_chart._ysticks[fast_chart.name]
|
||||||
last_price_sticky.update_from_data(
|
last_price_sticky.update_from_data(
|
||||||
*ohlcv.array[-1][['index', 'close']]
|
*ohlcv.array[-1][['index', 'close']]
|
||||||
)
|
)
|
||||||
|
|
||||||
if vlm_chart:
|
hist_last_price_sticky = hist_chart._ysticks[hist_chart.name]
|
||||||
vlm_sticky = vlm_chart._ysticks['volume']
|
hist_last_price_sticky.update_from_data(
|
||||||
|
*hist_ohlcv.array[-1][['index', 'close']]
|
||||||
|
)
|
||||||
|
|
||||||
maxmin = partial(
|
maxmin = partial(
|
||||||
chart_maxmin,
|
chart_maxmin,
|
||||||
chart,
|
fast_chart,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
vlm_chart,
|
vlm_chart,
|
||||||
)
|
)
|
||||||
|
@ -200,15 +286,15 @@ async def graphics_update_loop(
|
||||||
|
|
||||||
last, volume = ohlcv.array[-1][['close', 'volume']]
|
last, volume = ohlcv.array[-1][['close', 'volume']]
|
||||||
|
|
||||||
symbol = chart.linked.symbol
|
symbol = fast_chart.linked.symbol
|
||||||
|
|
||||||
l1 = L1Labels(
|
l1 = L1Labels(
|
||||||
chart,
|
fast_chart,
|
||||||
# determine precision/decimal lengths
|
# determine precision/decimal lengths
|
||||||
digits=symbol.tick_size_digits,
|
digits=symbol.tick_size_digits,
|
||||||
size_digits=symbol.lot_size_digits,
|
size_digits=symbol.lot_size_digits,
|
||||||
)
|
)
|
||||||
chart._l1_labels = l1
|
fast_chart._l1_labels = l1
|
||||||
|
|
||||||
# TODO:
|
# TODO:
|
||||||
# - in theory we should be able to read buffer data faster
|
# - in theory we should be able to read buffer data faster
|
||||||
|
@ -218,46 +304,22 @@ async def graphics_update_loop(
|
||||||
# levels this might be dark volume we need to
|
# levels this might be dark volume we need to
|
||||||
# present differently -> likely dark vlm
|
# present differently -> likely dark vlm
|
||||||
|
|
||||||
tick_size = chart.linked.symbol.tick_size
|
tick_size = fast_chart.linked.symbol.tick_size
|
||||||
tick_margin = 3 * tick_size
|
tick_margin = 3 * tick_size
|
||||||
|
|
||||||
chart.show()
|
fast_chart.show()
|
||||||
# view = chart.view
|
|
||||||
last_quote = time.time()
|
last_quote = time.time()
|
||||||
i_last = ohlcv.index
|
i_last = ohlcv.index
|
||||||
|
|
||||||
# async def iter_drain_quotes():
|
|
||||||
# # NOTE: all code below this loop is expected to be synchronous
|
|
||||||
# # and thus draw instructions are not picked up jntil the next
|
|
||||||
# # wait / iteration.
|
|
||||||
# async for quotes in stream:
|
|
||||||
# while True:
|
|
||||||
# try:
|
|
||||||
# moar = stream.receive_nowait()
|
|
||||||
# except trio.WouldBlock:
|
|
||||||
# yield quotes
|
|
||||||
# break
|
|
||||||
# else:
|
|
||||||
# for sym, quote in moar.items():
|
|
||||||
# ticks_frame = quote.get('ticks')
|
|
||||||
# if ticks_frame:
|
|
||||||
# quotes[sym].setdefault(
|
|
||||||
# 'ticks', []).extend(ticks_frame)
|
|
||||||
# print('pulled extra')
|
|
||||||
|
|
||||||
# yield quotes
|
|
||||||
|
|
||||||
# async for quotes in iter_drain_quotes():
|
|
||||||
|
|
||||||
ds = linked.display_state = DisplayState(**{
|
ds = linked.display_state = DisplayState(**{
|
||||||
|
'godwidget': godwidget,
|
||||||
'quotes': {},
|
'quotes': {},
|
||||||
'linked': linked,
|
|
||||||
'maxmin': maxmin,
|
'maxmin': maxmin,
|
||||||
'ohlcv': ohlcv,
|
'ohlcv': ohlcv,
|
||||||
'chart': chart,
|
'hist_ohlcv': hist_ohlcv,
|
||||||
|
'chart': fast_chart,
|
||||||
'last_price_sticky': last_price_sticky,
|
'last_price_sticky': last_price_sticky,
|
||||||
'vlm_chart': vlm_chart,
|
'hist_last_price_sticky': hist_last_price_sticky,
|
||||||
'vlm_sticky': vlm_sticky,
|
|
||||||
'l1': l1,
|
'l1': l1,
|
||||||
|
|
||||||
'vars': {
|
'vars': {
|
||||||
|
@ -270,9 +332,69 @@ async def graphics_update_loop(
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
chart.default_view()
|
if vlm_chart:
|
||||||
|
vlm_sticky = vlm_chart._ysticks['volume']
|
||||||
|
ds.vlm_chart = vlm_chart
|
||||||
|
ds.vlm_sticky = vlm_sticky
|
||||||
|
|
||||||
|
fast_chart.default_view()
|
||||||
|
|
||||||
|
# TODO: probably factor this into some kinda `DisplayState`
|
||||||
|
# API that can be reused at least in terms of pulling view
|
||||||
|
# params (eg ``.bars_range()``).
|
||||||
|
async def increment_history_view():
|
||||||
|
i_last = hist_ohlcv.index
|
||||||
|
state = ds.vars.copy() | {
|
||||||
|
'i_last_append': i_last,
|
||||||
|
'i_last': i_last,
|
||||||
|
}
|
||||||
|
_, hist_step_size_s, _ = flume.get_ds_info()
|
||||||
|
|
||||||
|
async with flume.index_stream(
|
||||||
|
# int(hist_step_size_s)
|
||||||
|
# TODO: seems this is more reliable at keeping the slow
|
||||||
|
# chart incremented in view more correctly?
|
||||||
|
# - It might make sense to just inline this logic with the
|
||||||
|
# main display task? => it's a tradeoff of slower task
|
||||||
|
# wakeups/ctx switches verus logic checks (as normal)
|
||||||
|
# - we need increment logic that only does the view shift
|
||||||
|
# call when the uppx permits/needs it
|
||||||
|
int(1),
|
||||||
|
) as istream:
|
||||||
|
async for msg in istream:
|
||||||
|
|
||||||
|
# check if slow chart needs an x-domain shift and/or
|
||||||
|
# y-range resize.
|
||||||
|
(
|
||||||
|
uppx,
|
||||||
|
liv,
|
||||||
|
do_append,
|
||||||
|
i_diff,
|
||||||
|
append_diff,
|
||||||
|
do_rt_update,
|
||||||
|
) = ds.incr_info(
|
||||||
|
chart=hist_chart,
|
||||||
|
shm=ds.hist_ohlcv,
|
||||||
|
state=state,
|
||||||
|
# update_state=False,
|
||||||
|
)
|
||||||
|
# print(
|
||||||
|
# f'liv: {liv}\n'
|
||||||
|
# f'do_append: {do_append}\n'
|
||||||
|
# f'append_diff: {append_diff}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
|
if (
|
||||||
|
do_append
|
||||||
|
and liv
|
||||||
|
):
|
||||||
|
hist_chart.increment_view(steps=i_diff)
|
||||||
|
hist_chart.view._set_yrange(yrange=hist_chart.maxmin())
|
||||||
|
|
||||||
|
nurse.start_soon(increment_history_view)
|
||||||
|
|
||||||
# main real-time quotes update loop
|
# main real-time quotes update loop
|
||||||
|
stream: tractor.MsgStream = flume.stream
|
||||||
async for quotes in stream:
|
async for quotes in stream:
|
||||||
|
|
||||||
ds.quotes = quotes
|
ds.quotes = quotes
|
||||||
|
@ -292,15 +414,16 @@ async def graphics_update_loop(
|
||||||
last_quote = time.time()
|
last_quote = time.time()
|
||||||
|
|
||||||
# chart isn't active/shown so skip render cycle and pause feed(s)
|
# chart isn't active/shown so skip render cycle and pause feed(s)
|
||||||
if chart.linked.isHidden():
|
if fast_chart.linked.isHidden():
|
||||||
chart.pause_all_feeds()
|
# print('skipping update')
|
||||||
|
fast_chart.pause_all_feeds()
|
||||||
continue
|
continue
|
||||||
|
|
||||||
ic = chart.view._ic
|
# ic = fast_chart.view._ic
|
||||||
if ic:
|
# if ic:
|
||||||
chart.pause_all_feeds()
|
# fast_chart.pause_all_feeds()
|
||||||
await ic.wait()
|
# await ic.wait()
|
||||||
chart.resume_all_feeds()
|
# fast_chart.resume_all_feeds()
|
||||||
|
|
||||||
# sync call to update all graphics/UX components.
|
# sync call to update all graphics/UX components.
|
||||||
graphics_update_cycle(ds)
|
graphics_update_cycle(ds)
|
||||||
|
@ -317,8 +440,10 @@ def graphics_update_cycle(
|
||||||
# hopefully XD
|
# hopefully XD
|
||||||
|
|
||||||
chart = ds.chart
|
chart = ds.chart
|
||||||
|
# TODO: just pass this as a direct ref to avoid so many attr accesses?
|
||||||
|
hist_chart = ds.godwidget.hist_linked.chart
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
msg=f'Graphics loop cycle for: `{chart.name}`',
|
msg=f'Graphics loop cycle for: `{chart.name}`',
|
||||||
delayed=True,
|
delayed=True,
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
|
@ -330,53 +455,24 @@ def graphics_update_cycle(
|
||||||
|
|
||||||
# unpack multi-referenced components
|
# unpack multi-referenced components
|
||||||
vlm_chart = ds.vlm_chart
|
vlm_chart = ds.vlm_chart
|
||||||
|
|
||||||
|
# rt "HFT" chart
|
||||||
l1 = ds.l1
|
l1 = ds.l1
|
||||||
ohlcv = ds.ohlcv
|
ohlcv = ds.ohlcv
|
||||||
array = ohlcv.array
|
array = ohlcv.array
|
||||||
|
|
||||||
vars = ds.vars
|
vars = ds.vars
|
||||||
tick_margin = vars['tick_margin']
|
tick_margin = vars['tick_margin']
|
||||||
|
|
||||||
update_uppx = 16
|
|
||||||
|
|
||||||
for sym, quote in ds.quotes.items():
|
for sym, quote in ds.quotes.items():
|
||||||
|
(
|
||||||
# compute the first available graphic's x-units-per-pixel
|
uppx,
|
||||||
uppx = vlm_chart.view.x_uppx()
|
liv,
|
||||||
|
do_append,
|
||||||
# NOTE: vlm may be written by the ``brokerd`` backend
|
i_diff,
|
||||||
# event though a tick sample is not emitted.
|
append_diff,
|
||||||
# TODO: show dark trades differently
|
do_rt_update,
|
||||||
# https://github.com/pikers/piker/issues/116
|
) = ds.incr_info()
|
||||||
|
|
||||||
# NOTE: this used to be implemented in a dedicated
|
|
||||||
# "increment task": ``check_for_new_bars()`` but it doesn't
|
|
||||||
# make sense to do a whole task switch when we can just do
|
|
||||||
# this simple index-diff and all the fsp sub-curve graphics
|
|
||||||
# are diffed on each draw cycle anyway; so updates to the
|
|
||||||
# "curve" length is already automatic.
|
|
||||||
|
|
||||||
# increment the view position by the sample offset.
|
|
||||||
i_step = ohlcv.index
|
|
||||||
i_diff = i_step - vars['i_last']
|
|
||||||
vars['i_last'] = i_step
|
|
||||||
|
|
||||||
append_diff = i_step - vars['i_last_append']
|
|
||||||
|
|
||||||
# update the "last datum" (aka extending the flow graphic with
|
|
||||||
# new data) only if the number of unit steps is >= the number of
|
|
||||||
# such unit steps per pixel (aka uppx). Iow, if the zoom level
|
|
||||||
# is such that a datum(s) update to graphics wouldn't span
|
|
||||||
# to a new pixel, we don't update yet.
|
|
||||||
do_append = (append_diff >= uppx)
|
|
||||||
if do_append:
|
|
||||||
vars['i_last_append'] = i_step
|
|
||||||
|
|
||||||
do_rt_update = uppx < update_uppx
|
|
||||||
# print(
|
|
||||||
# f'append_diff:{append_diff}\n'
|
|
||||||
# f'uppx:{uppx}\n'
|
|
||||||
# f'do_append: {do_append}'
|
|
||||||
# )
|
|
||||||
|
|
||||||
# TODO: we should only run mxmn when we know
|
# TODO: we should only run mxmn when we know
|
||||||
# an update is due via ``do_append`` above.
|
# an update is due via ``do_append`` above.
|
||||||
|
@ -392,8 +488,6 @@ def graphics_update_cycle(
|
||||||
|
|
||||||
profiler('`ds.maxmin()` call')
|
profiler('`ds.maxmin()` call')
|
||||||
|
|
||||||
liv = r >= i_step # the last datum is in view
|
|
||||||
|
|
||||||
if (
|
if (
|
||||||
prepend_update_index is not None
|
prepend_update_index is not None
|
||||||
and lbar > prepend_update_index
|
and lbar > prepend_update_index
|
||||||
|
@ -408,18 +502,11 @@ def graphics_update_cycle(
|
||||||
# don't real-time "shift" the curve to the
|
# don't real-time "shift" the curve to the
|
||||||
# left unless we get one of the following:
|
# left unless we get one of the following:
|
||||||
if (
|
if (
|
||||||
(
|
(do_append and liv)
|
||||||
# i_diff > 0 # no new sample step
|
|
||||||
do_append
|
|
||||||
# and uppx < 4 # chart is zoomed out very far
|
|
||||||
and liv
|
|
||||||
)
|
|
||||||
or trigger_all
|
or trigger_all
|
||||||
):
|
):
|
||||||
# TODO: we should track and compute whether the last
|
|
||||||
# pixel in a curve should show new data based on uppx
|
|
||||||
# and then iff update curves and shift?
|
|
||||||
chart.increment_view(steps=i_diff)
|
chart.increment_view(steps=i_diff)
|
||||||
|
chart.view._set_yrange(yrange=(mn, mx))
|
||||||
|
|
||||||
if vlm_chart:
|
if vlm_chart:
|
||||||
vlm_chart.increment_view(steps=i_diff)
|
vlm_chart.increment_view(steps=i_diff)
|
||||||
|
@ -477,7 +564,10 @@ def graphics_update_cycle(
|
||||||
):
|
):
|
||||||
chart.update_graphics_from_flow(
|
chart.update_graphics_from_flow(
|
||||||
chart.name,
|
chart.name,
|
||||||
# do_append=uppx < update_uppx,
|
do_append=do_append,
|
||||||
|
)
|
||||||
|
hist_chart.update_graphics_from_flow(
|
||||||
|
chart.name,
|
||||||
do_append=do_append,
|
do_append=do_append,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -517,6 +607,9 @@ def graphics_update_cycle(
|
||||||
ds.last_price_sticky.update_from_data(
|
ds.last_price_sticky.update_from_data(
|
||||||
*end[['index', 'close']]
|
*end[['index', 'close']]
|
||||||
)
|
)
|
||||||
|
ds.hist_last_price_sticky.update_from_data(
|
||||||
|
*end[['index', 'close']]
|
||||||
|
)
|
||||||
|
|
||||||
if wap_in_history:
|
if wap_in_history:
|
||||||
# update vwap overlay line
|
# update vwap overlay line
|
||||||
|
@ -564,26 +657,44 @@ def graphics_update_cycle(
|
||||||
l1.bid_label.update_fields({'level': price, 'size': size})
|
l1.bid_label.update_fields({'level': price, 'size': size})
|
||||||
|
|
||||||
# check for y-range re-size
|
# check for y-range re-size
|
||||||
if (
|
if (mx > vars['last_mx']) or (mn < vars['last_mn']):
|
||||||
(mx > vars['last_mx']) or (mn < vars['last_mn'])
|
|
||||||
and not chart._static_yrange == 'axis'
|
# fast chart resize case
|
||||||
and liv
|
|
||||||
):
|
|
||||||
main_vb = chart.view
|
|
||||||
if (
|
if (
|
||||||
main_vb._ic is None
|
liv
|
||||||
or not main_vb._ic.is_set()
|
and not chart._static_yrange == 'axis'
|
||||||
):
|
):
|
||||||
# print(f'updating range due to mxmn')
|
main_vb = chart.view
|
||||||
main_vb._set_yrange(
|
if (
|
||||||
# TODO: we should probably scale
|
main_vb._ic is None
|
||||||
# the view margin based on the size
|
or not main_vb._ic.is_set()
|
||||||
# of the true range? This way you can
|
):
|
||||||
# slap in orders outside the current
|
# print(f'updating range due to mxmn')
|
||||||
# L1 (only) book range.
|
main_vb._set_yrange(
|
||||||
# range_margin=0.1,
|
# TODO: we should probably scale
|
||||||
yrange=(mn, mx),
|
# the view margin based on the size
|
||||||
)
|
# of the true range? This way you can
|
||||||
|
# slap in orders outside the current
|
||||||
|
# L1 (only) book range.
|
||||||
|
# range_margin=0.1,
|
||||||
|
yrange=(mn, mx),
|
||||||
|
)
|
||||||
|
|
||||||
|
# check if slow chart needs a resize
|
||||||
|
(
|
||||||
|
_,
|
||||||
|
hist_liv,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
) = ds.incr_info(
|
||||||
|
chart=hist_chart,
|
||||||
|
shm=ds.hist_ohlcv,
|
||||||
|
update_state=False,
|
||||||
|
)
|
||||||
|
if hist_liv:
|
||||||
|
hist_chart.view._set_yrange(yrange=hist_chart.maxmin())
|
||||||
|
|
||||||
# XXX: update this every draw cycle to make L1-always-in-view work.
|
# XXX: update this every draw cycle to make L1-always-in-view work.
|
||||||
vars['last_mx'], vars['last_mn'] = mx, mn
|
vars['last_mx'], vars['last_mn'] = mx, mn
|
||||||
|
@ -700,10 +811,144 @@ def graphics_update_cycle(
|
||||||
flow.draw_last(array_key=curve_name)
|
flow.draw_last(array_key=curve_name)
|
||||||
|
|
||||||
|
|
||||||
|
async def link_views_with_region(
|
||||||
|
rt_chart: ChartPlotWidget,
|
||||||
|
hist_chart: ChartPlotWidget,
|
||||||
|
flume: Flume,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# these value are be only pulled once during shm init/startup
|
||||||
|
izero_hist = flume.izero_hist
|
||||||
|
izero_rt = flume.izero_rt
|
||||||
|
|
||||||
|
# Add the LinearRegionItem to the ViewBox, but tell the ViewBox
|
||||||
|
# to exclude this item when doing auto-range calculations.
|
||||||
|
rt_pi = rt_chart.plotItem
|
||||||
|
hist_pi = hist_chart.plotItem
|
||||||
|
|
||||||
|
region = pg.LinearRegionItem(
|
||||||
|
movable=False,
|
||||||
|
# color scheme that matches sidepane styling
|
||||||
|
pen=pg.mkPen(hcolor('gunmetal')),
|
||||||
|
brush=pg.mkBrush(hcolor('default_darkest')),
|
||||||
|
)
|
||||||
|
region.setZValue(10) # put linear region "in front" in layer terms
|
||||||
|
|
||||||
|
hist_pi.addItem(region, ignoreBounds=True)
|
||||||
|
|
||||||
|
flow = rt_chart._flows[hist_chart.name]
|
||||||
|
assert flow
|
||||||
|
|
||||||
|
# XXX: no idea why this doesn't work but it's causing
|
||||||
|
# a weird placement of the region on the way-far-left..
|
||||||
|
# region.setClipItem(flow.graphics)
|
||||||
|
|
||||||
|
# poll for datums load and timestep detection
|
||||||
|
for _ in range(100):
|
||||||
|
try:
|
||||||
|
_, _, ratio = flume.get_ds_info()
|
||||||
|
break
|
||||||
|
except IndexError:
|
||||||
|
await trio.sleep(0.01)
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
raise RuntimeError(
|
||||||
|
'Failed to detect sampling periods from shm!?')
|
||||||
|
|
||||||
|
# sampling rate transform math:
|
||||||
|
# -----------------------------
|
||||||
|
# define the fast chart to slow chart as a linear mapping
|
||||||
|
# over the fast index domain `i` to the slow index domain
|
||||||
|
# `j` as:
|
||||||
|
#
|
||||||
|
# j = i - i_offset
|
||||||
|
# ------------ + j_offset
|
||||||
|
# j/i
|
||||||
|
#
|
||||||
|
# conversely the inverse function is:
|
||||||
|
#
|
||||||
|
# i = j/i * (j - j_offset) + i_offset
|
||||||
|
#
|
||||||
|
# Where `j_offset` is our ``izero_hist`` and `i_offset` is our
|
||||||
|
# `izero_rt`, the ``ShmArray`` offsets which correspond to the
|
||||||
|
# indexes in each array where the "current" time is indexed at init.
|
||||||
|
# AKA the index where new data is "appended to" and historical data
|
||||||
|
# if "prepended from".
|
||||||
|
#
|
||||||
|
# more practically (and by default) `i` is normally an index
|
||||||
|
# into 1s samples and `j` is an index into 60s samples (aka 1m).
|
||||||
|
# in the below handlers ``ratio`` is the `j/i` and ``mn``/``mx``
|
||||||
|
# are the low and high index input from the source index domain.
|
||||||
|
|
||||||
|
def update_region_from_pi(
|
||||||
|
window,
|
||||||
|
viewRange: tuple[tuple, tuple],
|
||||||
|
is_manual: bool = True,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# put linear region "in front" in layer terms
|
||||||
|
region.setZValue(10)
|
||||||
|
|
||||||
|
# set the region on the history chart
|
||||||
|
# to the range currently viewed in the
|
||||||
|
# HFT/real-time chart.
|
||||||
|
mn, mx = viewRange[0]
|
||||||
|
ds_mn = (mn - izero_rt)/ratio
|
||||||
|
ds_mx = (mx - izero_rt)/ratio
|
||||||
|
lhmn = ds_mn + izero_hist
|
||||||
|
lhmx = ds_mx + izero_hist
|
||||||
|
# print(
|
||||||
|
# f'rt_view_range: {(mn, mx)}\n'
|
||||||
|
# f'ds_mn, ds_mx: {(ds_mn, ds_mx)}\n'
|
||||||
|
# f'lhmn, lhmx: {(lhmn, lhmx)}\n'
|
||||||
|
# )
|
||||||
|
region.setRegion((
|
||||||
|
lhmn,
|
||||||
|
lhmx,
|
||||||
|
))
|
||||||
|
|
||||||
|
# TODO: if we want to have the slow chart adjust range to
|
||||||
|
# match the fast chart's selection -> results in the
|
||||||
|
# linear region expansion never can go "outside of view".
|
||||||
|
# hmn, hmx = hvr = hist_chart.view.state['viewRange'][0]
|
||||||
|
# print((hmn, hmx))
|
||||||
|
# if (
|
||||||
|
# hvr
|
||||||
|
# and (lhmn < hmn or lhmx > hmx)
|
||||||
|
# ):
|
||||||
|
# hist_pi.setXRange(
|
||||||
|
# lhmn,
|
||||||
|
# lhmx,
|
||||||
|
# padding=0,
|
||||||
|
# )
|
||||||
|
# hist_linked.graphics_cycle()
|
||||||
|
|
||||||
|
# connect region to be updated on plotitem interaction.
|
||||||
|
rt_pi.sigRangeChanged.connect(update_region_from_pi)
|
||||||
|
|
||||||
|
def update_pi_from_region():
|
||||||
|
region.setZValue(10)
|
||||||
|
mn, mx = region.getRegion()
|
||||||
|
# print(f'region_x: {(mn, mx)}')
|
||||||
|
rt_pi.setXRange(
|
||||||
|
((mn - izero_hist) * ratio) + izero_rt,
|
||||||
|
((mx - izero_hist) * ratio) + izero_rt,
|
||||||
|
padding=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO BUG XXX: seems to cause a real perf hit and a recursion error
|
||||||
|
# (but used to work before generalizing for 1s ohlc offset?)..
|
||||||
|
# something to do with the label callback handlers?
|
||||||
|
|
||||||
|
# region.sigRegionChanged.connect(update_pi_from_region)
|
||||||
|
# region.sigRegionChangeFinished.connect(update_pi_from_region)
|
||||||
|
|
||||||
|
|
||||||
async def display_symbol_data(
|
async def display_symbol_data(
|
||||||
godwidget: GodWidget,
|
godwidget: GodWidget,
|
||||||
provider: str,
|
provider: str,
|
||||||
sym: str,
|
fqsns: list[str],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
order_mode_started: trio.Event,
|
order_mode_started: trio.Event,
|
||||||
|
|
||||||
|
@ -717,11 +962,6 @@ async def display_symbol_data(
|
||||||
|
|
||||||
'''
|
'''
|
||||||
sbar = godwidget.window.status_bar
|
sbar = godwidget.window.status_bar
|
||||||
loading_sym_key = sbar.open_status(
|
|
||||||
f'loading {sym}.{provider} ->',
|
|
||||||
group_key=True
|
|
||||||
)
|
|
||||||
|
|
||||||
# historical data fetch
|
# historical data fetch
|
||||||
# brokermod = brokers.get_brokermod(provider)
|
# brokermod = brokers.get_brokermod(provider)
|
||||||
|
|
||||||
|
@ -730,10 +970,17 @@ async def display_symbol_data(
|
||||||
# clear_on_next=True,
|
# clear_on_next=True,
|
||||||
# group_key=loading_sym_key,
|
# group_key=loading_sym_key,
|
||||||
# )
|
# )
|
||||||
fqsn = '.'.join((sym, provider))
|
|
||||||
|
|
||||||
|
for fqsn in fqsns:
|
||||||
|
|
||||||
|
loading_sym_key = sbar.open_status(
|
||||||
|
f'loading {fqsn} ->',
|
||||||
|
group_key=True
|
||||||
|
)
|
||||||
|
|
||||||
|
feed: Feed
|
||||||
async with open_feed(
|
async with open_feed(
|
||||||
[fqsn],
|
fqsns,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
# limit to at least display's FPS
|
# limit to at least display's FPS
|
||||||
|
@ -741,15 +988,19 @@ async def display_symbol_data(
|
||||||
tick_throttle=_quote_throttle_rate,
|
tick_throttle=_quote_throttle_rate,
|
||||||
|
|
||||||
) as feed:
|
) as feed:
|
||||||
ohlcv: ShmArray = feed.shm
|
|
||||||
bars = ohlcv.array
|
|
||||||
symbol = feed.symbols[sym]
|
|
||||||
fqsn = symbol.front_fqsn()
|
|
||||||
|
|
||||||
times = bars['time']
|
# TODO: right now we only show one symbol on charts, but
|
||||||
end = pendulum.from_timestamp(times[-1])
|
# overlays are coming muy pronto guey..
|
||||||
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
assert len(feed.flumes) == 1
|
||||||
step_size_s = (end - start).seconds
|
flume = list(feed.flumes.values())[0]
|
||||||
|
|
||||||
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
|
symbol = flume.symbol
|
||||||
|
fqsn = symbol.fqsn
|
||||||
|
|
||||||
|
step_size_s = 1
|
||||||
tf_key = tf_in_1s[step_size_s]
|
tf_key = tf_in_1s[step_size_s]
|
||||||
|
|
||||||
# load in symbol's ohlc data
|
# load in symbol's ohlc data
|
||||||
|
@ -759,59 +1010,84 @@ async def display_symbol_data(
|
||||||
f'step:{tf_key} '
|
f'step:{tf_key} '
|
||||||
)
|
)
|
||||||
|
|
||||||
linked = godwidget.linkedsplits
|
rt_linked = godwidget.rt_linked
|
||||||
linked._symbol = symbol
|
rt_linked._symbol = symbol
|
||||||
|
|
||||||
|
# create top history view chart above the "main rt chart".
|
||||||
|
hist_linked = godwidget.hist_linked
|
||||||
|
hist_linked._symbol = symbol
|
||||||
|
hist_chart = hist_linked.plot_ohlc_main(
|
||||||
|
symbol,
|
||||||
|
hist_ohlcv,
|
||||||
|
# in the case of history chart we explicitly set `False`
|
||||||
|
# to avoid internal pane creation.
|
||||||
|
# sidepane=False,
|
||||||
|
sidepane=godwidget.search,
|
||||||
|
)
|
||||||
|
# don't show when not focussed
|
||||||
|
hist_linked.cursor.always_show_xlabel = False
|
||||||
|
|
||||||
# generate order mode side-pane UI
|
# generate order mode side-pane UI
|
||||||
# A ``FieldsForm`` form to configure order entry
|
# A ``FieldsForm`` form to configure order entry
|
||||||
|
# and add as next-to-y-axis singleton pane
|
||||||
pp_pane: FieldsForm = mk_order_pane_layout(godwidget)
|
pp_pane: FieldsForm = mk_order_pane_layout(godwidget)
|
||||||
|
|
||||||
# add as next-to-y-axis singleton pane
|
|
||||||
godwidget.pp_pane = pp_pane
|
godwidget.pp_pane = pp_pane
|
||||||
|
|
||||||
# create main OHLC chart
|
# create main OHLC chart
|
||||||
chart = linked.plot_ohlc_main(
|
ohlc_chart = rt_linked.plot_ohlc_main(
|
||||||
symbol,
|
symbol,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
|
# in the case of history chart we explicitly set `False`
|
||||||
|
# to avoid internal pane creation.
|
||||||
sidepane=pp_pane,
|
sidepane=pp_pane,
|
||||||
)
|
)
|
||||||
chart.default_view()
|
|
||||||
chart._feeds[symbol.key] = feed
|
|
||||||
chart.setFocus()
|
|
||||||
|
|
||||||
# plot historical vwap if available
|
ohlc_chart._feeds[symbol.key] = feed
|
||||||
wap_in_history = False
|
ohlc_chart.setFocus()
|
||||||
|
|
||||||
# XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?!
|
# XXX: FOR SOME REASON THIS IS CAUSING HANGZ!?!
|
||||||
# if brokermod._show_wap_in_history:
|
# plot historical vwap if available
|
||||||
|
wap_in_history = False
|
||||||
# if 'bar_wap' in bars.dtype.fields:
|
# if (
|
||||||
# wap_in_history = True
|
# brokermod._show_wap_in_history
|
||||||
# chart.draw_curve(
|
# and 'bar_wap' in bars.dtype.fields
|
||||||
# name='bar_wap',
|
# ):
|
||||||
# shm=ohlcv,
|
# wap_in_history = True
|
||||||
# color='default_light',
|
# ohlc_chart.draw_curve(
|
||||||
# add_label=False,
|
# name='bar_wap',
|
||||||
# )
|
# shm=ohlcv,
|
||||||
|
# color='default_light',
|
||||||
# size view to data once at outset
|
# add_label=False,
|
||||||
chart.cv._set_yrange()
|
# )
|
||||||
|
|
||||||
# NOTE: we must immediately tell Qt to show the OHLC chart
|
# NOTE: we must immediately tell Qt to show the OHLC chart
|
||||||
# to avoid a race where the subplots get added/shown to
|
# to avoid a race where the subplots get added/shown to
|
||||||
# the linked set *before* the main price chart!
|
# the linked set *before* the main price chart!
|
||||||
linked.show()
|
rt_linked.show()
|
||||||
linked.focus()
|
rt_linked.focus()
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
# NOTE: here we insert the slow-history chart set into
|
||||||
|
# the fast chart's splitter -> so it's a splitter of charts
|
||||||
|
# inside the first widget slot of a splitter of charts XD
|
||||||
|
rt_linked.splitter.insertWidget(0, hist_linked)
|
||||||
|
# XXX: if we wanted it at the bottom?
|
||||||
|
# rt_linked.splitter.addWidget(hist_linked)
|
||||||
|
rt_linked.focus()
|
||||||
|
|
||||||
|
godwidget.resize_all()
|
||||||
|
|
||||||
vlm_chart: Optional[ChartPlotWidget] = None
|
vlm_chart: Optional[ChartPlotWidget] = None
|
||||||
async with trio.open_nursery() as ln:
|
async with trio.open_nursery() as ln:
|
||||||
|
|
||||||
# if available load volume related built-in display(s)
|
# if available load volume related built-in display(s)
|
||||||
if has_vlm(ohlcv):
|
if (
|
||||||
|
not symbol.broker_info[provider].get('no_vlm', False)
|
||||||
|
and has_vlm(ohlcv)
|
||||||
|
):
|
||||||
vlm_chart = await ln.start(
|
vlm_chart = await ln.start(
|
||||||
open_vlm_displays,
|
open_vlm_displays,
|
||||||
linked,
|
rt_linked,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -819,7 +1095,7 @@ async def display_symbol_data(
|
||||||
# from an input config.
|
# from an input config.
|
||||||
ln.start_soon(
|
ln.start_soon(
|
||||||
start_fsp_displays,
|
start_fsp_displays,
|
||||||
linked,
|
rt_linked,
|
||||||
ohlcv,
|
ohlcv,
|
||||||
loading_sym_key,
|
loading_sym_key,
|
||||||
loglevel,
|
loglevel,
|
||||||
|
@ -828,36 +1104,79 @@ async def display_symbol_data(
|
||||||
# start graphics update loop after receiving first live quote
|
# start graphics update loop after receiving first live quote
|
||||||
ln.start_soon(
|
ln.start_soon(
|
||||||
graphics_update_loop,
|
graphics_update_loop,
|
||||||
linked,
|
ln,
|
||||||
feed.stream,
|
godwidget,
|
||||||
ohlcv,
|
flume,
|
||||||
wap_in_history,
|
wap_in_history,
|
||||||
vlm_chart,
|
vlm_chart,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
# size view to data prior to order mode init
|
||||||
|
ohlc_chart.default_view()
|
||||||
|
rt_linked.graphics_cycle()
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
hist_chart.default_view(
|
||||||
|
bars_from_y=int(len(hist_ohlcv.array)), # size to data
|
||||||
|
y_offset=6116*2, # push it a little away from the y-axis
|
||||||
|
)
|
||||||
|
hist_linked.graphics_cycle()
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
godwidget.resize_all()
|
||||||
|
|
||||||
|
await link_views_with_region(
|
||||||
|
ohlc_chart,
|
||||||
|
hist_chart,
|
||||||
|
flume,
|
||||||
|
)
|
||||||
|
|
||||||
|
mode: OrderMode
|
||||||
async with (
|
async with (
|
||||||
open_order_mode(
|
open_order_mode(
|
||||||
feed,
|
feed,
|
||||||
chart,
|
godwidget,
|
||||||
fqsn,
|
fqsn,
|
||||||
order_mode_started
|
order_mode_started
|
||||||
)
|
) as mode
|
||||||
):
|
):
|
||||||
|
if not vlm_chart:
|
||||||
|
# trigger another view reset if no sub-chart
|
||||||
|
ohlc_chart.default_view()
|
||||||
|
|
||||||
|
rt_linked.mode = mode
|
||||||
|
|
||||||
# let Qt run to render all widgets and make sure the
|
# let Qt run to render all widgets and make sure the
|
||||||
# sidepanes line up vertically.
|
# sidepanes line up vertically.
|
||||||
await trio.sleep(0)
|
await trio.sleep(0)
|
||||||
linked.resize_sidepanes()
|
|
||||||
|
|
||||||
|
# dynamic resize steps
|
||||||
|
godwidget.resize_all()
|
||||||
|
|
||||||
|
# TODO: look into this because not sure why it was
|
||||||
|
# commented out / we ever needed it XD
|
||||||
# NOTE: we pop the volume chart from the subplots set so
|
# NOTE: we pop the volume chart from the subplots set so
|
||||||
# that it isn't double rendered in the display loop
|
# that it isn't double rendered in the display loop
|
||||||
# above since we do a maxmin calc on the volume data to
|
# above since we do a maxmin calc on the volume data to
|
||||||
# determine if auto-range adjustements should be made.
|
# determine if auto-range adjustements should be made.
|
||||||
# linked.subplots.pop('volume', None)
|
# rt_linked.subplots.pop('volume', None)
|
||||||
|
|
||||||
# TODO: make this not so shit XD
|
# TODO: make this not so shit XD
|
||||||
# close group status
|
# close group status
|
||||||
sbar._status_groups[loading_sym_key][1]()
|
sbar._status_groups[loading_sym_key][1]()
|
||||||
|
|
||||||
|
hist_linked.graphics_cycle()
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
bars_in_mem = int(len(hist_ohlcv.array))
|
||||||
|
hist_chart.default_view(
|
||||||
|
bars_from_y=bars_in_mem, # size to data
|
||||||
|
# push it 1/16th away from the y-axis
|
||||||
|
y_offset=round(bars_in_mem / 16),
|
||||||
|
)
|
||||||
|
godwidget.resize_all()
|
||||||
|
|
||||||
# let the app run.. bby
|
# let the app run.. bby
|
||||||
# linked.graphics_cycle()
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
|
@ -18,11 +18,27 @@
|
||||||
Higher level annotation editors.
|
Higher level annotation editors.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from dataclasses import dataclass, field
|
from __future__ import annotations
|
||||||
from typing import Optional
|
from collections import defaultdict
|
||||||
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
TYPE_CHECKING
|
||||||
|
)
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from pyqtgraph import ViewBox, Point, QtCore, QtGui
|
from pyqtgraph import (
|
||||||
|
ViewBox,
|
||||||
|
Point,
|
||||||
|
QtCore,
|
||||||
|
QtWidgets,
|
||||||
|
)
|
||||||
|
from PyQt5.QtGui import (
|
||||||
|
QColor,
|
||||||
|
)
|
||||||
|
from PyQt5.QtWidgets import (
|
||||||
|
QLabel,
|
||||||
|
)
|
||||||
|
|
||||||
from pyqtgraph import functions as fn
|
from pyqtgraph import functions as fn
|
||||||
from PyQt5.QtCore import QPointF
|
from PyQt5.QtCore import QPointF
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -30,28 +46,34 @@ import numpy as np
|
||||||
from ._style import hcolor, _font
|
from ._style import hcolor, _font
|
||||||
from ._lines import LevelLine
|
from ._lines import LevelLine
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import GodWidget
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class ArrowEditor(Struct):
|
||||||
class ArrowEditor:
|
|
||||||
|
|
||||||
chart: 'ChartPlotWidget' # noqa
|
godw: GodWidget = None # type: ignore # noqa
|
||||||
_arrows: field(default_factory=dict)
|
_arrows: dict[str, list[pg.ArrowItem]] = {}
|
||||||
|
|
||||||
def add(
|
def add(
|
||||||
self,
|
self,
|
||||||
|
plot: pg.PlotItem,
|
||||||
uid: str,
|
uid: str,
|
||||||
x: float,
|
x: float,
|
||||||
y: float,
|
y: float,
|
||||||
color='default',
|
color='default',
|
||||||
pointing: Optional[str] = None,
|
pointing: Optional[str] = None,
|
||||||
) -> pg.ArrowItem:
|
|
||||||
"""Add an arrow graphic to view at given (x, y).
|
|
||||||
|
|
||||||
"""
|
) -> pg.ArrowItem:
|
||||||
|
'''
|
||||||
|
Add an arrow graphic to view at given (x, y).
|
||||||
|
|
||||||
|
'''
|
||||||
angle = {
|
angle = {
|
||||||
'up': 90,
|
'up': 90,
|
||||||
'down': -90,
|
'down': -90,
|
||||||
|
@ -74,25 +96,25 @@ class ArrowEditor:
|
||||||
brush=pg.mkBrush(hcolor(color)),
|
brush=pg.mkBrush(hcolor(color)),
|
||||||
)
|
)
|
||||||
arrow.setPos(x, y)
|
arrow.setPos(x, y)
|
||||||
|
self._arrows.setdefault(uid, []).append(arrow)
|
||||||
self._arrows[uid] = arrow
|
|
||||||
|
|
||||||
# render to view
|
# render to view
|
||||||
self.chart.plotItem.addItem(arrow)
|
plot.addItem(arrow)
|
||||||
|
|
||||||
return arrow
|
return arrow
|
||||||
|
|
||||||
def remove(self, arrow) -> bool:
|
def remove(self, arrow) -> bool:
|
||||||
self.chart.plotItem.removeItem(arrow)
|
for linked in self.godw.iter_linked():
|
||||||
|
linked.chart.plotItem.removeItem(arrow)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class LineEditor(Struct):
|
||||||
class LineEditor:
|
'''
|
||||||
'''The great editor of linez.
|
The great editor of linez.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
chart: 'ChartPlotWidget' = None # type: ignore # noqa
|
godw: GodWidget = None # type: ignore # noqa
|
||||||
_order_lines: dict[str, LevelLine] = field(default_factory=dict)
|
_order_lines: defaultdict[str, LevelLine] = defaultdict(list)
|
||||||
_active_staged_line: LevelLine = None
|
_active_staged_line: LevelLine = None
|
||||||
|
|
||||||
def stage_line(
|
def stage_line(
|
||||||
|
@ -100,11 +122,11 @@ class LineEditor:
|
||||||
line: LevelLine,
|
line: LevelLine,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
"""Stage a line at the current chart's cursor position
|
'''
|
||||||
|
Stage a line at the current chart's cursor position
|
||||||
and return it.
|
and return it.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
|
||||||
# add a "staged" cursor-tracking line to view
|
# add a "staged" cursor-tracking line to view
|
||||||
# and cash it in a a var
|
# and cash it in a a var
|
||||||
if self._active_staged_line:
|
if self._active_staged_line:
|
||||||
|
@ -115,17 +137,25 @@ class LineEditor:
|
||||||
return line
|
return line
|
||||||
|
|
||||||
def unstage_line(self) -> LevelLine:
|
def unstage_line(self) -> LevelLine:
|
||||||
"""Inverse of ``.stage_line()``.
|
'''
|
||||||
|
Inverse of ``.stage_line()``.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# chart = self.chart._cursor.active_plot
|
cursor = self.godw.get_cursor()
|
||||||
# # chart.setCursor(QtCore.Qt.ArrowCursor)
|
if not cursor:
|
||||||
cursor = self.chart.linked.cursor
|
return None
|
||||||
|
|
||||||
# delete "staged" cursor tracking line from view
|
# delete "staged" cursor tracking line from view
|
||||||
line = self._active_staged_line
|
line = self._active_staged_line
|
||||||
if line:
|
if line:
|
||||||
cursor._trackers.remove(line)
|
try:
|
||||||
|
cursor._trackers.remove(line)
|
||||||
|
except KeyError:
|
||||||
|
# when the current cursor doesn't have said line
|
||||||
|
# registered (probably means that user held order mode
|
||||||
|
# key while panning to another view) then we just
|
||||||
|
# ignore the remove error.
|
||||||
|
pass
|
||||||
line.delete()
|
line.delete()
|
||||||
|
|
||||||
self._active_staged_line = None
|
self._active_staged_line = None
|
||||||
|
@ -133,55 +163,58 @@ class LineEditor:
|
||||||
# show the crosshair y line and label
|
# show the crosshair y line and label
|
||||||
cursor.show_xhair()
|
cursor.show_xhair()
|
||||||
|
|
||||||
def submit_line(
|
def submit_lines(
|
||||||
self,
|
self,
|
||||||
line: LevelLine,
|
lines: list[LevelLine],
|
||||||
uuid: str,
|
uuid: str,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
|
|
||||||
staged_line = self._active_staged_line
|
# staged_line = self._active_staged_line
|
||||||
if not staged_line:
|
# if not staged_line:
|
||||||
raise RuntimeError("No line is currently staged!?")
|
# raise RuntimeError("No line is currently staged!?")
|
||||||
|
|
||||||
# for now, until submission reponse arrives
|
# for now, until submission reponse arrives
|
||||||
line.hide_labels()
|
for line in lines:
|
||||||
|
line.hide_labels()
|
||||||
|
|
||||||
# register for later lookup/deletion
|
# register for later lookup/deletion
|
||||||
self._order_lines[uuid] = line
|
self._order_lines[uuid] += lines
|
||||||
|
|
||||||
return line
|
return lines
|
||||||
|
|
||||||
def commit_line(self, uuid: str) -> LevelLine:
|
def commit_line(self, uuid: str) -> list[LevelLine]:
|
||||||
"""Commit a "staged line" to view.
|
'''
|
||||||
|
Commit a "staged line" to view.
|
||||||
|
|
||||||
Submits the line graphic under the cursor as a (new) permanent
|
Submits the line graphic under the cursor as a (new) permanent
|
||||||
graphic in view.
|
graphic in view.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
try:
|
lines = self._order_lines[uuid]
|
||||||
line = self._order_lines[uuid]
|
if lines:
|
||||||
except KeyError:
|
for line in lines:
|
||||||
log.warning(f'No line for {uuid} could be found?')
|
line.show_labels()
|
||||||
return
|
line.hide_markers()
|
||||||
else:
|
log.debug(f'Level active for level: {line.value()}')
|
||||||
line.show_labels()
|
# TODO: other flashy things to indicate the order is active
|
||||||
|
|
||||||
# TODO: other flashy things to indicate the order is active
|
return lines
|
||||||
|
|
||||||
log.debug(f'Level active for level: {line.value()}')
|
|
||||||
|
|
||||||
return line
|
|
||||||
|
|
||||||
def lines_under_cursor(self) -> list[LevelLine]:
|
def lines_under_cursor(self) -> list[LevelLine]:
|
||||||
"""Get the line(s) under the cursor position.
|
'''
|
||||||
|
Get the line(s) under the cursor position.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# Delete any hoverable under the cursor
|
# Delete any hoverable under the cursor
|
||||||
return self.chart.linked.cursor._hovered
|
return self.godw.get_cursor()._hovered
|
||||||
|
|
||||||
def all_lines(self) -> tuple[LevelLine]:
|
def all_lines(self) -> list[LevelLine]:
|
||||||
return tuple(self._order_lines.values())
|
all_lines = []
|
||||||
|
for lines in list(self._order_lines.values()):
|
||||||
|
all_lines.extend(lines)
|
||||||
|
|
||||||
|
return all_lines
|
||||||
|
|
||||||
def remove_line(
|
def remove_line(
|
||||||
self,
|
self,
|
||||||
|
@ -196,29 +229,30 @@ class LineEditor:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# try to look up line from our registry
|
# try to look up line from our registry
|
||||||
line = self._order_lines.pop(uuid, line)
|
lines = self._order_lines.pop(uuid, None)
|
||||||
if line:
|
if lines:
|
||||||
|
cursor = self.godw.get_cursor()
|
||||||
|
if cursor:
|
||||||
|
for line in lines:
|
||||||
|
# if hovered remove from cursor set
|
||||||
|
hovered = cursor._hovered
|
||||||
|
if line in hovered:
|
||||||
|
hovered.remove(line)
|
||||||
|
|
||||||
# if hovered remove from cursor set
|
log.debug(f'deleting {line} with oid: {uuid}')
|
||||||
cursor = self.chart.linked.cursor
|
line.delete()
|
||||||
hovered = cursor._hovered
|
|
||||||
if line in hovered:
|
|
||||||
hovered.remove(line)
|
|
||||||
|
|
||||||
# make sure the xhair doesn't get left off
|
# make sure the xhair doesn't get left off
|
||||||
# just because we never got a un-hover event
|
# just because we never got a un-hover event
|
||||||
cursor.show_xhair()
|
cursor.show_xhair()
|
||||||
|
|
||||||
log.debug(f'deleting {line} with oid: {uuid}')
|
|
||||||
line.delete()
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
log.warning(f'Could not find line for {line}')
|
log.warning(f'Could not find line for {line}')
|
||||||
|
|
||||||
return line
|
return lines
|
||||||
|
|
||||||
|
|
||||||
class SelectRect(QtGui.QGraphicsRectItem):
|
class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -227,12 +261,12 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(0, 0, 1, 1)
|
super().__init__(0, 0, 1, 1)
|
||||||
|
|
||||||
# self.rbScaleBox = QtGui.QGraphicsRectItem(0, 0, 1, 1)
|
# self.rbScaleBox = QGraphicsRectItem(0, 0, 1, 1)
|
||||||
self.vb = viewbox
|
self.vb = viewbox
|
||||||
self._chart: 'ChartPlotWidget' = None # noqa
|
self._chart: 'ChartPlotWidget' = None # noqa
|
||||||
|
|
||||||
# override selection box color
|
# override selection box color
|
||||||
color = QtGui.QColor(hcolor(color))
|
color = QColor(hcolor(color))
|
||||||
self.setPen(fn.mkPen(color, width=1))
|
self.setPen(fn.mkPen(color, width=1))
|
||||||
color.setAlpha(66)
|
color.setAlpha(66)
|
||||||
self.setBrush(fn.mkBrush(color))
|
self.setBrush(fn.mkBrush(color))
|
||||||
|
@ -240,7 +274,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
self.hide()
|
self.hide()
|
||||||
self._label = None
|
self._label = None
|
||||||
|
|
||||||
label = self._label = QtGui.QLabel()
|
label = self._label = QLabel()
|
||||||
label.setTextFormat(0) # markdown
|
label.setTextFormat(0) # markdown
|
||||||
label.setFont(_font.font)
|
label.setFont(_font.font)
|
||||||
label.setMargin(0)
|
label.setMargin(0)
|
||||||
|
@ -277,8 +311,8 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
# TODO: get bg color working
|
# TODO: get bg color working
|
||||||
palette.setColor(
|
palette.setColor(
|
||||||
self._label.backgroundRole(),
|
self._label.backgroundRole(),
|
||||||
# QtGui.QColor(chart.backgroundBrush()),
|
# QColor(chart.backgroundBrush()),
|
||||||
QtGui.QColor(hcolor('papas_special')),
|
QColor(hcolor('papas_special')),
|
||||||
)
|
)
|
||||||
|
|
||||||
def update_on_resize(self, vr, r):
|
def update_on_resize(self, vr, r):
|
||||||
|
@ -326,7 +360,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
|
|
||||||
self.setPos(r.topLeft())
|
self.setPos(r.topLeft())
|
||||||
self.resetTransform()
|
self.resetTransform()
|
||||||
self.scale(r.width(), r.height())
|
self.setRect(r)
|
||||||
self.show()
|
self.show()
|
||||||
|
|
||||||
y1, y2 = start_pos.y(), end_pos.y()
|
y1, y2 = start_pos.y(), end_pos.y()
|
||||||
|
|
|
@ -18,11 +18,11 @@
|
||||||
Qt event proxying and processing using ``trio`` mem chans.
|
Qt event proxying and processing using ``trio`` mem chans.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager, AsyncExitStack
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import Callable
|
from typing import Callable
|
||||||
|
|
||||||
from pydantic import BaseModel
|
|
||||||
import trio
|
import trio
|
||||||
|
from tractor.trionics import gather_contexts
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
||||||
from PyQt5.QtWidgets import QWidget
|
from PyQt5.QtWidgets import QWidget
|
||||||
|
@ -30,6 +30,8 @@ from PyQt5.QtWidgets import (
|
||||||
QGraphicsSceneMouseEvent as gs_mouse,
|
QGraphicsSceneMouseEvent as gs_mouse,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
MOUSE_EVENTS = {
|
MOUSE_EVENTS = {
|
||||||
gs_mouse.GraphicsSceneMousePress,
|
gs_mouse.GraphicsSceneMousePress,
|
||||||
|
@ -43,13 +45,10 @@ MOUSE_EVENTS = {
|
||||||
# TODO: maybe consider some constrained ints down the road?
|
# TODO: maybe consider some constrained ints down the road?
|
||||||
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
||||||
|
|
||||||
class KeyboardMsg(BaseModel):
|
class KeyboardMsg(Struct):
|
||||||
'''Unpacked Qt keyboard event data.
|
'''Unpacked Qt keyboard event data.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
event: QEvent
|
event: QEvent
|
||||||
etype: int
|
etype: int
|
||||||
key: int
|
key: int
|
||||||
|
@ -57,16 +56,13 @@ class KeyboardMsg(BaseModel):
|
||||||
txt: str
|
txt: str
|
||||||
|
|
||||||
def to_tuple(self) -> tuple:
|
def to_tuple(self) -> tuple:
|
||||||
return tuple(self.dict().values())
|
return tuple(self.to_dict().values())
|
||||||
|
|
||||||
|
|
||||||
class MouseMsg(BaseModel):
|
class MouseMsg(Struct):
|
||||||
'''Unpacked Qt keyboard event data.
|
'''Unpacked Qt keyboard event data.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
class Config:
|
|
||||||
arbitrary_types_allowed = True
|
|
||||||
|
|
||||||
event: QEvent
|
event: QEvent
|
||||||
etype: int
|
etype: int
|
||||||
button: int
|
button: int
|
||||||
|
@ -160,7 +156,7 @@ class EventRelay(QtCore.QObject):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_event_stream(
|
async def open_event_stream(
|
||||||
|
|
||||||
source_widget: QWidget,
|
source_widget: QWidget,
|
||||||
|
@ -186,7 +182,7 @@ async def open_event_stream(
|
||||||
source_widget.removeEventFilter(kc)
|
source_widget.removeEventFilter(kc)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_signal_handler(
|
async def open_signal_handler(
|
||||||
|
|
||||||
signal: pyqtBoundSignal,
|
signal: pyqtBoundSignal,
|
||||||
|
@ -211,7 +207,7 @@ async def open_signal_handler(
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_handlers(
|
async def open_handlers(
|
||||||
|
|
||||||
source_widgets: list[QWidget],
|
source_widgets: list[QWidget],
|
||||||
|
@ -220,16 +216,14 @@ async def open_handlers(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
AsyncExitStack() as stack,
|
gather_contexts([
|
||||||
|
open_event_stream(widget, event_types, **kwargs)
|
||||||
|
for widget in source_widgets
|
||||||
|
]) as streams,
|
||||||
):
|
):
|
||||||
for widget in source_widgets:
|
for widget, event_recv_stream in zip(source_widgets, streams):
|
||||||
|
|
||||||
event_recv_stream = await stack.enter_async_context(
|
|
||||||
open_event_stream(widget, event_types, **kwargs)
|
|
||||||
)
|
|
||||||
n.start_soon(async_handler, widget, event_recv_stream)
|
n.start_soon(async_handler, widget, event_recv_stream)
|
||||||
|
|
||||||
yield
|
yield
|
||||||
|
|
|
@ -20,16 +20,24 @@ Trio - Qt integration
|
||||||
Run ``trio`` in guest mode on top of the Qt event loop.
|
Run ``trio`` in guest mode on top of the Qt event loop.
|
||||||
All global Qt runtime settings are mostly defined here.
|
All global Qt runtime settings are mostly defined here.
|
||||||
"""
|
"""
|
||||||
from typing import Tuple, Callable, Dict, Any
|
from __future__ import annotations
|
||||||
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Any,
|
||||||
|
Type,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
import platform
|
import platform
|
||||||
import traceback
|
import traceback
|
||||||
|
|
||||||
# Qt specific
|
# Qt specific
|
||||||
import PyQt5 # noqa
|
import PyQt5 # noqa
|
||||||
import pyqtgraph as pg
|
from PyQt5.QtWidgets import (
|
||||||
from pyqtgraph import QtGui
|
QWidget,
|
||||||
|
QMainWindow,
|
||||||
|
QApplication,
|
||||||
|
)
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
# from PyQt5.QtGui import QLabel, QStatusBar
|
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
pyqtRemoveInputHook,
|
pyqtRemoveInputHook,
|
||||||
Qt,
|
Qt,
|
||||||
|
@ -37,15 +45,19 @@ from PyQt5.QtCore import (
|
||||||
)
|
)
|
||||||
import qdarkstyle
|
import qdarkstyle
|
||||||
from qdarkstyle import DarkPalette
|
from qdarkstyle import DarkPalette
|
||||||
# import qdarkgraystyle
|
# import qdarkgraystyle # TODO: play with it
|
||||||
import trio
|
import trio
|
||||||
from outcome import Error
|
from outcome import Error
|
||||||
|
|
||||||
from .._daemon import maybe_open_pikerd, _tractor_kwargs
|
from .._daemon import (
|
||||||
|
maybe_open_pikerd,
|
||||||
|
get_tractor_runtime_kwargs,
|
||||||
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._pg_overrides import _do_overrides
|
from ._pg_overrides import _do_overrides
|
||||||
from . import _style
|
from . import _style
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# pyqtgraph global config
|
# pyqtgraph global config
|
||||||
|
@ -72,17 +84,18 @@ if platform.system() == "Windows":
|
||||||
|
|
||||||
def run_qtractor(
|
def run_qtractor(
|
||||||
func: Callable,
|
func: Callable,
|
||||||
args: Tuple,
|
args: tuple,
|
||||||
main_widget: QtGui.QWidget,
|
main_widget_type: Type[QWidget],
|
||||||
tractor_kwargs: Dict[str, Any] = {},
|
tractor_kwargs: dict[str, Any] = {},
|
||||||
window_type: QtGui.QMainWindow = None,
|
window_type: QMainWindow = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# avoids annoying message when entering debugger from qt loop
|
# avoids annoying message when entering debugger from qt loop
|
||||||
pyqtRemoveInputHook()
|
pyqtRemoveInputHook()
|
||||||
|
|
||||||
app = QtGui.QApplication.instance()
|
app = QApplication.instance()
|
||||||
if app is None:
|
if app is None:
|
||||||
app = PyQt5.QtWidgets.QApplication([])
|
app = QApplication([])
|
||||||
|
|
||||||
# TODO: we might not need this if it's desired
|
# TODO: we might not need this if it's desired
|
||||||
# to cancel the tractor machinery on Qt loop
|
# to cancel the tractor machinery on Qt loop
|
||||||
|
@ -156,11 +169,11 @@ def run_qtractor(
|
||||||
# hook into app focus change events
|
# hook into app focus change events
|
||||||
app.focusChanged.connect(window.on_focus_change)
|
app.focusChanged.connect(window.on_focus_change)
|
||||||
|
|
||||||
instance = main_widget()
|
instance = main_widget_type()
|
||||||
instance.window = window
|
instance.window = window
|
||||||
|
|
||||||
# override tractor's defaults
|
# override tractor's defaults
|
||||||
tractor_kwargs.update(_tractor_kwargs)
|
tractor_kwargs.update(get_tractor_runtime_kwargs())
|
||||||
|
|
||||||
# define tractor entrypoint
|
# define tractor entrypoint
|
||||||
async def main():
|
async def main():
|
||||||
|
@ -178,7 +191,7 @@ def run_qtractor(
|
||||||
# restrict_keyboard_interrupt_to_checkpoints=True,
|
# restrict_keyboard_interrupt_to_checkpoints=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
window.main_widget = main_widget
|
window.godwidget: GodWidget = instance
|
||||||
window.setCentralWidget(instance)
|
window.setCentralWidget(instance)
|
||||||
if is_windows:
|
if is_windows:
|
||||||
window.configure_to_desktop()
|
window.configure_to_desktop()
|
||||||
|
|
|
@ -25,13 +25,10 @@ incremental update.
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional,
|
Optional,
|
||||||
Callable,
|
|
||||||
Union,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
import msgspec
|
import msgspec
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from numpy.lib import recfunctions as rfn
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5.QtGui import QPainterPath
|
from PyQt5.QtGui import QPainterPath
|
||||||
from PyQt5.QtCore import QLineF
|
from PyQt5.QtCore import QLineF
|
||||||
|
@ -44,9 +41,10 @@ from .._profile import (
|
||||||
# ms_slower_then,
|
# ms_slower_then,
|
||||||
)
|
)
|
||||||
from ._pathops import (
|
from ._pathops import (
|
||||||
gen_ohlc_qpath,
|
IncrementalFormatter,
|
||||||
ohlc_to_line,
|
OHLCBarsFmtr, # Plain OHLC renderer
|
||||||
to_step_format,
|
OHLCBarsAsCurveFmtr, # OHLC converted to line
|
||||||
|
StepCurveFmtr, # "step" curve (like for vlm)
|
||||||
xy_downsample,
|
xy_downsample,
|
||||||
)
|
)
|
||||||
from ._ohlc import (
|
from ._ohlc import (
|
||||||
|
@ -59,70 +57,12 @@ from ._curve import (
|
||||||
FlattenedOHLC,
|
FlattenedOHLC,
|
||||||
)
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# class FlowsTable(msgspec.Struct):
|
|
||||||
# '''
|
|
||||||
# Data-AGGRegate: high level API onto multiple (categorized)
|
|
||||||
# ``Flow``s with high level processing routines for
|
|
||||||
# multi-graphics computations and display.
|
|
||||||
|
|
||||||
# '''
|
|
||||||
# flows: dict[str, np.ndarray] = {}
|
|
||||||
|
|
||||||
|
|
||||||
def update_ohlc_to_line(
|
|
||||||
src_shm: ShmArray,
|
|
||||||
array_key: str,
|
|
||||||
src_update: np.ndarray,
|
|
||||||
slc: slice,
|
|
||||||
ln: int,
|
|
||||||
first: int,
|
|
||||||
last: int,
|
|
||||||
is_append: bool,
|
|
||||||
|
|
||||||
) -> np.ndarray:
|
|
||||||
|
|
||||||
fields = ['open', 'high', 'low', 'close']
|
|
||||||
return (
|
|
||||||
rfn.structured_to_unstructured(src_update[fields]),
|
|
||||||
slc,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def ohlc_flat_to_xy(
|
|
||||||
r: Renderer,
|
|
||||||
array: np.ndarray,
|
|
||||||
array_key: str,
|
|
||||||
vr: tuple[int, int],
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.nd.array,
|
|
||||||
str,
|
|
||||||
]:
|
|
||||||
# TODO: in the case of an existing ``.update_xy()``
|
|
||||||
# should we be passing in array as an xy arrays tuple?
|
|
||||||
|
|
||||||
# 2 more datum-indexes to capture zero at end
|
|
||||||
x_flat = r.x_data[r._xy_first:r._xy_last]
|
|
||||||
y_flat = r.y_data[r._xy_first:r._xy_last]
|
|
||||||
|
|
||||||
# slice to view
|
|
||||||
ivl, ivr = vr
|
|
||||||
x_iv_flat = x_flat[ivl:ivr]
|
|
||||||
y_iv_flat = y_flat[ivl:ivr]
|
|
||||||
|
|
||||||
# reshape to 1d for graphics rendering
|
|
||||||
y_iv = y_iv_flat.reshape(-1)
|
|
||||||
x_iv = x_iv_flat.reshape(-1)
|
|
||||||
|
|
||||||
return x_iv, y_iv, 'all'
|
|
||||||
|
|
||||||
|
|
||||||
def render_baritems(
|
def render_baritems(
|
||||||
flow: Flow,
|
flow: Flow,
|
||||||
graphics: BarItems,
|
graphics: BarItems,
|
||||||
|
@ -130,7 +70,7 @@ def render_baritems(
|
||||||
int, int, np.ndarray,
|
int, int, np.ndarray,
|
||||||
int, int, np.ndarray,
|
int, int, np.ndarray,
|
||||||
],
|
],
|
||||||
profiler: pg.debug.Profiler,
|
profiler: Profiler,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -154,21 +94,24 @@ def render_baritems(
|
||||||
r = self._src_r
|
r = self._src_r
|
||||||
if not r:
|
if not r:
|
||||||
show_bars = True
|
show_bars = True
|
||||||
|
|
||||||
# OHLC bars path renderer
|
# OHLC bars path renderer
|
||||||
r = self._src_r = Renderer(
|
r = self._src_r = Renderer(
|
||||||
flow=self,
|
flow=self,
|
||||||
format_xy=gen_ohlc_qpath,
|
fmtr=OHLCBarsFmtr(
|
||||||
last_read=read,
|
shm=flow.shm,
|
||||||
|
flow=flow,
|
||||||
|
_last_read=read,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
ds_curve_r = Renderer(
|
ds_curve_r = Renderer(
|
||||||
flow=self,
|
flow=self,
|
||||||
last_read=read,
|
fmtr=OHLCBarsAsCurveFmtr(
|
||||||
|
shm=flow.shm,
|
||||||
# incr update routines
|
flow=flow,
|
||||||
allocate_xy=ohlc_to_line,
|
_last_read=read,
|
||||||
update_xy=update_ohlc_to_line,
|
),
|
||||||
format_xy=ohlc_flat_to_xy,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
curve = FlattenedOHLC(
|
curve = FlattenedOHLC(
|
||||||
|
@ -252,77 +195,6 @@ def render_baritems(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def update_step_xy(
|
|
||||||
src_shm: ShmArray,
|
|
||||||
array_key: str,
|
|
||||||
y_update: np.ndarray,
|
|
||||||
slc: slice,
|
|
||||||
ln: int,
|
|
||||||
first: int,
|
|
||||||
last: int,
|
|
||||||
is_append: bool,
|
|
||||||
|
|
||||||
) -> np.ndarray:
|
|
||||||
|
|
||||||
# for a step curve we slice from one datum prior
|
|
||||||
# to the current "update slice" to get the previous
|
|
||||||
# "level".
|
|
||||||
if is_append:
|
|
||||||
start = max(last - 1, 0)
|
|
||||||
end = src_shm._last.value
|
|
||||||
new_y = src_shm._array[start:end][array_key]
|
|
||||||
slc = slice(start, end)
|
|
||||||
|
|
||||||
else:
|
|
||||||
new_y = y_update
|
|
||||||
|
|
||||||
return (
|
|
||||||
np.broadcast_to(
|
|
||||||
new_y[:, None], (new_y.size, 2),
|
|
||||||
),
|
|
||||||
slc,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def step_to_xy(
|
|
||||||
r: Renderer,
|
|
||||||
array: np.ndarray,
|
|
||||||
array_key: str,
|
|
||||||
vr: tuple[int, int],
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.nd.array,
|
|
||||||
str,
|
|
||||||
]:
|
|
||||||
|
|
||||||
# 2 more datum-indexes to capture zero at end
|
|
||||||
x_step = r.x_data[r._xy_first:r._xy_last+2]
|
|
||||||
y_step = r.y_data[r._xy_first:r._xy_last+2]
|
|
||||||
|
|
||||||
lasts = array[['index', array_key]]
|
|
||||||
last = lasts[array_key][-1]
|
|
||||||
y_step[-1] = last
|
|
||||||
|
|
||||||
# slice out in-view data
|
|
||||||
ivl, ivr = vr
|
|
||||||
ys_iv = y_step[ivl:ivr+1]
|
|
||||||
xs_iv = x_step[ivl:ivr+1]
|
|
||||||
|
|
||||||
# flatten to 1d
|
|
||||||
y_iv = ys_iv.reshape(ys_iv.size)
|
|
||||||
x_iv = xs_iv.reshape(xs_iv.size)
|
|
||||||
|
|
||||||
# print(
|
|
||||||
# f'ys_iv : {ys_iv[-s:]}\n'
|
|
||||||
# f'y_iv: {y_iv[-s:]}\n'
|
|
||||||
# f'xs_iv: {xs_iv[-s:]}\n'
|
|
||||||
# f'x_iv: {x_iv[-s:]}\n'
|
|
||||||
# )
|
|
||||||
|
|
||||||
return x_iv, y_iv, 'all'
|
|
||||||
|
|
||||||
|
|
||||||
class Flow(msgspec.Struct): # , frozen=True):
|
class Flow(msgspec.Struct): # , frozen=True):
|
||||||
'''
|
'''
|
||||||
(Financial Signal-)Flow compound type which wraps a real-time
|
(Financial Signal-)Flow compound type which wraps a real-time
|
||||||
|
@ -336,15 +208,15 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
'''
|
'''
|
||||||
name: str
|
name: str
|
||||||
plot: pg.PlotItem
|
plot: pg.PlotItem
|
||||||
graphics: Union[Curve, BarItems]
|
graphics: Curve | BarItems
|
||||||
|
_shm: ShmArray
|
||||||
|
yrange: tuple[float, float] = None
|
||||||
|
|
||||||
# in some cases a flow may want to change its
|
# in some cases a flow may want to change its
|
||||||
# graphical "type" or, "form" when downsampling,
|
# graphical "type" or, "form" when downsampling,
|
||||||
# normally this is just a plain line.
|
# normally this is just a plain line.
|
||||||
ds_graphics: Optional[Curve] = None
|
ds_graphics: Optional[Curve] = None
|
||||||
|
|
||||||
_shm: ShmArray
|
|
||||||
|
|
||||||
is_ohlc: bool = False
|
is_ohlc: bool = False
|
||||||
render: bool = True # toggle for display loop
|
render: bool = True # toggle for display loop
|
||||||
|
|
||||||
|
@ -386,10 +258,11 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
lbar: int,
|
lbar: int,
|
||||||
rbar: int,
|
rbar: int,
|
||||||
|
|
||||||
) -> tuple[float, float]:
|
) -> Optional[tuple[float, float]]:
|
||||||
'''
|
'''
|
||||||
Compute the cached max and min y-range values for a given
|
Compute the cached max and min y-range values for a given
|
||||||
x-range determined by ``lbar`` and ``rbar``.
|
x-range determined by ``lbar`` and ``rbar`` or ``None``
|
||||||
|
if no range can be determined (yet).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
rkey = (lbar, rbar)
|
rkey = (lbar, rbar)
|
||||||
|
@ -399,40 +272,44 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
shm = self.shm
|
shm = self.shm
|
||||||
if shm is None:
|
if shm is None:
|
||||||
mxmn = None
|
return None
|
||||||
|
|
||||||
else: # new block for profiling?..
|
arr = shm.array
|
||||||
arr = shm.array
|
|
||||||
|
|
||||||
# build relative indexes into shm array
|
# build relative indexes into shm array
|
||||||
# TODO: should we just add/use a method
|
# TODO: should we just add/use a method
|
||||||
# on the shm to do this?
|
# on the shm to do this?
|
||||||
ifirst = arr[0]['index']
|
ifirst = arr[0]['index']
|
||||||
slice_view = arr[
|
slice_view = arr[
|
||||||
lbar - ifirst:
|
lbar - ifirst:
|
||||||
(rbar - ifirst) + 1
|
(rbar - ifirst) + 1
|
||||||
]
|
]
|
||||||
|
|
||||||
if not slice_view.size:
|
if not slice_view.size:
|
||||||
mxmn = None
|
return None
|
||||||
|
|
||||||
|
elif self.yrange:
|
||||||
|
mxmn = self.yrange
|
||||||
|
# print(f'{self.name} M4 maxmin: {mxmn}')
|
||||||
|
|
||||||
|
else:
|
||||||
|
if self.is_ohlc:
|
||||||
|
ylow = np.min(slice_view['low'])
|
||||||
|
yhigh = np.max(slice_view['high'])
|
||||||
|
|
||||||
else:
|
else:
|
||||||
if self.is_ohlc:
|
view = slice_view[self.name]
|
||||||
ylow = np.min(slice_view['low'])
|
ylow = np.min(view)
|
||||||
yhigh = np.max(slice_view['high'])
|
yhigh = np.max(view)
|
||||||
|
|
||||||
else:
|
mxmn = ylow, yhigh
|
||||||
view = slice_view[self.name]
|
# print(f'{self.name} MANUAL maxmin: {mxmin}')
|
||||||
ylow = np.min(view)
|
|
||||||
yhigh = np.max(view)
|
|
||||||
|
|
||||||
mxmn = ylow, yhigh
|
# cache result for input range
|
||||||
|
assert mxmn
|
||||||
|
self._mxmns[rkey] = mxmn
|
||||||
|
|
||||||
if mxmn is not None:
|
return mxmn
|
||||||
# cache new mxmn result
|
|
||||||
self._mxmns[rkey] = mxmn
|
|
||||||
|
|
||||||
return mxmn
|
|
||||||
|
|
||||||
def view_range(self) -> tuple[int, int]:
|
def view_range(self) -> tuple[int, int]:
|
||||||
'''
|
'''
|
||||||
|
@ -511,7 +388,7 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
render: bool = True,
|
render: bool = True,
|
||||||
array_key: Optional[str] = None,
|
array_key: Optional[str] = None,
|
||||||
|
|
||||||
profiler: Optional[pg.debug.Profiler] = None,
|
profiler: Optional[Profiler] = None,
|
||||||
do_append: bool = True,
|
do_append: bool = True,
|
||||||
|
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
@ -522,7 +399,7 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
render to graphics.
|
render to graphics.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
msg=f'Flow.update_graphics() for {self.name}',
|
msg=f'Flow.update_graphics() for {self.name}',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=4,
|
ms_threshold=4,
|
||||||
|
@ -547,9 +424,14 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
slice_to_head: int = -1
|
slice_to_head: int = -1
|
||||||
should_redraw: bool = False
|
should_redraw: bool = False
|
||||||
|
should_line: bool = False
|
||||||
rkwargs = {}
|
rkwargs = {}
|
||||||
|
|
||||||
should_line = False
|
# TODO: probably specialize ``Renderer`` types instead of
|
||||||
|
# these logic checks?
|
||||||
|
# - put these blocks into a `.load_renderer()` meth?
|
||||||
|
# - consider a OHLCRenderer, StepCurveRenderer, Renderer?
|
||||||
|
r = self._src_r
|
||||||
if isinstance(graphics, BarItems):
|
if isinstance(graphics, BarItems):
|
||||||
# XXX: special case where we change out graphics
|
# XXX: special case where we change out graphics
|
||||||
# to a line after a certain uppx threshold.
|
# to a line after a certain uppx threshold.
|
||||||
|
@ -569,16 +451,36 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
should_redraw = changed_to_line or not should_line
|
should_redraw = changed_to_line or not should_line
|
||||||
self._in_ds = should_line
|
self._in_ds = should_line
|
||||||
|
|
||||||
else:
|
elif not r:
|
||||||
r = self._src_r
|
if isinstance(graphics, StepCurve):
|
||||||
if not r:
|
|
||||||
# just using for ``.diff()`` atm..
|
|
||||||
r = self._src_r = Renderer(
|
r = self._src_r = Renderer(
|
||||||
flow=self,
|
flow=self,
|
||||||
# TODO: rename this to something with ohlc
|
fmtr=StepCurveFmtr(
|
||||||
last_read=read,
|
shm=self.shm,
|
||||||
|
flow=self,
|
||||||
|
_last_read=read,
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: append logic inside ``.render()`` isn't
|
||||||
|
# correct yet for step curves.. remove this to see it.
|
||||||
|
should_redraw = True
|
||||||
|
slice_to_head = -2
|
||||||
|
|
||||||
|
else:
|
||||||
|
r = self._src_r
|
||||||
|
if not r:
|
||||||
|
# just using for ``.diff()`` atm..
|
||||||
|
r = self._src_r = Renderer(
|
||||||
|
flow=self,
|
||||||
|
fmtr=IncrementalFormatter(
|
||||||
|
shm=self.shm,
|
||||||
|
flow=self,
|
||||||
|
_last_read=read,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
# ``Curve`` derivative case(s):
|
# ``Curve`` derivative case(s):
|
||||||
array_key = array_key or self.name
|
array_key = array_key or self.name
|
||||||
# print(array_key)
|
# print(array_key)
|
||||||
|
@ -588,19 +490,6 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
should_ds: bool = r._in_ds
|
should_ds: bool = r._in_ds
|
||||||
showing_src_data: bool = not r._in_ds
|
showing_src_data: bool = not r._in_ds
|
||||||
|
|
||||||
# step_mode = getattr(graphics, '_step_mode', False)
|
|
||||||
step_mode = isinstance(graphics, StepCurve)
|
|
||||||
if step_mode:
|
|
||||||
|
|
||||||
r.allocate_xy = to_step_format
|
|
||||||
r.update_xy = update_step_xy
|
|
||||||
r.format_xy = step_to_xy
|
|
||||||
|
|
||||||
# TODO: append logic inside ``.render()`` isn't
|
|
||||||
# correct yet for step curves.. remove this to see it.
|
|
||||||
should_redraw = True
|
|
||||||
slice_to_head = -2
|
|
||||||
|
|
||||||
# downsampling incremental state checking
|
# downsampling incremental state checking
|
||||||
# check for and set std m4 downsample conditions
|
# check for and set std m4 downsample conditions
|
||||||
uppx = graphics.x_uppx()
|
uppx = graphics.x_uppx()
|
||||||
|
@ -628,10 +517,13 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
# source data so we clear our path data in prep
|
# source data so we clear our path data in prep
|
||||||
# to generate a new one from original source data.
|
# to generate a new one from original source data.
|
||||||
new_sample_rate = True
|
new_sample_rate = True
|
||||||
showing_src_data = True
|
|
||||||
should_ds = False
|
should_ds = False
|
||||||
should_redraw = True
|
should_redraw = True
|
||||||
|
|
||||||
|
showing_src_data = True
|
||||||
|
# reset yrange to be computed from source data
|
||||||
|
self.yrange = None
|
||||||
|
|
||||||
# MAIN RENDER LOGIC:
|
# MAIN RENDER LOGIC:
|
||||||
# - determine in view data and redraw on range change
|
# - determine in view data and redraw on range change
|
||||||
# - determine downsampling ops if needed
|
# - determine downsampling ops if needed
|
||||||
|
@ -657,6 +549,10 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
**rkwargs,
|
**rkwargs,
|
||||||
)
|
)
|
||||||
|
if showing_src_data:
|
||||||
|
# print(f"{self.name} SHOWING SOURCE")
|
||||||
|
# reset yrange to be computed from source data
|
||||||
|
self.yrange = None
|
||||||
|
|
||||||
if not out:
|
if not out:
|
||||||
log.warning(f'{self.name} failed to render!?')
|
log.warning(f'{self.name} failed to render!?')
|
||||||
|
@ -664,28 +560,32 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
|
|
||||||
path, data, reset = out
|
path, data, reset = out
|
||||||
|
|
||||||
|
# if self.yrange:
|
||||||
|
# print(f'flow {self.name} yrange from m4: {self.yrange}')
|
||||||
|
|
||||||
# XXX: SUPER UGGGHHH... without this we get stale cache
|
# XXX: SUPER UGGGHHH... without this we get stale cache
|
||||||
# graphics that don't update until you downsampler again..
|
# graphics that don't update until you downsampler again..
|
||||||
if reset:
|
# reset = False
|
||||||
with graphics.reset_cache():
|
# if reset:
|
||||||
# assign output paths to graphicis obj
|
# with graphics.reset_cache():
|
||||||
graphics.path = r.path
|
# # assign output paths to graphicis obj
|
||||||
graphics.fast_path = r.fast_path
|
# graphics.path = r.path
|
||||||
|
# graphics.fast_path = r.fast_path
|
||||||
|
|
||||||
# XXX: we don't need this right?
|
# # XXX: we don't need this right?
|
||||||
# graphics.draw_last_datum(
|
# # graphics.draw_last_datum(
|
||||||
# path,
|
# # path,
|
||||||
# src_array,
|
# # src_array,
|
||||||
# data,
|
# # data,
|
||||||
# reset,
|
# # reset,
|
||||||
# array_key,
|
# # array_key,
|
||||||
# )
|
# # )
|
||||||
# graphics.update()
|
# # graphics.update()
|
||||||
# profiler('.update()')
|
# # profiler('.update()')
|
||||||
else:
|
# else:
|
||||||
# assign output paths to graphicis obj
|
# assign output paths to graphicis obj
|
||||||
graphics.path = r.path
|
graphics.path = r.path
|
||||||
graphics.fast_path = r.fast_path
|
graphics.fast_path = r.fast_path
|
||||||
|
|
||||||
graphics.draw_last_datum(
|
graphics.draw_last_datum(
|
||||||
path,
|
path,
|
||||||
|
@ -769,51 +669,10 @@ class Flow(msgspec.Struct): # , frozen=True):
|
||||||
g.update()
|
g.update()
|
||||||
|
|
||||||
|
|
||||||
def by_index_and_key(
|
|
||||||
renderer: Renderer,
|
|
||||||
array: np.ndarray,
|
|
||||||
array_key: str,
|
|
||||||
vr: tuple[int, int],
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
]:
|
|
||||||
return array['index'], array[array_key], 'all'
|
|
||||||
|
|
||||||
|
|
||||||
class Renderer(msgspec.Struct):
|
class Renderer(msgspec.Struct):
|
||||||
|
|
||||||
flow: Flow
|
flow: Flow
|
||||||
# last array view read
|
fmtr: IncrementalFormatter
|
||||||
last_read: Optional[tuple] = None
|
|
||||||
|
|
||||||
# default just returns index, and named array from data
|
|
||||||
format_xy: Callable[
|
|
||||||
[np.ndarray, str],
|
|
||||||
tuple[np.ndarray]
|
|
||||||
] = by_index_and_key
|
|
||||||
|
|
||||||
# optional pre-graphics xy formatted data which
|
|
||||||
# is incrementally updated in sync with the source data.
|
|
||||||
allocate_xy: Optional[Callable[
|
|
||||||
[int, slice],
|
|
||||||
tuple[np.ndarray, np.nd.array]
|
|
||||||
]] = None
|
|
||||||
|
|
||||||
update_xy: Optional[Callable[
|
|
||||||
[int, slice], None]
|
|
||||||
] = None
|
|
||||||
|
|
||||||
x_data: Optional[np.ndarray] = None
|
|
||||||
y_data: Optional[np.ndarray] = None
|
|
||||||
|
|
||||||
# indexes which slice into the above arrays (which are allocated
|
|
||||||
# based on source data shm input size) and allow retrieving
|
|
||||||
# incrementally updated data.
|
|
||||||
_xy_first: int = 0
|
|
||||||
_xy_last: int = 0
|
|
||||||
|
|
||||||
# output graphics rendering, the main object
|
# output graphics rendering, the main object
|
||||||
# processed in ``QGraphicsObject.paint()``
|
# processed in ``QGraphicsObject.paint()``
|
||||||
|
@ -835,58 +694,11 @@ class Renderer(msgspec.Struct):
|
||||||
_last_uppx: float = 0
|
_last_uppx: float = 0
|
||||||
_in_ds: bool = False
|
_in_ds: bool = False
|
||||||
|
|
||||||
# incremental update state(s)
|
|
||||||
_last_vr: Optional[tuple[float, float]] = None
|
|
||||||
_last_ivr: Optional[tuple[float, float]] = None
|
|
||||||
|
|
||||||
def diff(
|
|
||||||
self,
|
|
||||||
new_read: tuple[np.ndarray],
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
]:
|
|
||||||
(
|
|
||||||
last_xfirst,
|
|
||||||
last_xlast,
|
|
||||||
last_array,
|
|
||||||
last_ivl,
|
|
||||||
last_ivr,
|
|
||||||
last_in_view,
|
|
||||||
) = self.last_read
|
|
||||||
|
|
||||||
# TODO: can the renderer just call ``Flow.read()`` directly?
|
|
||||||
# unpack latest source data read
|
|
||||||
(
|
|
||||||
xfirst,
|
|
||||||
xlast,
|
|
||||||
array,
|
|
||||||
ivl,
|
|
||||||
ivr,
|
|
||||||
in_view,
|
|
||||||
) = new_read
|
|
||||||
|
|
||||||
# compute the length diffs between the first/last index entry in
|
|
||||||
# the input data and the last indexes we have on record from the
|
|
||||||
# last time we updated the curve index.
|
|
||||||
prepend_length = int(last_xfirst - xfirst)
|
|
||||||
append_length = int(xlast - last_xlast)
|
|
||||||
|
|
||||||
# blah blah blah
|
|
||||||
# do diffing for prepend, append and last entry
|
|
||||||
return (
|
|
||||||
slice(xfirst, last_xfirst),
|
|
||||||
prepend_length,
|
|
||||||
append_length,
|
|
||||||
slice(last_xlast, xlast),
|
|
||||||
)
|
|
||||||
|
|
||||||
def draw_path(
|
def draw_path(
|
||||||
self,
|
self,
|
||||||
x: np.ndarray,
|
x: np.ndarray,
|
||||||
y: np.ndarray,
|
y: np.ndarray,
|
||||||
connect: Union[str, np.ndarray] = 'all',
|
connect: str | np.ndarray = 'all',
|
||||||
path: Optional[QPainterPath] = None,
|
path: Optional[QPainterPath] = None,
|
||||||
redraw: bool = False,
|
redraw: bool = False,
|
||||||
|
|
||||||
|
@ -932,7 +744,7 @@ class Renderer(msgspec.Struct):
|
||||||
|
|
||||||
new_read,
|
new_read,
|
||||||
array_key: str,
|
array_key: str,
|
||||||
profiler: pg.debug.Profiler,
|
profiler: Profiler,
|
||||||
uppx: float = 1,
|
uppx: float = 1,
|
||||||
|
|
||||||
# redraw and ds flags
|
# redraw and ds flags
|
||||||
|
@ -964,165 +776,54 @@ class Renderer(msgspec.Struct):
|
||||||
'''
|
'''
|
||||||
# TODO: can the renderer just call ``Flow.read()`` directly?
|
# TODO: can the renderer just call ``Flow.read()`` directly?
|
||||||
# unpack latest source data read
|
# unpack latest source data read
|
||||||
|
fmtr = self.fmtr
|
||||||
|
|
||||||
(
|
(
|
||||||
xfirst,
|
_,
|
||||||
xlast,
|
_,
|
||||||
array,
|
array,
|
||||||
ivl,
|
ivl,
|
||||||
ivr,
|
ivr,
|
||||||
in_view,
|
in_view,
|
||||||
) = new_read
|
) = new_read
|
||||||
|
|
||||||
(
|
|
||||||
pre_slice,
|
|
||||||
prepend_length,
|
|
||||||
append_length,
|
|
||||||
post_slice,
|
|
||||||
) = self.diff(new_read)
|
|
||||||
|
|
||||||
if self.update_xy:
|
|
||||||
|
|
||||||
shm = self.flow.shm
|
|
||||||
|
|
||||||
if self.y_data is None:
|
|
||||||
# we first need to allocate xy data arrays
|
|
||||||
# from the source data.
|
|
||||||
assert self.allocate_xy
|
|
||||||
self.x_data, self.y_data = self.allocate_xy(
|
|
||||||
shm,
|
|
||||||
array_key,
|
|
||||||
)
|
|
||||||
self._xy_first = shm._first.value
|
|
||||||
self._xy_last = shm._last.value
|
|
||||||
profiler('allocated xy history')
|
|
||||||
|
|
||||||
if prepend_length:
|
|
||||||
y_prepend = shm._array[pre_slice]
|
|
||||||
|
|
||||||
if read_from_key:
|
|
||||||
y_prepend = y_prepend[array_key]
|
|
||||||
|
|
||||||
xy_data, xy_slice = self.update_xy(
|
|
||||||
shm,
|
|
||||||
array_key,
|
|
||||||
|
|
||||||
# this is the pre-sliced, "normally expected"
|
|
||||||
# new data that an updater would normally be
|
|
||||||
# expected to process, however in some cases (like
|
|
||||||
# step curves) the updater routine may want to do
|
|
||||||
# the source history-data reading itself, so we pass
|
|
||||||
# both here.
|
|
||||||
y_prepend,
|
|
||||||
|
|
||||||
pre_slice,
|
|
||||||
prepend_length,
|
|
||||||
self._xy_first,
|
|
||||||
self._xy_last,
|
|
||||||
is_append=False,
|
|
||||||
)
|
|
||||||
self.y_data[xy_slice] = xy_data
|
|
||||||
self._xy_first = shm._first.value
|
|
||||||
profiler('prepended xy history: {prepend_length}')
|
|
||||||
|
|
||||||
if append_length:
|
|
||||||
y_append = shm._array[post_slice]
|
|
||||||
|
|
||||||
if read_from_key:
|
|
||||||
y_append = y_append[array_key]
|
|
||||||
|
|
||||||
xy_data, xy_slice = self.update_xy(
|
|
||||||
shm,
|
|
||||||
array_key,
|
|
||||||
|
|
||||||
y_append,
|
|
||||||
post_slice,
|
|
||||||
append_length,
|
|
||||||
|
|
||||||
self._xy_first,
|
|
||||||
self._xy_last,
|
|
||||||
is_append=True,
|
|
||||||
)
|
|
||||||
# self.y_data[post_slice] = xy_data
|
|
||||||
# self.y_data[xy_slice or post_slice] = xy_data
|
|
||||||
self.y_data[xy_slice] = xy_data
|
|
||||||
self._xy_last = shm._last.value
|
|
||||||
profiler('appened xy history: {append_length}')
|
|
||||||
|
|
||||||
if use_vr:
|
|
||||||
array = in_view
|
|
||||||
# else:
|
|
||||||
# ivl, ivr = xfirst, xlast
|
|
||||||
|
|
||||||
hist = array[:slice_to_head]
|
|
||||||
|
|
||||||
# xy-path data transform: convert source data to a format
|
# xy-path data transform: convert source data to a format
|
||||||
# able to be passed to a `QPainterPath` rendering routine.
|
# able to be passed to a `QPainterPath` rendering routine.
|
||||||
if not len(hist):
|
fmt_out = fmtr.format_to_1d(
|
||||||
return
|
new_read,
|
||||||
|
|
||||||
x_out, y_out, connect = self.format_xy(
|
|
||||||
self,
|
|
||||||
# TODO: hist here should be the pre-sliced
|
|
||||||
# x/y_data in the case where allocate_xy is
|
|
||||||
# defined?
|
|
||||||
hist,
|
|
||||||
array_key,
|
array_key,
|
||||||
(ivl, ivr),
|
profiler,
|
||||||
|
|
||||||
|
slice_to_head=slice_to_head,
|
||||||
|
read_src_from_key=read_from_key,
|
||||||
|
slice_to_inview=use_vr,
|
||||||
)
|
)
|
||||||
|
|
||||||
profiler('sliced input arrays')
|
# no history in view case
|
||||||
|
if not fmt_out:
|
||||||
|
# XXX: this might be why the profiler only has exits?
|
||||||
|
return
|
||||||
|
|
||||||
if (
|
(
|
||||||
use_vr
|
x_1d,
|
||||||
):
|
y_1d,
|
||||||
# if a view range is passed, plan to draw the
|
connect,
|
||||||
# source ouput that's "in view" of the chart.
|
prepend_length,
|
||||||
view_range = (ivl, ivr)
|
append_length,
|
||||||
# print(f'{self._name} vr: {view_range}')
|
view_changed,
|
||||||
|
# append_tres,
|
||||||
|
|
||||||
profiler(f'view range slice {view_range}')
|
) = fmt_out
|
||||||
|
|
||||||
vl, vr = view_range
|
|
||||||
|
|
||||||
zoom_or_append = False
|
|
||||||
last_vr = self._last_vr
|
|
||||||
last_ivr = self._last_ivr or vl, vr
|
|
||||||
|
|
||||||
# incremental in-view data update.
|
|
||||||
if last_vr:
|
|
||||||
# relative slice indices
|
|
||||||
lvl, lvr = last_vr
|
|
||||||
# abs slice indices
|
|
||||||
al, ar = last_ivr
|
|
||||||
|
|
||||||
# left_change = abs(x_iv[0] - al) >= 1
|
|
||||||
# right_change = abs(x_iv[-1] - ar) >= 1
|
|
||||||
|
|
||||||
if (
|
|
||||||
# likely a zoom view change
|
|
||||||
(vr - lvr) > 2 or vl < lvl
|
|
||||||
# append / prepend update
|
|
||||||
# we had an append update where the view range
|
|
||||||
# didn't change but the data-viewed (shifted)
|
|
||||||
# underneath, so we need to redraw.
|
|
||||||
# or left_change and right_change and last_vr == view_range
|
|
||||||
|
|
||||||
# not (left_change and right_change) and ivr
|
|
||||||
# (
|
|
||||||
# or abs(x_iv[ivr] - livr) > 1
|
|
||||||
):
|
|
||||||
zoom_or_append = True
|
|
||||||
|
|
||||||
self._last_vr = view_range
|
|
||||||
if len(x_out):
|
|
||||||
self._last_ivr = x_out[0], x_out[slice_to_head]
|
|
||||||
|
|
||||||
# redraw conditions
|
# redraw conditions
|
||||||
if (
|
if (
|
||||||
prepend_length > 0
|
prepend_length > 0
|
||||||
or new_sample_rate
|
or new_sample_rate
|
||||||
|
or view_changed
|
||||||
|
|
||||||
|
# NOTE: comment this to try and make "append paths"
|
||||||
|
# work below..
|
||||||
or append_length > 0
|
or append_length > 0
|
||||||
or zoom_or_append
|
|
||||||
):
|
):
|
||||||
should_redraw = True
|
should_redraw = True
|
||||||
|
|
||||||
|
@ -1144,18 +845,21 @@ class Renderer(msgspec.Struct):
|
||||||
|
|
||||||
elif should_ds and uppx > 1:
|
elif should_ds and uppx > 1:
|
||||||
|
|
||||||
x_out, y_out = xy_downsample(
|
x_1d, y_1d, ymn, ymx = xy_downsample(
|
||||||
x_out,
|
x_1d,
|
||||||
y_out,
|
y_1d,
|
||||||
uppx,
|
uppx,
|
||||||
)
|
)
|
||||||
|
self.flow.yrange = ymn, ymx
|
||||||
|
# print(f'{self.flow.name} post ds: ymn, ymx: {ymn},{ymx}')
|
||||||
|
|
||||||
reset = True
|
reset = True
|
||||||
profiler(f'FULL PATH downsample redraw={should_ds}')
|
profiler(f'FULL PATH downsample redraw={should_ds}')
|
||||||
self._in_ds = True
|
self._in_ds = True
|
||||||
|
|
||||||
path = self.draw_path(
|
path = self.draw_path(
|
||||||
x=x_out,
|
x=x_1d,
|
||||||
y=y_out,
|
y=y_1d,
|
||||||
connect=connect,
|
connect=connect,
|
||||||
path=path,
|
path=path,
|
||||||
redraw=True,
|
redraw=True,
|
||||||
|
@ -1170,7 +874,6 @@ class Renderer(msgspec.Struct):
|
||||||
# TODO: get this piecewise prepend working - right now it's
|
# TODO: get this piecewise prepend working - right now it's
|
||||||
# giving heck on vwap...
|
# giving heck on vwap...
|
||||||
# elif prepend_length:
|
# elif prepend_length:
|
||||||
# breakpoint()
|
|
||||||
|
|
||||||
# prepend_path = pg.functions.arrayToQPath(
|
# prepend_path = pg.functions.arrayToQPath(
|
||||||
# x[0:prepend_length],
|
# x[0:prepend_length],
|
||||||
|
@ -1187,18 +890,22 @@ class Renderer(msgspec.Struct):
|
||||||
elif (
|
elif (
|
||||||
append_length > 0
|
append_length > 0
|
||||||
and do_append
|
and do_append
|
||||||
and not should_redraw
|
|
||||||
):
|
):
|
||||||
# print(f'{array_key} append len: {append_length}')
|
print(f'{array_key} append len: {append_length}')
|
||||||
new_x = x_out[-append_length - 2:] # slice_to_head]
|
# new_x = x_1d[-append_length - 2:] # slice_to_head]
|
||||||
new_y = y_out[-append_length - 2:] # slice_to_head]
|
# new_y = y_1d[-append_length - 2:] # slice_to_head]
|
||||||
profiler('sliced append path')
|
profiler('sliced append path')
|
||||||
|
# (
|
||||||
|
# x_1d,
|
||||||
|
# y_1d,
|
||||||
|
# connect,
|
||||||
|
# ) = append_tres
|
||||||
|
|
||||||
profiler(
|
profiler(
|
||||||
f'diffed array input, append_length={append_length}'
|
f'diffed array input, append_length={append_length}'
|
||||||
)
|
)
|
||||||
|
|
||||||
# if should_ds:
|
# if should_ds and uppx > 1:
|
||||||
# new_x, new_y = xy_downsample(
|
# new_x, new_y = xy_downsample(
|
||||||
# new_x,
|
# new_x,
|
||||||
# new_y,
|
# new_y,
|
||||||
|
@ -1207,14 +914,15 @@ class Renderer(msgspec.Struct):
|
||||||
# profiler(f'fast path downsample redraw={should_ds}')
|
# profiler(f'fast path downsample redraw={should_ds}')
|
||||||
|
|
||||||
append_path = self.draw_path(
|
append_path = self.draw_path(
|
||||||
x=new_x,
|
x=x_1d,
|
||||||
y=new_y,
|
y=y_1d,
|
||||||
connect=connect,
|
connect=connect,
|
||||||
path=fast_path,
|
path=fast_path,
|
||||||
)
|
)
|
||||||
profiler('generated append qpath')
|
profiler('generated append qpath')
|
||||||
|
|
||||||
if use_fpath:
|
if use_fpath:
|
||||||
|
# print(f'{self.flow.name}: FAST PATH')
|
||||||
# an attempt at trying to make append-updates faster..
|
# an attempt at trying to make append-updates faster..
|
||||||
if fast_path is None:
|
if fast_path is None:
|
||||||
fast_path = append_path
|
fast_path = append_path
|
||||||
|
@ -1224,7 +932,12 @@ class Renderer(msgspec.Struct):
|
||||||
size = fast_path.capacity()
|
size = fast_path.capacity()
|
||||||
profiler(f'connected fast path w size: {size}')
|
profiler(f'connected fast path w size: {size}')
|
||||||
|
|
||||||
# print(f"append_path br: {append_path.boundingRect()}")
|
print(
|
||||||
|
f"append_path br: {append_path.boundingRect()}\n"
|
||||||
|
f"path size: {size}\n"
|
||||||
|
f"append_path len: {append_path.length()}\n"
|
||||||
|
f"fast_path len: {fast_path.length()}\n"
|
||||||
|
)
|
||||||
# graphics.path.moveTo(new_x[0], new_y[0])
|
# graphics.path.moveTo(new_x[0], new_y[0])
|
||||||
# path.connectPath(append_path)
|
# path.connectPath(append_path)
|
||||||
|
|
||||||
|
@ -1238,10 +951,4 @@ class Renderer(msgspec.Struct):
|
||||||
self.path = path
|
self.path = path
|
||||||
self.fast_path = fast_path
|
self.fast_path = fast_path
|
||||||
|
|
||||||
# TODO: eventually maybe we can implement some kind of
|
|
||||||
# transform on the ``QPainterPath`` that will more or less
|
|
||||||
# detect the diff in "elements" terms?
|
|
||||||
# update diff state since we've now rendered paths.
|
|
||||||
self.last_read = new_read
|
|
||||||
|
|
||||||
return self.path, array, reset
|
return self.path, array, reset
|
||||||
|
|
|
@ -619,7 +619,7 @@ class FillStatusBar(QProgressBar):
|
||||||
# color: #19232D;
|
# color: #19232D;
|
||||||
# width: 10px;
|
# width: 10px;
|
||||||
|
|
||||||
self.setRange(0, slots)
|
self.setRange(0, int(slots))
|
||||||
self.setValue(value)
|
self.setValue(value)
|
||||||
|
|
||||||
|
|
||||||
|
@ -644,7 +644,7 @@ def mk_fill_status_bar(
|
||||||
|
|
||||||
# TODO: calc this height from the ``ChartnPane``
|
# TODO: calc this height from the ``ChartnPane``
|
||||||
chart_h = round(parent_pane.height() * 5/8)
|
chart_h = round(parent_pane.height() * 5/8)
|
||||||
bar_h = chart_h * 0.375
|
bar_h = chart_h * 0.375*0.9
|
||||||
|
|
||||||
# TODO: once things are sized to screen
|
# TODO: once things are sized to screen
|
||||||
bar_label_font_size = label_font_size or _font.px_size - 2
|
bar_label_font_size = label_font_size or _font.px_size - 2
|
||||||
|
|
|
@ -27,12 +27,13 @@ from itertools import cycle
|
||||||
from typing import Optional, AsyncGenerator, Any
|
from typing import Optional, AsyncGenerator, Any
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from pydantic import create_model
|
import msgspec
|
||||||
import tractor
|
import tractor
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
|
from piker.data.types import Struct
|
||||||
from ._axes import PriceAxis
|
from ._axes import PriceAxis
|
||||||
from .._cacheables import maybe_open_context
|
from .._cacheables import maybe_open_context
|
||||||
from ..calc import humanize
|
from ..calc import humanize
|
||||||
|
@ -50,14 +51,18 @@ from ._forms import (
|
||||||
mk_form,
|
mk_form,
|
||||||
open_form_input_handling,
|
open_form_input_handling,
|
||||||
)
|
)
|
||||||
from ..fsp._api import maybe_mk_fsp_shm, Fsp
|
from ..fsp._api import (
|
||||||
|
maybe_mk_fsp_shm,
|
||||||
|
Fsp,
|
||||||
|
)
|
||||||
from ..fsp import cascade
|
from ..fsp import cascade
|
||||||
from ..fsp._volume import (
|
from ..fsp._volume import (
|
||||||
tina_vwap,
|
# tina_vwap,
|
||||||
dolla_vlm,
|
dolla_vlm,
|
||||||
flow_rates,
|
flow_rates,
|
||||||
)
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -153,12 +158,13 @@ async def open_fsp_sidepane(
|
||||||
)
|
)
|
||||||
|
|
||||||
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
||||||
FspConfig = create_model(
|
FspConfig = msgspec.defstruct(
|
||||||
'FspConfig',
|
"Point",
|
||||||
name=name,
|
[('name', name)] + list(params.items()),
|
||||||
**params,
|
bases=(Struct,),
|
||||||
)
|
)
|
||||||
sidepane.model = FspConfig()
|
model = FspConfig(name=name, **params)
|
||||||
|
sidepane.model = model
|
||||||
|
|
||||||
# just a logger for now until we get fsp configs up and running.
|
# just a logger for now until we get fsp configs up and running.
|
||||||
async def settings_change(
|
async def settings_change(
|
||||||
|
@ -188,7 +194,7 @@ async def open_fsp_actor_cluster(
|
||||||
|
|
||||||
from tractor._clustering import open_actor_cluster
|
from tractor._clustering import open_actor_cluster
|
||||||
|
|
||||||
# profiler = pg.debug.Profiler(
|
# profiler = Profiler(
|
||||||
# delayed=False,
|
# delayed=False,
|
||||||
# disabled=False
|
# disabled=False
|
||||||
# )
|
# )
|
||||||
|
@ -210,7 +216,7 @@ async def run_fsp_ui(
|
||||||
target: Fsp,
|
target: Fsp,
|
||||||
conf: dict[str, dict],
|
conf: dict[str, dict],
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
# profiler: pg.debug.Profiler,
|
# profiler: Profiler,
|
||||||
# _quote_throttle_rate: int = 58,
|
# _quote_throttle_rate: int = 58,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -440,7 +446,9 @@ class FspAdmin:
|
||||||
# if the chart isn't hidden try to update
|
# if the chart isn't hidden try to update
|
||||||
# the data on screen.
|
# the data on screen.
|
||||||
if not self.linked.isHidden():
|
if not self.linked.isHidden():
|
||||||
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
|
log.debug(
|
||||||
|
f'Re-syncing graphics for fsp: {ns_path}'
|
||||||
|
)
|
||||||
self.linked.graphics_cycle(
|
self.linked.graphics_cycle(
|
||||||
trigger_all=True,
|
trigger_all=True,
|
||||||
prepend_update_index=info['first'],
|
prepend_update_index=info['first'],
|
||||||
|
@ -469,9 +477,10 @@ class FspAdmin:
|
||||||
target=target,
|
target=target,
|
||||||
readonly=True,
|
readonly=True,
|
||||||
)
|
)
|
||||||
self._flow_registry[
|
self._flow_registry[(
|
||||||
(self.src_shm._token, target.name)
|
self.src_shm._token,
|
||||||
] = dst_shm._token
|
target.name
|
||||||
|
)] = dst_shm._token
|
||||||
|
|
||||||
# if not opened:
|
# if not opened:
|
||||||
# raise RuntimeError(
|
# raise RuntimeError(
|
||||||
|
@ -618,6 +627,8 @@ async def open_vlm_displays(
|
||||||
# built-in vlm which we plot ASAP since it's
|
# built-in vlm which we plot ASAP since it's
|
||||||
# usually data provided directly with OHLC history.
|
# usually data provided directly with OHLC history.
|
||||||
shm = ohlcv
|
shm = ohlcv
|
||||||
|
ohlc_chart = linked.chart
|
||||||
|
|
||||||
chart = linked.add_plot(
|
chart = linked.add_plot(
|
||||||
name='volume',
|
name='volume',
|
||||||
shm=shm,
|
shm=shm,
|
||||||
|
@ -633,26 +644,34 @@ async def open_vlm_displays(
|
||||||
# the curve item internals are pretty convoluted.
|
# the curve item internals are pretty convoluted.
|
||||||
style='step',
|
style='step',
|
||||||
)
|
)
|
||||||
|
ohlc_chart.view.enable_auto_yrange(
|
||||||
|
src_vb=chart.view,
|
||||||
|
)
|
||||||
|
|
||||||
# force 0 to always be in view
|
# force 0 to always be in view
|
||||||
def multi_maxmin(
|
def multi_maxmin(
|
||||||
names: list[str],
|
names: list[str],
|
||||||
|
|
||||||
) -> tuple[float, float]:
|
) -> tuple[float, float]:
|
||||||
|
'''
|
||||||
|
Flows "group" maxmin loop; assumes all named flows
|
||||||
|
are in the same co-domain and thus can be sorted
|
||||||
|
as one set.
|
||||||
|
|
||||||
|
Iterates all the named flows and calls the chart
|
||||||
|
api to find their range values and return.
|
||||||
|
|
||||||
|
TODO: really we should probably have a more built-in API
|
||||||
|
for this?
|
||||||
|
|
||||||
|
'''
|
||||||
mx = 0
|
mx = 0
|
||||||
for name in names:
|
for name in names:
|
||||||
|
ymn, ymx = chart.maxmin(name=name)
|
||||||
mxmn = chart.maxmin(name=name)
|
mx = max(mx, ymx)
|
||||||
if mxmn:
|
|
||||||
ymax = mxmn[1]
|
|
||||||
if ymax > mx:
|
|
||||||
mx = ymax
|
|
||||||
|
|
||||||
return 0, mx
|
return 0, mx
|
||||||
|
|
||||||
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
|
||||||
|
|
||||||
# TODO: fix the x-axis label issue where if you put
|
# TODO: fix the x-axis label issue where if you put
|
||||||
# the axis on the left it's totally not lined up...
|
# the axis on the left it's totally not lined up...
|
||||||
# show volume units value on LHS (for dinkus)
|
# show volume units value on LHS (for dinkus)
|
||||||
|
@ -736,6 +755,8 @@ async def open_vlm_displays(
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
dvlm_pi.hideAxis('left')
|
||||||
|
dvlm_pi.hideAxis('bottom')
|
||||||
# all to be overlayed curve names
|
# all to be overlayed curve names
|
||||||
fields = [
|
fields = [
|
||||||
'dolla_vlm',
|
'dolla_vlm',
|
||||||
|
@ -776,6 +797,7 @@ async def open_vlm_displays(
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
for name in names:
|
for name in names:
|
||||||
|
|
||||||
if 'dark' in name:
|
if 'dark' in name:
|
||||||
color = dark_vlm_color
|
color = dark_vlm_color
|
||||||
elif 'rate' in name:
|
elif 'rate' in name:
|
||||||
|
@ -867,6 +889,7 @@ async def open_vlm_displays(
|
||||||
# keep both regular and dark vlm in view
|
# keep both regular and dark vlm in view
|
||||||
names=trade_rate_fields,
|
names=trade_rate_fields,
|
||||||
)
|
)
|
||||||
|
tr_pi.hideAxis('bottom')
|
||||||
|
|
||||||
chart_curves(
|
chart_curves(
|
||||||
trade_rate_fields,
|
trade_rate_fields,
|
||||||
|
@ -940,7 +963,7 @@ async def start_fsp_displays(
|
||||||
# },
|
# },
|
||||||
# },
|
# },
|
||||||
}
|
}
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
delayed=False,
|
delayed=False,
|
||||||
disabled=False
|
disabled=False
|
||||||
)
|
)
|
||||||
|
|
|
@ -33,6 +33,7 @@ import numpy as np
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
# from ._style import _min_points_to_show
|
# from ._style import _min_points_to_show
|
||||||
from ._editors import SelectRect
|
from ._editors import SelectRect
|
||||||
|
@ -141,13 +142,16 @@ async def handle_viewmode_kb_inputs(
|
||||||
Qt.Key_Space,
|
Qt.Key_Space,
|
||||||
}
|
}
|
||||||
):
|
):
|
||||||
view._chart.linked.godwidget.search.focus()
|
godw = view._chart.linked.godwidget
|
||||||
|
godw.hist_linked.resize_sidepanes(from_linked=godw.rt_linked)
|
||||||
|
godw.search.focus()
|
||||||
|
|
||||||
# esc and ctrl-c
|
# esc and ctrl-c
|
||||||
if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
|
if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
|
||||||
# ctrl-c as cancel
|
# ctrl-c as cancel
|
||||||
# https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
|
# https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
|
||||||
view.select_box.clear()
|
view.select_box.clear()
|
||||||
|
view.linked.focus()
|
||||||
|
|
||||||
# cancel order or clear graphics
|
# cancel order or clear graphics
|
||||||
if key == Qt.Key_C or key == Qt.Key_Delete:
|
if key == Qt.Key_C or key == Qt.Key_Delete:
|
||||||
|
@ -178,17 +182,17 @@ async def handle_viewmode_kb_inputs(
|
||||||
if key in pressed:
|
if key in pressed:
|
||||||
pressed.remove(key)
|
pressed.remove(key)
|
||||||
|
|
||||||
# QUERY/QUOTE MODE #
|
# QUERY/QUOTE MODE
|
||||||
|
# ----------------
|
||||||
if {Qt.Key_Q}.intersection(pressed):
|
if {Qt.Key_Q}.intersection(pressed):
|
||||||
|
|
||||||
view.linkedsplits.cursor.in_query_mode = True
|
view.linked.cursor.in_query_mode = True
|
||||||
|
|
||||||
else:
|
else:
|
||||||
view.linkedsplits.cursor.in_query_mode = False
|
view.linked.cursor.in_query_mode = False
|
||||||
|
|
||||||
# SELECTION MODE
|
# SELECTION MODE
|
||||||
# --------------
|
# --------------
|
||||||
|
|
||||||
if shift:
|
if shift:
|
||||||
if view.state['mouseMode'] == ViewBox.PanMode:
|
if view.state['mouseMode'] == ViewBox.PanMode:
|
||||||
view.setMouseMode(ViewBox.RectMode)
|
view.setMouseMode(ViewBox.RectMode)
|
||||||
|
@ -209,18 +213,27 @@ async def handle_viewmode_kb_inputs(
|
||||||
|
|
||||||
# ORDER MODE
|
# ORDER MODE
|
||||||
# ----------
|
# ----------
|
||||||
|
|
||||||
# live vs. dark trigger + an action {buy, sell, alert}
|
# live vs. dark trigger + an action {buy, sell, alert}
|
||||||
order_keys_pressed = ORDER_MODE.intersection(pressed)
|
order_keys_pressed = ORDER_MODE.intersection(pressed)
|
||||||
|
|
||||||
if order_keys_pressed:
|
if order_keys_pressed:
|
||||||
|
|
||||||
# show the pp size label
|
# TODO: it seems like maybe the composition should be
|
||||||
order_mode.current_pp.show()
|
# reversed here? Like, maybe we should have the nav have
|
||||||
|
# access to the pos state and then make encapsulated logic
|
||||||
|
# that shows the right stuff on screen instead or order mode
|
||||||
|
# and position-related abstractions doing this?
|
||||||
|
|
||||||
|
# show the pp size label only if there is
|
||||||
|
# a non-zero pos existing
|
||||||
|
tracker = order_mode.current_pp
|
||||||
|
if tracker.live_pp.size:
|
||||||
|
tracker.nav.show()
|
||||||
|
|
||||||
# TODO: show pp config mini-params in status bar widget
|
# TODO: show pp config mini-params in status bar widget
|
||||||
# mode.pp_config.show()
|
# mode.pp_config.show()
|
||||||
|
|
||||||
|
trigger_type: str = 'dark'
|
||||||
if (
|
if (
|
||||||
# 's' for "submit" to activate "live" order
|
# 's' for "submit" to activate "live" order
|
||||||
Qt.Key_S in pressed or
|
Qt.Key_S in pressed or
|
||||||
|
@ -228,9 +241,6 @@ async def handle_viewmode_kb_inputs(
|
||||||
):
|
):
|
||||||
trigger_type: str = 'live'
|
trigger_type: str = 'live'
|
||||||
|
|
||||||
else:
|
|
||||||
trigger_type: str = 'dark'
|
|
||||||
|
|
||||||
# order mode trigger "actions"
|
# order mode trigger "actions"
|
||||||
if Qt.Key_D in pressed: # for "damp eet"
|
if Qt.Key_D in pressed: # for "damp eet"
|
||||||
action = 'sell'
|
action = 'sell'
|
||||||
|
@ -259,8 +269,8 @@ async def handle_viewmode_kb_inputs(
|
||||||
Qt.Key_S in pressed or
|
Qt.Key_S in pressed or
|
||||||
order_keys_pressed or
|
order_keys_pressed or
|
||||||
Qt.Key_O in pressed
|
Qt.Key_O in pressed
|
||||||
) and
|
)
|
||||||
key in NUMBER_LINE
|
and key in NUMBER_LINE
|
||||||
):
|
):
|
||||||
# hot key to set order slots size.
|
# hot key to set order slots size.
|
||||||
# change edit field to current number line value,
|
# change edit field to current number line value,
|
||||||
|
@ -278,7 +288,7 @@ async def handle_viewmode_kb_inputs(
|
||||||
else: # none active
|
else: # none active
|
||||||
|
|
||||||
# hide pp label
|
# hide pp label
|
||||||
order_mode.current_pp.hide_info()
|
order_mode.current_pp.nav.hide_info()
|
||||||
|
|
||||||
# if none are pressed, remove "staged" level
|
# if none are pressed, remove "staged" level
|
||||||
# line under cursor position
|
# line under cursor position
|
||||||
|
@ -319,7 +329,6 @@ async def handle_viewmode_mouse(
|
||||||
):
|
):
|
||||||
# when in order mode, submit execution
|
# when in order mode, submit execution
|
||||||
# msg.event.accept()
|
# msg.event.accept()
|
||||||
# breakpoint()
|
|
||||||
view.order_mode.submit_order()
|
view.order_mode.submit_order()
|
||||||
|
|
||||||
|
|
||||||
|
@ -336,16 +345,6 @@ class ChartView(ViewBox):
|
||||||
'''
|
'''
|
||||||
mode_name: str = 'view'
|
mode_name: str = 'view'
|
||||||
|
|
||||||
# "relay events" for making overlaid views work.
|
|
||||||
# NOTE: these MUST be defined here (and can't be monkey patched
|
|
||||||
# on later) due to signal construction requiring refs to be
|
|
||||||
# in place during the run of meta-class machinery.
|
|
||||||
mouseDragEventRelay = QtCore.Signal(object, object, object)
|
|
||||||
wheelEventRelay = QtCore.Signal(object, object, object)
|
|
||||||
|
|
||||||
event_relay_source: 'Optional[ViewBox]' = None
|
|
||||||
relays: dict[str, QtCore.Signal] = {}
|
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
|
@ -375,7 +374,7 @@ class ChartView(ViewBox):
|
||||||
y=True,
|
y=True,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.linkedsplits = None
|
self.linked = None
|
||||||
self._chart: 'ChartPlotWidget' = None # noqa
|
self._chart: 'ChartPlotWidget' = None # noqa
|
||||||
|
|
||||||
# add our selection box annotator
|
# add our selection box annotator
|
||||||
|
@ -397,8 +396,11 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if self._ic is None:
|
if self._ic is None:
|
||||||
self.chart.pause_all_feeds()
|
try:
|
||||||
self._ic = trio.Event()
|
self.chart.pause_all_feeds()
|
||||||
|
self._ic = trio.Event()
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
def signal_ic(
|
def signal_ic(
|
||||||
self,
|
self,
|
||||||
|
@ -411,9 +413,12 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if self._ic:
|
if self._ic:
|
||||||
self._ic.set()
|
try:
|
||||||
self._ic = None
|
self._ic.set()
|
||||||
self.chart.resume_all_feeds()
|
self._ic = None
|
||||||
|
self.chart.resume_all_feeds()
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_async_input_handler(
|
async def open_async_input_handler(
|
||||||
|
@ -463,7 +468,7 @@ class ChartView(ViewBox):
|
||||||
self,
|
self,
|
||||||
ev,
|
ev,
|
||||||
axis=None,
|
axis=None,
|
||||||
relayed_from: ChartView = None,
|
# relayed_from: ChartView = None,
|
||||||
):
|
):
|
||||||
'''
|
'''
|
||||||
Override "center-point" location for scrolling.
|
Override "center-point" location for scrolling.
|
||||||
|
@ -474,13 +479,20 @@ class ChartView(ViewBox):
|
||||||
TODO: PR a method into ``pyqtgraph`` to make this configurable
|
TODO: PR a method into ``pyqtgraph`` to make this configurable
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
linked = self.linked
|
||||||
|
if (
|
||||||
|
not linked
|
||||||
|
):
|
||||||
|
# print(f'{self.name} not linked but relay from {relayed_from.name}')
|
||||||
|
return
|
||||||
|
|
||||||
if axis in (0, 1):
|
if axis in (0, 1):
|
||||||
mask = [False, False]
|
mask = [False, False]
|
||||||
mask[axis] = self.state['mouseEnabled'][axis]
|
mask[axis] = self.state['mouseEnabled'][axis]
|
||||||
else:
|
else:
|
||||||
mask = self.state['mouseEnabled'][:]
|
mask = self.state['mouseEnabled'][:]
|
||||||
|
|
||||||
chart = self.linkedsplits.chart
|
chart = self.linked.chart
|
||||||
|
|
||||||
# don't zoom more then the min points setting
|
# don't zoom more then the min points setting
|
||||||
l, lbar, rbar, r = chart.bars_range()
|
l, lbar, rbar, r = chart.bars_range()
|
||||||
|
@ -593,9 +605,20 @@ class ChartView(ViewBox):
|
||||||
self,
|
self,
|
||||||
ev,
|
ev,
|
||||||
axis: Optional[int] = None,
|
axis: Optional[int] = None,
|
||||||
relayed_from: ChartView = None,
|
# relayed_from: ChartView = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
# if relayed_from:
|
||||||
|
# print(f'PAN: {self.name} -> RELAYED FROM: {relayed_from.name}')
|
||||||
|
|
||||||
|
# NOTE since in the overlay case axes are already
|
||||||
|
# "linked" any x-range change will already be mirrored
|
||||||
|
# in all overlaid ``PlotItems``, so we need to simply
|
||||||
|
# ignore the signal here since otherwise we get N-calls
|
||||||
|
# from N-overlays resulting in an "accelerated" feeling
|
||||||
|
# panning motion instead of the expect linear shift.
|
||||||
|
# if relayed_from:
|
||||||
|
# return
|
||||||
|
|
||||||
pos = ev.pos()
|
pos = ev.pos()
|
||||||
lastPos = ev.lastPos()
|
lastPos = ev.lastPos()
|
||||||
|
@ -669,7 +692,10 @@ class ChartView(ViewBox):
|
||||||
# XXX: WHY
|
# XXX: WHY
|
||||||
ev.accept()
|
ev.accept()
|
||||||
|
|
||||||
self.start_ic()
|
try:
|
||||||
|
self.start_ic()
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
# if self._ic is None:
|
# if self._ic is None:
|
||||||
# self.chart.pause_all_feeds()
|
# self.chart.pause_all_feeds()
|
||||||
# self._ic = trio.Event()
|
# self._ic = trio.Event()
|
||||||
|
@ -761,7 +787,7 @@ class ChartView(ViewBox):
|
||||||
'''
|
'''
|
||||||
name = self.name
|
name = self.name
|
||||||
# print(f'YRANGE ON {name}')
|
# print(f'YRANGE ON {name}')
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
msg=f'`ChartView._set_yrange()`: `{name}`',
|
msg=f'`ChartView._set_yrange()`: `{name}`',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
|
@ -830,33 +856,37 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''
|
'''
|
||||||
Assign callback for rescaling y-axis automatically
|
Assign callbacks for rescaling and resampling y-axis data
|
||||||
based on data contents and ``ViewBox`` state.
|
automatically based on data contents and ``ViewBox`` state.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if src_vb is None:
|
if src_vb is None:
|
||||||
src_vb = self
|
src_vb = self
|
||||||
|
|
||||||
# splitter(s) resizing
|
# widget-UIs/splitter(s) resizing
|
||||||
src_vb.sigResized.connect(self._set_yrange)
|
src_vb.sigResized.connect(self._set_yrange)
|
||||||
|
|
||||||
|
# re-sampling trigger:
|
||||||
# TODO: a smarter way to avoid calling this needlessly?
|
# TODO: a smarter way to avoid calling this needlessly?
|
||||||
# 2 things i can think of:
|
# 2 things i can think of:
|
||||||
# - register downsample-able graphics specially and only
|
# - register downsample-able graphics specially and only
|
||||||
# iterate those.
|
# iterate those.
|
||||||
# - only register this when certain downsampleable graphics are
|
# - only register this when certain downsample-able graphics are
|
||||||
# "added to scene".
|
# "added to scene".
|
||||||
src_vb.sigRangeChangedManually.connect(
|
src_vb.sigRangeChangedManually.connect(
|
||||||
self.maybe_downsample_graphics
|
self.maybe_downsample_graphics
|
||||||
)
|
)
|
||||||
|
|
||||||
# mouse wheel doesn't emit XRangeChanged
|
# mouse wheel doesn't emit XRangeChanged
|
||||||
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||||
|
|
||||||
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
# XXX: enabling these will cause "jittery"-ness
|
||||||
# src_vb.sigXRangeChanged.connect(
|
# on zoom where sharp diffs in the y-range will
|
||||||
# self.maybe_downsample_graphics
|
# not re-size right away until a new sample update?
|
||||||
# )
|
# if src_vb is not self:
|
||||||
|
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||||
|
# src_vb.sigXRangeChanged.connect(
|
||||||
|
# self.maybe_downsample_graphics
|
||||||
|
# )
|
||||||
|
|
||||||
def disable_auto_yrange(self) -> None:
|
def disable_auto_yrange(self) -> None:
|
||||||
|
|
||||||
|
@ -897,8 +927,7 @@ class ChartView(ViewBox):
|
||||||
self,
|
self,
|
||||||
autoscale_overlays: bool = True,
|
autoscale_overlays: bool = True,
|
||||||
):
|
):
|
||||||
|
profiler = Profiler(
|
||||||
profiler = pg.debug.Profiler(
|
|
||||||
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
|
|
||||||
|
@ -912,8 +941,12 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
# TODO: a faster single-loop-iterator way of doing this XD
|
# TODO: a faster single-loop-iterator way of doing this XD
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
linked = self.linkedsplits
|
plots = {chart.name: chart}
|
||||||
plots = linked.subplots | {chart.name: chart}
|
|
||||||
|
linked = self.linked
|
||||||
|
if linked:
|
||||||
|
plots |= linked.subplots
|
||||||
|
|
||||||
for chart_name, chart in plots.items():
|
for chart_name, chart in plots.items():
|
||||||
for name, flow in chart._flows.items():
|
for name, flow in chart._flows.items():
|
||||||
|
|
||||||
|
@ -923,6 +956,7 @@ class ChartView(ViewBox):
|
||||||
# XXX: super important to be aware of this.
|
# XXX: super important to be aware of this.
|
||||||
# or not flow.graphics.isVisible()
|
# or not flow.graphics.isVisible()
|
||||||
):
|
):
|
||||||
|
# print(f'skipping {flow.name}')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# pass in no array which will read and render from the last
|
# pass in no array which will read and render from the last
|
||||||
|
|
|
@ -18,9 +18,14 @@
|
||||||
Lines for orders, alerts, L2.
|
Lines for orders, alerts, L2.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from math import floor
|
from math import floor
|
||||||
from typing import Optional, Callable
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from pyqtgraph import Point, functions as fn
|
from pyqtgraph import Point, functions as fn
|
||||||
|
@ -37,6 +42,9 @@ from ..calc import humanize
|
||||||
from ._label import Label
|
from ._label import Label
|
||||||
from ._style import hcolor, _font
|
from ._style import hcolor, _font
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._cursor import Cursor
|
||||||
|
|
||||||
|
|
||||||
# TODO: probably worth investigating if we can
|
# TODO: probably worth investigating if we can
|
||||||
# make .boundingRect() faster:
|
# make .boundingRect() faster:
|
||||||
|
@ -84,7 +92,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
self._marker = None
|
self._marker = None
|
||||||
self.only_show_markers_on_hover = only_show_markers_on_hover
|
self.only_show_markers_on_hover = only_show_markers_on_hover
|
||||||
self.show_markers: bool = True # presuming the line is hovered at init
|
self.track_marker_pos: bool = False
|
||||||
|
|
||||||
# should line go all the way to far end or leave a "margin"
|
# should line go all the way to far end or leave a "margin"
|
||||||
# space for other graphics (eg. L1 book)
|
# space for other graphics (eg. L1 book)
|
||||||
|
@ -122,6 +130,9 @@ class LevelLine(pg.InfiniteLine):
|
||||||
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
||||||
self._right_end_sc: float = 0
|
self._right_end_sc: float = 0
|
||||||
|
|
||||||
|
# use px caching
|
||||||
|
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
def txt_offsets(self) -> tuple[int, int]:
|
def txt_offsets(self) -> tuple[int, int]:
|
||||||
return 0, 0
|
return 0, 0
|
||||||
|
|
||||||
|
@ -216,20 +227,23 @@ class LevelLine(pg.InfiniteLine):
|
||||||
y: float
|
y: float
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Chart coordinates cursor tracking callback.
|
'''
|
||||||
|
Chart coordinates cursor tracking callback.
|
||||||
|
|
||||||
this is called by our ``Cursor`` type once this line is set to
|
this is called by our ``Cursor`` type once this line is set to
|
||||||
track the cursor: for every movement this callback is invoked to
|
track the cursor: for every movement this callback is invoked to
|
||||||
reposition the line with the current view coordinates.
|
reposition the line with the current view coordinates.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
self.movable = True
|
self.movable = True
|
||||||
self.set_level(y) # implictly calls reposition handler
|
self.set_level(y) # implictly calls reposition handler
|
||||||
|
|
||||||
def mouseDragEvent(self, ev):
|
def mouseDragEvent(self, ev):
|
||||||
"""Override the ``InfiniteLine`` handler since we need more
|
'''
|
||||||
|
Override the ``InfiniteLine`` handler since we need more
|
||||||
detailed control and start end signalling.
|
detailed control and start end signalling.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
cursor = self._chart.linked.cursor
|
cursor = self._chart.linked.cursor
|
||||||
|
|
||||||
# hide y-crosshair
|
# hide y-crosshair
|
||||||
|
@ -281,10 +295,20 @@ class LevelLine(pg.InfiniteLine):
|
||||||
# show y-crosshair again
|
# show y-crosshair again
|
||||||
cursor.show_xhair()
|
cursor.show_xhair()
|
||||||
|
|
||||||
def delete(self) -> None:
|
def get_cursor(self) -> Optional[Cursor]:
|
||||||
"""Remove this line from containing chart/view/scene.
|
|
||||||
|
|
||||||
"""
|
chart = self._chart
|
||||||
|
cur = chart.linked.cursor
|
||||||
|
if self in cur._hovered:
|
||||||
|
return cur
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def delete(self) -> None:
|
||||||
|
'''
|
||||||
|
Remove this line from containing chart/view/scene.
|
||||||
|
|
||||||
|
'''
|
||||||
scene = self.scene()
|
scene = self.scene()
|
||||||
if scene:
|
if scene:
|
||||||
for label in self._labels:
|
for label in self._labels:
|
||||||
|
@ -298,9 +322,8 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
# remove from chart/cursor states
|
# remove from chart/cursor states
|
||||||
chart = self._chart
|
chart = self._chart
|
||||||
cur = chart.linked.cursor
|
cur = self.get_cursor()
|
||||||
|
if cur:
|
||||||
if self in cur._hovered:
|
|
||||||
cur._hovered.remove(self)
|
cur._hovered.remove(self)
|
||||||
|
|
||||||
chart.plotItem.removeItem(self)
|
chart.plotItem.removeItem(self)
|
||||||
|
@ -308,8 +331,8 @@ class LevelLine(pg.InfiniteLine):
|
||||||
def mouseDoubleClickEvent(
|
def mouseDoubleClickEvent(
|
||||||
self,
|
self,
|
||||||
ev: QtGui.QMouseEvent,
|
ev: QtGui.QMouseEvent,
|
||||||
) -> None:
|
|
||||||
|
|
||||||
|
) -> None:
|
||||||
# TODO: enter labels edit mode
|
# TODO: enter labels edit mode
|
||||||
print(f'double click {ev}')
|
print(f'double click {ev}')
|
||||||
|
|
||||||
|
@ -334,30 +357,22 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
||||||
|
|
||||||
if self.show_markers and self.markers:
|
# (legacy) NOTE: at one point this seemed slower when moving around
|
||||||
|
# order lines.. not sure if that's still true or why but we've
|
||||||
p.setPen(self.pen)
|
# dropped the original hacky `.pain()` transform stuff for inf
|
||||||
qgo_draw_markers(
|
# line markers now - check the git history if it needs to be
|
||||||
self.markers,
|
# reverted.
|
||||||
self.pen.color(),
|
if self._marker:
|
||||||
p,
|
if self.track_marker_pos:
|
||||||
vb_left,
|
# make the line end at the marker's x pos
|
||||||
vb_right,
|
line_end = marker_right = self._marker.pos().x()
|
||||||
marker_right,
|
|
||||||
)
|
|
||||||
# marker_size = self.markers[0][2]
|
|
||||||
self._maxMarkerSize = max([m[2] / 2. for m in self.markers])
|
|
||||||
|
|
||||||
# this seems slower when moving around
|
|
||||||
# order lines.. not sure wtf is up with that.
|
|
||||||
# for now we're just using it on the position line.
|
|
||||||
elif self._marker:
|
|
||||||
|
|
||||||
# TODO: make this label update part of a scene-aware-marker
|
# TODO: make this label update part of a scene-aware-marker
|
||||||
# composed annotation
|
# composed annotation
|
||||||
self._marker.setPos(
|
self._marker.setPos(
|
||||||
QPointF(marker_right, self.scene_y())
|
QPointF(marker_right, self.scene_y())
|
||||||
)
|
)
|
||||||
|
|
||||||
if hasattr(self._marker, 'label'):
|
if hasattr(self._marker, 'label'):
|
||||||
self._marker.label.update()
|
self._marker.label.update()
|
||||||
|
|
||||||
|
@ -379,16 +394,14 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
def hide(self) -> None:
|
def hide(self) -> None:
|
||||||
super().hide()
|
super().hide()
|
||||||
if self._marker:
|
mkr = self._marker
|
||||||
self._marker.hide()
|
if mkr:
|
||||||
# needed for ``order_line()`` lines currently
|
mkr.hide()
|
||||||
self._marker.label.hide()
|
|
||||||
|
|
||||||
def show(self) -> None:
|
def show(self) -> None:
|
||||||
super().show()
|
super().show()
|
||||||
if self._marker:
|
if self._marker:
|
||||||
self._marker.show()
|
self._marker.show()
|
||||||
# self._marker.label.show()
|
|
||||||
|
|
||||||
def scene_y(self) -> float:
|
def scene_y(self) -> float:
|
||||||
return self.getViewBox().mapFromView(
|
return self.getViewBox().mapFromView(
|
||||||
|
@ -421,6 +434,10 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
|
@property
|
||||||
|
def marker(self) -> LevelMarker:
|
||||||
|
return self._marker
|
||||||
|
|
||||||
def hoverEvent(self, ev):
|
def hoverEvent(self, ev):
|
||||||
'''
|
'''
|
||||||
Mouse hover callback.
|
Mouse hover callback.
|
||||||
|
@ -429,17 +446,16 @@ class LevelLine(pg.InfiniteLine):
|
||||||
cur = self._chart.linked.cursor
|
cur = self._chart.linked.cursor
|
||||||
|
|
||||||
# hovered
|
# hovered
|
||||||
if (not ev.isExit()) and ev.acceptDrags(QtCore.Qt.LeftButton):
|
if (
|
||||||
|
not ev.isExit()
|
||||||
|
and ev.acceptDrags(QtCore.Qt.LeftButton)
|
||||||
|
):
|
||||||
# if already hovered we don't need to run again
|
# if already hovered we don't need to run again
|
||||||
if self.mouseHovering is True:
|
if self.mouseHovering is True:
|
||||||
return
|
return
|
||||||
|
|
||||||
if self.only_show_markers_on_hover:
|
if self.only_show_markers_on_hover:
|
||||||
self.show_markers = True
|
self.show_markers()
|
||||||
|
|
||||||
if self._marker:
|
|
||||||
self._marker.show()
|
|
||||||
|
|
||||||
# highlight if so configured
|
# highlight if so configured
|
||||||
if self.highlight_on_hover:
|
if self.highlight_on_hover:
|
||||||
|
@ -482,11 +498,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
cur._hovered.remove(self)
|
cur._hovered.remove(self)
|
||||||
|
|
||||||
if self.only_show_markers_on_hover:
|
if self.only_show_markers_on_hover:
|
||||||
self.show_markers = False
|
self.hide_markers()
|
||||||
|
|
||||||
if self._marker:
|
|
||||||
self._marker.hide()
|
|
||||||
self._marker.label.hide()
|
|
||||||
|
|
||||||
if self not in cur._trackers:
|
if self not in cur._trackers:
|
||||||
cur.show_xhair(y_label_level=self.value())
|
cur.show_xhair(y_label_level=self.value())
|
||||||
|
@ -498,6 +510,15 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
self.update()
|
self.update()
|
||||||
|
|
||||||
|
def hide_markers(self) -> None:
|
||||||
|
if self._marker:
|
||||||
|
self._marker.hide()
|
||||||
|
self._marker.label.hide()
|
||||||
|
|
||||||
|
def show_markers(self) -> None:
|
||||||
|
if self._marker:
|
||||||
|
self._marker.show()
|
||||||
|
|
||||||
|
|
||||||
def level_line(
|
def level_line(
|
||||||
|
|
||||||
|
@ -518,9 +539,10 @@ def level_line(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
"""Convenience routine to add a styled horizontal line to a plot.
|
'''
|
||||||
|
Convenience routine to add a styled horizontal line to a plot.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
hl_color = color + '_light' if highlight_on_hover else color
|
hl_color = color + '_light' if highlight_on_hover else color
|
||||||
|
|
||||||
line = LevelLine(
|
line = LevelLine(
|
||||||
|
@ -702,7 +724,7 @@ def order_line(
|
||||||
marker = LevelMarker(
|
marker = LevelMarker(
|
||||||
chart=chart,
|
chart=chart,
|
||||||
style=marker_style,
|
style=marker_style,
|
||||||
get_level=line.value,
|
get_level=line.value, # callback
|
||||||
size=marker_size,
|
size=marker_size,
|
||||||
keep_in_view=False,
|
keep_in_view=False,
|
||||||
)
|
)
|
||||||
|
@ -711,7 +733,8 @@ def order_line(
|
||||||
marker = line.add_marker(marker)
|
marker = line.add_marker(marker)
|
||||||
|
|
||||||
# XXX: DON'T COMMENT THIS!
|
# XXX: DON'T COMMENT THIS!
|
||||||
# this fixes it the artifact issue! .. of course, bounding rect stuff
|
# this fixes it the artifact issue!
|
||||||
|
# .. of course, bounding rect stuff
|
||||||
line._maxMarkerSize = marker_size
|
line._maxMarkerSize = marker_size
|
||||||
|
|
||||||
assert line._marker is marker
|
assert line._marker is marker
|
||||||
|
@ -732,7 +755,8 @@ def order_line(
|
||||||
|
|
||||||
if action != 'alert':
|
if action != 'alert':
|
||||||
|
|
||||||
# add a partial position label if we also added a level marker
|
# add a partial position label if we also added a level
|
||||||
|
# marker
|
||||||
pp_size_label = Label(
|
pp_size_label = Label(
|
||||||
view=view,
|
view=view,
|
||||||
color=line.color,
|
color=line.color,
|
||||||
|
@ -766,9 +790,9 @@ def order_line(
|
||||||
# XXX: without this the pp proportion label next the marker
|
# XXX: without this the pp proportion label next the marker
|
||||||
# seems to lag? this is the same issue we had with position
|
# seems to lag? this is the same issue we had with position
|
||||||
# lines which we handle with ``.update_graphcis()``.
|
# lines which we handle with ``.update_graphcis()``.
|
||||||
# marker._on_paint=lambda marker: pp_size_label.update()
|
|
||||||
marker._on_paint = lambda marker: pp_size_label.update()
|
marker._on_paint = lambda marker: pp_size_label.update()
|
||||||
|
|
||||||
|
# XXX: THIS IS AN UNTYPED MONKEY PATCH!?!?!
|
||||||
marker.label = label
|
marker.label = label
|
||||||
|
|
||||||
# sanity check
|
# sanity check
|
||||||
|
|
|
@ -0,0 +1,104 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Notifications utils.
|
||||||
|
|
||||||
|
"""
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import subprocess
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import trio
|
||||||
|
|
||||||
|
from ..log import get_logger
|
||||||
|
from ..clearing._messages import (
|
||||||
|
Status,
|
||||||
|
)
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
_dbus_uid: Optional[str] = ''
|
||||||
|
|
||||||
|
|
||||||
|
async def notify_from_ems_status_msg(
|
||||||
|
msg: Status,
|
||||||
|
duration: int = 3000,
|
||||||
|
is_subproc: bool = False,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Send a linux desktop notification.
|
||||||
|
|
||||||
|
Handle subprocesses by discovering the dbus user id
|
||||||
|
on first call.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if platform.system() != "Linux":
|
||||||
|
return
|
||||||
|
|
||||||
|
# TODO: this in another task?
|
||||||
|
# not sure if this will ever be a bottleneck,
|
||||||
|
# we probably could do graphics stuff first tho?
|
||||||
|
|
||||||
|
if is_subproc:
|
||||||
|
global _dbus_uid
|
||||||
|
su = os.environ.get('SUDO_USER')
|
||||||
|
if (
|
||||||
|
not _dbus_uid
|
||||||
|
and su
|
||||||
|
):
|
||||||
|
|
||||||
|
# TODO: use `trio` but we need to use nursery.start()
|
||||||
|
# to use pipes?
|
||||||
|
# result = await trio.run_process(
|
||||||
|
result = subprocess.run(
|
||||||
|
[
|
||||||
|
'id',
|
||||||
|
'-u',
|
||||||
|
su,
|
||||||
|
],
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE,
|
||||||
|
# check=True
|
||||||
|
)
|
||||||
|
_dbus_uid = result.stdout.decode("utf-8").replace('\n', '')
|
||||||
|
|
||||||
|
os.environ['DBUS_SESSION_BUS_ADDRESS'] = (
|
||||||
|
f'unix:path=/run/user/{_dbus_uid}/bus'
|
||||||
|
)
|
||||||
|
|
||||||
|
result = await trio.run_process(
|
||||||
|
[
|
||||||
|
'notify-send',
|
||||||
|
'-u', 'normal',
|
||||||
|
'-t', f'{duration}',
|
||||||
|
'piker',
|
||||||
|
|
||||||
|
# TODO: add in standard fill/exec info that maybe we
|
||||||
|
# pack in a broker independent way?
|
||||||
|
f"'{msg.pformat()}'",
|
||||||
|
],
|
||||||
|
capture_stdout=True,
|
||||||
|
capture_stderr=True,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
if result.returncode != 0:
|
||||||
|
log.warn(f'No notification daemon installed stderr: {result.stderr}')
|
||||||
|
|
||||||
|
log.runtime(result)
|
|
@ -25,13 +25,21 @@ from typing import (
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import (
|
||||||
from PyQt5.QtCore import QLineF, QPointF
|
QtGui,
|
||||||
|
QtWidgets,
|
||||||
|
)
|
||||||
|
from PyQt5.QtCore import (
|
||||||
|
QLineF,
|
||||||
|
QRectF,
|
||||||
|
)
|
||||||
|
|
||||||
from PyQt5.QtGui import QPainterPath
|
from PyQt5.QtGui import QPainterPath
|
||||||
|
|
||||||
from .._profile import pg_profile_enabled, ms_slower_then
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._style import hcolor
|
from ._style import hcolor
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._chart import LinkedSplits
|
from ._chart import LinkedSplits
|
||||||
|
@ -113,8 +121,13 @@ class BarItems(pg.GraphicsObject):
|
||||||
# we expect the downsample curve report this.
|
# we expect the downsample curve report this.
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||||
def boundingRect(self):
|
def boundingRect(self):
|
||||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
# profiler = Profiler(
|
||||||
|
# msg=f'BarItems.boundingRect(): `{self._name}`',
|
||||||
|
# disabled=not pg_profile_enabled(),
|
||||||
|
# ms_threshold=ms_slower_then,
|
||||||
|
# )
|
||||||
|
|
||||||
# TODO: Can we do rect caching to make this faster
|
# TODO: Can we do rect caching to make this faster
|
||||||
# like `pg.PlotCurveItem` does? In theory it's just
|
# like `pg.PlotCurveItem` does? In theory it's just
|
||||||
|
@ -134,32 +147,37 @@ class BarItems(pg.GraphicsObject):
|
||||||
hb.topLeft(),
|
hb.topLeft(),
|
||||||
hb.bottomRight(),
|
hb.bottomRight(),
|
||||||
)
|
)
|
||||||
|
mn_y = hb_tl.y()
|
||||||
|
mx_y = hb_br.y()
|
||||||
|
most_left = hb_tl.x()
|
||||||
|
most_right = hb_br.x()
|
||||||
|
# profiler('calc path vertices')
|
||||||
|
|
||||||
# need to include last bar height or BR will be off
|
# need to include last bar height or BR will be off
|
||||||
mx_y = hb_br.y()
|
# OHLC line segments: [hl, o, c]
|
||||||
mn_y = hb_tl.y()
|
last_lines: tuple[QLineF] | None = self._last_bar_lines
|
||||||
|
|
||||||
last_lines = self._last_bar_lines
|
|
||||||
if last_lines:
|
if last_lines:
|
||||||
body_line = self._last_bar_lines[0]
|
(
|
||||||
if body_line:
|
hl,
|
||||||
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
o,
|
||||||
mn_y = min(mn_y, min(body_line.y1(), body_line.y2()))
|
c,
|
||||||
|
) = last_lines
|
||||||
|
most_right = c.x2() + 1
|
||||||
|
ymx = ymn = c.y2()
|
||||||
|
|
||||||
return QtCore.QRectF(
|
if hl:
|
||||||
|
y1, y2 = hl.y1(), hl.y2()
|
||||||
# top left
|
ymn = min(y1, y2)
|
||||||
QPointF(
|
ymx = max(y1, y2)
|
||||||
hb_tl.x(),
|
mx_y = max(ymx, mx_y)
|
||||||
mn_y,
|
mn_y = min(ymn, mn_y)
|
||||||
),
|
# profiler('calc last bar vertices')
|
||||||
|
|
||||||
# bottom right
|
|
||||||
QPointF(
|
|
||||||
hb_br.x() + 1,
|
|
||||||
mx_y,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
return QRectF(
|
||||||
|
most_left,
|
||||||
|
mn_y,
|
||||||
|
most_right - most_left + 1,
|
||||||
|
mx_y - mn_y,
|
||||||
)
|
)
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
|
@ -170,7 +188,7 @@ class BarItems(pg.GraphicsObject):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(
|
profiler = Profiler(
|
||||||
disabled=not pg_profile_enabled(),
|
disabled=not pg_profile_enabled(),
|
||||||
ms_threshold=ms_slower_then,
|
ms_threshold=ms_slower_then,
|
||||||
)
|
)
|
||||||
|
@ -212,11 +230,15 @@ class BarItems(pg.GraphicsObject):
|
||||||
|
|
||||||
# relevant fields
|
# relevant fields
|
||||||
ohlc = src_data[fields]
|
ohlc = src_data[fields]
|
||||||
last_row = ohlc[-1:]
|
# last_row = ohlc[-1:]
|
||||||
|
|
||||||
# individual values
|
# individual values
|
||||||
last_row = i, o, h, l, last = ohlc[-1]
|
last_row = i, o, h, l, last = ohlc[-1]
|
||||||
|
|
||||||
|
# times = src_data['time']
|
||||||
|
# if times[-1] - times[-2]:
|
||||||
|
# breakpoint()
|
||||||
|
|
||||||
# generate new lines objects for updatable "current bar"
|
# generate new lines objects for updatable "current bar"
|
||||||
self._last_bar_lines = bar_from_ohlc_row(last_row)
|
self._last_bar_lines = bar_from_ohlc_row(last_row)
|
||||||
|
|
||||||
|
@ -247,4 +269,5 @@ class BarItems(pg.GraphicsObject):
|
||||||
# date / from some previous sample. It's weird though
|
# date / from some previous sample. It's weird though
|
||||||
# because i've seen it do this to bars i - 3 back?
|
# because i've seen it do this to bars i - 3 back?
|
||||||
|
|
||||||
|
# return ohlc['time'], ohlc['close']
|
||||||
return ohlc['index'], ohlc['close']
|
return ohlc['index'], ohlc['close']
|
||||||
|
|
|
@ -22,12 +22,9 @@ from __future__ import annotations
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional, Generic,
|
Optional, Generic,
|
||||||
TypeVar, Callable,
|
TypeVar, Callable,
|
||||||
Literal,
|
|
||||||
)
|
)
|
||||||
import enum
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from pydantic import BaseModel, validator
|
# from pydantic import BaseModel, validator
|
||||||
from pydantic.generics import GenericModel
|
from pydantic.generics import GenericModel
|
||||||
from PyQt5.QtWidgets import (
|
from PyQt5.QtWidgets import (
|
||||||
QWidget,
|
QWidget,
|
||||||
|
@ -38,6 +35,7 @@ from ._forms import (
|
||||||
# FontScaledDelegate,
|
# FontScaledDelegate,
|
||||||
Edit,
|
Edit,
|
||||||
)
|
)
|
||||||
|
from ..data.types import Struct
|
||||||
|
|
||||||
|
|
||||||
DataType = TypeVar('DataType')
|
DataType = TypeVar('DataType')
|
||||||
|
@ -62,7 +60,7 @@ class Selection(Field[DataType], Generic[DataType]):
|
||||||
options: dict[str, DataType]
|
options: dict[str, DataType]
|
||||||
# value: DataType = None
|
# value: DataType = None
|
||||||
|
|
||||||
@validator('value') # , always=True)
|
# @validator('value') # , always=True)
|
||||||
def set_value_first(
|
def set_value_first(
|
||||||
cls,
|
cls,
|
||||||
|
|
||||||
|
@ -100,7 +98,7 @@ class Edit(Field[DataType], Generic[DataType]):
|
||||||
widget_factory = Edit
|
widget_factory = Edit
|
||||||
|
|
||||||
|
|
||||||
class AllocatorPane(BaseModel):
|
class AllocatorPane(Struct):
|
||||||
|
|
||||||
account = Selection[str](
|
account = Selection[str](
|
||||||
options=dict.fromkeys(
|
options=dict.fromkeys(
|
||||||
|
|
|
@ -18,23 +18,27 @@
|
||||||
Charting overlay helpers.
|
Charting overlay helpers.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from typing import Callable, Optional
|
from collections import defaultdict
|
||||||
|
from functools import partial
|
||||||
from pyqtgraph.Qt.QtCore import (
|
from typing import (
|
||||||
# QObject,
|
Callable,
|
||||||
# Signal,
|
Optional,
|
||||||
Qt,
|
|
||||||
# QEvent,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
||||||
from pyqtgraph.graphicsItems.ViewBox import ViewBox
|
from pyqtgraph.graphicsItems.ViewBox import ViewBox
|
||||||
from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
# from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
||||||
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
|
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
|
||||||
from pyqtgraph.Qt.QtCore import QObject, Signal, QEvent
|
from pyqtgraph.Qt.QtCore import (
|
||||||
from pyqtgraph.Qt.QtWidgets import QGraphicsGridLayout, QGraphicsLinearLayout
|
QObject,
|
||||||
|
Signal,
|
||||||
from ._interaction import ChartView
|
QEvent,
|
||||||
|
Qt,
|
||||||
|
)
|
||||||
|
from pyqtgraph.Qt.QtWidgets import (
|
||||||
|
# QGraphicsGridLayout,
|
||||||
|
QGraphicsLinearLayout,
|
||||||
|
)
|
||||||
|
|
||||||
__all__ = ["PlotItemOverlay"]
|
__all__ = ["PlotItemOverlay"]
|
||||||
|
|
||||||
|
@ -80,8 +84,8 @@ class ComposedGridLayout:
|
||||||
``<axis_name>i`` in the layout.
|
``<axis_name>i`` in the layout.
|
||||||
|
|
||||||
The ``item: PlotItem`` passed to the constructor's grid layout is
|
The ``item: PlotItem`` passed to the constructor's grid layout is
|
||||||
used verbatim as the "main plot" who's view box is give precedence
|
used verbatim as the "main plot" who's view box is given precedence
|
||||||
for input handling. The main plot's axes are removed from it's
|
for input handling. The main plot's axes are removed from its
|
||||||
layout and placed in the surrounding exterior layouts to allow for
|
layout and placed in the surrounding exterior layouts to allow for
|
||||||
re-ordering if desired.
|
re-ordering if desired.
|
||||||
|
|
||||||
|
@ -89,16 +93,11 @@ class ComposedGridLayout:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
item: PlotItem,
|
item: PlotItem,
|
||||||
grid: QGraphicsGridLayout,
|
|
||||||
reverse: bool = False, # insert items to the "center"
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
self.items: list[PlotItem] = []
|
|
||||||
# self.grid = grid
|
|
||||||
self.reverse = reverse
|
|
||||||
|
|
||||||
# TODO: use a ``bidict`` here?
|
self.items: list[PlotItem] = []
|
||||||
self._pi2axes: dict[
|
self._pi2axes: dict[ # TODO: use a ``bidict`` here?
|
||||||
int,
|
int,
|
||||||
dict[str, AxisItem],
|
dict[str, AxisItem],
|
||||||
] = {}
|
] = {}
|
||||||
|
@ -120,12 +119,13 @@ class ComposedGridLayout:
|
||||||
|
|
||||||
if name in ('top', 'bottom'):
|
if name in ('top', 'bottom'):
|
||||||
orient = Qt.Vertical
|
orient = Qt.Vertical
|
||||||
|
|
||||||
elif name in ('left', 'right'):
|
elif name in ('left', 'right'):
|
||||||
orient = Qt.Horizontal
|
orient = Qt.Horizontal
|
||||||
|
|
||||||
layout.setOrientation(orient)
|
layout.setOrientation(orient)
|
||||||
|
|
||||||
self.insert(0, item)
|
self.insert_plotitem(0, item)
|
||||||
|
|
||||||
# insert surrounding linear layouts into the parent pi's layout
|
# insert surrounding linear layouts into the parent pi's layout
|
||||||
# such that additional axes can be appended arbitrarily without
|
# such that additional axes can be appended arbitrarily without
|
||||||
|
@ -159,7 +159,7 @@ class ComposedGridLayout:
|
||||||
# enter plot into list for index tracking
|
# enter plot into list for index tracking
|
||||||
self.items.insert(index, plotitem)
|
self.items.insert(index, plotitem)
|
||||||
|
|
||||||
def insert(
|
def insert_plotitem(
|
||||||
self,
|
self,
|
||||||
index: int,
|
index: int,
|
||||||
plotitem: PlotItem,
|
plotitem: PlotItem,
|
||||||
|
@ -171,7 +171,9 @@ class ComposedGridLayout:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if index < 0:
|
if index < 0:
|
||||||
raise ValueError('`insert()` only supports an index >= 0')
|
raise ValueError(
|
||||||
|
'`.insert_plotitem()` only supports an index >= 0'
|
||||||
|
)
|
||||||
|
|
||||||
# add plot's axes in sequence to the embedded linear layouts
|
# add plot's axes in sequence to the embedded linear layouts
|
||||||
# for each "side" thus avoiding graphics collisions.
|
# for each "side" thus avoiding graphics collisions.
|
||||||
|
@ -220,7 +222,7 @@ class ComposedGridLayout:
|
||||||
|
|
||||||
return index
|
return index
|
||||||
|
|
||||||
def append(
|
def append_plotitem(
|
||||||
self,
|
self,
|
||||||
item: PlotItem,
|
item: PlotItem,
|
||||||
|
|
||||||
|
@ -232,7 +234,7 @@ class ComposedGridLayout:
|
||||||
'''
|
'''
|
||||||
# for left and bottom axes we have to first remove
|
# for left and bottom axes we have to first remove
|
||||||
# items and re-insert to maintain a list-order.
|
# items and re-insert to maintain a list-order.
|
||||||
return self.insert(len(self.items), item)
|
return self.insert_plotitem(len(self.items), item)
|
||||||
|
|
||||||
def get_axis(
|
def get_axis(
|
||||||
self,
|
self,
|
||||||
|
@ -249,16 +251,16 @@ class ComposedGridLayout:
|
||||||
named = self._pi2axes[name]
|
named = self._pi2axes[name]
|
||||||
return named.get(index)
|
return named.get(index)
|
||||||
|
|
||||||
def pop(
|
# def pop(
|
||||||
self,
|
# self,
|
||||||
item: PlotItem,
|
# item: PlotItem,
|
||||||
|
|
||||||
) -> PlotItem:
|
# ) -> PlotItem:
|
||||||
'''
|
# '''
|
||||||
Remove item and restack all axes in list-order.
|
# Remove item and restack all axes in list-order.
|
||||||
|
|
||||||
'''
|
# '''
|
||||||
raise NotImplementedError
|
# raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
# Unimplemented features TODO:
|
# Unimplemented features TODO:
|
||||||
|
@ -279,194 +281,6 @@ class ComposedGridLayout:
|
||||||
# axis?
|
# axis?
|
||||||
|
|
||||||
|
|
||||||
# TODO: we might want to enabled some kind of manual flag to disable
|
|
||||||
# this method wrapping during type creation? As example a user could
|
|
||||||
# definitively decide **not** to enable broadcasting support by
|
|
||||||
# setting something like ``ViewBox.disable_relays = True``?
|
|
||||||
def mk_relay_method(
|
|
||||||
|
|
||||||
signame: str,
|
|
||||||
slot: Callable[
|
|
||||||
[ViewBox,
|
|
||||||
'QEvent',
|
|
||||||
Optional[AxisItem]],
|
|
||||||
None,
|
|
||||||
],
|
|
||||||
|
|
||||||
) -> Callable[
|
|
||||||
[
|
|
||||||
ViewBox,
|
|
||||||
# lol, there isn't really a generic type thanks
|
|
||||||
# to the rewrite of Qt's event system XD
|
|
||||||
'QEvent',
|
|
||||||
|
|
||||||
'Optional[AxisItem]',
|
|
||||||
'Optional[ViewBox]', # the ``relayed_from`` arg we provide
|
|
||||||
],
|
|
||||||
None,
|
|
||||||
]:
|
|
||||||
|
|
||||||
def maybe_broadcast(
|
|
||||||
vb: 'ViewBox',
|
|
||||||
ev: 'QEvent',
|
|
||||||
axis: 'Optional[int]' = None,
|
|
||||||
relayed_from: 'ViewBox' = None,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
'''
|
|
||||||
(soon to be) Decorator which makes an event handler
|
|
||||||
"broadcastable" to overlayed ``GraphicsWidget``s.
|
|
||||||
|
|
||||||
Adds relay signals based on the decorated handler's name
|
|
||||||
and conducts a signal broadcast of the relay signal if there
|
|
||||||
are consumers registered.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# When no relay source has been set just bypass all
|
|
||||||
# the broadcast machinery.
|
|
||||||
if vb.event_relay_source is None:
|
|
||||||
ev.accept()
|
|
||||||
return slot(
|
|
||||||
vb,
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
)
|
|
||||||
|
|
||||||
if relayed_from:
|
|
||||||
assert axis is None
|
|
||||||
|
|
||||||
# this is a relayed event and should be ignored (so it does not
|
|
||||||
# halt/short circuit the graphicscene loop). Further the
|
|
||||||
# surrounding handler for this signal must be allowed to execute
|
|
||||||
# and get processed by **this consumer**.
|
|
||||||
# print(f'{vb.name} rx relayed from {relayed_from.name}')
|
|
||||||
ev.ignore()
|
|
||||||
|
|
||||||
return slot(
|
|
||||||
vb,
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
)
|
|
||||||
|
|
||||||
if axis is not None:
|
|
||||||
# print(f'{vb.name} handling axis event:\n{str(ev)}')
|
|
||||||
ev.accept()
|
|
||||||
return slot(
|
|
||||||
vb,
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
)
|
|
||||||
|
|
||||||
elif (
|
|
||||||
relayed_from is None
|
|
||||||
and vb.event_relay_source is vb # we are the broadcaster
|
|
||||||
and axis is None
|
|
||||||
):
|
|
||||||
# Broadcast case: this is a source event which will be
|
|
||||||
# relayed to attached consumers and accepted after all
|
|
||||||
# consumers complete their own handling followed by this
|
|
||||||
# routine's processing. Sequence is,
|
|
||||||
# - pre-relay to all consumers *first* - ``.emit()`` blocks
|
|
||||||
# until all downstream relay handlers have run.
|
|
||||||
# - run the source handler for **this** event and accept
|
|
||||||
# the event
|
|
||||||
|
|
||||||
# Access the "bound signal" that is created
|
|
||||||
# on the widget type as part of instantiation.
|
|
||||||
signal = getattr(vb, signame)
|
|
||||||
# print(f'{vb.name} emitting {signame}')
|
|
||||||
|
|
||||||
# TODO/NOTE: we could also just bypass a "relay" signal
|
|
||||||
# entirely and instead call the handlers manually in
|
|
||||||
# a loop? This probably is a lot simpler and also doesn't
|
|
||||||
# have any downside, and allows not touching target widget
|
|
||||||
# internals.
|
|
||||||
signal.emit(
|
|
||||||
ev,
|
|
||||||
axis,
|
|
||||||
# passing this demarks a broadcasted/relayed event
|
|
||||||
vb,
|
|
||||||
)
|
|
||||||
# accept event so no more relays are fired.
|
|
||||||
ev.accept()
|
|
||||||
|
|
||||||
# call underlying wrapped method with an extra
|
|
||||||
# ``relayed_from`` value to denote that this is a relayed
|
|
||||||
# event handling case.
|
|
||||||
return slot(
|
|
||||||
vb,
|
|
||||||
ev,
|
|
||||||
axis=axis,
|
|
||||||
)
|
|
||||||
|
|
||||||
return maybe_broadcast
|
|
||||||
|
|
||||||
|
|
||||||
# XXX: :( can't define signals **after** class compile time
|
|
||||||
# so this is not really useful.
|
|
||||||
# def mk_relay_signal(
|
|
||||||
# func,
|
|
||||||
# name: str = None,
|
|
||||||
|
|
||||||
# ) -> Signal:
|
|
||||||
# (
|
|
||||||
# args,
|
|
||||||
# varargs,
|
|
||||||
# varkw,
|
|
||||||
# defaults,
|
|
||||||
# kwonlyargs,
|
|
||||||
# kwonlydefaults,
|
|
||||||
# annotations
|
|
||||||
# ) = inspect.getfullargspec(func)
|
|
||||||
|
|
||||||
# # XXX: generate a relay signal with 1 extra
|
|
||||||
# # argument for a ``relayed_from`` kwarg. Since
|
|
||||||
# # ``'self'`` is already ignored by signals we just need
|
|
||||||
# # to count the arguments since we're adding only 1 (and
|
|
||||||
# # ``args`` will capture that).
|
|
||||||
# numargs = len(args + list(defaults))
|
|
||||||
# signal = Signal(*tuple(numargs * [object]))
|
|
||||||
# signame = name or func.__name__ + 'Relay'
|
|
||||||
# return signame, signal
|
|
||||||
|
|
||||||
|
|
||||||
def enable_relays(
|
|
||||||
widget: GraphicsWidget,
|
|
||||||
handler_names: list[str],
|
|
||||||
|
|
||||||
) -> list[Signal]:
|
|
||||||
'''
|
|
||||||
Method override helper which enables relay of a particular
|
|
||||||
``Signal`` from some chosen broadcaster widget to a set of
|
|
||||||
consumer widgets which should operate their event handlers normally
|
|
||||||
but instead of signals "relayed" from the broadcaster.
|
|
||||||
|
|
||||||
Mostly useful for overlaying widgets that handle user input
|
|
||||||
that you want to overlay graphically. The target ``widget`` type must
|
|
||||||
define ``QtCore.Signal``s each with a `'Relay'` suffix for each
|
|
||||||
name provided in ``handler_names: list[str]``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
signals = []
|
|
||||||
for name in handler_names:
|
|
||||||
handler = getattr(widget, name)
|
|
||||||
signame = name + 'Relay'
|
|
||||||
# ensure the target widget defines a relay signal
|
|
||||||
relay = getattr(widget, signame)
|
|
||||||
widget.relays[signame] = name
|
|
||||||
signals.append(relay)
|
|
||||||
method = mk_relay_method(signame, handler)
|
|
||||||
setattr(widget, name, method)
|
|
||||||
|
|
||||||
return signals
|
|
||||||
|
|
||||||
|
|
||||||
enable_relays(
|
|
||||||
ChartView,
|
|
||||||
['wheelEvent', 'mouseDragEvent']
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PlotItemOverlay:
|
class PlotItemOverlay:
|
||||||
'''
|
'''
|
||||||
A composite for managing overlaid ``PlotItem`` instances such that
|
A composite for managing overlaid ``PlotItem`` instances such that
|
||||||
|
@ -482,16 +296,18 @@ class PlotItemOverlay:
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
self.root_plotitem: PlotItem = root_plotitem
|
self.root_plotitem: PlotItem = root_plotitem
|
||||||
|
self.relay_handlers: defaultdict[
|
||||||
|
str,
|
||||||
|
list[Callable],
|
||||||
|
] = defaultdict(list)
|
||||||
|
|
||||||
vb = root_plotitem.vb
|
# NOTE: required for scene layering/relaying; this guarantees
|
||||||
vb.event_relay_source = vb # TODO: maybe change name?
|
# the "root" plot receives priority for interaction
|
||||||
vb.setZValue(1000) # XXX: critical for scene layering/relaying
|
# events/signals.
|
||||||
|
root_plotitem.vb.setZValue(10)
|
||||||
|
|
||||||
self.overlays: list[PlotItem] = []
|
self.overlays: list[PlotItem] = []
|
||||||
self.layout = ComposedGridLayout(
|
self.layout = ComposedGridLayout(root_plotitem)
|
||||||
root_plotitem,
|
|
||||||
root_plotitem.layout,
|
|
||||||
)
|
|
||||||
self._relays: dict[str, Signal] = {}
|
self._relays: dict[str, Signal] = {}
|
||||||
|
|
||||||
def add_plotitem(
|
def add_plotitem(
|
||||||
|
@ -499,8 +315,10 @@ class PlotItemOverlay:
|
||||||
plotitem: PlotItem,
|
plotitem: PlotItem,
|
||||||
index: Optional[int] = None,
|
index: Optional[int] = None,
|
||||||
|
|
||||||
# TODO: we could also put the ``ViewBox.XAxis``
|
# event/signal names which will be broadcasted to all added
|
||||||
# style enum here?
|
# (relayee) ``PlotItem``s (eg. ``ViewBox.mouseDragEvent``).
|
||||||
|
relay_events: list[str] = [],
|
||||||
|
|
||||||
# (0,), # link x
|
# (0,), # link x
|
||||||
# (1,), # link y
|
# (1,), # link y
|
||||||
# (0, 1), # link both
|
# (0, 1), # link both
|
||||||
|
@ -510,58 +328,155 @@ class PlotItemOverlay:
|
||||||
|
|
||||||
index = index or len(self.overlays)
|
index = index or len(self.overlays)
|
||||||
root = self.root_plotitem
|
root = self.root_plotitem
|
||||||
# layout: QGraphicsGridLayout = root.layout
|
|
||||||
self.overlays.insert(index, plotitem)
|
self.overlays.insert(index, plotitem)
|
||||||
vb: ViewBox = plotitem.vb
|
vb: ViewBox = plotitem.vb
|
||||||
|
|
||||||
# mark this consumer overlay as ready to expect relayed events
|
|
||||||
# from the root plotitem.
|
|
||||||
vb.event_relay_source = root.vb
|
|
||||||
|
|
||||||
# TODO: some sane way to allow menu event broadcast XD
|
# TODO: some sane way to allow menu event broadcast XD
|
||||||
# vb.setMenuEnabled(False)
|
# vb.setMenuEnabled(False)
|
||||||
|
|
||||||
# TODO: inside the `maybe_broadcast()` (soon to be) decorator
|
# wire up any relay signal(s) from the source plot to added
|
||||||
# we need have checks that consumers have been attached to
|
# "overlays". We use a plain loop instead of mucking with
|
||||||
# these relay signals.
|
# re-connecting signal/slots which tends to be more invasive and
|
||||||
if link_axes != (0, 1):
|
# harder to implement and provides no measurable performance
|
||||||
|
# gain.
|
||||||
|
if relay_events:
|
||||||
|
for ev_name in relay_events:
|
||||||
|
relayee_handler: Callable[
|
||||||
|
[
|
||||||
|
ViewBox,
|
||||||
|
# lol, there isn't really a generic type thanks
|
||||||
|
# to the rewrite of Qt's event system XD
|
||||||
|
QEvent,
|
||||||
|
|
||||||
# wire up relay signals
|
AxisItem | None,
|
||||||
for relay_signal_name, handler_name in vb.relays.items():
|
],
|
||||||
# print(handler_name)
|
None,
|
||||||
# XXX: Signal class attrs are bound after instantiation
|
] = getattr(vb, ev_name)
|
||||||
# of the defining type, so we need to access that bound
|
|
||||||
# version here.
|
sub_handlers: list[Callable] = self.relay_handlers[ev_name]
|
||||||
signal = getattr(root.vb, relay_signal_name)
|
|
||||||
handler = getattr(vb, handler_name)
|
# on the first registry of a relayed event we pop the
|
||||||
signal.connect(handler)
|
# root's handler and override it to a custom broadcaster
|
||||||
|
# routine.
|
||||||
|
if not sub_handlers:
|
||||||
|
|
||||||
|
src_handler = getattr(
|
||||||
|
root.vb,
|
||||||
|
ev_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
def broadcast(
|
||||||
|
ev: 'QEvent',
|
||||||
|
|
||||||
|
# TODO: drop this viewbox specific input and
|
||||||
|
# allow a predicate to be passed in by user.
|
||||||
|
axis: 'Optional[int]' = None,
|
||||||
|
|
||||||
|
*,
|
||||||
|
|
||||||
|
# these are bound in by the ``partial`` below
|
||||||
|
# and ensure a unique broadcaster per event.
|
||||||
|
ev_name: str = None,
|
||||||
|
src_handler: Callable = None,
|
||||||
|
relayed_from: 'ViewBox' = None,
|
||||||
|
|
||||||
|
# remaining inputs the source handler expects
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Broadcast signal or event: this is a source
|
||||||
|
event which will be relayed to attached
|
||||||
|
"relayee" plot item consumers.
|
||||||
|
|
||||||
|
The event is accepted halting any further
|
||||||
|
handlers from being triggered.
|
||||||
|
|
||||||
|
Sequence is,
|
||||||
|
- pre-relay to all consumers *first* - exactly
|
||||||
|
like how a ``Signal.emit()`` blocks until all
|
||||||
|
downstream relay handlers have run.
|
||||||
|
- run the event's source handler event
|
||||||
|
|
||||||
|
'''
|
||||||
|
ev.accept()
|
||||||
|
|
||||||
|
# broadcast first to relayees *first*. trigger
|
||||||
|
# relay of event to all consumers **before**
|
||||||
|
# processing/consumption in the source handler.
|
||||||
|
relayed_handlers = self.relay_handlers[ev_name]
|
||||||
|
|
||||||
|
assert getattr(vb, ev_name).__name__ == ev_name
|
||||||
|
|
||||||
|
# TODO: generalize as an input predicate
|
||||||
|
if axis is None:
|
||||||
|
for handler in relayed_handlers:
|
||||||
|
handler(
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
# run "source" widget's handler last
|
||||||
|
src_handler(
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
# dynamic handler override on the publisher plot
|
||||||
|
setattr(
|
||||||
|
root.vb,
|
||||||
|
ev_name,
|
||||||
|
partial(
|
||||||
|
broadcast,
|
||||||
|
ev_name=ev_name,
|
||||||
|
src_handler=src_handler
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
assert getattr(root.vb, ev_name)
|
||||||
|
assert relayee_handler not in sub_handlers
|
||||||
|
|
||||||
|
# append relayed-to widget's handler to relay table
|
||||||
|
sub_handlers.append(relayee_handler)
|
||||||
|
|
||||||
# link dim-axes to root if requested by user.
|
# link dim-axes to root if requested by user.
|
||||||
# TODO: solve more-then-wanted scaled panning on click drag
|
|
||||||
# which seems to be due to broadcast. So we probably need to
|
|
||||||
# disable broadcast when axes are linked in a particular
|
|
||||||
# dimension?
|
|
||||||
for dim in link_axes:
|
for dim in link_axes:
|
||||||
# link x and y axes to new view box such that the top level
|
# link x and y axes to new view box such that the top level
|
||||||
# viewbox propagates to the root (and whatever other
|
# viewbox propagates to the root (and whatever other
|
||||||
# plotitem overlays that have been added).
|
# plotitem overlays that have been added).
|
||||||
vb.linkView(dim, root.vb)
|
vb.linkView(dim, root.vb)
|
||||||
|
|
||||||
# make overlaid viewbox impossible to focus since the top
|
# => NOTE: in order to prevent "more-then-linear" scaled
|
||||||
# level should handle all input and relay to overlays.
|
# panning moves on (for eg. click-drag) certain range change
|
||||||
# NOTE: this was solved with the `setZValue()` above!
|
# signals (i.e. ``.sigXRangeChanged``), the user needs to be
|
||||||
|
# careful that any broadcasted ``relay_events`` are are short
|
||||||
|
# circuited in sub-handlers (aka relayee's) implementations. As
|
||||||
|
# an example if a ``ViewBox.mouseDragEvent`` is broadcasted, the
|
||||||
|
# overlayed implementations need to be sure they either don't
|
||||||
|
# also link the x-axes (by not providing ``link_axes=(0,)``
|
||||||
|
# above) or that the relayee ``.mouseDragEvent()`` handlers are
|
||||||
|
# ready to "``return`` early" in the case that
|
||||||
|
# ``.sigXRangeChanged`` is emitted as part of linked axes.
|
||||||
|
# For more details on such signalling mechanics peek in
|
||||||
|
# ``ViewBox.linkView()``.
|
||||||
|
|
||||||
# TODO: we will probably want to add a "focus" api such that
|
# make overlaid viewbox impossible to focus since the top level
|
||||||
# a new "top level" ``PlotItem`` can be selected dynamically
|
# should handle all input and relay to overlays. Note that the
|
||||||
# (and presumably the axes dynamically sorted to match).
|
# "root" plot item gettingn interaction priority is configured
|
||||||
|
# with the ``.setZValue()`` during init.
|
||||||
vb.setFlag(
|
vb.setFlag(
|
||||||
vb.GraphicsItemFlag.ItemIsFocusable,
|
vb.GraphicsItemFlag.ItemIsFocusable,
|
||||||
False
|
False
|
||||||
)
|
)
|
||||||
vb.setFocusPolicy(Qt.NoFocus)
|
vb.setFocusPolicy(Qt.NoFocus)
|
||||||
|
|
||||||
|
# => TODO: add a "focus" api for switching the "top level"
|
||||||
|
# ``PlotItem`` dynamically.
|
||||||
|
|
||||||
# append-compose into the layout all axes from this plot
|
# append-compose into the layout all axes from this plot
|
||||||
self.layout.insert(index, plotitem)
|
self.layout.insert_plotitem(index, plotitem)
|
||||||
|
|
||||||
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
||||||
|
|
||||||
|
@ -579,24 +494,7 @@ class PlotItemOverlay:
|
||||||
root.vb.setFocus()
|
root.vb.setFocus()
|
||||||
assert root.vb.focusWidget()
|
assert root.vb.focusWidget()
|
||||||
|
|
||||||
# XXX: do we need this? Why would you build then destroy?
|
vb.setZValue(100)
|
||||||
def remove_plotitem(self, plotItem: PlotItem) -> None:
|
|
||||||
'''
|
|
||||||
Remove this ``PlotItem`` from the overlayed set making not shown
|
|
||||||
and unable to accept input.
|
|
||||||
|
|
||||||
'''
|
|
||||||
...
|
|
||||||
|
|
||||||
# TODO: i think this would be super hot B)
|
|
||||||
def focus_item(self, plotitem: PlotItem) -> PlotItem:
|
|
||||||
'''
|
|
||||||
Apply focus to a contained PlotItem thus making it the "top level"
|
|
||||||
item in the overlay able to accept peripheral's input from the user
|
|
||||||
and responsible for zoom and panning control via its ``ViewBox``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
...
|
|
||||||
|
|
||||||
def get_axis(
|
def get_axis(
|
||||||
self,
|
self,
|
||||||
|
@ -630,8 +528,9 @@ class PlotItemOverlay:
|
||||||
|
|
||||||
return axes
|
return axes
|
||||||
|
|
||||||
# TODO: i guess we need this if you want to detach existing plots
|
# XXX: untested as of now.
|
||||||
# dynamically? XXX: untested as of now.
|
# TODO: need this as part of selecting a different root/source
|
||||||
|
# plot to rewire interaction event broadcast dynamically.
|
||||||
def _disconnect_all(
|
def _disconnect_all(
|
||||||
self,
|
self,
|
||||||
plotitem: PlotItem,
|
plotitem: PlotItem,
|
||||||
|
@ -646,3 +545,22 @@ class PlotItemOverlay:
|
||||||
disconnected.append(sig)
|
disconnected.append(sig)
|
||||||
|
|
||||||
return disconnected
|
return disconnected
|
||||||
|
|
||||||
|
# XXX: do we need this? Why would you build then destroy?
|
||||||
|
# def remove_plotitem(self, plotItem: PlotItem) -> None:
|
||||||
|
# '''
|
||||||
|
# Remove this ``PlotItem`` from the overlayed set making not shown
|
||||||
|
# and unable to accept input.
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# ...
|
||||||
|
|
||||||
|
# TODO: i think this would be super hot B)
|
||||||
|
# def focus_plotitem(self, plotitem: PlotItem) -> PlotItem:
|
||||||
|
# '''
|
||||||
|
# Apply focus to a contained PlotItem thus making it the "top level"
|
||||||
|
# item in the overlay able to accept peripheral's input from the user
|
||||||
|
# and responsible for zoom and panning control via its ``ViewBox``.
|
||||||
|
|
||||||
|
# '''
|
||||||
|
# ...
|
||||||
|
|
|
@ -19,15 +19,16 @@ Super fast ``QPainterPath`` generation related operator routines.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from typing import (
|
from typing import (
|
||||||
# Optional,
|
Optional,
|
||||||
TYPE_CHECKING,
|
TYPE_CHECKING,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
import msgspec
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from numpy.lib import recfunctions as rfn
|
from numpy.lib import recfunctions as rfn
|
||||||
from numba import njit, float64, int64 # , optional
|
from numba import njit, float64, int64 # , optional
|
||||||
# import pyqtgraph as pg
|
# import pyqtgraph as pg
|
||||||
from PyQt5 import QtGui
|
# from PyQt5 import QtGui
|
||||||
# from PyQt5.QtCore import QLineF, QPointF
|
# from PyQt5.QtCore import QLineF, QPointF
|
||||||
|
|
||||||
from ..data._sharedmem import (
|
from ..data._sharedmem import (
|
||||||
|
@ -39,7 +40,778 @@ from ._compression import (
|
||||||
)
|
)
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ._flows import Renderer
|
from ._flows import (
|
||||||
|
Renderer,
|
||||||
|
Flow,
|
||||||
|
)
|
||||||
|
from .._profile import Profiler
|
||||||
|
|
||||||
|
|
||||||
|
def by_index_and_key(
|
||||||
|
renderer: Renderer,
|
||||||
|
array: np.ndarray,
|
||||||
|
array_key: str,
|
||||||
|
vr: tuple[int, int],
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
]:
|
||||||
|
return array['index'], array[array_key], 'all'
|
||||||
|
|
||||||
|
|
||||||
|
class IncrementalFormatter(msgspec.Struct):
|
||||||
|
'''
|
||||||
|
Incrementally updating, pre-path-graphics tracking, formatter.
|
||||||
|
|
||||||
|
Allows tracking source data state in an updateable pre-graphics
|
||||||
|
``np.ndarray`` format (in local process memory) as well as
|
||||||
|
incrementally rendering from that format **to** 1d x/y for path
|
||||||
|
generation using ``pg.functions.arrayToQPath()``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
shm: ShmArray
|
||||||
|
flow: Flow
|
||||||
|
|
||||||
|
# last read from shm (usually due to an update call)
|
||||||
|
_last_read: tuple[
|
||||||
|
int,
|
||||||
|
int,
|
||||||
|
np.ndarray
|
||||||
|
|
||||||
|
]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def last_read(self) -> tuple | None:
|
||||||
|
return self._last_read
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
msg = (
|
||||||
|
f'{type(self)}: ->\n\n'
|
||||||
|
f'fqsn={self.flow.name}\n'
|
||||||
|
f'shm_name={self.shm.token["shm_name"]}\n\n'
|
||||||
|
|
||||||
|
f'last_vr={self._last_vr}\n'
|
||||||
|
f'last_ivdr={self._last_ivdr}\n\n'
|
||||||
|
|
||||||
|
f'xy_nd_start={self.xy_nd_start}\n'
|
||||||
|
f'xy_nd_stop={self.xy_nd_stop}\n\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
x_nd_len = 0
|
||||||
|
y_nd_len = 0
|
||||||
|
if self.x_nd is not None:
|
||||||
|
x_nd_len = len(self.x_nd)
|
||||||
|
y_nd_len = len(self.y_nd)
|
||||||
|
|
||||||
|
msg += (
|
||||||
|
f'x_nd_len={x_nd_len}\n'
|
||||||
|
f'y_nd_len={y_nd_len}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
return msg
|
||||||
|
|
||||||
|
def diff(
|
||||||
|
self,
|
||||||
|
new_read: tuple[np.ndarray],
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
]:
|
||||||
|
(
|
||||||
|
last_xfirst,
|
||||||
|
last_xlast,
|
||||||
|
last_array,
|
||||||
|
last_ivl,
|
||||||
|
last_ivr,
|
||||||
|
last_in_view,
|
||||||
|
) = self.last_read
|
||||||
|
|
||||||
|
# TODO: can the renderer just call ``Flow.read()`` directly?
|
||||||
|
# unpack latest source data read
|
||||||
|
(
|
||||||
|
xfirst,
|
||||||
|
xlast,
|
||||||
|
array,
|
||||||
|
ivl,
|
||||||
|
ivr,
|
||||||
|
in_view,
|
||||||
|
) = new_read
|
||||||
|
|
||||||
|
# compute the length diffs between the first/last index entry in
|
||||||
|
# the input data and the last indexes we have on record from the
|
||||||
|
# last time we updated the curve index.
|
||||||
|
prepend_length = int(last_xfirst - xfirst)
|
||||||
|
append_length = int(xlast - last_xlast)
|
||||||
|
|
||||||
|
# blah blah blah
|
||||||
|
# do diffing for prepend, append and last entry
|
||||||
|
return (
|
||||||
|
slice(xfirst, last_xfirst),
|
||||||
|
prepend_length,
|
||||||
|
append_length,
|
||||||
|
slice(last_xlast, xlast),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Incrementally updated xy ndarray formatted data, a pre-1d
|
||||||
|
# format which is updated and cached independently of the final
|
||||||
|
# pre-graphics-path 1d format.
|
||||||
|
x_nd: Optional[np.ndarray] = None
|
||||||
|
y_nd: Optional[np.ndarray] = None
|
||||||
|
|
||||||
|
# indexes which slice into the above arrays (which are allocated
|
||||||
|
# based on source data shm input size) and allow retrieving
|
||||||
|
# incrementally updated data.
|
||||||
|
xy_nd_start: int = 0
|
||||||
|
xy_nd_stop: int = 0
|
||||||
|
|
||||||
|
# TODO: eventually incrementally update 1d-pre-graphics path data?
|
||||||
|
# x_1d: Optional[np.ndarray] = None
|
||||||
|
# y_1d: Optional[np.ndarray] = None
|
||||||
|
|
||||||
|
# incremental view-change state(s) tracking
|
||||||
|
_last_vr: tuple[float, float] | None = None
|
||||||
|
_last_ivdr: tuple[float, float] | None = None
|
||||||
|
|
||||||
|
def _track_inview_range(
|
||||||
|
self,
|
||||||
|
view_range: tuple[int, int],
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
# if a view range is passed, plan to draw the
|
||||||
|
# source ouput that's "in view" of the chart.
|
||||||
|
vl, vr = view_range
|
||||||
|
zoom_or_append = False
|
||||||
|
last_vr = self._last_vr
|
||||||
|
|
||||||
|
# incremental in-view data update.
|
||||||
|
if last_vr:
|
||||||
|
lvl, lvr = last_vr # relative slice indices
|
||||||
|
|
||||||
|
# TODO: detecting more specifically the interaction changes
|
||||||
|
# last_ivr = self._last_ivdr or (vl, vr)
|
||||||
|
# al, ar = last_ivr # abs slice indices
|
||||||
|
# left_change = abs(x_iv[0] - al) >= 1
|
||||||
|
# right_change = abs(x_iv[-1] - ar) >= 1
|
||||||
|
|
||||||
|
# likely a zoom/pan view change or data append update
|
||||||
|
if (
|
||||||
|
(vr - lvr) > 2
|
||||||
|
or vl < lvl
|
||||||
|
|
||||||
|
# append / prepend update
|
||||||
|
# we had an append update where the view range
|
||||||
|
# didn't change but the data-viewed (shifted)
|
||||||
|
# underneath, so we need to redraw.
|
||||||
|
# or left_change and right_change and last_vr == view_range
|
||||||
|
|
||||||
|
# not (left_change and right_change) and ivr
|
||||||
|
# (
|
||||||
|
# or abs(x_iv[ivr] - livr) > 1
|
||||||
|
):
|
||||||
|
zoom_or_append = True
|
||||||
|
|
||||||
|
self._last_vr = view_range
|
||||||
|
|
||||||
|
return zoom_or_append
|
||||||
|
|
||||||
|
def format_to_1d(
|
||||||
|
self,
|
||||||
|
new_read: tuple,
|
||||||
|
array_key: str,
|
||||||
|
profiler: Profiler,
|
||||||
|
|
||||||
|
slice_to_head: int = -1,
|
||||||
|
read_src_from_key: bool = True,
|
||||||
|
slice_to_inview: bool = True,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
]:
|
||||||
|
shm = self.shm
|
||||||
|
|
||||||
|
(
|
||||||
|
_,
|
||||||
|
_,
|
||||||
|
array,
|
||||||
|
ivl,
|
||||||
|
ivr,
|
||||||
|
in_view,
|
||||||
|
|
||||||
|
) = new_read
|
||||||
|
|
||||||
|
(
|
||||||
|
pre_slice,
|
||||||
|
prepend_len,
|
||||||
|
append_len,
|
||||||
|
post_slice,
|
||||||
|
) = self.diff(new_read)
|
||||||
|
|
||||||
|
if self.y_nd is None:
|
||||||
|
# we first need to allocate xy data arrays
|
||||||
|
# from the source data.
|
||||||
|
self.x_nd, self.y_nd = self.allocate_xy_nd(
|
||||||
|
shm,
|
||||||
|
array_key,
|
||||||
|
)
|
||||||
|
self.xy_nd_start = shm._first.value
|
||||||
|
self.xy_nd_stop = shm._last.value
|
||||||
|
profiler('allocated xy history')
|
||||||
|
|
||||||
|
if prepend_len:
|
||||||
|
y_prepend = shm._array[pre_slice]
|
||||||
|
if read_src_from_key:
|
||||||
|
y_prepend = y_prepend[array_key]
|
||||||
|
|
||||||
|
(
|
||||||
|
new_y_nd,
|
||||||
|
y_nd_slc,
|
||||||
|
|
||||||
|
) = self.incr_update_xy_nd(
|
||||||
|
shm,
|
||||||
|
array_key,
|
||||||
|
|
||||||
|
# this is the pre-sliced, "normally expected"
|
||||||
|
# new data that an updater would normally be
|
||||||
|
# expected to process, however in some cases (like
|
||||||
|
# step curves) the updater routine may want to do
|
||||||
|
# the source history-data reading itself, so we pass
|
||||||
|
# both here.
|
||||||
|
y_prepend,
|
||||||
|
pre_slice,
|
||||||
|
prepend_len,
|
||||||
|
|
||||||
|
self.xy_nd_start,
|
||||||
|
self.xy_nd_stop,
|
||||||
|
is_append=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# y_nd_view = self.y_nd[y_nd_slc]
|
||||||
|
self.y_nd[y_nd_slc] = new_y_nd
|
||||||
|
# if read_src_from_key:
|
||||||
|
# y_nd_view[:][array_key] = new_y_nd
|
||||||
|
# else:
|
||||||
|
# y_nd_view[:] = new_y_nd
|
||||||
|
|
||||||
|
self.xy_nd_start = shm._first.value
|
||||||
|
profiler('prepended xy history: {prepend_length}')
|
||||||
|
|
||||||
|
if append_len:
|
||||||
|
y_append = shm._array[post_slice]
|
||||||
|
if read_src_from_key:
|
||||||
|
y_append = y_append[array_key]
|
||||||
|
|
||||||
|
(
|
||||||
|
new_y_nd,
|
||||||
|
y_nd_slc,
|
||||||
|
|
||||||
|
) = self.incr_update_xy_nd(
|
||||||
|
shm,
|
||||||
|
array_key,
|
||||||
|
|
||||||
|
y_append,
|
||||||
|
post_slice,
|
||||||
|
append_len,
|
||||||
|
|
||||||
|
self.xy_nd_start,
|
||||||
|
self.xy_nd_stop,
|
||||||
|
is_append=True,
|
||||||
|
)
|
||||||
|
# self.y_nd[post_slice] = new_y_nd
|
||||||
|
# self.y_nd[xy_slice or post_slice] = xy_data
|
||||||
|
self.y_nd[y_nd_slc] = new_y_nd
|
||||||
|
# if read_src_from_key:
|
||||||
|
# y_nd_view[:][array_key] = new_y_nd
|
||||||
|
# else:
|
||||||
|
# y_nd_view[:] = new_y_nd
|
||||||
|
|
||||||
|
self.xy_nd_stop = shm._last.value
|
||||||
|
profiler('appened xy history: {append_length}')
|
||||||
|
|
||||||
|
view_changed: bool = False
|
||||||
|
view_range: tuple[int, int] = (ivl, ivr)
|
||||||
|
if slice_to_inview:
|
||||||
|
view_changed = self._track_inview_range(view_range)
|
||||||
|
array = in_view
|
||||||
|
profiler(f'{self.flow.name} view range slice {view_range}')
|
||||||
|
|
||||||
|
hist = array[:slice_to_head]
|
||||||
|
|
||||||
|
# xy-path data transform: convert source data to a format
|
||||||
|
# able to be passed to a `QPainterPath` rendering routine.
|
||||||
|
if not len(hist):
|
||||||
|
# XXX: this might be why the profiler only has exits?
|
||||||
|
return
|
||||||
|
|
||||||
|
# TODO: hist here should be the pre-sliced
|
||||||
|
# x/y_data in the case where allocate_xy is
|
||||||
|
# defined?
|
||||||
|
x_1d, y_1d, connect = self.format_xy_nd_to_1d(
|
||||||
|
hist,
|
||||||
|
array_key,
|
||||||
|
view_range,
|
||||||
|
)
|
||||||
|
|
||||||
|
# app_tres = None
|
||||||
|
# if append_len:
|
||||||
|
# appended = array[-append_len-1:slice_to_head]
|
||||||
|
# app_tres = self.format_xy_nd_to_1d(
|
||||||
|
# appended,
|
||||||
|
# array_key,
|
||||||
|
# (
|
||||||
|
# view_range[1] - append_len + slice_to_head,
|
||||||
|
# view_range[1]
|
||||||
|
# ),
|
||||||
|
# )
|
||||||
|
# # assert (len(appended) - 1) == append_len
|
||||||
|
# # assert len(appended) == append_len
|
||||||
|
# print(
|
||||||
|
# f'{self.flow.name} APPEND LEN: {append_len}\n'
|
||||||
|
# f'{self.flow.name} APPENDED: {appended}\n'
|
||||||
|
# f'{self.flow.name} app_tres: {app_tres}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# update the last "in view data range"
|
||||||
|
if len(x_1d):
|
||||||
|
self._last_ivdr = x_1d[0], x_1d[slice_to_head]
|
||||||
|
|
||||||
|
# TODO: eventually maybe we can implement some kind of
|
||||||
|
# transform on the ``QPainterPath`` that will more or less
|
||||||
|
# detect the diff in "elements" terms?
|
||||||
|
# update diff state since we've now rendered paths.
|
||||||
|
self._last_read = new_read
|
||||||
|
|
||||||
|
profiler('.format_to_1d()')
|
||||||
|
return (
|
||||||
|
x_1d,
|
||||||
|
y_1d,
|
||||||
|
connect,
|
||||||
|
prepend_len,
|
||||||
|
append_len,
|
||||||
|
view_changed,
|
||||||
|
# app_tres,
|
||||||
|
)
|
||||||
|
|
||||||
|
###############################
|
||||||
|
# Sub-type override interface #
|
||||||
|
###############################
|
||||||
|
|
||||||
|
# optional pre-graphics xy formatted data which
|
||||||
|
# is incrementally updated in sync with the source data.
|
||||||
|
# XXX: was ``.allocate_xy()``
|
||||||
|
def allocate_xy_nd(
|
||||||
|
self,
|
||||||
|
src_shm: ShmArray,
|
||||||
|
data_field: str,
|
||||||
|
index_field: str = 'index',
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray, # x
|
||||||
|
np.nd.array # y
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Convert the structured-array ``src_shm`` format to
|
||||||
|
a equivalently shaped (and field-less) ``np.ndarray``.
|
||||||
|
|
||||||
|
Eg. a 4 field x N struct-array => (N, 4)
|
||||||
|
|
||||||
|
'''
|
||||||
|
y_nd = src_shm._array[data_field].copy()
|
||||||
|
x_nd = src_shm._array[index_field].copy()
|
||||||
|
return x_nd, y_nd
|
||||||
|
|
||||||
|
# XXX: was ``.update_xy()``
|
||||||
|
def incr_update_xy_nd(
|
||||||
|
self,
|
||||||
|
|
||||||
|
src_shm: ShmArray,
|
||||||
|
data_field: str,
|
||||||
|
|
||||||
|
new_from_src: np.ndarray, # portion of source that was updated
|
||||||
|
|
||||||
|
read_slc: slice,
|
||||||
|
ln: int, # len of updated
|
||||||
|
|
||||||
|
nd_start: int,
|
||||||
|
nd_stop: int,
|
||||||
|
|
||||||
|
is_append: bool,
|
||||||
|
index_field: str = 'index',
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
slice,
|
||||||
|
]:
|
||||||
|
# write pushed data to flattened copy
|
||||||
|
new_y_nd = new_from_src
|
||||||
|
|
||||||
|
# XXX
|
||||||
|
# TODO: this should be returned and written by caller!
|
||||||
|
# XXX
|
||||||
|
# generate same-valued-per-row x support based on y shape
|
||||||
|
if index_field != 'index':
|
||||||
|
self.x_nd[read_slc, :] = new_from_src[index_field]
|
||||||
|
|
||||||
|
return new_y_nd, read_slc
|
||||||
|
|
||||||
|
# XXX: was ``.format_xy()``
|
||||||
|
def format_xy_nd_to_1d(
|
||||||
|
self,
|
||||||
|
|
||||||
|
array: np.ndarray,
|
||||||
|
array_key: str,
|
||||||
|
vr: tuple[int, int],
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray, # 1d x
|
||||||
|
np.ndarray, # 1d y
|
||||||
|
np.ndarray | str, # connection array/style
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Default xy-nd array to 1d pre-graphics-path render routine.
|
||||||
|
|
||||||
|
Return single field column data verbatim
|
||||||
|
|
||||||
|
'''
|
||||||
|
return (
|
||||||
|
array['index'],
|
||||||
|
array[array_key],
|
||||||
|
|
||||||
|
# 1d connection array or style-key to
|
||||||
|
# ``pg.functions.arrayToQPath()``
|
||||||
|
'all',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class OHLCBarsFmtr(IncrementalFormatter):
|
||||||
|
|
||||||
|
fields: list[str] = ['open', 'high', 'low', 'close']
|
||||||
|
|
||||||
|
def allocate_xy_nd(
|
||||||
|
self,
|
||||||
|
|
||||||
|
ohlc_shm: ShmArray,
|
||||||
|
data_field: str,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray, # x
|
||||||
|
np.nd.array # y
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Convert an input struct-array holding OHLC samples into a pair of
|
||||||
|
flattened x, y arrays with the same size (datums wise) as the source
|
||||||
|
data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
y_nd = ohlc_shm.ustruct(self.fields)
|
||||||
|
|
||||||
|
# generate an flat-interpolated x-domain
|
||||||
|
x_nd = (
|
||||||
|
np.broadcast_to(
|
||||||
|
ohlc_shm._array['index'][:, None],
|
||||||
|
(
|
||||||
|
ohlc_shm._array.size,
|
||||||
|
# 4, # only ohlc
|
||||||
|
y_nd.shape[1],
|
||||||
|
),
|
||||||
|
) + np.array([-0.5, 0, 0, 0.5])
|
||||||
|
)
|
||||||
|
assert y_nd.any()
|
||||||
|
|
||||||
|
# write pushed data to flattened copy
|
||||||
|
return (
|
||||||
|
x_nd,
|
||||||
|
y_nd,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
@njit(
|
||||||
|
# TODO: for now need to construct this manually for readonly
|
||||||
|
# arrays, see https://github.com/numba/numba/issues/4511
|
||||||
|
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||||
|
# numba_ohlc_dtype[::1], # contiguous
|
||||||
|
# int64,
|
||||||
|
# optional(float64),
|
||||||
|
# ),
|
||||||
|
nogil=True
|
||||||
|
)
|
||||||
|
def path_arrays_from_ohlc(
|
||||||
|
data: np.ndarray,
|
||||||
|
start: int64,
|
||||||
|
bar_gap: float64 = 0.43,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Generate an array of lines objects from input ohlc data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size = int(data.shape[0] * 6)
|
||||||
|
|
||||||
|
x = np.zeros(
|
||||||
|
# data,
|
||||||
|
shape=size,
|
||||||
|
dtype=float64,
|
||||||
|
)
|
||||||
|
y, c = x.copy(), x.copy()
|
||||||
|
|
||||||
|
# TODO: report bug for assert @
|
||||||
|
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||||
|
for i, q in enumerate(data[start:], start):
|
||||||
|
|
||||||
|
# TODO: ask numba why this doesn't work..
|
||||||
|
# open, high, low, close, index = q[
|
||||||
|
# ['open', 'high', 'low', 'close', 'index']]
|
||||||
|
|
||||||
|
open = q['open']
|
||||||
|
high = q['high']
|
||||||
|
low = q['low']
|
||||||
|
close = q['close']
|
||||||
|
index = float64(q['index'])
|
||||||
|
|
||||||
|
istart = i * 6
|
||||||
|
istop = istart + 6
|
||||||
|
|
||||||
|
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||||
|
x[istart:istop] = (
|
||||||
|
index - bar_gap,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index + bar_gap,
|
||||||
|
)
|
||||||
|
y[istart:istop] = (
|
||||||
|
open,
|
||||||
|
open,
|
||||||
|
low,
|
||||||
|
high,
|
||||||
|
close,
|
||||||
|
close,
|
||||||
|
)
|
||||||
|
|
||||||
|
# specifies that the first edge is never connected to the
|
||||||
|
# prior bars last edge thus providing a small "gap"/"space"
|
||||||
|
# between bars determined by ``bar_gap``.
|
||||||
|
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||||
|
|
||||||
|
return x, y, c
|
||||||
|
|
||||||
|
# TODO: can we drop this frame and just use the above?
|
||||||
|
def format_xy_nd_to_1d(
|
||||||
|
self,
|
||||||
|
|
||||||
|
array: np.ndarray,
|
||||||
|
array_key: str,
|
||||||
|
vr: tuple[int, int],
|
||||||
|
|
||||||
|
start: int = 0, # XXX: do we need this?
|
||||||
|
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||||
|
w: float = 0.43,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
More or less direct proxy to the ``numba``-fied
|
||||||
|
``path_arrays_from_ohlc()`` (above) but with closed in kwargs
|
||||||
|
for line spacing.
|
||||||
|
|
||||||
|
'''
|
||||||
|
x, y, c = self.path_arrays_from_ohlc(
|
||||||
|
array,
|
||||||
|
start,
|
||||||
|
bar_gap=w,
|
||||||
|
)
|
||||||
|
return x, y, c
|
||||||
|
|
||||||
|
def incr_update_xy_nd(
|
||||||
|
self,
|
||||||
|
|
||||||
|
src_shm: ShmArray,
|
||||||
|
data_field: str,
|
||||||
|
|
||||||
|
new_from_src: np.ndarray, # portion of source that was updated
|
||||||
|
|
||||||
|
read_slc: slice,
|
||||||
|
ln: int, # len of updated
|
||||||
|
|
||||||
|
nd_start: int,
|
||||||
|
nd_stop: int,
|
||||||
|
|
||||||
|
is_append: bool,
|
||||||
|
index_field: str = 'index',
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
slice,
|
||||||
|
]:
|
||||||
|
# write newly pushed data to flattened copy
|
||||||
|
# a struct-arr is always passed in.
|
||||||
|
new_y_nd = rfn.structured_to_unstructured(
|
||||||
|
new_from_src[self.fields]
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX
|
||||||
|
# TODO: this should be returned and written by caller!
|
||||||
|
# XXX
|
||||||
|
# generate same-valued-per-row x support based on y shape
|
||||||
|
if index_field != 'index':
|
||||||
|
self.x_nd[read_slc, :] = new_from_src[index_field]
|
||||||
|
|
||||||
|
return new_y_nd, read_slc
|
||||||
|
|
||||||
|
|
||||||
|
class OHLCBarsAsCurveFmtr(OHLCBarsFmtr):
|
||||||
|
|
||||||
|
def format_xy_nd_to_1d(
|
||||||
|
self,
|
||||||
|
|
||||||
|
array: np.ndarray,
|
||||||
|
array_key: str,
|
||||||
|
vr: tuple[int, int],
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
str,
|
||||||
|
]:
|
||||||
|
# TODO: in the case of an existing ``.update_xy()``
|
||||||
|
# should we be passing in array as an xy arrays tuple?
|
||||||
|
|
||||||
|
# 2 more datum-indexes to capture zero at end
|
||||||
|
x_flat = self.x_nd[self.xy_nd_start:self.xy_nd_stop]
|
||||||
|
y_flat = self.y_nd[self.xy_nd_start:self.xy_nd_stop]
|
||||||
|
|
||||||
|
# slice to view
|
||||||
|
ivl, ivr = vr
|
||||||
|
x_iv_flat = x_flat[ivl:ivr]
|
||||||
|
y_iv_flat = y_flat[ivl:ivr]
|
||||||
|
|
||||||
|
# reshape to 1d for graphics rendering
|
||||||
|
y_iv = y_iv_flat.reshape(-1)
|
||||||
|
x_iv = x_iv_flat.reshape(-1)
|
||||||
|
|
||||||
|
return x_iv, y_iv, 'all'
|
||||||
|
|
||||||
|
|
||||||
|
class StepCurveFmtr(IncrementalFormatter):
|
||||||
|
|
||||||
|
def allocate_xy_nd(
|
||||||
|
self,
|
||||||
|
|
||||||
|
shm: ShmArray,
|
||||||
|
data_field: str,
|
||||||
|
|
||||||
|
index_field: str = 'index',
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray, # x
|
||||||
|
np.nd.array # y
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Convert an input 1d shm array to a "step array" format
|
||||||
|
for use by path graphics generation.
|
||||||
|
|
||||||
|
'''
|
||||||
|
i = shm._array['index'].copy()
|
||||||
|
out = shm._array[data_field].copy()
|
||||||
|
|
||||||
|
x_out = np.broadcast_to(
|
||||||
|
i[:, None],
|
||||||
|
(i.size, 2),
|
||||||
|
) + np.array([-0.5, 0.5])
|
||||||
|
|
||||||
|
y_out = np.empty((len(out), 2), dtype=out.dtype)
|
||||||
|
y_out[:] = out[:, np.newaxis]
|
||||||
|
|
||||||
|
# start y at origin level
|
||||||
|
y_out[0, 0] = 0
|
||||||
|
return x_out, y_out
|
||||||
|
|
||||||
|
def incr_update_xy_nd(
|
||||||
|
self,
|
||||||
|
|
||||||
|
src_shm: ShmArray,
|
||||||
|
array_key: str,
|
||||||
|
|
||||||
|
src_update: np.ndarray, # portion of source that was updated
|
||||||
|
slc: slice,
|
||||||
|
ln: int, # len of updated
|
||||||
|
|
||||||
|
first: int,
|
||||||
|
last: int,
|
||||||
|
|
||||||
|
is_append: bool,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
slice,
|
||||||
|
]:
|
||||||
|
# for a step curve we slice from one datum prior
|
||||||
|
# to the current "update slice" to get the previous
|
||||||
|
# "level".
|
||||||
|
if is_append:
|
||||||
|
start = max(last - 1, 0)
|
||||||
|
end = src_shm._last.value
|
||||||
|
new_y = src_shm._array[start:end][array_key]
|
||||||
|
slc = slice(start, end)
|
||||||
|
|
||||||
|
else:
|
||||||
|
new_y = src_update
|
||||||
|
|
||||||
|
return (
|
||||||
|
np.broadcast_to(
|
||||||
|
new_y[:, None], (new_y.size, 2),
|
||||||
|
),
|
||||||
|
slc,
|
||||||
|
)
|
||||||
|
|
||||||
|
def format_xy_nd_to_1d(
|
||||||
|
self,
|
||||||
|
|
||||||
|
array: np.ndarray,
|
||||||
|
array_key: str,
|
||||||
|
vr: tuple[int, int],
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
str,
|
||||||
|
]:
|
||||||
|
lasts = array[['index', array_key]]
|
||||||
|
last = lasts[array_key][-1]
|
||||||
|
|
||||||
|
# 2 more datum-indexes to capture zero at end
|
||||||
|
x_step = self.x_nd[self.xy_nd_start:self.xy_nd_stop+2]
|
||||||
|
y_step = self.y_nd[self.xy_nd_start:self.xy_nd_stop+2]
|
||||||
|
y_step[-1] = last
|
||||||
|
|
||||||
|
# slice out in-view data
|
||||||
|
ivl, ivr = vr
|
||||||
|
ys_iv = y_step[ivl:ivr+1]
|
||||||
|
xs_iv = x_step[ivl:ivr+1]
|
||||||
|
|
||||||
|
# flatten to 1d
|
||||||
|
y_iv = ys_iv.reshape(ys_iv.size)
|
||||||
|
x_iv = xs_iv.reshape(xs_iv.size)
|
||||||
|
|
||||||
|
# print(
|
||||||
|
# f'ys_iv : {ys_iv[-s:]}\n'
|
||||||
|
# f'y_iv: {y_iv[-s:]}\n'
|
||||||
|
# f'xs_iv: {xs_iv[-s:]}\n'
|
||||||
|
# f'x_iv: {x_iv[-s:]}\n'
|
||||||
|
# )
|
||||||
|
|
||||||
|
return x_iv, y_iv, 'all'
|
||||||
|
|
||||||
|
|
||||||
def xy_downsample(
|
def xy_downsample(
|
||||||
|
@ -49,12 +821,21 @@ def xy_downsample(
|
||||||
|
|
||||||
x_spacer: float = 0.5,
|
x_spacer: float = 0.5,
|
||||||
|
|
||||||
) -> tuple[np.ndarray, np.ndarray]:
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
float,
|
||||||
|
float,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Downsample 1D (flat ``numpy.ndarray``) arrays using M4 given an input
|
||||||
|
``uppx`` (units-per-pixel) and add space between discreet datums.
|
||||||
|
|
||||||
|
'''
|
||||||
# downsample whenever more then 1 pixels per datum can be shown.
|
# downsample whenever more then 1 pixels per datum can be shown.
|
||||||
# always refresh data bounds until we get diffing
|
# always refresh data bounds until we get diffing
|
||||||
# working properly, see above..
|
# working properly, see above..
|
||||||
bins, x, y = ds_m4(
|
bins, x, y, ymn, ymx = ds_m4(
|
||||||
x,
|
x,
|
||||||
y,
|
y,
|
||||||
uppx,
|
uppx,
|
||||||
|
@ -67,170 +848,4 @@ def xy_downsample(
|
||||||
)).flatten()
|
)).flatten()
|
||||||
y = y.flatten()
|
y = y.flatten()
|
||||||
|
|
||||||
return x, y
|
return x, y, ymn, ymx
|
||||||
|
|
||||||
|
|
||||||
@njit(
|
|
||||||
# TODO: for now need to construct this manually for readonly arrays, see
|
|
||||||
# https://github.com/numba/numba/issues/4511
|
|
||||||
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
|
||||||
# numba_ohlc_dtype[::1], # contiguous
|
|
||||||
# int64,
|
|
||||||
# optional(float64),
|
|
||||||
# ),
|
|
||||||
nogil=True
|
|
||||||
)
|
|
||||||
def path_arrays_from_ohlc(
|
|
||||||
data: np.ndarray,
|
|
||||||
start: int64,
|
|
||||||
bar_gap: float64 = 0.43,
|
|
||||||
|
|
||||||
) -> np.ndarray:
|
|
||||||
'''
|
|
||||||
Generate an array of lines objects from input ohlc data.
|
|
||||||
|
|
||||||
'''
|
|
||||||
size = int(data.shape[0] * 6)
|
|
||||||
|
|
||||||
x = np.zeros(
|
|
||||||
# data,
|
|
||||||
shape=size,
|
|
||||||
dtype=float64,
|
|
||||||
)
|
|
||||||
y, c = x.copy(), x.copy()
|
|
||||||
|
|
||||||
# TODO: report bug for assert @
|
|
||||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
|
||||||
for i, q in enumerate(data[start:], start):
|
|
||||||
|
|
||||||
# TODO: ask numba why this doesn't work..
|
|
||||||
# open, high, low, close, index = q[
|
|
||||||
# ['open', 'high', 'low', 'close', 'index']]
|
|
||||||
|
|
||||||
open = q['open']
|
|
||||||
high = q['high']
|
|
||||||
low = q['low']
|
|
||||||
close = q['close']
|
|
||||||
index = float64(q['index'])
|
|
||||||
|
|
||||||
istart = i * 6
|
|
||||||
istop = istart + 6
|
|
||||||
|
|
||||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
|
||||||
x[istart:istop] = (
|
|
||||||
index - bar_gap,
|
|
||||||
index,
|
|
||||||
index,
|
|
||||||
index,
|
|
||||||
index,
|
|
||||||
index + bar_gap,
|
|
||||||
)
|
|
||||||
y[istart:istop] = (
|
|
||||||
open,
|
|
||||||
open,
|
|
||||||
low,
|
|
||||||
high,
|
|
||||||
close,
|
|
||||||
close,
|
|
||||||
)
|
|
||||||
|
|
||||||
# specifies that the first edge is never connected to the
|
|
||||||
# prior bars last edge thus providing a small "gap"/"space"
|
|
||||||
# between bars determined by ``bar_gap``.
|
|
||||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
|
||||||
|
|
||||||
return x, y, c
|
|
||||||
|
|
||||||
|
|
||||||
def gen_ohlc_qpath(
|
|
||||||
r: Renderer,
|
|
||||||
data: np.ndarray,
|
|
||||||
array_key: str, # we ignore this
|
|
||||||
vr: tuple[int, int],
|
|
||||||
|
|
||||||
start: int = 0, # XXX: do we need this?
|
|
||||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
|
||||||
w: float = 0.43,
|
|
||||||
|
|
||||||
) -> QtGui.QPainterPath:
|
|
||||||
'''
|
|
||||||
More or less direct proxy to ``path_arrays_from_ohlc()``
|
|
||||||
but with closed in kwargs for line spacing.
|
|
||||||
|
|
||||||
'''
|
|
||||||
x, y, c = path_arrays_from_ohlc(
|
|
||||||
data,
|
|
||||||
start,
|
|
||||||
bar_gap=w,
|
|
||||||
)
|
|
||||||
return x, y, c
|
|
||||||
|
|
||||||
|
|
||||||
def ohlc_to_line(
|
|
||||||
ohlc_shm: ShmArray,
|
|
||||||
data_field: str,
|
|
||||||
fields: list[str] = ['open', 'high', 'low', 'close']
|
|
||||||
|
|
||||||
) -> tuple[
|
|
||||||
np.ndarray,
|
|
||||||
np.ndarray,
|
|
||||||
]:
|
|
||||||
'''
|
|
||||||
Convert an input struct-array holding OHLC samples into a pair of
|
|
||||||
flattened x, y arrays with the same size (datums wise) as the source
|
|
||||||
data.
|
|
||||||
|
|
||||||
'''
|
|
||||||
y_out = ohlc_shm.ustruct(fields)
|
|
||||||
first = ohlc_shm._first.value
|
|
||||||
last = ohlc_shm._last.value
|
|
||||||
|
|
||||||
# write pushed data to flattened copy
|
|
||||||
y_out[first:last] = rfn.structured_to_unstructured(
|
|
||||||
ohlc_shm.array[fields]
|
|
||||||
)
|
|
||||||
|
|
||||||
# generate an flat-interpolated x-domain
|
|
||||||
x_out = (
|
|
||||||
np.broadcast_to(
|
|
||||||
ohlc_shm._array['index'][:, None],
|
|
||||||
(
|
|
||||||
ohlc_shm._array.size,
|
|
||||||
# 4, # only ohlc
|
|
||||||
y_out.shape[1],
|
|
||||||
),
|
|
||||||
) + np.array([-0.5, 0, 0, 0.5])
|
|
||||||
)
|
|
||||||
assert y_out.any()
|
|
||||||
|
|
||||||
return (
|
|
||||||
x_out,
|
|
||||||
y_out,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def to_step_format(
|
|
||||||
shm: ShmArray,
|
|
||||||
data_field: str,
|
|
||||||
index_field: str = 'index',
|
|
||||||
|
|
||||||
) -> tuple[int, np.ndarray, np.ndarray]:
|
|
||||||
'''
|
|
||||||
Convert an input 1d shm array to a "step array" format
|
|
||||||
for use by path graphics generation.
|
|
||||||
|
|
||||||
'''
|
|
||||||
i = shm._array['index'].copy()
|
|
||||||
out = shm._array[data_field].copy()
|
|
||||||
|
|
||||||
x_out = np.broadcast_to(
|
|
||||||
i[:, None],
|
|
||||||
(i.size, 2),
|
|
||||||
) + np.array([-0.5, 0.5])
|
|
||||||
|
|
||||||
y_out = np.empty((len(out), 2), dtype=out.dtype)
|
|
||||||
y_out[:] = out[:, np.newaxis]
|
|
||||||
|
|
||||||
# start y at origin level
|
|
||||||
y_out[0, 0] = 0
|
|
||||||
return x_out, y_out
|
|
||||||
|
|
|
@ -15,11 +15,15 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Customization of ``pyqtgraph`` core routines to speed up our use mostly
|
Customization of ``pyqtgraph`` core routines and various types normally
|
||||||
based on not requiring "scentific precision" for pixel perfect view
|
for speedups.
|
||||||
transforms.
|
|
||||||
|
Generally, our does not require "scentific precision" for pixel perfect
|
||||||
|
view transforms.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
|
|
||||||
|
|
||||||
|
@ -46,3 +50,211 @@ def _do_overrides() -> None:
|
||||||
"""
|
"""
|
||||||
# we don't care about potential fp issues inside Qt
|
# we don't care about potential fp issues inside Qt
|
||||||
pg.functions.invertQTransform = invertQTransform
|
pg.functions.invertQTransform = invertQTransform
|
||||||
|
pg.PlotItem = PlotItem
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE: the below customized type contains all our changes on a method
|
||||||
|
# by method basis as per the diff:
|
||||||
|
# https://github.com/pyqtgraph/pyqtgraph/commit/8e60bc14234b6bec1369ff4192dbfb82f8682920#diff-a2b5865955d2ba703dbc4c35ff01aa761aa28d2aeaac5e68d24e338bc82fb5b1R500
|
||||||
|
|
||||||
|
class PlotItem(pg.PlotItem):
|
||||||
|
'''
|
||||||
|
Overrides for the core plot object mostly pertaining to overlayed
|
||||||
|
multi-view management as it relates to multi-axis managment.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
parent=None,
|
||||||
|
name=None,
|
||||||
|
labels=None,
|
||||||
|
title=None,
|
||||||
|
viewBox=None,
|
||||||
|
axisItems=None,
|
||||||
|
default_axes=['left', 'bottom'],
|
||||||
|
enableMenu=True,
|
||||||
|
**kargs
|
||||||
|
):
|
||||||
|
super().__init__(
|
||||||
|
parent=parent,
|
||||||
|
name=name,
|
||||||
|
labels=labels,
|
||||||
|
title=title,
|
||||||
|
viewBox=viewBox,
|
||||||
|
axisItems=axisItems,
|
||||||
|
# default_axes=default_axes,
|
||||||
|
enableMenu=enableMenu,
|
||||||
|
kargs=kargs,
|
||||||
|
)
|
||||||
|
# self.setAxisItems(
|
||||||
|
# axisItems,
|
||||||
|
# default_axes=default_axes,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# NOTE: this is an entirely new method not in upstream.
|
||||||
|
def removeAxis(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
unlink: bool = True,
|
||||||
|
|
||||||
|
) -> Optional[pg.AxisItem]:
|
||||||
|
"""
|
||||||
|
Remove an axis from the contained axis items
|
||||||
|
by ```name: str```.
|
||||||
|
|
||||||
|
This means the axis graphics object will be removed
|
||||||
|
from the ``.layout: QGraphicsGridLayout`` as well as unlinked
|
||||||
|
from the underlying associated ``ViewBox``.
|
||||||
|
|
||||||
|
If the ``unlink: bool`` is set to ``False`` then the axis will
|
||||||
|
stay linked to its view and will only be removed from the
|
||||||
|
layoutonly be removed from the layout.
|
||||||
|
|
||||||
|
If no axis with ``name: str`` is found then this is a noop.
|
||||||
|
|
||||||
|
Return the axis instance that was removed.
|
||||||
|
|
||||||
|
"""
|
||||||
|
entry = self.axes.pop(name, None)
|
||||||
|
|
||||||
|
if not entry:
|
||||||
|
return
|
||||||
|
|
||||||
|
axis = entry['item']
|
||||||
|
self.layout.removeItem(axis)
|
||||||
|
axis.scene().removeItem(axis)
|
||||||
|
if unlink:
|
||||||
|
axis.unlinkFromView()
|
||||||
|
|
||||||
|
self.update()
|
||||||
|
|
||||||
|
return axis
|
||||||
|
|
||||||
|
# Why do we need to always have all axes created?
|
||||||
|
#
|
||||||
|
# I don't understand this at all.
|
||||||
|
#
|
||||||
|
# Everything seems to work if you just always apply the
|
||||||
|
# set passed to this method **EXCEPT** for some super weird reason
|
||||||
|
# the view box geometry still computes as though the space for the
|
||||||
|
# `'bottom'` axis is always there **UNLESS** you always add that
|
||||||
|
# axis but hide it?
|
||||||
|
#
|
||||||
|
# Why in tf would this be the case!?!?
|
||||||
|
def setAxisItems(
|
||||||
|
self,
|
||||||
|
# XXX: yeah yeah, i know we can't use type annots like this yet.
|
||||||
|
axisItems: Optional[dict[str, pg.AxisItem]] = None,
|
||||||
|
add_to_layout: bool = True,
|
||||||
|
default_axes: list[str] = ['left', 'bottom'],
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Override axis item setting to only
|
||||||
|
|
||||||
|
"""
|
||||||
|
axisItems = axisItems or {}
|
||||||
|
|
||||||
|
# XXX: wth is is this even saying?!?
|
||||||
|
# Array containing visible axis items
|
||||||
|
# Also containing potentially hidden axes, but they are not
|
||||||
|
# touched so it does not matter
|
||||||
|
# visibleAxes = ['left', 'bottom']
|
||||||
|
# Note that it does not matter that this adds
|
||||||
|
# some values to visibleAxes a second time
|
||||||
|
|
||||||
|
# XXX: uhhh wat^ ..?
|
||||||
|
|
||||||
|
visibleAxes = list(default_axes) + list(axisItems.keys())
|
||||||
|
|
||||||
|
# TODO: we should probably invert the loop here to not loop the
|
||||||
|
# predefined "axis name set" and instead loop the `axisItems`
|
||||||
|
# input and lookup indices from a predefined map.
|
||||||
|
for name, pos in (
|
||||||
|
('top', (1, 1)),
|
||||||
|
('bottom', (3, 1)),
|
||||||
|
('left', (2, 0)),
|
||||||
|
('right', (2, 2))
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
name in self.axes and
|
||||||
|
name in axisItems
|
||||||
|
):
|
||||||
|
# we already have an axis entry for this name
|
||||||
|
# so remove the existing entry.
|
||||||
|
self.removeAxis(name)
|
||||||
|
|
||||||
|
# elif name not in axisItems:
|
||||||
|
# # this axis entry is not provided in this call
|
||||||
|
# # so remove any old/existing entry.
|
||||||
|
# self.removeAxis(name)
|
||||||
|
|
||||||
|
# Create new axis
|
||||||
|
if name in axisItems:
|
||||||
|
axis = axisItems[name]
|
||||||
|
if axis.scene() is not None:
|
||||||
|
if (
|
||||||
|
name not in self.axes
|
||||||
|
or axis != self.axes[name]["item"]
|
||||||
|
):
|
||||||
|
raise RuntimeError(
|
||||||
|
"Can't add an axis to multiple plots. Shared axes"
|
||||||
|
" can be achieved with multiple AxisItem instances"
|
||||||
|
" and set[X/Y]Link.")
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Set up new axis
|
||||||
|
|
||||||
|
# XXX: ok but why do we want to add axes for all entries
|
||||||
|
# if not desired by the user? The only reason I can see
|
||||||
|
# adding this is without it there's some weird
|
||||||
|
# ``ViewBox`` geometry bug.. where a gap for the
|
||||||
|
# 'bottom' axis is somehow left in?
|
||||||
|
axis = pg.AxisItem(orientation=name, parent=self)
|
||||||
|
|
||||||
|
axis.linkToView(self.vb)
|
||||||
|
|
||||||
|
# XXX: shouldn't you already know the ``pos`` from the name?
|
||||||
|
# Oh right instead of a global map that would let you
|
||||||
|
# reasily look that up it's redefined over and over and over
|
||||||
|
# again in methods..
|
||||||
|
self.axes[name] = {'item': axis, 'pos': pos}
|
||||||
|
|
||||||
|
# NOTE: in the overlay case the axis may be added to some
|
||||||
|
# other layout and should not be added here.
|
||||||
|
if add_to_layout:
|
||||||
|
self.layout.addItem(axis, *pos)
|
||||||
|
|
||||||
|
# place axis above images at z=0, items that want to draw
|
||||||
|
# over the axes should be placed at z>=1:
|
||||||
|
axis.setZValue(0.5)
|
||||||
|
axis.setFlag(
|
||||||
|
axis.GraphicsItemFlag.ItemNegativeZStacksBehindParent
|
||||||
|
)
|
||||||
|
if name in visibleAxes:
|
||||||
|
self.showAxis(name, True)
|
||||||
|
else:
|
||||||
|
# why do we need to insert all axes to ``.axes`` and
|
||||||
|
# only hide the ones the user doesn't specify? It all
|
||||||
|
# seems to work fine without doing this except for this
|
||||||
|
# weird gap for the 'bottom' axis that always shows up
|
||||||
|
# in the view box geometry??
|
||||||
|
self.hideAxis(name)
|
||||||
|
|
||||||
|
def updateGrid(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
):
|
||||||
|
alpha = self.ctrl.gridAlphaSlider.value()
|
||||||
|
x = alpha if self.ctrl.xGridCheck.isChecked() else False
|
||||||
|
y = alpha if self.ctrl.yGridCheck.isChecked() else False
|
||||||
|
for name, dim in (
|
||||||
|
('top', x),
|
||||||
|
('bottom', x),
|
||||||
|
('left', y),
|
||||||
|
('right', y)
|
||||||
|
):
|
||||||
|
if name in self.axes:
|
||||||
|
self.getAxis(name).setGrid(dim)
|
||||||
|
# self.getAxis('bottom').setGrid(x)
|
||||||
|
# self.getAxis('left').setGrid(y)
|
||||||
|
# self.getAxis('right').setGrid(y)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -35,9 +35,13 @@ from collections import defaultdict
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional, Callable,
|
Optional,
|
||||||
Awaitable, Sequence,
|
Callable,
|
||||||
Any, AsyncIterator
|
Awaitable,
|
||||||
|
Sequence,
|
||||||
|
Any,
|
||||||
|
AsyncIterator,
|
||||||
|
Iterator,
|
||||||
)
|
)
|
||||||
import time
|
import time
|
||||||
# from pprint import pformat
|
# from pprint import pformat
|
||||||
|
@ -119,7 +123,7 @@ class CompleterView(QTreeView):
|
||||||
# TODO: size this based on DPI font
|
# TODO: size this based on DPI font
|
||||||
self.setIndentation(_font.px_size)
|
self.setIndentation(_font.px_size)
|
||||||
|
|
||||||
# self.setUniformRowHeights(True)
|
self.setUniformRowHeights(True)
|
||||||
# self.setColumnWidth(0, 3)
|
# self.setColumnWidth(0, 3)
|
||||||
# self.setVerticalBarPolicy(Qt.ScrollBarAlwaysOff)
|
# self.setVerticalBarPolicy(Qt.ScrollBarAlwaysOff)
|
||||||
# self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustIgnored)
|
# self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustIgnored)
|
||||||
|
@ -138,13 +142,15 @@ class CompleterView(QTreeView):
|
||||||
model.setHorizontalHeaderLabels(labels)
|
model.setHorizontalHeaderLabels(labels)
|
||||||
|
|
||||||
self._font_size: int = 0 # pixels
|
self._font_size: int = 0 # pixels
|
||||||
|
self._init: bool = False
|
||||||
|
|
||||||
async def on_pressed(self, idx: QModelIndex) -> None:
|
async def on_pressed(self, idx: QModelIndex) -> None:
|
||||||
'''Mouse pressed on view handler.
|
'''
|
||||||
|
Mouse pressed on view handler.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
search = self.parent()
|
search = self.parent()
|
||||||
await search.chart_current_item(clear_to_cache=False)
|
await search.chart_current_item()
|
||||||
search.focus()
|
search.focus()
|
||||||
|
|
||||||
def set_font_size(self, size: int = 18):
|
def set_font_size(self, size: int = 18):
|
||||||
|
@ -156,56 +162,64 @@ class CompleterView(QTreeView):
|
||||||
|
|
||||||
self.setStyleSheet(f"font: {size}px")
|
self.setStyleSheet(f"font: {size}px")
|
||||||
|
|
||||||
# def resizeEvent(self, event: 'QEvent') -> None:
|
def resize_to_results(
|
||||||
# event.accept()
|
self,
|
||||||
# super().resizeEvent(event)
|
w: Optional[float] = 0,
|
||||||
|
h: Optional[float] = None,
|
||||||
|
|
||||||
def on_resize(self) -> None:
|
) -> None:
|
||||||
'''
|
|
||||||
Resize relay event from god.
|
|
||||||
|
|
||||||
'''
|
|
||||||
self.resize_to_results()
|
|
||||||
|
|
||||||
def resize_to_results(self):
|
|
||||||
model = self.model()
|
model = self.model()
|
||||||
cols = model.columnCount()
|
cols = model.columnCount()
|
||||||
# rows = model.rowCount()
|
cidx = self.selectionModel().currentIndex()
|
||||||
|
rows = model.rowCount()
|
||||||
|
self.expandAll()
|
||||||
|
|
||||||
|
# compute the approx height in pixels needed to include
|
||||||
|
# all result rows in view.
|
||||||
|
row_h = rows_h = self.rowHeight(cidx) * (rows + 1)
|
||||||
|
for idx, item in self.iter_df_rows():
|
||||||
|
row_h = self.rowHeight(idx)
|
||||||
|
rows_h += row_h
|
||||||
|
# print(f'row_h: {row_h}\nrows_h: {rows_h}')
|
||||||
|
|
||||||
|
# TODO: could we just break early here on detection
|
||||||
|
# of ``rows_h >= h``?
|
||||||
|
|
||||||
col_w_tot = 0
|
col_w_tot = 0
|
||||||
for i in range(cols):
|
for i in range(cols):
|
||||||
|
# only slap in a rows's height's worth
|
||||||
|
# of padding once at startup.. no idea
|
||||||
|
if (
|
||||||
|
not self._init
|
||||||
|
and row_h
|
||||||
|
):
|
||||||
|
col_w_tot = row_h
|
||||||
|
self._init = True
|
||||||
|
|
||||||
self.resizeColumnToContents(i)
|
self.resizeColumnToContents(i)
|
||||||
col_w_tot += self.columnWidth(i)
|
col_w_tot += self.columnWidth(i)
|
||||||
|
|
||||||
win = self.window()
|
# NOTE: if the heigh `h` set here is **too large** then the
|
||||||
win_h = win.height()
|
# resize event will perpetually trigger as the window causes
|
||||||
edit_h = self.parent().bar.height()
|
# some kind of recompute of callbacks.. so we have to ensure
|
||||||
sb_h = win.statusBar().height()
|
# it's limited.
|
||||||
|
if h:
|
||||||
|
h: int = round(h)
|
||||||
|
abs_mx = round(0.91 * h)
|
||||||
|
self.setMaximumHeight(abs_mx)
|
||||||
|
|
||||||
# TODO: probably make this more general / less hacky
|
if rows_h <= abs_mx:
|
||||||
# we should figure out the exact number of rows to allow
|
# self.setMinimumHeight(rows_h)
|
||||||
# inclusive of search bar and header "rows", in pixel terms.
|
self.setMinimumHeight(rows_h)
|
||||||
# Eventually when we have an "info" widget below the results we
|
# self.setFixedHeight(rows_h)
|
||||||
# will want space for it and likely terminating the results-view
|
|
||||||
# space **exactly on a row** would be ideal.
|
|
||||||
# if row_px > 0:
|
|
||||||
# rows = ceil(window_h / row_px) - 4
|
|
||||||
# else:
|
|
||||||
# rows = 16
|
|
||||||
# self.setFixedHeight(rows * row_px)
|
|
||||||
# self.resize(self.width(), rows * row_px)
|
|
||||||
|
|
||||||
# NOTE: if the heigh set here is **too large** then the resize
|
else:
|
||||||
# event will perpetually trigger as the window causes some kind
|
self.setMinimumHeight(abs_mx)
|
||||||
# of recompute of callbacks.. so we have to ensure it's limited.
|
|
||||||
h = win_h - (edit_h + 1.666*sb_h)
|
|
||||||
assert h > 0
|
|
||||||
self.setFixedHeight(round(h))
|
|
||||||
|
|
||||||
# size to width of longest result seen thus far
|
# dyncamically size to width of longest result seen
|
||||||
# TODO: should we always dynamically scale to longest result?
|
curr_w = self.width()
|
||||||
if self.width() < col_w_tot:
|
if curr_w < col_w_tot:
|
||||||
self.setFixedWidth(col_w_tot)
|
self.setMinimumWidth(col_w_tot)
|
||||||
|
|
||||||
self.update()
|
self.update()
|
||||||
|
|
||||||
|
@ -331,6 +345,23 @@ class CompleterView(QTreeView):
|
||||||
item = model.itemFromIndex(idx)
|
item = model.itemFromIndex(idx)
|
||||||
yield idx, item
|
yield idx, item
|
||||||
|
|
||||||
|
def iter_df_rows(
|
||||||
|
self,
|
||||||
|
iparent: QModelIndex = QModelIndex(),
|
||||||
|
|
||||||
|
) -> Iterator[tuple[QModelIndex, QStandardItem]]:
|
||||||
|
|
||||||
|
model = self.model()
|
||||||
|
isections = model.rowCount(iparent)
|
||||||
|
for i in range(isections):
|
||||||
|
idx = model.index(i, 0, iparent)
|
||||||
|
item = model.itemFromIndex(idx)
|
||||||
|
yield idx, item
|
||||||
|
|
||||||
|
if model.hasChildren(idx):
|
||||||
|
# recursively yield child items depth-first
|
||||||
|
yield from self.iter_df_rows(idx)
|
||||||
|
|
||||||
def find_section(
|
def find_section(
|
||||||
self,
|
self,
|
||||||
section: str,
|
section: str,
|
||||||
|
@ -354,7 +385,8 @@ class CompleterView(QTreeView):
|
||||||
status_field: str = None,
|
status_field: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Clear all result-rows from under the depth = 1 section.
|
'''
|
||||||
|
Clear all result-rows from under the depth = 1 section.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
idx = self.find_section(section)
|
idx = self.find_section(section)
|
||||||
|
@ -375,8 +407,6 @@ class CompleterView(QTreeView):
|
||||||
else:
|
else:
|
||||||
model.setItem(idx.row(), 1, QStandardItem())
|
model.setItem(idx.row(), 1, QStandardItem())
|
||||||
|
|
||||||
self.resize_to_results()
|
|
||||||
|
|
||||||
return idx
|
return idx
|
||||||
else:
|
else:
|
||||||
return None
|
return None
|
||||||
|
@ -444,9 +474,22 @@ class CompleterView(QTreeView):
|
||||||
|
|
||||||
self.show_matches()
|
self.show_matches()
|
||||||
|
|
||||||
def show_matches(self) -> None:
|
def show_matches(
|
||||||
|
self,
|
||||||
|
wh: Optional[tuple[float, float]] = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
if wh:
|
||||||
|
self.resize_to_results(*wh)
|
||||||
|
else:
|
||||||
|
# case where it's just an update from results and *NOT*
|
||||||
|
# a resize of some higher level parent-container widget.
|
||||||
|
search = self.parent()
|
||||||
|
w, h = search.space_dims()
|
||||||
|
self.resize_to_results(w=w, h=h)
|
||||||
|
|
||||||
self.show()
|
self.show()
|
||||||
self.resize_to_results()
|
|
||||||
|
|
||||||
|
|
||||||
class SearchBar(Edit):
|
class SearchBar(Edit):
|
||||||
|
@ -466,18 +509,15 @@ class SearchBar(Edit):
|
||||||
self.godwidget = godwidget
|
self.godwidget = godwidget
|
||||||
super().__init__(parent, **kwargs)
|
super().__init__(parent, **kwargs)
|
||||||
self.view: CompleterView = view
|
self.view: CompleterView = view
|
||||||
godwidget._widgets[view.mode_name] = view
|
|
||||||
|
|
||||||
def show(self) -> None:
|
|
||||||
super().show()
|
|
||||||
self.view.show_matches()
|
|
||||||
|
|
||||||
def unfocus(self) -> None:
|
def unfocus(self) -> None:
|
||||||
self.parent().hide()
|
self.parent().hide()
|
||||||
self.clearFocus()
|
self.clearFocus()
|
||||||
|
|
||||||
|
def hide(self) -> None:
|
||||||
if self.view:
|
if self.view:
|
||||||
self.view.hide()
|
self.view.hide()
|
||||||
|
super().hide()
|
||||||
|
|
||||||
|
|
||||||
class SearchWidget(QtWidgets.QWidget):
|
class SearchWidget(QtWidgets.QWidget):
|
||||||
|
@ -496,15 +536,16 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
parent=None,
|
parent=None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(parent or godwidget)
|
super().__init__(parent)
|
||||||
|
|
||||||
# size it as we specify
|
# size it as we specify
|
||||||
self.setSizePolicy(
|
self.setSizePolicy(
|
||||||
QtWidgets.QSizePolicy.Fixed,
|
QtWidgets.QSizePolicy.Fixed,
|
||||||
QtWidgets.QSizePolicy.Expanding,
|
QtWidgets.QSizePolicy.Fixed,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.godwidget = godwidget
|
self.godwidget = godwidget
|
||||||
|
godwidget.reg_for_resize(self)
|
||||||
|
|
||||||
self.vbox = QtWidgets.QVBoxLayout(self)
|
self.vbox = QtWidgets.QVBoxLayout(self)
|
||||||
self.vbox.setContentsMargins(0, 4, 4, 0)
|
self.vbox.setContentsMargins(0, 4, 4, 0)
|
||||||
|
@ -554,17 +595,22 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
self.vbox.setAlignment(self.view, Qt.AlignTop | Qt.AlignLeft)
|
self.vbox.setAlignment(self.view, Qt.AlignTop | Qt.AlignLeft)
|
||||||
|
|
||||||
def focus(self) -> None:
|
def focus(self) -> None:
|
||||||
|
|
||||||
if self.view.model().rowCount(QModelIndex()) == 0:
|
|
||||||
# fill cache list if nothing existing
|
|
||||||
self.view.set_section_entries(
|
|
||||||
'cache',
|
|
||||||
list(reversed(self.godwidget._chart_cache)),
|
|
||||||
clear_all=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
self.bar.focus()
|
|
||||||
self.show()
|
self.show()
|
||||||
|
self.bar.focus()
|
||||||
|
|
||||||
|
def show_only_cache_entries(self) -> None:
|
||||||
|
'''
|
||||||
|
Clear the search results view and show only cached (aka recently
|
||||||
|
loaded with active data) feeds in the results section.
|
||||||
|
|
||||||
|
'''
|
||||||
|
godw = self.godwidget
|
||||||
|
self.view.set_section_entries(
|
||||||
|
'cache',
|
||||||
|
list(reversed(godw._chart_cache)),
|
||||||
|
# remove all other completion results except for cache
|
||||||
|
clear_all=True,
|
||||||
|
)
|
||||||
|
|
||||||
def get_current_item(self) -> Optional[tuple[str, str]]:
|
def get_current_item(self) -> Optional[tuple[str, str]]:
|
||||||
'''Return the current completer tree selection as
|
'''Return the current completer tree selection as
|
||||||
|
@ -603,7 +649,8 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
clear_to_cache: bool = True,
|
clear_to_cache: bool = True,
|
||||||
|
|
||||||
) -> Optional[str]:
|
) -> Optional[str]:
|
||||||
'''Attempt to load and switch the current selected
|
'''
|
||||||
|
Attempt to load and switch the current selected
|
||||||
completion result to the affiliated chart app.
|
completion result to the affiliated chart app.
|
||||||
|
|
||||||
Return any loaded symbol.
|
Return any loaded symbol.
|
||||||
|
@ -614,13 +661,13 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
return None
|
return None
|
||||||
|
|
||||||
provider, symbol = value
|
provider, symbol = value
|
||||||
chart = self.godwidget
|
godw = self.godwidget
|
||||||
|
|
||||||
log.info(f'Requesting symbol: {symbol}.{provider}')
|
log.info(f'Requesting symbol: {symbol}.{provider}')
|
||||||
|
|
||||||
await chart.load_symbol(
|
await godw.load_symbols(
|
||||||
provider,
|
provider,
|
||||||
symbol,
|
[symbol],
|
||||||
'info',
|
'info',
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -635,18 +682,46 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
# Re-order the symbol cache on the chart to display in
|
# Re-order the symbol cache on the chart to display in
|
||||||
# LIFO order. this is normally only done internally by
|
# LIFO order. this is normally only done internally by
|
||||||
# the chart on new symbols being loaded into memory
|
# the chart on new symbols being loaded into memory
|
||||||
chart.set_chart_symbol(fqsn, chart.linkedsplits)
|
godw.set_chart_symbol(
|
||||||
|
fqsn, (
|
||||||
self.view.set_section_entries(
|
godw.hist_linked,
|
||||||
'cache',
|
godw.rt_linked,
|
||||||
values=list(reversed(chart._chart_cache)),
|
)
|
||||||
|
|
||||||
# remove all other completion results except for cache
|
|
||||||
clear_all=True,
|
|
||||||
)
|
)
|
||||||
|
self.show_only_cache_entries()
|
||||||
|
|
||||||
|
self.bar.focus()
|
||||||
return fqsn
|
return fqsn
|
||||||
|
|
||||||
|
def space_dims(self) -> tuple[float, float]:
|
||||||
|
'''
|
||||||
|
Compute and return the "available space dimentions" for this
|
||||||
|
search widget in terms of px space for results by return the
|
||||||
|
pair of width and height.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# XXX: dun need dis rite?
|
||||||
|
# win = self.window()
|
||||||
|
# win_h = win.height()
|
||||||
|
# sb_h = win.statusBar().height()
|
||||||
|
godw = self.godwidget
|
||||||
|
hl = godw.hist_linked
|
||||||
|
edit_h = self.bar.height()
|
||||||
|
h = hl.height() - edit_h
|
||||||
|
w = hl.width()
|
||||||
|
return w, h
|
||||||
|
|
||||||
|
def on_resize(self) -> None:
|
||||||
|
'''
|
||||||
|
Resize relay event from god, resize all child widgets.
|
||||||
|
|
||||||
|
Right now this is just view to contents and/or the fast chart
|
||||||
|
height.
|
||||||
|
|
||||||
|
'''
|
||||||
|
w, h = self.space_dims()
|
||||||
|
self.bar.view.show_matches(wh=(w, h))
|
||||||
|
|
||||||
|
|
||||||
_search_active: trio.Event = trio.Event()
|
_search_active: trio.Event = trio.Event()
|
||||||
_search_enabled: bool = False
|
_search_enabled: bool = False
|
||||||
|
@ -712,10 +787,11 @@ async def fill_results(
|
||||||
max_pause_time: float = 6/16 + 0.001,
|
max_pause_time: float = 6/16 + 0.001,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Task to search through providers and fill in possible
|
'''
|
||||||
|
Task to search through providers and fill in possible
|
||||||
completion results.
|
completion results.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
global _search_active, _search_enabled, _searcher_cache
|
global _search_active, _search_enabled, _searcher_cache
|
||||||
|
|
||||||
bar = search.bar
|
bar = search.bar
|
||||||
|
@ -729,6 +805,10 @@ async def fill_results(
|
||||||
matches = defaultdict(list)
|
matches = defaultdict(list)
|
||||||
has_results: defaultdict[str, set[str]] = defaultdict(set)
|
has_results: defaultdict[str, set[str]] = defaultdict(set)
|
||||||
|
|
||||||
|
# show cached feed list at startup
|
||||||
|
search.show_only_cache_entries()
|
||||||
|
search.on_resize()
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
await _search_active.wait()
|
await _search_active.wait()
|
||||||
period = None
|
period = None
|
||||||
|
@ -742,7 +822,7 @@ async def fill_results(
|
||||||
pattern = await recv_chan.receive()
|
pattern = await recv_chan.receive()
|
||||||
|
|
||||||
period = time.time() - wait_start
|
period = time.time() - wait_start
|
||||||
print(f'{pattern} after {period}')
|
log.debug(f'{pattern} after {period}')
|
||||||
|
|
||||||
# during fast multiple key inputs, wait until a pause
|
# during fast multiple key inputs, wait until a pause
|
||||||
# (in typing) to initiate search
|
# (in typing) to initiate search
|
||||||
|
@ -841,8 +921,7 @@ async def handle_keyboard_input(
|
||||||
godwidget = search.godwidget
|
godwidget = search.godwidget
|
||||||
view = bar.view
|
view = bar.view
|
||||||
view.set_font_size(bar.dpi_font.px_size)
|
view.set_font_size(bar.dpi_font.px_size)
|
||||||
|
send, recv = trio.open_memory_channel(616)
|
||||||
send, recv = trio.open_memory_channel(16)
|
|
||||||
|
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
|
|
||||||
|
@ -857,6 +936,10 @@ async def handle_keyboard_input(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
bar.focus()
|
||||||
|
search.show_only_cache_entries()
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
async for kbmsg in recv_chan:
|
async for kbmsg in recv_chan:
|
||||||
event, etype, key, mods, txt = kbmsg.to_tuple()
|
event, etype, key, mods, txt = kbmsg.to_tuple()
|
||||||
|
|
||||||
|
@ -867,10 +950,11 @@ async def handle_keyboard_input(
|
||||||
ctl = True
|
ctl = True
|
||||||
|
|
||||||
if key in (Qt.Key_Enter, Qt.Key_Return):
|
if key in (Qt.Key_Enter, Qt.Key_Return):
|
||||||
|
|
||||||
await search.chart_current_item(clear_to_cache=True)
|
|
||||||
_search_enabled = False
|
_search_enabled = False
|
||||||
continue
|
await search.chart_current_item(clear_to_cache=True)
|
||||||
|
search.show_only_cache_entries()
|
||||||
|
view.show_matches()
|
||||||
|
search.focus()
|
||||||
|
|
||||||
elif not ctl and not bar.text():
|
elif not ctl and not bar.text():
|
||||||
# if nothing in search text show the cache
|
# if nothing in search text show the cache
|
||||||
|
@ -887,7 +971,7 @@ async def handle_keyboard_input(
|
||||||
Qt.Key_Space, # i feel like this is the "native" one
|
Qt.Key_Space, # i feel like this is the "native" one
|
||||||
Qt.Key_Alt,
|
Qt.Key_Alt,
|
||||||
}:
|
}:
|
||||||
search.bar.unfocus()
|
bar.unfocus()
|
||||||
|
|
||||||
# kill the search and focus back on main chart
|
# kill the search and focus back on main chart
|
||||||
if godwidget:
|
if godwidget:
|
||||||
|
@ -935,9 +1019,10 @@ async def handle_keyboard_input(
|
||||||
if item:
|
if item:
|
||||||
parent_item = item.parent()
|
parent_item = item.parent()
|
||||||
|
|
||||||
|
# if we're in the cache section and thus the next
|
||||||
|
# selection is a cache item, switch and show it
|
||||||
|
# immediately since it should be very fast.
|
||||||
if parent_item and parent_item.text() == 'cache':
|
if parent_item and parent_item.text() == 'cache':
|
||||||
|
|
||||||
# if it's a cache item, switch and show it immediately
|
|
||||||
await search.chart_current_item(clear_to_cache=False)
|
await search.chart_current_item(clear_to_cache=False)
|
||||||
|
|
||||||
elif not ctl:
|
elif not ctl:
|
||||||
|
|
|
@ -21,15 +21,29 @@ Qt main window singletons and stuff.
|
||||||
import os
|
import os
|
||||||
import signal
|
import signal
|
||||||
import time
|
import time
|
||||||
from typing import Callable, Optional, Union
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from pyqtgraph import QtGui
|
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
from PyQt5.QtWidgets import QLabel, QStatusBar
|
from PyQt5.QtWidgets import (
|
||||||
|
QWidget,
|
||||||
|
QMainWindow,
|
||||||
|
QApplication,
|
||||||
|
QLabel,
|
||||||
|
QStatusBar,
|
||||||
|
)
|
||||||
|
|
||||||
|
from PyQt5.QtGui import (
|
||||||
|
QScreen,
|
||||||
|
QCloseEvent,
|
||||||
|
)
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._style import _font_small, hcolor
|
from ._style import _font_small, hcolor
|
||||||
|
from ._chart import GodWidget
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -148,12 +162,13 @@ class MultiStatus:
|
||||||
self.bar.clearMessage()
|
self.bar.clearMessage()
|
||||||
|
|
||||||
|
|
||||||
class MainWindow(QtGui.QMainWindow):
|
class MainWindow(QMainWindow):
|
||||||
|
|
||||||
# XXX: for tiling wms this should scale
|
# XXX: for tiling wms this should scale
|
||||||
# with the alloted window size.
|
# with the alloted window size.
|
||||||
# TODO: detect for tiling and if untrue set some size?
|
# TODO: detect for tiling and if untrue set some size?
|
||||||
size = (300, 500)
|
# size = (300, 500)
|
||||||
|
godwidget: GodWidget
|
||||||
|
|
||||||
title = 'piker chart (ur symbol is loading bby)'
|
title = 'piker chart (ur symbol is loading bby)'
|
||||||
|
|
||||||
|
@ -162,17 +177,20 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
# self.setMinimumSize(*self.size)
|
# self.setMinimumSize(*self.size)
|
||||||
self.setWindowTitle(self.title)
|
self.setWindowTitle(self.title)
|
||||||
|
|
||||||
|
# set by runtime after `trio` is engaged.
|
||||||
|
self.godwidget: Optional[GodWidget] = None
|
||||||
|
|
||||||
self._status_bar: QStatusBar = None
|
self._status_bar: QStatusBar = None
|
||||||
self._status_label: QLabel = None
|
self._status_label: QLabel = None
|
||||||
self._size: Optional[tuple[int, int]] = None
|
self._size: Optional[tuple[int, int]] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mode_label(self) -> QtGui.QLabel:
|
def mode_label(self) -> QLabel:
|
||||||
|
|
||||||
# init mode label
|
# init mode label
|
||||||
if not self._status_label:
|
if not self._status_label:
|
||||||
|
|
||||||
self._status_label = label = QtGui.QLabel()
|
self._status_label = label = QLabel()
|
||||||
label.setStyleSheet(
|
label.setStyleSheet(
|
||||||
f"""QLabel {{
|
f"""QLabel {{
|
||||||
color : {hcolor('gunmetal')};
|
color : {hcolor('gunmetal')};
|
||||||
|
@ -194,8 +212,7 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
|
|
||||||
def closeEvent(
|
def closeEvent(
|
||||||
self,
|
self,
|
||||||
|
event: QCloseEvent,
|
||||||
event: QtGui.QCloseEvent,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Cancel the root actor asap.
|
'''Cancel the root actor asap.
|
||||||
|
@ -235,8 +252,8 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
def on_focus_change(
|
def on_focus_change(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
last: QtGui.QWidget,
|
last: QWidget,
|
||||||
current: QtGui.QWidget,
|
current: QWidget,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
|
@ -247,11 +264,12 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
name = getattr(current, 'mode_name', '')
|
name = getattr(current, 'mode_name', '')
|
||||||
self.set_mode_name(name)
|
self.set_mode_name(name)
|
||||||
|
|
||||||
def current_screen(self) -> QtGui.QScreen:
|
def current_screen(self) -> QScreen:
|
||||||
"""Get a frickin screen (if we can, gawd).
|
'''
|
||||||
|
Get a frickin screen (if we can, gawd).
|
||||||
|
|
||||||
"""
|
'''
|
||||||
app = QtGui.QApplication.instance()
|
app = QApplication.instance()
|
||||||
|
|
||||||
for _ in range(3):
|
for _ in range(3):
|
||||||
screen = app.screenAt(self.pos())
|
screen = app.screenAt(self.pos())
|
||||||
|
@ -284,7 +302,7 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
'''
|
'''
|
||||||
# https://stackoverflow.com/a/18975846
|
# https://stackoverflow.com/a/18975846
|
||||||
if not size and not self._size:
|
if not size and not self._size:
|
||||||
app = QtGui.QApplication.instance()
|
# app = QApplication.instance()
|
||||||
geo = self.current_screen().geometry()
|
geo = self.current_screen().geometry()
|
||||||
h, w = geo.height(), geo.width()
|
h, w = geo.height(), geo.width()
|
||||||
# use approx 1/3 of the area of the screen by default
|
# use approx 1/3 of the area of the screen by default
|
||||||
|
@ -292,9 +310,36 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
|
|
||||||
self.resize(*size or self._size)
|
self.resize(*size or self._size)
|
||||||
|
|
||||||
|
def resizeEvent(self, event: QtCore.QEvent) -> None:
|
||||||
|
if (
|
||||||
|
# event.spontaneous()
|
||||||
|
event.oldSize().height == event.size().height
|
||||||
|
):
|
||||||
|
event.ignore()
|
||||||
|
return
|
||||||
|
|
||||||
|
# XXX: uncomment for debugging..
|
||||||
|
# attrs = {}
|
||||||
|
# for key in dir(event):
|
||||||
|
# if key == '__dir__':
|
||||||
|
# continue
|
||||||
|
# attr = getattr(event, key)
|
||||||
|
# try:
|
||||||
|
# attrs[key] = attr()
|
||||||
|
# except TypeError:
|
||||||
|
# attrs[key] = attr
|
||||||
|
|
||||||
|
# from pprint import pformat
|
||||||
|
# print(
|
||||||
|
# f'{pformat(attrs)}\n'
|
||||||
|
# f'WINDOW RESIZE: {self.size()}\n\n'
|
||||||
|
# )
|
||||||
|
self.godwidget.on_win_resize(event)
|
||||||
|
event.accept()
|
||||||
|
|
||||||
|
|
||||||
# singleton app per actor
|
# singleton app per actor
|
||||||
_qt_win: QtGui.QMainWindow = None
|
_qt_win: QMainWindow = None
|
||||||
|
|
||||||
|
|
||||||
def main_window() -> MainWindow:
|
def main_window() -> MainWindow:
|
||||||
|
|
|
@ -46,8 +46,10 @@ def _kivy_import_hack():
|
||||||
@click.argument('name', nargs=1, required=True)
|
@click.argument('name', nargs=1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def monitor(config, rate, name, dhost, test, tl):
|
def monitor(config, rate, name, dhost, test, tl):
|
||||||
"""Start a real-time watchlist UI
|
'''
|
||||||
"""
|
Start a real-time watchlist UI
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
loglevel = config['loglevel']
|
loglevel = config['loglevel']
|
||||||
|
@ -70,8 +72,12 @@ def monitor(config, rate, name, dhost, test, tl):
|
||||||
) as portal:
|
) as portal:
|
||||||
# run app "main"
|
# run app "main"
|
||||||
await _async_main(
|
await _async_main(
|
||||||
name, portal, tickers,
|
name,
|
||||||
brokermod, rate, test=test,
|
portal,
|
||||||
|
tickers,
|
||||||
|
brokermod,
|
||||||
|
rate,
|
||||||
|
test=test,
|
||||||
)
|
)
|
||||||
|
|
||||||
tractor.run(
|
tractor.run(
|
||||||
|
@ -122,7 +128,7 @@ def optschain(config, symbol, date, rate, test):
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option(
|
@click.option(
|
||||||
'--profile',
|
'--profile',
|
||||||
'-p',
|
# '-p',
|
||||||
default=None,
|
default=None,
|
||||||
help='Enable pyqtgraph profiling'
|
help='Enable pyqtgraph profiling'
|
||||||
)
|
)
|
||||||
|
@ -131,9 +137,14 @@ def optschain(config, symbol, date, rate, test):
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
help='Enable tractor debug mode'
|
help='Enable tractor debug mode'
|
||||||
)
|
)
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbols', nargs=-1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def chart(config, symbol, profile, pdb):
|
def chart(
|
||||||
|
config,
|
||||||
|
symbols: list[str],
|
||||||
|
profile,
|
||||||
|
pdb: bool,
|
||||||
|
):
|
||||||
'''
|
'''
|
||||||
Start a real-time chartng UI
|
Start a real-time chartng UI
|
||||||
|
|
||||||
|
@ -144,24 +155,27 @@ def chart(config, symbol, profile, pdb):
|
||||||
_profile._pg_profile = True
|
_profile._pg_profile = True
|
||||||
_profile.ms_slower_then = float(profile)
|
_profile.ms_slower_then = float(profile)
|
||||||
|
|
||||||
|
# Qt UI entrypoint
|
||||||
from ._app import _main
|
from ._app import _main
|
||||||
|
|
||||||
if '.' not in symbol:
|
for symbol in symbols:
|
||||||
click.echo(click.style(
|
if '.' not in symbol:
|
||||||
f'symbol: {symbol} must have a {symbol}.<provider> suffix',
|
click.echo(click.style(
|
||||||
fg='red',
|
f'symbol: {symbol} must have a {symbol}.<provider> suffix',
|
||||||
))
|
fg='red',
|
||||||
return
|
))
|
||||||
|
return
|
||||||
|
|
||||||
# global opts
|
# global opts
|
||||||
brokernames = config['brokers']
|
brokernames = config['brokers']
|
||||||
|
brokermods = config['brokermods']
|
||||||
|
assert brokermods
|
||||||
tractorloglevel = config['tractorloglevel']
|
tractorloglevel = config['tractorloglevel']
|
||||||
pikerloglevel = config['loglevel']
|
pikerloglevel = config['loglevel']
|
||||||
|
|
||||||
_main(
|
_main(
|
||||||
sym=symbol,
|
syms=symbols,
|
||||||
brokernames=brokernames,
|
brokermods=brokermods,
|
||||||
piker_loglevel=pikerloglevel,
|
piker_loglevel=pikerloglevel,
|
||||||
tractor_kwargs={
|
tractor_kwargs={
|
||||||
'debug_mode': pdb,
|
'debug_mode': pdb,
|
||||||
|
@ -170,5 +184,6 @@ def chart(config, symbol, profile, pdb):
|
||||||
'enable_modules': [
|
'enable_modules': [
|
||||||
'piker.clearing._client'
|
'piker.clearing._client'
|
||||||
],
|
],
|
||||||
|
'registry_addr': config.get('registry_addr'),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,13 +1,12 @@
|
||||||
# we require a pinned dev branch to get some edge features that
|
# we require a pinned dev branch to get some edge features that
|
||||||
# are often untested in tractor's CI and/or being tested by us
|
# are often untested in tractor's CI and/or being tested by us
|
||||||
# first before committing as core features in tractor's base.
|
# first before committing as core features in tractor's base.
|
||||||
-e git+https://github.com/goodboy/tractor.git@master#egg=tractor
|
-e git+https://github.com/goodboy/tractor.git@piker_pin#egg=tractor
|
||||||
|
|
||||||
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
|
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
|
||||||
# pin this to a dev branch that we have more control over especially
|
# pin this to a dev branch that we have more control over especially
|
||||||
# as more graphics stuff gets hashed out.
|
# as more graphics stuff gets hashed out.
|
||||||
-e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
|
-e git+https://github.com/pikers/pyqtgraph.git@master#egg=pyqtgraph
|
||||||
|
|
||||||
|
|
||||||
# our async client for ``marketstore`` (the tsdb)
|
# our async client for ``marketstore`` (the tsdb)
|
||||||
-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
|
-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
|
||||||
|
@ -18,4 +17,7 @@
|
||||||
|
|
||||||
|
|
||||||
# ``asyncvnc`` for sending interactions to ib-gw inside docker
|
# ``asyncvnc`` for sending interactions to ib-gw inside docker
|
||||||
-e git+https://github.com/pikers/asyncvnc.git@vid_passthrough#egg=asyncvnc
|
-e git+https://github.com/pikers/asyncvnc.git@main#egg=asyncvnc
|
||||||
|
|
||||||
|
# ``cryptofeed`` for connecting to various crypto exchanges + custom fixes
|
||||||
|
-e git+https://github.com/pikers/cryptofeed.git@date_parsing#egg=cryptofeed
|
||||||
|
|
5
setup.py
5
setup.py
|
@ -41,23 +41,24 @@ setup(
|
||||||
},
|
},
|
||||||
install_requires=[
|
install_requires=[
|
||||||
'toml',
|
'toml',
|
||||||
|
'tomli', # fastest pure py reader
|
||||||
'click',
|
'click',
|
||||||
'colorlog',
|
'colorlog',
|
||||||
'attrs',
|
'attrs',
|
||||||
'pygments',
|
'pygments',
|
||||||
'colorama', # numba traceback coloring
|
'colorama', # numba traceback coloring
|
||||||
'pydantic', # structured data
|
'msgspec', # performant IPC messaging and structs
|
||||||
|
|
||||||
# async
|
# async
|
||||||
'trio',
|
'trio',
|
||||||
'trio-websocket',
|
'trio-websocket',
|
||||||
'msgspec', # performant IPC messaging
|
|
||||||
'async_generator',
|
'async_generator',
|
||||||
|
|
||||||
# from github currently (see requirements.txt)
|
# from github currently (see requirements.txt)
|
||||||
# 'trimeter', # not released yet..
|
# 'trimeter', # not released yet..
|
||||||
# 'tractor',
|
# 'tractor',
|
||||||
# asyncvnc,
|
# asyncvnc,
|
||||||
|
# 'cryptofeed',
|
||||||
|
|
||||||
# brokers
|
# brokers
|
||||||
'asks==2.4.8',
|
'asks==2.4.8',
|
||||||
|
|
|
@ -1,22 +1,30 @@
|
||||||
"""
|
"""
|
||||||
Resource list for mucking with DPIs on multiple screens:
|
DPI and info helper script for display metrics.
|
||||||
|
|
||||||
- https://stackoverflow.com/questions/42141354/convert-pixel-size-to-point-size-for-fonts-on-multiple-platforms
|
|
||||||
- https://stackoverflow.com/questions/25761556/qt5-font-rendering-different-on-various-platforms/25929628#25929628
|
|
||||||
- https://doc.qt.io/qt-5/highdpi.html
|
|
||||||
- https://stackoverflow.com/questions/20464814/changing-dpi-scaling-size-of-display-make-qt-applications-font-size-get-rendere
|
|
||||||
- https://stackoverflow.com/a/20465247
|
|
||||||
- https://doc.qt.io/archives/qt-4.8/qfontmetrics.html#width
|
|
||||||
- https://forum.qt.io/topic/54136/how-do-i-get-the-qscreen-my-widget-is-on-qapplication-desktop-screen-returns-a-qwidget-and-qobject_cast-qscreen-returns-null/3
|
|
||||||
- https://forum.qt.io/topic/43625/point-sizes-are-they-reliable/4
|
|
||||||
- https://stackoverflow.com/questions/16561879/what-is-the-difference-between-logicaldpix-and-physicaldpix-in-qt
|
|
||||||
- https://doc.qt.io/qt-5/qguiapplication.html#screenAt
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from pyqtgraph import QtGui
|
# Resource list for mucking with DPIs on multiple screens:
|
||||||
|
# https://stackoverflow.com/questions/42141354/convert-pixel-size-to-point-size-for-fonts-on-multiple-platforms
|
||||||
|
# https://stackoverflow.com/questions/25761556/qt5-font-rendering-different-on-various-platforms/25929628#25929628
|
||||||
|
# https://doc.qt.io/qt-5/highdpi.html
|
||||||
|
# https://stackoverflow.com/questions/20464814/changing-dpi-scaling-size-of-display-make-qt-applications-font-size-get-rendere
|
||||||
|
# https://stackoverflow.com/a/20465247
|
||||||
|
# https://doc.qt.io/archives/qt-4.8/qfontmetrics.html#width
|
||||||
|
# https://forum.qt.io/topic/54136/how-do-i-get-the-qscreen-my-widget-is-on-qapplication-desktop-screen-returns-a-qwidget-and-qobject_cast-qscreen-returns-null/3
|
||||||
|
# https://forum.qt.io/topic/43625/point-sizes-are-they-reliable/4
|
||||||
|
# https://stackoverflow.com/questions/16561879/what-is-the-difference-between-logicaldpix-and-physicaldpix-in-qt
|
||||||
|
# https://doc.qt.io/qt-5/qguiapplication.html#screenAt
|
||||||
|
|
||||||
|
from pyqtgraph import (
|
||||||
|
QtGui,
|
||||||
|
)
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
Qt, QCoreApplication
|
Qt,
|
||||||
|
QCoreApplication,
|
||||||
|
)
|
||||||
|
from PyQt5.QtWidgets import (
|
||||||
|
QWidget,
|
||||||
|
QMainWindow,
|
||||||
|
QApplication,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
|
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
|
||||||
|
@ -28,55 +36,48 @@ if hasattr(Qt, 'AA_UseHighDpiPixmaps'):
|
||||||
QCoreApplication.setAttribute(Qt.AA_UseHighDpiPixmaps, True)
|
QCoreApplication.setAttribute(Qt.AA_UseHighDpiPixmaps, True)
|
||||||
|
|
||||||
|
|
||||||
app = QtGui.QApplication([])
|
app = QApplication([])
|
||||||
window = QtGui.QMainWindow()
|
window = QMainWindow()
|
||||||
main_widget = QtGui.QWidget()
|
main_widget = QWidget()
|
||||||
window.setCentralWidget(main_widget)
|
window.setCentralWidget(main_widget)
|
||||||
window.show()
|
window.show()
|
||||||
|
|
||||||
|
# TODO: move widget through multiple displays and auto-detect the pixel
|
||||||
|
# ratio? (probably is gonna require calls to i3ipc on linux)..
|
||||||
pxr = main_widget.devicePixelRatioF()
|
pxr = main_widget.devicePixelRatioF()
|
||||||
|
|
||||||
# screen_num = app.desktop().screenNumber()
|
# TODO: how to detect list of displays from API?
|
||||||
# screen = app.screens()[screen_num]
|
# screen = app.screens()[screen_num]
|
||||||
|
|
||||||
|
|
||||||
|
def ppscreeninfo(screen: QtGui.QScreen) -> None:
|
||||||
|
# screen_num = app.desktop().screenNumber()
|
||||||
|
name = screen.name()
|
||||||
|
size = screen.size()
|
||||||
|
geo = screen.availableGeometry()
|
||||||
|
phydpi = screen.physicalDotsPerInch()
|
||||||
|
logdpi = screen.logicalDotsPerInch()
|
||||||
|
rr = screen.refreshRate()
|
||||||
|
|
||||||
|
print(
|
||||||
|
# f'screen number: {screen_num}\n',
|
||||||
|
f'screen: {name}\n'
|
||||||
|
f' size: {size}\n'
|
||||||
|
f' geometry: {geo}\n'
|
||||||
|
f' logical dpi: {logdpi}\n'
|
||||||
|
f' devicePixelRationF(): {pxr}\n'
|
||||||
|
f' physical dpi: {phydpi}\n'
|
||||||
|
f' refresh rate: {rr}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
print('-'*50 + '\n')
|
||||||
|
|
||||||
|
|
||||||
screen = app.screenAt(main_widget.geometry().center())
|
screen = app.screenAt(main_widget.geometry().center())
|
||||||
|
ppscreeninfo(screen)
|
||||||
name = screen.name()
|
|
||||||
size = screen.size()
|
|
||||||
geo = screen.availableGeometry()
|
|
||||||
phydpi = screen.physicalDotsPerInch()
|
|
||||||
logdpi = screen.logicalDotsPerInch()
|
|
||||||
|
|
||||||
print(
|
|
||||||
# f'screen number: {screen_num}\n',
|
|
||||||
f'screen name: {name}\n'
|
|
||||||
f'screen size: {size}\n'
|
|
||||||
f'screen geometry: {geo}\n\n'
|
|
||||||
f'devicePixelRationF(): {pxr}\n'
|
|
||||||
f'physical dpi: {phydpi}\n'
|
|
||||||
f'logical dpi: {logdpi}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
print('-'*50)
|
|
||||||
|
|
||||||
screen = app.primaryScreen()
|
screen = app.primaryScreen()
|
||||||
|
ppscreeninfo(screen)
|
||||||
name = screen.name()
|
|
||||||
size = screen.size()
|
|
||||||
geo = screen.availableGeometry()
|
|
||||||
phydpi = screen.physicalDotsPerInch()
|
|
||||||
logdpi = screen.logicalDotsPerInch()
|
|
||||||
|
|
||||||
print(
|
|
||||||
# f'screen number: {screen_num}\n',
|
|
||||||
f'screen name: {name}\n'
|
|
||||||
f'screen size: {size}\n'
|
|
||||||
f'screen geometry: {geo}\n\n'
|
|
||||||
f'devicePixelRationF(): {pxr}\n'
|
|
||||||
f'physical dpi: {phydpi}\n'
|
|
||||||
f'logical dpi: {logdpi}\n'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# app-wide font
|
# app-wide font
|
||||||
font = QtGui.QFont("Hack")
|
font = QtGui.QFont("Hack")
|
||||||
|
|
|
@ -1,10 +1,15 @@
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
import os
|
import os
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
from piker import (
|
||||||
from piker import log, config
|
# log,
|
||||||
from piker.brokers import questrade
|
config,
|
||||||
|
)
|
||||||
|
from piker._daemon import (
|
||||||
|
Services,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def pytest_addoption(parser):
|
def pytest_addoption(parser):
|
||||||
|
@ -14,15 +19,6 @@ def pytest_addoption(parser):
|
||||||
help="Use a practice API account")
|
help="Use a practice API account")
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session', autouse=True)
|
|
||||||
def loglevel(request):
|
|
||||||
orig = tractor.log._default_loglevel
|
|
||||||
level = tractor.log._default_loglevel = request.config.option.loglevel
|
|
||||||
log.get_console_log(level)
|
|
||||||
yield level
|
|
||||||
tractor.log._default_loglevel = orig
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session')
|
@pytest.fixture(scope='session')
|
||||||
def test_config():
|
def test_config():
|
||||||
dirname = os.path.dirname
|
dirname = os.path.dirname
|
||||||
|
@ -37,9 +33,11 @@ def test_config():
|
||||||
|
|
||||||
@pytest.fixture(scope='session', autouse=True)
|
@pytest.fixture(scope='session', autouse=True)
|
||||||
def confdir(request, test_config):
|
def confdir(request, test_config):
|
||||||
"""If the `--confdir` flag is not passed use the
|
'''
|
||||||
|
If the `--confdir` flag is not passed use the
|
||||||
broker config file found in that dir.
|
broker config file found in that dir.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
confdir = request.config.option.confdir
|
confdir = request.config.option.confdir
|
||||||
if confdir is not None:
|
if confdir is not None:
|
||||||
config._override_config_dir(confdir)
|
config._override_config_dir(confdir)
|
||||||
|
@ -47,49 +45,61 @@ def confdir(request, test_config):
|
||||||
return confdir
|
return confdir
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope='session', autouse=True)
|
# @pytest.fixture(scope='session', autouse=True)
|
||||||
def travis(confdir):
|
# def travis(confdir):
|
||||||
is_travis = os.environ.get('TRAVIS', False)
|
# is_travis = os.environ.get('TRAVIS', False)
|
||||||
if is_travis:
|
# if is_travis:
|
||||||
# this directory is cached, see .travis.yaml
|
# # this directory is cached, see .travis.yaml
|
||||||
conf_file = config.get_broker_conf_path()
|
# conf_file = config.get_broker_conf_path()
|
||||||
refresh_token = os.environ['QT_REFRESH_TOKEN']
|
# refresh_token = os.environ['QT_REFRESH_TOKEN']
|
||||||
|
|
||||||
def write_with_token(token):
|
# def write_with_token(token):
|
||||||
# XXX don't pass the dir path here since may be
|
# # XXX don't pass the dir path here since may be
|
||||||
# written behind the scenes in the `confdir fixture`
|
# # written behind the scenes in the `confdir fixture`
|
||||||
if not os.path.isfile(conf_file):
|
# if not os.path.isfile(conf_file):
|
||||||
open(conf_file, 'w').close()
|
# open(conf_file, 'w').close()
|
||||||
conf, path = config.load()
|
# conf, path = config.load()
|
||||||
conf.setdefault('questrade', {}).update(
|
# conf.setdefault('questrade', {}).update(
|
||||||
{'refresh_token': token,
|
# {'refresh_token': token,
|
||||||
'is_practice': 'True'}
|
# 'is_practice': 'True'}
|
||||||
)
|
# )
|
||||||
config.write(conf, path)
|
# config.write(conf, path)
|
||||||
|
|
||||||
async def ensure_config():
|
# async def ensure_config():
|
||||||
# try to refresh current token using cached brokers config
|
# # try to refresh current token using cached brokers config
|
||||||
# if it fails fail try using the refresh token provided by the
|
# # if it fails fail try using the refresh token provided by the
|
||||||
# env var and if that fails stop the test run here.
|
# # env var and if that fails stop the test run here.
|
||||||
try:
|
# try:
|
||||||
async with questrade.get_client(ask_user=False):
|
# async with questrade.get_client(ask_user=False):
|
||||||
pass
|
# pass
|
||||||
except (
|
# except (
|
||||||
FileNotFoundError, ValueError,
|
# FileNotFoundError, ValueError,
|
||||||
questrade.BrokerError, questrade.QuestradeError,
|
# questrade.BrokerError, questrade.QuestradeError,
|
||||||
trio.MultiError,
|
# trio.MultiError,
|
||||||
):
|
# ):
|
||||||
# 3 cases:
|
# # 3 cases:
|
||||||
# - config doesn't have a ``refresh_token`` k/v
|
# # - config doesn't have a ``refresh_token`` k/v
|
||||||
# - cache dir does not exist yet
|
# # - cache dir does not exist yet
|
||||||
# - current token is expired; take it form env var
|
# # - current token is expired; take it form env var
|
||||||
write_with_token(refresh_token)
|
# write_with_token(refresh_token)
|
||||||
|
|
||||||
async with questrade.get_client(ask_user=False):
|
# async with questrade.get_client(ask_user=False):
|
||||||
pass
|
# pass
|
||||||
|
|
||||||
# XXX ``pytest_trio`` doesn't support scope or autouse
|
# # XXX ``pytest_trio`` doesn't support scope or autouse
|
||||||
trio.run(ensure_config)
|
# trio.run(ensure_config)
|
||||||
|
|
||||||
|
|
||||||
|
_ci_env: bool = os.environ.get('CI', False)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture(scope='session')
|
||||||
|
def ci_env() -> bool:
|
||||||
|
'''
|
||||||
|
Detect CI envoirment.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return _ci_env
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -105,3 +115,61 @@ def tmx_symbols():
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def cse_symbols():
|
def cse_symbols():
|
||||||
return ['TRUL.CN', 'CWEB.CN', 'SNN.CN']
|
return ['TRUL.CN', 'CWEB.CN', 'SNN.CN']
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def _open_test_pikerd(
|
||||||
|
reg_addr: tuple[str, int] | None = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
str,
|
||||||
|
int,
|
||||||
|
tractor.Portal
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Testing helper to startup the service tree and runtime on
|
||||||
|
a different port then the default to allow testing alongside
|
||||||
|
a running stack.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import random
|
||||||
|
from piker._daemon import maybe_open_pikerd
|
||||||
|
|
||||||
|
if reg_addr is None:
|
||||||
|
port = random.randint(6e3, 7e3)
|
||||||
|
reg_addr = ('127.0.0.1', port)
|
||||||
|
|
||||||
|
# try:
|
||||||
|
async with (
|
||||||
|
maybe_open_pikerd(
|
||||||
|
registry_addr=reg_addr,
|
||||||
|
**kwargs,
|
||||||
|
) as service_manager,
|
||||||
|
):
|
||||||
|
# this proc/actor is the pikerd
|
||||||
|
assert service_manager is Services
|
||||||
|
|
||||||
|
async with tractor.wait_for_actor(
|
||||||
|
'pikerd',
|
||||||
|
arbiter_sockaddr=reg_addr,
|
||||||
|
) as portal:
|
||||||
|
raddr = portal.channel.raddr
|
||||||
|
assert raddr == reg_addr
|
||||||
|
yield (
|
||||||
|
raddr[0],
|
||||||
|
raddr[1],
|
||||||
|
portal,
|
||||||
|
service_manager,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def open_test_pikerd():
|
||||||
|
|
||||||
|
yield _open_test_pikerd
|
||||||
|
|
||||||
|
# TODO: teardown checks such as,
|
||||||
|
# - no leaked subprocs or shm buffers
|
||||||
|
# - all requested container service are torn down
|
||||||
|
# - certain ``tractor`` runtime state?
|
||||||
|
|
|
@ -0,0 +1,128 @@
|
||||||
|
'''
|
||||||
|
Data feed layer APIs, performance, msg throttling.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from collections import Counter
|
||||||
|
from pprint import pprint
|
||||||
|
from typing import AsyncContextManager
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
# import tractor
|
||||||
|
import trio
|
||||||
|
from piker.data import (
|
||||||
|
ShmArray,
|
||||||
|
open_feed,
|
||||||
|
)
|
||||||
|
from piker.data._source import (
|
||||||
|
unpack_fqsn,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
'fqsns',
|
||||||
|
[
|
||||||
|
# binance
|
||||||
|
(100, {'btcusdt.binance', 'ethusdt.binance'}, False),
|
||||||
|
|
||||||
|
# kraken
|
||||||
|
(20, {'ethusdt.kraken', 'xbtusd.kraken'}, True),
|
||||||
|
|
||||||
|
# binance + kraken
|
||||||
|
(100, {'btcusdt.binance', 'xbtusd.kraken'}, False),
|
||||||
|
],
|
||||||
|
ids=lambda param: f'quotes={param[0]}@fqsns={param[1]}',
|
||||||
|
)
|
||||||
|
def test_multi_fqsn_feed(
|
||||||
|
open_test_pikerd: AsyncContextManager,
|
||||||
|
fqsns: set[str],
|
||||||
|
ci_env: bool
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Start a real-time data feed for provided fqsn and pull
|
||||||
|
a few quotes then simply shut down.
|
||||||
|
|
||||||
|
'''
|
||||||
|
max_quotes, fqsns, run_in_ci = fqsns
|
||||||
|
|
||||||
|
if (
|
||||||
|
ci_env
|
||||||
|
and not run_in_ci
|
||||||
|
):
|
||||||
|
pytest.skip('Skipping CI disabled test due to feed restrictions')
|
||||||
|
|
||||||
|
brokers = set()
|
||||||
|
for fqsn in fqsns:
|
||||||
|
brokername, key, suffix = unpack_fqsn(fqsn)
|
||||||
|
brokers.add(brokername)
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with (
|
||||||
|
open_test_pikerd(),
|
||||||
|
open_feed(
|
||||||
|
fqsns,
|
||||||
|
loglevel='info',
|
||||||
|
|
||||||
|
# TODO: ensure throttle rate is applied
|
||||||
|
# limit to at least display's FPS
|
||||||
|
# avoiding needless Qt-in-guest-mode context switches
|
||||||
|
# tick_throttle=_quote_throttle_rate,
|
||||||
|
|
||||||
|
) as feed
|
||||||
|
):
|
||||||
|
# verify shm buffers exist
|
||||||
|
for fqin in fqsns:
|
||||||
|
flume = feed.flumes[fqin]
|
||||||
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
|
async with feed.open_multi_stream(brokers) as stream:
|
||||||
|
|
||||||
|
# pull the first startup quotes, one for each fqsn, and
|
||||||
|
# ensure they match each flume's startup quote value.
|
||||||
|
fqsns_copy = fqsns.copy()
|
||||||
|
with trio.fail_after(0.5):
|
||||||
|
for _ in range(1):
|
||||||
|
first_quotes = await stream.receive()
|
||||||
|
for fqsn, quote in first_quotes.items():
|
||||||
|
|
||||||
|
# XXX: TODO: WTF apparently this error will get
|
||||||
|
# supressed and only show up in the teardown
|
||||||
|
# excgroup if we don't have the fix from
|
||||||
|
# <tractorbugurl>
|
||||||
|
# assert 0
|
||||||
|
|
||||||
|
fqsns_copy.remove(fqsn)
|
||||||
|
flume = feed.flumes[fqsn]
|
||||||
|
assert quote['last'] == flume.first_quote['last']
|
||||||
|
|
||||||
|
cntr = Counter()
|
||||||
|
with trio.fail_after(6):
|
||||||
|
async for quotes in stream:
|
||||||
|
for fqsn, quote in quotes.items():
|
||||||
|
cntr[fqsn] += 1
|
||||||
|
|
||||||
|
# await tractor.breakpoint()
|
||||||
|
flume = feed.flumes[fqsn]
|
||||||
|
ohlcv: ShmArray = flume.rt_shm
|
||||||
|
hist_ohlcv: ShmArray = flume.hist_shm
|
||||||
|
|
||||||
|
# print quote msg, rt and history
|
||||||
|
# buffer values on console.
|
||||||
|
rt_row = ohlcv.array[-1]
|
||||||
|
hist_row = hist_ohlcv.array[-1]
|
||||||
|
# last = quote['last']
|
||||||
|
|
||||||
|
# assert last == rt_row['close']
|
||||||
|
# assert last == hist_row['close']
|
||||||
|
pprint(
|
||||||
|
f'{fqsn}: {quote}\n'
|
||||||
|
f'rt_ohlc: {rt_row}\n'
|
||||||
|
f'hist_ohlc: {hist_row}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
if cntr.total() >= max_quotes:
|
||||||
|
break
|
||||||
|
|
||||||
|
assert set(cntr.keys()) == fqsns
|
||||||
|
|
||||||
|
trio.run(main)
|
|
@ -8,7 +8,6 @@ from trio.testing import trio_test
|
||||||
from piker.brokers import questrade as qt
|
from piker.brokers import questrade as qt
|
||||||
import pytest
|
import pytest
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.testing import tractor_test
|
|
||||||
|
|
||||||
import piker
|
import piker
|
||||||
from piker.brokers import get_brokermod
|
from piker.brokers import get_brokermod
|
||||||
|
@ -23,6 +22,12 @@ pytestmark = pytest.mark.skipif(
|
||||||
reason="questrade tests can only be run locally with an API key",
|
reason="questrade tests can only be run locally with an API key",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: this module was removed from tractor into it's
|
||||||
|
# tests/conftest.py, we need to rewrite the below tests
|
||||||
|
# to use the `open_pikerd_runtime()` to make these work again
|
||||||
|
# (if we're not just gonna junk em).
|
||||||
|
# from tractor.testing import tractor_test
|
||||||
|
|
||||||
|
|
||||||
# stock quote
|
# stock quote
|
||||||
_ex_quotes = {
|
_ex_quotes = {
|
||||||
|
@ -106,7 +111,7 @@ def match_packet(symbols, quotes, feed_type='stock'):
|
||||||
assert not quotes
|
assert not quotes
|
||||||
|
|
||||||
|
|
||||||
@tractor_test
|
# @tractor_test
|
||||||
async def test_concurrent_tokens_refresh(us_symbols, loglevel):
|
async def test_concurrent_tokens_refresh(us_symbols, loglevel):
|
||||||
"""Verify that concurrent requests from mulitple tasks work alongside
|
"""Verify that concurrent requests from mulitple tasks work alongside
|
||||||
random token refreshing which simulates an access token expiry + refresh
|
random token refreshing which simulates an access token expiry + refresh
|
||||||
|
@ -337,7 +342,7 @@ async def stream_stocks(feed, symbols):
|
||||||
'options_and_options',
|
'options_and_options',
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
@tractor_test
|
# @tractor_test
|
||||||
async def test_quote_streaming(tmx_symbols, loglevel, stream_what):
|
async def test_quote_streaming(tmx_symbols, loglevel, stream_what):
|
||||||
"""Set up option streaming using the broker daemon.
|
"""Set up option streaming using the broker daemon.
|
||||||
"""
|
"""
|
||||||
|
|
|
@ -0,0 +1,176 @@
|
||||||
|
'''
|
||||||
|
Actor tree daemon sub-service verifications
|
||||||
|
|
||||||
|
'''
|
||||||
|
from typing import AsyncContextManager
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
import trio
|
||||||
|
import tractor
|
||||||
|
|
||||||
|
from piker._daemon import (
|
||||||
|
find_service,
|
||||||
|
check_for_service,
|
||||||
|
Services,
|
||||||
|
)
|
||||||
|
from piker.data import (
|
||||||
|
open_feed,
|
||||||
|
)
|
||||||
|
from piker.clearing import (
|
||||||
|
open_ems,
|
||||||
|
)
|
||||||
|
from piker.clearing._messages import (
|
||||||
|
BrokerdPosition,
|
||||||
|
Status,
|
||||||
|
)
|
||||||
|
from piker.clearing._client import (
|
||||||
|
OrderBook,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_runtime_boot(
|
||||||
|
open_test_pikerd: AsyncContextManager
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Verify we can boot the `pikerd` service stack using the
|
||||||
|
`open_test_pikerd` fixture helper and that registry address details
|
||||||
|
match up.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async def main():
|
||||||
|
port = 6666
|
||||||
|
daemon_addr = ('127.0.0.1', port)
|
||||||
|
services: Services
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_test_pikerd(
|
||||||
|
reg_addr=daemon_addr,
|
||||||
|
) as (_, _, pikerd_portal, services),
|
||||||
|
|
||||||
|
tractor.wait_for_actor(
|
||||||
|
'pikerd',
|
||||||
|
arbiter_sockaddr=daemon_addr,
|
||||||
|
) as portal,
|
||||||
|
):
|
||||||
|
assert pikerd_portal.channel.raddr == daemon_addr
|
||||||
|
assert pikerd_portal.channel.raddr == portal.channel.raddr
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def ensure_service(
|
||||||
|
name: str,
|
||||||
|
sockaddr: tuple[str, int] | None = None,
|
||||||
|
) -> None:
|
||||||
|
async with find_service(name) as portal:
|
||||||
|
remote_sockaddr = portal.channel.raddr
|
||||||
|
print(f'FOUND `{name}` @ {remote_sockaddr}')
|
||||||
|
|
||||||
|
if sockaddr:
|
||||||
|
assert remote_sockaddr == sockaddr
|
||||||
|
|
||||||
|
yield portal
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_datafeed_actors(
|
||||||
|
open_test_pikerd: AsyncContextManager
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Verify that booting a data feed starts a `brokerd`
|
||||||
|
actor and a singleton global `samplerd` and opening
|
||||||
|
an order mode in paper opens the `paperboi` service.
|
||||||
|
|
||||||
|
'''
|
||||||
|
actor_name: str = 'brokerd'
|
||||||
|
backend: str = 'kraken'
|
||||||
|
brokerd_name: str = f'{actor_name}.{backend}'
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
async with (
|
||||||
|
open_test_pikerd(),
|
||||||
|
open_feed(
|
||||||
|
['xbtusdt.kraken'],
|
||||||
|
loglevel='info',
|
||||||
|
) as feed
|
||||||
|
):
|
||||||
|
# halt rt quote streams since we aren't testing them
|
||||||
|
await feed.pause()
|
||||||
|
|
||||||
|
async with (
|
||||||
|
ensure_service(brokerd_name),
|
||||||
|
ensure_service('samplerd'),
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
|
def test_ensure_ems_in_paper_actors(
|
||||||
|
open_test_pikerd: AsyncContextManager
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
actor_name: str = 'brokerd'
|
||||||
|
backend: str = 'kraken'
|
||||||
|
brokerd_name: str = f'{actor_name}.{backend}'
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
|
||||||
|
# type declares
|
||||||
|
book: OrderBook
|
||||||
|
trades_stream: tractor.MsgStream
|
||||||
|
pps: dict[str, list[BrokerdPosition]]
|
||||||
|
accounts: list[str]
|
||||||
|
dialogs: dict[str, Status]
|
||||||
|
|
||||||
|
# ensure we timeout after is startup is too slow.
|
||||||
|
# TODO: something like this should be our start point for
|
||||||
|
# benchmarking end-to-end startup B)
|
||||||
|
with trio.fail_after(9):
|
||||||
|
async with (
|
||||||
|
open_test_pikerd() as (_, _, _, services),
|
||||||
|
|
||||||
|
open_ems(
|
||||||
|
'xbtusdt.kraken',
|
||||||
|
mode='paper',
|
||||||
|
) as (
|
||||||
|
book,
|
||||||
|
trades_stream,
|
||||||
|
pps,
|
||||||
|
accounts,
|
||||||
|
dialogs,
|
||||||
|
),
|
||||||
|
):
|
||||||
|
# there should be no on-going positions,
|
||||||
|
# TODO: though eventually we'll want to validate against
|
||||||
|
# local ledger and `pps.toml` state ;)
|
||||||
|
assert not pps
|
||||||
|
assert not dialogs
|
||||||
|
|
||||||
|
pikerd_subservices = ['emsd', 'samplerd']
|
||||||
|
|
||||||
|
async with (
|
||||||
|
ensure_service('emsd'),
|
||||||
|
ensure_service(brokerd_name),
|
||||||
|
ensure_service(f'paperboi.{backend}'),
|
||||||
|
):
|
||||||
|
for name in pikerd_subservices:
|
||||||
|
assert name in services.service_tasks
|
||||||
|
|
||||||
|
# brokerd.kraken actor should have been started
|
||||||
|
# implicitly by the ems.
|
||||||
|
assert brokerd_name in services.service_tasks
|
||||||
|
|
||||||
|
print('ALL SERVICES STARTED, terminating..')
|
||||||
|
await services.cancel_service('emsd')
|
||||||
|
|
||||||
|
with pytest.raises(
|
||||||
|
tractor._exceptions.ContextCancelled,
|
||||||
|
) as exc_info:
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
cancel_msg: str = '`_emsd_main()` was remotely cancelled by its caller'
|
||||||
|
assert cancel_msg in exc_info.value.args[0]
|
Loading…
Reference in New Issue