Compare commits
1054 Commits
310_plus
...
storage_mi
Author | SHA1 | Date |
---|---|---|
Tyler Goodlet | d3da84e8d2 | |
goodboy | eb51033b18 | |
Tyler Goodlet | 12883c3c90 | |
Tyler Goodlet | 8ceaa27872 | |
Tyler Goodlet | 97290fcb05 | |
Tyler Goodlet | 44a3115539 | |
Tyler Goodlet | 0772b4a0fa | |
Tyler Goodlet | 15064d94cb | |
Tyler Goodlet | 9a00c45923 | |
Tyler Goodlet | 7cc9911565 | |
Tyler Goodlet | 79b0db4449 | |
Tyler Goodlet | 5aaa7f47dc | |
Tyler Goodlet | aa36abf36e | |
Tyler Goodlet | 2014019b06 | |
Tyler Goodlet | 75b7a8b56e | |
Tyler Goodlet | 31392af427 | |
Tyler Goodlet | 6540c415c1 | |
Tyler Goodlet | fbc12b1b07 | |
Tyler Goodlet | cda7a54718 | |
Tyler Goodlet | 6f92c6b52d | |
Tyler Goodlet | 441243f83b | |
Tyler Goodlet | cec2967071 | |
Tyler Goodlet | f95ea19b21 | |
Tyler Goodlet | eca048c0c5 | |
Tyler Goodlet | a2d40937a3 | |
Tyler Goodlet | 31f2b01c3e | |
Tyler Goodlet | b226b678e9 | |
Tyler Goodlet | dd87d1142e | |
Tyler Goodlet | afac553ea2 | |
Tyler Goodlet | 93c81fa4d1 | |
Tyler Goodlet | bfe3ea1f59 | |
Tyler Goodlet | 56629b6b2e | |
Tyler Goodlet | bb723abc9d | |
Tyler Goodlet | 7694419e71 | |
Tyler Goodlet | b078a06621 | |
Tyler Goodlet | 05b67c27d0 | |
Tyler Goodlet | 8c66f066bd | |
Tyler Goodlet | 959e423849 | |
Tyler Goodlet | 7b196b1b97 | |
Tyler Goodlet | fe0695fb7b | |
jaredgoldman | dae8e59d26 | |
Tyler Goodlet | aba238e8b1 | |
Tyler Goodlet | d3192bb8c2 | |
goodboy | 6cd18576aa | |
Tyler Goodlet | daa6a5c80a | |
goodboy | 201f86e482 | |
Guillermo Rodriguez | d4ac8972ac | |
Tyler Goodlet | b4a1cc8f22 | |
Tyler Goodlet | 69b85aa7e5 | |
Tyler Goodlet | 3a4794e9d1 | |
Guillermo Rodriguez | 6be96a96aa | |
Guillermo Rodriguez | d704b153ca | |
Guillermo Rodriguez | 20d91f5e06 | |
Guillermo Rodriguez | 6c23c79f2a | |
Guillermo Rodriguez | f5b8b9a14f | |
Guillermo Rodriguez | dc78994dcf | |
goodboy | 269a04ba1a | |
Tyler Goodlet | 569df45d18 | |
Tyler Goodlet | f53f4df583 | |
jaredgoldman | d04fe366ab | |
jaredgoldman | c83fe5aaa7 | |
jaredgoldman | 41f81eb701 | |
jaredgoldman | 05fdc9dd60 | |
jaredgoldman | 1323981cc4 | |
jaredgoldman | 882032e3a3 | |
jaredgoldman | a6257ae615 | |
jaredgoldman | 973c068e96 | |
jaredgoldman | d7317c3710 | |
jaredgoldman | 87eb9c5772 | |
jaredgoldman | ecb22dda1a | |
jaredgoldman | 6f15d47012 | |
jaredgoldman | 802af306ac | |
jaredgoldman | e4e368923d | |
jaredgoldman | 342aec648b | |
jaredgoldman | 55253c8469 | |
jaredgoldman | 4b72d3ba99 | |
jaredgoldman | 61296bbdfc | |
jaredgoldman | 36f466fff8 | |
Guillermo Rodriguez | 26146097eb | |
jaredgoldman | fcd8b8eb78 | |
jaredgoldman | 3e83764b5b | |
jaredgoldman | 3a6fbabaf8 | |
jaredgoldman | 85ad23a1e9 | |
jaredgoldman | 15525c2b46 | |
jaredgoldman | 76736a5441 | |
jaredgoldman | 4c2e776e01 | |
jaredgoldman | 1e748f11ef | |
jaredgoldman | 3fcad16298 | |
jaredgoldman | 2d25d1f048 | |
jaredgoldman | e54d928405 | |
jaredgoldman | c99381216d | |
algorandpa | db2e2ed78f | |
algorandpa | 3bc54e308f | |
algorandpa | 8c9c165e0a | |
algorandpa | 7bd8019876 | |
algorandpa | 8122e6c86f | |
algorandpa | 7e87dc52eb | |
algorandpa | 2c366d7349 | |
algorandpa | 9acbfacd4c | |
algorandpa | 316ead577d | |
algorandpa | 4b6d3fe138 | |
algorandpa | 0dec2b9c89 | |
algorandpa | acc86ae6db | |
algorandpa | 730906a072 | |
algorandpa | e5cefeb44b | |
algorandpa | 7142a6a7ca | |
algorandpa | dff8abd6ad | |
algorandpa | b180602a3e | |
algorandpa | 95b9dacb7a | |
algorandpa | df868cec35 | |
algorandpa | 68a196218b | |
algorandpa | 84cd1e0059 | |
algorandpa | 86b4386522 | |
algorandpa | 5bb93ccc5f | |
algorandpa | 3028a8b1f8 | |
algorandpa | 6126c4f438 | |
algorandpa | 41bb0445e0 | |
algorandpa | 97627a4976 | |
algorandpa | 1b2fce430f | |
algorandpa | 8cd2354d73 | |
algorandpa | 9c28d7086e | |
algorandpa | a4bd51a01b | |
algorandpa | b67d020e23 | |
Guillermo Rodriguez | 85a1b858b4 | |
Guillermo Rodriguez | 47bf45f30e | |
Esmeralda Gallardo | b96e2c314a | |
Esmeralda Gallardo | f96d6a04b6 | |
Guillermo Rodriguez | acc6249d88 | |
jaredgoldman | 82174d01c5 | |
Tyler Goodlet | 0b678c97f4 | |
Tyler Goodlet | d0d1554d74 | |
Esmeralda Gallardo | 4122c482ba | |
Esmeralda Gallardo | b5cdf14036 | |
Esmeralda Gallardo | 3ce8bfa012 | |
Guillermo Rodriguez | bf9ca4a4a8 | |
Guillermo Rodriguez | 17a4fe4b2f | |
Esmeralda Gallardo | 0dc24bd475 | |
Tyler Goodlet | b3400f0d9c | |
Tyler Goodlet | 2bad692703 | |
Tyler Goodlet | cd3e9b1b2a | |
Tyler Goodlet | e01220af14 | |
goodboy | bfc0220a47 | |
goodboy | 139b8ba0f4 | |
Guillermo Rodriguez | 71b2f24a2e | |
Guillermo Rodriguez | ffd707db62 | |
Tyler Goodlet | fefb0de51f | |
Tyler Goodlet | 59f34c94b0 | |
Tyler Goodlet | ebf53e32bd | |
Tyler Goodlet | 9ce52033f0 | |
Tyler Goodlet | 9876f200c1 | |
Tyler Goodlet | 81b8cd5461 | |
Tyler Goodlet | 731eb91a58 | |
Tyler Goodlet | 49ca743e6a | |
Tyler Goodlet | a36d4b1dc6 | |
Tyler Goodlet | 33df4f9927 | |
Tyler Goodlet | 72a9af21ac | |
Tyler Goodlet | 1a10514cad | |
Tyler Goodlet | 5d9b7c72b3 | |
Tyler Goodlet | efddd43760 | |
Tyler Goodlet | 1606b3a9c3 | |
Tyler Goodlet | 8b5b1c214b | |
Tyler Goodlet | 9780263cfa | |
Tyler Goodlet | e1e3afb495 | |
Tyler Goodlet | f9eb880404 | |
Tyler Goodlet | a3bbbeda9d | |
Tyler Goodlet | 3ad7844fdf | |
Tyler Goodlet | b71c61e23f | |
Tyler Goodlet | 9650b32786 | |
Tyler Goodlet | 433697cc4f | |
Tyler Goodlet | d622b4157c | |
Tyler Goodlet | 1add591b2c | |
Tyler Goodlet | 60440bc6b7 | |
Tyler Goodlet | 4003729231 | |
Tyler Goodlet | 934b32c342 | |
Tyler Goodlet | 97bb3b48da | |
Tyler Goodlet | da618e1d38 | |
Tyler Goodlet | 23c03a0905 | |
Tyler Goodlet | 07c8ed8a3a | |
Tyler Goodlet | bcf2a9868d | |
Tyler Goodlet | c09c3925a4 | |
Tyler Goodlet | 92ce1b3304 | |
Tyler Goodlet | 0fc06a98d4 | |
Tyler Goodlet | 4ba99494f0 | |
Tyler Goodlet | a8e1796a8b | |
Tyler Goodlet | 5ced05aab0 | |
Tyler Goodlet | 4a6339ffc2 | |
Tyler Goodlet | efa4089920 | |
Tyler Goodlet | 35cc37ddc1 | |
Tyler Goodlet | 5ea4be1d4b | |
Tyler Goodlet | 0c5b5a5aea | |
Tyler Goodlet | 4027d683e9 | |
Tyler Goodlet | 7afc9301ac | |
Tyler Goodlet | 12c6d58c2a | |
Tyler Goodlet | c5db7295e6 | |
Tyler Goodlet | 02c3ea1743 | |
Tyler Goodlet | 63f0567418 | |
Tyler Goodlet | 3e17e52555 | |
Tyler Goodlet | 65dca16dc0 | |
Tyler Goodlet | e742d18a6c | |
Tyler Goodlet | 7e29c36a24 | |
Tyler Goodlet | 4d2b5c8f86 | |
Tyler Goodlet | fe932a96a9 | |
Tyler Goodlet | c1b7063e3c | |
goodboy | 42d2f9e461 | |
goodboy | 31fc2d73ce | |
Tyler Goodlet | 1346c33f04 | |
Tyler Goodlet | cee6321a9f | |
Tyler Goodlet | 1abed2ad9e | |
Tyler Goodlet | 5bd6fa3cbf | |
Tyler Goodlet | a82911d8a9 | |
Tyler Goodlet | dc88364253 | |
Tyler Goodlet | 4c51a68691 | |
Tyler Goodlet | 42d3537516 | |
Tyler Goodlet | 3fd394d693 | |
Tyler Goodlet | a7a08aced9 | |
Tyler Goodlet | 1d83fdb510 | |
Tyler Goodlet | 924fcca463 | |
Tyler Goodlet | 26f497e2bb | |
Tyler Goodlet | e37e118a7e | |
Tyler Goodlet | b2bb7f4923 | |
Tyler Goodlet | 97b03bbfbb | |
goodboy | d690ad2bab | |
Guillermo Rodriguez | 0f082ed9d4 | |
Guillermo Rodriguez | 2851a0ecc5 | |
Tyler Goodlet | 340045af77 | |
Tyler Goodlet | c1988c4d8d | |
Tyler Goodlet | 6a0c36922e | |
Tyler Goodlet | 459cbfdbad | |
Tyler Goodlet | fc17187ff4 | |
Tyler Goodlet | a7d78a3f40 | |
Tyler Goodlet | 7ce3f10e73 | |
Tyler Goodlet | bfc6014ad3 | |
Tyler Goodlet | a5eed8fc1e | |
Tyler Goodlet | cdec4782f0 | |
Tyler Goodlet | f30a48b82c | |
Tyler Goodlet | 98de22a740 | |
Tyler Goodlet | efbb8e86d4 | |
Tyler Goodlet | b6521498f4 | |
Tyler Goodlet | 06f1b94147 | |
Tyler Goodlet | ffb57f0256 | |
Tyler Goodlet | ed1f64cf43 | |
Tyler Goodlet | bf8ea33697 | |
Tyler Goodlet | bc17308de7 | |
Tyler Goodlet | 1ece704d6e | |
Tyler Goodlet | dea1c1c2d6 | |
Tyler Goodlet | 3300a240c6 | |
Tyler Goodlet | 50ef4efccb | |
Tyler Goodlet | 51f2461e8b | |
Tyler Goodlet | 444768d30f | |
Tyler Goodlet | 0d0675ac7e | |
Tyler Goodlet | 24b384f3ef | |
Tyler Goodlet | 93330954c2 | |
Tyler Goodlet | edf721f755 | |
Tyler Goodlet | 530b2731ba | |
Tyler Goodlet | 14104185d2 | |
Tyler Goodlet | 3019c35e30 | |
Tyler Goodlet | 4d74bc29b4 | |
Tyler Goodlet | 3638ae8d3e | |
Tyler Goodlet | c5dd67e63c | |
Tyler Goodlet | 0663880a6d | |
Tyler Goodlet | 3bed142d15 | |
Tyler Goodlet | 9fcc6f9c44 | |
Tyler Goodlet | 7aef31701b | |
Tyler Goodlet | 135627e142 | |
Tyler Goodlet | 5216a6b732 | |
Tyler Goodlet | 2a797d32dc | |
Tyler Goodlet | 35a16ded2d | |
Tyler Goodlet | 44f50e3d0e | |
Tyler Goodlet | 96b871c4d7 | |
Tyler Goodlet | d2aad74dfc | |
Tyler Goodlet | 50209752c3 | |
Tyler Goodlet | 5ab4e5493e | |
Tyler Goodlet | e252f70253 | |
Tyler Goodlet | 98438e29ef | |
Tyler Goodlet | d649a7d1fa | |
Tyler Goodlet | 2669ced629 | |
Tyler Goodlet | f2c0987a04 | |
Tyler Goodlet | bb84715bf0 | |
Tyler Goodlet | 0bdb7261d1 | |
Tyler Goodlet | 12857a258b | |
Tyler Goodlet | 46808fbb89 | |
Tyler Goodlet | 6ca8334253 | |
Tyler Goodlet | a3844f9922 | |
Tyler Goodlet | 58b36db2e5 | |
Tyler Goodlet | a33f58a61a | |
Tyler Goodlet | a4392696a1 | |
Tyler Goodlet | d5844ce8ff | |
Tyler Goodlet | bf88b40a50 | |
Tyler Goodlet | e4a0d4ecea | |
Tyler Goodlet | cca3417c57 | |
Tyler Goodlet | 031d7967de | |
Tyler Goodlet | 2e67e98b4d | |
Tyler Goodlet | 7124a131dd | |
Tyler Goodlet | 9052ed5ddf | |
Tyler Goodlet | 7ec21c7f3b | |
Tyler Goodlet | 309ae240cf | |
Tyler Goodlet | 382a619a03 | |
Tyler Goodlet | 7f3f6f871a | |
Tyler Goodlet | 6ea04f850d | |
Tyler Goodlet | 3d5695f40a | |
Tyler Goodlet | 5affad942f | |
Tyler Goodlet | eb9ab20646 | |
Tyler Goodlet | f3bab826f6 | |
Tyler Goodlet | 2b9ca5f805 | |
Tyler Goodlet | 25a75e5bec | |
Tyler Goodlet | 702ae29a2c | |
Tyler Goodlet | ac1f37a2c2 | |
Tyler Goodlet | 344d2eeb9e | |
Tyler Goodlet | 9133103f8f | |
Tyler Goodlet | 166d14af69 | |
Tyler Goodlet | 696c6f8897 | |
Tyler Goodlet | be21f9829e | |
Tyler Goodlet | 5a0673d66f | |
Tyler Goodlet | 6cacd7d18b | |
Tyler Goodlet | 5b08e9cba3 | |
Tyler Goodlet | d3f5ff1b4f | |
Tyler Goodlet | e45bc4c619 | |
Tyler Goodlet | baee86a2d6 | |
Tyler Goodlet | 86d09d9305 | |
Tyler Goodlet | 9ace053aaf | |
Guillermo Rodriguez | 69707786fc | |
Guillermo Rodriguez | 096e87cd3b | |
Guillermo Rodriguez | 5017c541db | |
Guillermo Rodriguez | 3ea6554ab0 | |
Guillermo Rodriguez | f0b17cb8f7 | |
Guillermo Rodriguez | 5ca45362c8 | |
Tyler Goodlet | 1f2081911f | |
goodboy | a7d02ecec8 | |
goodboy | 11ba706797 | |
Tyler Goodlet | 50ad7370c7 | |
goodboy | 0616cbd1f1 | |
Tyler Goodlet | af92602027 | |
Tyler Goodlet | d8bf45b02d | |
Tyler Goodlet | 07ab853d3d | |
Tyler Goodlet | 414866fc6b | |
Tyler Goodlet | bc7fe6114d | |
Tyler Goodlet | 8d592886fa | |
Tyler Goodlet | 69ea296a9b | |
Tyler Goodlet | 03821fdf6f | |
Tyler Goodlet | 1aa9ab03da | |
Tyler Goodlet | 1d83b43efe | |
Tyler Goodlet | 6986be1b21 | |
Tyler Goodlet | 92c50aa6a7 | |
Tyler Goodlet | eac79c5cdd | |
Tyler Goodlet | 7aec238f5f | |
Tyler Goodlet | be3dc69290 | |
Tyler Goodlet | 6100bd19c7 | |
Tyler Goodlet | d57bc6c6d9 | |
Tyler Goodlet | 58b42d629f | |
Tyler Goodlet | 36a81cb2de | |
Tyler Goodlet | ae0f3118f4 | |
Tyler Goodlet | 727c7ce2b1 | |
Tyler Goodlet | a39c980266 | |
Tyler Goodlet | 00be100e71 | |
Tyler Goodlet | 9217610734 | |
Tyler Goodlet | 31af7a2c99 | |
Tyler Goodlet | 34fac364fd | |
goodboy | dcdfd2577a | |
goodboy | 6733dc57af | |
Tyler Goodlet | 05c4b6afb9 | |
Tyler Goodlet | 4b22325ffc | |
Tyler Goodlet | 9d16299f60 | |
Tyler Goodlet | ab1f15506d | |
Tyler Goodlet | 0db5451e47 | |
goodboy | 61218f30f5 | |
Tyler Goodlet | fcfc0f31f0 | |
Tyler Goodlet | 69074f4fa5 | |
Tyler Goodlet | fe4fb37b58 | |
Tyler Goodlet | 7cfd431a2b | |
Tyler Goodlet | 61e20a86cc | |
Tyler Goodlet | d9b73e1d08 | |
goodboy | 4833d56ecb | |
Tyler Goodlet | 090d1ba524 | |
Tyler Goodlet | afc45a8e16 | |
Tyler Goodlet | 844626f6dc | |
Tyler Goodlet | 470079665f | |
Tyler Goodlet | 0cd87d9e54 | |
Tyler Goodlet | 09711750bf | |
Tyler Goodlet | 71ca4c8e1f | |
Tyler Goodlet | 9811dcf5f3 | |
Tyler Goodlet | da659cf607 | |
Tyler Goodlet | 37e0ec7b7d | |
Tyler Goodlet | 045b76bab5 | |
Tyler Goodlet | c8c641a038 | |
Tyler Goodlet | 6a1bb13feb | |
Tyler Goodlet | 75591dd7e9 | |
Tyler Goodlet | d792fed099 | |
Tyler Goodlet | d66fb49077 | |
Tyler Goodlet | 78c7c8524c | |
Tyler Goodlet | a746258f99 | |
Tyler Goodlet | 5adb234a24 | |
Tyler Goodlet | 2778ee1401 | |
Tyler Goodlet | e0ca5d5200 | |
Tyler Goodlet | b3d1b1aa63 | |
Tyler Goodlet | 5ec1a72a3d | |
Tyler Goodlet | a342f7d2d4 | |
Tyler Goodlet | 2c76cee928 | |
Tyler Goodlet | b5f2ff854c | |
Tyler Goodlet | 3efb0b5884 | |
Tyler Goodlet | 009bbe456e | |
Tyler Goodlet | daf7b3f4a5 | |
Tyler Goodlet | b0a6dd46e4 | |
Tyler Goodlet | 1c5141f4c6 | |
Tyler Goodlet | 4cdd2271b0 | |
Tyler Goodlet | 89095d4e9f | |
Tyler Goodlet | 04c0d77595 | |
Tyler Goodlet | d1b07c625f | |
Tyler Goodlet | a5bb33b0ff | |
Tyler Goodlet | 8e1ceca43d | |
Tyler Goodlet | c85e7790de | |
Tyler Goodlet | 2399c618b6 | |
Tyler Goodlet | 7ec88f8cac | |
Tyler Goodlet | eacd44dd65 | |
Tyler Goodlet | e5e70a6011 | |
Tyler Goodlet | 7da5c2b238 | |
Tyler Goodlet | 1ee49df31d | |
Tyler Goodlet | f2df32a673 | |
Tyler Goodlet | 125e31dbf3 | |
Tyler Goodlet | 715e693564 | |
Tyler Goodlet | 43717c92d9 | |
Tyler Goodlet | f370685c62 | |
Tyler Goodlet | 4300470786 | |
Tyler Goodlet | b89fd9652c | |
Tyler Goodlet | 51f4afbd88 | |
Tyler Goodlet | 7ef8111381 | |
Tyler Goodlet | 35b097469b | |
Tyler Goodlet | 94290c7d8b | |
Tyler Goodlet | 73379d3627 | |
Tyler Goodlet | 23835f2c08 | |
Tyler Goodlet | d2aee00a56 | |
Tyler Goodlet | cf6e44cb9c | |
Tyler Goodlet | a146ad9e69 | |
Tyler Goodlet | 70ad1a1860 | |
Tyler Goodlet | f3ef73ef41 | |
Tyler Goodlet | a9832dc0cb | |
Tyler Goodlet | 9be245e955 | |
Tyler Goodlet | 800773e585 | |
goodboy | 8d1eb81f16 | |
Tyler Goodlet | 963e5bdd62 | |
Tyler Goodlet | 55de9abc41 | |
Tyler Goodlet | 593db0ed0d | |
Tyler Goodlet | 06622105cd | |
Tyler Goodlet | 008ae47e14 | |
Tyler Goodlet | 81585d9e6e | |
Tyler Goodlet | f6b7057b0d | |
Tyler Goodlet | 76f920a16b | |
Tyler Goodlet | f232d6d4ee | |
Tyler Goodlet | b7e1443618 | |
Tyler Goodlet | 5d021ffb85 | |
Tyler Goodlet | 28fd795280 | |
Tyler Goodlet | c944db5f02 | |
Tyler Goodlet | 967e28b7ac | |
Tyler Goodlet | 2a158aea2c | |
Tyler Goodlet | 88870fdda7 | |
Tyler Goodlet | 326f153a47 | |
Tyler Goodlet | f5cd63ad35 | |
Tyler Goodlet | 1e96ca32df | |
Tyler Goodlet | c088963cf2 | |
Tyler Goodlet | 79fcbcc281 | |
Tyler Goodlet | ddbba76095 | |
Tyler Goodlet | 0a959c1c74 | |
Tyler Goodlet | e348968113 | |
Tyler Goodlet | 7bbe86d6fb | |
Tyler Goodlet | 7b9db86753 | |
Tyler Goodlet | 20a396270e | |
Tyler Goodlet | 81516c5204 | |
Tyler Goodlet | d6fb6fe3ae | |
Tyler Goodlet | 8476d8d056 | |
Tyler Goodlet | 36868bb86e | |
Tyler Goodlet | 29b6b3e54f | |
Tyler Goodlet | 8a01c9e42b | |
Tyler Goodlet | 2c4daf08e0 | |
Tyler Goodlet | 7daab6329d | |
Tyler Goodlet | bb6452b969 | |
Tyler Goodlet | 25bfe6f035 | |
Tyler Goodlet | 32b36aa042 | |
Tyler Goodlet | e7de5404d3 | |
Tyler Goodlet | 18dc8b08e4 | |
Tyler Goodlet | 5bf3cb8e4b | |
Tyler Goodlet | c7d5db5f90 | |
Tyler Goodlet | 1bf1965a8b | |
Tyler Goodlet | 051a8729b6 | |
Tyler Goodlet | 8e85ed92c8 | |
Tyler Goodlet | 2a9042b1b1 | |
Tyler Goodlet | 344a634cb6 | |
Tyler Goodlet | 508de6182a | |
Tyler Goodlet | 40000345a1 | |
goodboy | 220d38b4a9 | |
Esmeralda Gallardo | 888438ca25 | |
goodboy | d84bcf77c0 | |
Guillermo Rodriguez | 0474d66531 | |
algorandpa | f218b804b4 | |
Guillermo Rodriguez | 7b14f498a8 | |
Esmeralda Gallardo | 18e4352faf | |
Esmeralda Gallardo | a6e921548b | |
Esmeralda Gallardo | 3f5dec82ed | |
Esmeralda Gallardo | db0b59abaa | |
algorandpa | f5bcd1d91c | |
algorandpa | db11c3c0f8 | |
Tyler Goodlet | df6071ae9e | |
goodboy | cc1694760c | |
goodboy | 4d8b22dd8f | |
Tyler Goodlet | fd296a557e | |
Tyler Goodlet | 0de2f863bd | |
Tyler Goodlet | de93da202b | |
Tyler Goodlet | 5c459f21be | |
goodboy | 5915cf3acf | |
algorandpa | 997bf31bd4 | |
algorandpa | f3427bb13b | |
algorandpa | 6fa266e3e0 | |
Guillermo Rodriguez | 019a6432fb | |
goodboy | 209e1085ae | |
Tyler Goodlet | 0ef75e6aa6 | |
Tyler Goodlet | 243d0329f6 | |
Tyler Goodlet | a0ce9ecc0d | |
Tyler Goodlet | af9c30c3f5 | |
Zoltan | ebbfa47baf | |
Tyler Goodlet | 02fbc0a0ed | |
goodboy | 4729e4c6bc | |
goodboy | a44b8e3e22 | |
goodboy | 8a89303cb3 | |
Tyler Goodlet | e547b307f6 | |
Tyler Goodlet | 72ec9b1e10 | |
Tyler Goodlet | 40c70ae6d8 | |
Tyler Goodlet | d3fefdeaff | |
Tyler Goodlet | 8be005212f | |
Tyler Goodlet | 5a2795e76b | |
Tyler Goodlet | a987f0ab81 | |
Tyler Goodlet | d99b40317d | |
Tyler Goodlet | 9ae519f6fa | |
Tyler Goodlet | 8f3fe8e542 | |
Tyler Goodlet | 490d85aba5 | |
goodboy | ba2e1e04cd | |
Tyler Goodlet | 5d4929db9c | |
Tyler Goodlet | c41400ae18 | |
Tyler Goodlet | e71bd2cb1e | |
Tyler Goodlet | be24473fb4 | |
Tyler Goodlet | b524ea5c22 | |
Tyler Goodlet | d46945cb09 | |
Tyler Goodlet | 1d4fc6f327 | |
Tyler Goodlet | 5976acbe76 | |
goodboy | 11ecf9cb09 | |
goodboy | 2dac531729 | |
Tyler Goodlet | 1fadf58ab7 | |
Tyler Goodlet | ceca0d9fb7 | |
Tyler Goodlet | df16726211 | |
Tyler Goodlet | fb4f1732b6 | |
Tyler Goodlet | d5b357b69a | |
Tyler Goodlet | 610fb5f7c6 | |
Tyler Goodlet | 2b231ba631 | |
Tyler Goodlet | 286228c290 | |
Tyler Goodlet | a1a24da7b6 | |
Tyler Goodlet | 553d0557b6 | |
Tyler Goodlet | 2f7b272d8c | |
Tyler Goodlet | dc1edeecda | |
Tyler Goodlet | 4ca7817735 | |
Tyler Goodlet | 5b63585398 | |
Tyler Goodlet | 0000d9a314 | |
Tyler Goodlet | f7ec66362e | |
Tyler Goodlet | b7ef0596b9 | |
Tyler Goodlet | 143e86a80c | |
Tyler Goodlet | 956c7d3435 | |
Tyler Goodlet | 330d16262e | |
Tyler Goodlet | c7f57b940c | |
Tyler Goodlet | 27bd3c07af | |
Tyler Goodlet | 55dc27a197 | |
Tyler Goodlet | a11f20fac2 | |
Tyler Goodlet | daebb78755 | |
Tyler Goodlet | 90a395a069 | |
Tyler Goodlet | 23d0353934 | |
Tyler Goodlet | ede67ed184 | |
Tyler Goodlet | 811d21e111 | |
Tyler Goodlet | 54567d33da | |
Tyler Goodlet | 61ca5f7e19 | |
Tyler Goodlet | 7396624be0 | |
Tyler Goodlet | 25b90afbdb | |
Tyler Goodlet | 72dfeb2b4e | |
Tyler Goodlet | 6b34c9e866 | |
Tyler Goodlet | e7ec01b8e6 | |
Tyler Goodlet | fce7055c62 | |
Tyler Goodlet | bf7d5e9a71 | |
Tyler Goodlet | 2a866dde65 | |
Tyler Goodlet | 220981e718 | |
Tyler Goodlet | 8537a4091b | |
Tyler Goodlet | 71a11a23bd | |
Tyler Goodlet | fa368b1263 | |
Tyler Goodlet | e6dd1458f8 | |
Tyler Goodlet | 9486d993ce | |
Tyler Goodlet | 30994dac10 | |
Tyler Goodlet | 8a61211c8c | |
Tyler Goodlet | c43f7eb656 | |
goodboy | d05caa4b02 | |
Tyler Goodlet | 63e9af002d | |
goodboy | 5144299f4f | |
Tyler Goodlet | c437f9370a | |
Tyler Goodlet | 94f81587ab | |
Tyler Goodlet | 2bc25e3593 | |
Tyler Goodlet | 1d9ab7b0de | |
Tyler Goodlet | 4c96a4878e | |
Tyler Goodlet | 8cd56cb6d3 | |
Tyler Goodlet | c246dcef6f | |
Tyler Goodlet | 26d6e10ad7 | |
Tyler Goodlet | 3924c66bd0 | |
Tyler Goodlet | 2fbfe583dd | |
Tyler Goodlet | 525f805cdb | |
Tyler Goodlet | b65c02336d | |
Tyler Goodlet | d3abfce540 | |
Tyler Goodlet | 49433ea87d | |
goodboy | 31b0d8cee8 | |
Tyler Goodlet | 35871d0213 | |
Tyler Goodlet | 4877af9bc3 | |
Tyler Goodlet | 909e068121 | |
Tyler Goodlet | cf835b97ca | |
Tyler Goodlet | 30bce42c0b | |
Tyler Goodlet | 48ff4859e6 | |
Tyler Goodlet | 887583d27f | |
Tyler Goodlet | 45b97bf6c3 | |
Tyler Goodlet | 91397b85a4 | |
Tyler Goodlet | 47f81b31af | |
goodboy | 30c452cfd0 | |
Tyler Goodlet | fda1c5b554 | |
goodboy | d6c9834a9a | |
Tyler Goodlet | 41b0c11aaa | |
Tyler Goodlet | cc67d23eee | |
Tyler Goodlet | 4818af1445 | |
Tyler Goodlet | 2cf1742999 | |
Tyler Goodlet | 25ac6e6665 | |
Tyler Goodlet | 90754f979b | |
Tyler Goodlet | c0d490ed63 | |
Tyler Goodlet | 7c6d12d982 | |
Tyler Goodlet | fd8c05e024 | |
Tyler Goodlet | 5d65c86c84 | |
Tyler Goodlet | cf11e8d7d8 | |
Tyler Goodlet | ed868f6246 | |
goodboy | 5d371ad80e | |
Tyler Goodlet | 6897aed6b6 | |
Tyler Goodlet | a61a11f86b | |
Tyler Goodlet | 286f620f8e | |
Tyler Goodlet | b7e60b9653 | |
Tyler Goodlet | df42e7acc4 | |
Tyler Goodlet | e492e9ca0c | |
Tyler Goodlet | 44c6f6dfda | |
Tyler Goodlet | ad2100fe3f | |
Tyler Goodlet | ae64ac79a6 | |
Tyler Goodlet | 20663dfa1c | |
Tyler Goodlet | 70f2241d22 | |
Tyler Goodlet | b3fcc25e21 | |
Tyler Goodlet | 4f15ce346b | |
Tyler Goodlet | 445849337f | |
Tyler Goodlet | 3fd7107e08 | |
Tyler Goodlet | 73a02d54b7 | |
Tyler Goodlet | b734af6dd0 | |
Tyler Goodlet | f7c0ee930a | |
Tyler Goodlet | ead426abc4 | |
Tyler Goodlet | bcd6bbb7ca | |
Tyler Goodlet | 80929d080f | |
Tyler Goodlet | eed47b3733 | |
Tyler Goodlet | d5f0c59b57 | |
Tyler Goodlet | d11dc787a1 | |
Tyler Goodlet | 1e81feee46 | |
Tyler Goodlet | 40a9761943 | |
Tyler Goodlet | 256bcf36d3 | |
Tyler Goodlet | 9944277096 | |
Tyler Goodlet | f9dc5637fa | |
Tyler Goodlet | addedc20f1 | |
Tyler Goodlet | 1fa6e8d9ba | |
Tyler Goodlet | 2a06dc997f | |
Tyler Goodlet | 6b93eedcda | |
Tyler Goodlet | a786df65de | |
Tyler Goodlet | 8f2823d5f0 | |
Tyler Goodlet | 58fe220fde | |
Tyler Goodlet | 161448c31a | |
Tyler Goodlet | 1c685189d1 | |
Tyler Goodlet | ceac3f2ee4 | |
Tyler Goodlet | a07367fae2 | |
Tyler Goodlet | 006190d227 | |
Tyler Goodlet | 412197019e | |
Tyler Goodlet | 271e378ce3 | |
Tyler Goodlet | 8e07fda88f | |
Tyler Goodlet | a4935b8fa8 | |
Tyler Goodlet | 2b76baeb10 | |
Tyler Goodlet | 2dfa8976a0 | |
Tyler Goodlet | d3402f715b | |
Tyler Goodlet | f070f9a984 | |
Tyler Goodlet | 416270ee6c | |
Tyler Goodlet | 14bee778ec | |
Tyler Goodlet | 10c1944de5 | |
Tyler Goodlet | 7958d8ad4f | |
Tyler Goodlet | 50c5dc255c | |
Tyler Goodlet | 31735f26d3 | |
Tyler Goodlet | 2ef6460853 | |
Tyler Goodlet | 5e98a30537 | |
Tyler Goodlet | dd03ef42ac | |
Tyler Goodlet | 59884d251e | |
Tyler Goodlet | e06e257a81 | |
Tyler Goodlet | 6e574835c8 | |
Tyler Goodlet | 49ccfdd673 | |
Tyler Goodlet | 3a434f312b | |
Tyler Goodlet | bb4dc448b3 | |
Tyler Goodlet | 9846396df2 | |
Tyler Goodlet | f0d417ce42 | |
Tyler Goodlet | 55fc4114b4 | |
Tyler Goodlet | 97b074365b | |
Tyler Goodlet | f79c3617d6 | |
Tyler Goodlet | 861fe791eb | |
Tyler Goodlet | 60052ff73a | |
Tyler Goodlet | 4d2708cd42 | |
Tyler Goodlet | d1cc52dff5 | |
Tyler Goodlet | 4fa901dbcb | |
goodboy | f2c488c1e0 | |
Tyler Goodlet | 4a9c16d298 | |
Tyler Goodlet | b9d5b904f4 | |
Tyler Goodlet | 0aef762d9a | |
goodboy | c724117c1a | |
Tyler Goodlet | cc3bb85c66 | |
goodboy | 20817313b1 | |
Tyler Goodlet | 23d0b8a7ac | |
goodboy | 087a34f061 | |
Tyler Goodlet | 653f5c824b | |
Tyler Goodlet | f9217570ab | |
Tyler Goodlet | 7f224f0342 | |
Tyler Goodlet | 75a5f3795a | |
Tyler Goodlet | de9f215c83 | |
Tyler Goodlet | 848e345364 | |
Tyler Goodlet | 38b190e598 | |
Tyler Goodlet | 3a9bc8058f | |
Guillermo Rodriguez | 739a231afc | |
Tyler Goodlet | 7dfa4c3cde | |
Tyler Goodlet | 7b653fe4f4 | |
goodboy | 77a687bced | |
Tyler Goodlet | d5c1cdd91d | |
Tyler Goodlet | 46d3fe88ca | |
Tyler Goodlet | 5c8c5d8fbf | |
goodboy | 71412310c4 | |
Guillermo Rodriguez | 0c323fdc0b | |
Tyler Goodlet | 02f53d0c13 | |
Tyler Goodlet | 8792c97de6 | |
Tyler Goodlet | 980815d075 | |
Tyler Goodlet | 4cedfedc21 | |
Tyler Goodlet | fe3d0c6fdd | |
Tyler Goodlet | 9200e8da57 | |
Tyler Goodlet | 430d065da6 | |
Tyler Goodlet | ecd93cb05a | |
Guillermo Rodriguez | 4facd161a9 | |
goodboy | c5447fda06 | |
Guillermo Rodriguez | 0447612b34 | |
goodboy | b5499b8225 | |
Guillermo Rodriguez | 00aabddfe8 | |
Guillermo Rodriguez | 43fb720877 | |
Guillermo Rodriguez | 9626dbd7ac | |
Guillermo Rodriguez | f286c79a03 | |
Guillermo Rodriguez | accb0eee6c | |
Guillermo Rodriguez | e97dd1cbdb | |
Guillermo Rodriguez | 34fb497eb4 | |
Guillermo Rodriguez | 6669ba6590 | |
Guillermo Rodriguez | cb8099bb8c | |
Guillermo Rodriguez | 80a1a58bfc | |
Guillermo Rodriguez | d60f222bb7 | |
Guillermo Rodriguez | 2c2e43d8ac | |
Guillermo Rodriguez | 212b3d620d | |
Guillermo Rodriguez | 92090b01b8 | |
Guillermo Rodriguez | 9073fbc317 | |
Guillermo Rodriguez | f55f56a29f | |
Guillermo Rodriguez | 28e025d02e | |
Guillermo Rodriguez | e558e5837e | |
Guillermo Rodriguez | a0b415095a | |
Guillermo Rodriguez | 6df181c233 | |
Guillermo Rodriguez | 7acc4e3208 | |
Guillermo Rodriguez | 10ea242143 | |
Tyler Goodlet | eda6ecd529 | |
goodboy | cf5b0bf9c6 | |
Tyler Goodlet | b9dba48306 | |
Tyler Goodlet | 4d2e23b5ce | |
Tyler Goodlet | 973bf87e67 | |
Tyler Goodlet | 5861839783 | |
Tyler Goodlet | 06845e5504 | |
Tyler Goodlet | 43bdd4d022 | |
Tyler Goodlet | bafd2cb44f | |
Tyler Goodlet | be8fd32e7d | |
Tyler Goodlet | ee8c00684b | |
Tyler Goodlet | 7379dc03af | |
Tyler Goodlet | a602c47d47 | |
Tyler Goodlet | 317610e00a | |
Tyler Goodlet | c4af706d51 | |
Tyler Goodlet | 665bb183f7 | |
Tyler Goodlet | f6ba95a6c7 | |
Tyler Goodlet | e2cd8c4aef | |
Tyler Goodlet | c8bff81220 | |
Tyler Goodlet | 2aec1c5f1d | |
Tyler Goodlet | bec32956a8 | |
Tyler Goodlet | 91fdc7c5c7 | |
Tyler Goodlet | b59ed74bc1 | |
Tyler Goodlet | 16012f6f02 | |
Tyler Goodlet | 2b61672723 | |
Tyler Goodlet | 176b230a46 | |
Tyler Goodlet | 7fa9dbf869 | |
Tyler Goodlet | 87ed9abefa | |
Tyler Goodlet | 2548aae73d | |
Tyler Goodlet | 1cfa04927d | |
Tyler Goodlet | e34ea94f9f | |
Tyler Goodlet | 1510383738 | |
Tyler Goodlet | 016b669d63 | |
Tyler Goodlet | 682a0191ef | |
Tyler Goodlet | 9e36dbe47f | |
goodboy | 8bef67642e | |
Tyler Goodlet | 52febac6ae | |
Tyler Goodlet | f202699c25 | |
Tyler Goodlet | 0fb07670d2 | |
Tyler Goodlet | 73d2e7716f | |
Tyler Goodlet | 999ae5a1c6 | |
Tyler Goodlet | 23ba0e5e69 | |
Tyler Goodlet | 941a2196b3 | |
Tyler Goodlet | 0cf4e07b84 | |
Tyler Goodlet | 7bec989eed | |
Tyler Goodlet | 6856ca207f | |
Guillermo Rodriguez | 2e5616850c | |
Tyler Goodlet | a83bd9c608 | |
goodboy | 9651ca84bf | |
Tyler Goodlet | 109b35f6eb | |
Tyler Goodlet | e28c1748fc | |
Tyler Goodlet | 72889b4d1f | |
Tyler Goodlet | ae001c3dd7 | |
Tyler Goodlet | 2309e7ab05 | |
Tyler Goodlet | 46c51b55f7 | |
goodboy | a9185e7d6f | |
Tyler Goodlet | 3a0987e0be | |
Tyler Goodlet | d280a592b1 | |
goodboy | ef5829a6b7 | |
Tyler Goodlet | 30bcfdcc83 | |
Tyler Goodlet | 1a291939c3 | |
Tyler Goodlet | 69e501764a | |
goodboy | 7f3f7f0372 | |
Tyler Goodlet | 1cbf45b4c4 | |
Tyler Goodlet | 227a80469e | |
Tyler Goodlet | dc8072c6db | |
Tyler Goodlet | 808dbb12e6 | |
Tyler Goodlet | 44e21b1de9 | |
Tyler Goodlet | b3058b8c78 | |
Tyler Goodlet | db564d7977 | |
Tyler Goodlet | e6a3e8b65a | |
Tyler Goodlet | d43ba47ebe | |
Tyler Goodlet | 168c9863cb | |
Tyler Goodlet | 0fb31586fd | |
Tyler Goodlet | 8b609f531b | |
Tyler Goodlet | d502274eb9 | |
Tyler Goodlet | b1419c850d | |
Tyler Goodlet | aa7f24b6db | |
Tyler Goodlet | 319e68c855 | |
Tyler Goodlet | 64f920d7e5 | |
Tyler Goodlet | 3b79743c7b | |
Tyler Goodlet | 54008a1976 | |
Tyler Goodlet | b96b7a8b9c | |
Tyler Goodlet | 0fca1b3e1a | |
Tyler Goodlet | 2386270cad | |
Tyler Goodlet | 5b135fad61 | |
Tyler Goodlet | abb6854e74 | |
Tyler Goodlet | 22f9b2552c | |
Tyler Goodlet | 57f2478dc7 | |
Tyler Goodlet | 5dc9a61ec4 | |
Tyler Goodlet | b0d3d9bb01 | |
Tyler Goodlet | caecbaa231 | |
Tyler Goodlet | a20a8d95d5 | |
Tyler Goodlet | ba93f96c71 | |
Tyler Goodlet | 804e9afdde | |
Tyler Goodlet | 89bcaed15e | |
Tyler Goodlet | bb2f8e4304 | |
Tyler Goodlet | 8ab8268edc | |
Tyler Goodlet | bbcc55b24c | |
Tyler Goodlet | 9fa9c27e4d | |
Tyler Goodlet | d9b4c4a413 | |
Tyler Goodlet | 84cab1327d | |
Tyler Goodlet | df4cec930b | |
Tyler Goodlet | ab08dc582d | |
Tyler Goodlet | f79d9865a0 | |
Tyler Goodlet | 00378c330c | |
goodboy | 180b97b180 | |
Tyler Goodlet | f0b3a4d5c0 | |
goodboy | e2e66324cc | |
Tyler Goodlet | d950c78b81 | |
Tyler Goodlet | 7dbcbfdcd5 | |
Tyler Goodlet | 279c899de5 | |
Tyler Goodlet | db5aacdb9c | |
Tyler Goodlet | c7b84ab500 | |
Tyler Goodlet | 9967adb371 | |
Tyler Goodlet | 30ff793a22 | |
Tyler Goodlet | 666587991a | |
goodboy | 01005e40a8 | |
goodboy | d81e629c29 | |
Tyler Goodlet | 2766fad719 | |
Tyler Goodlet | ae71168216 | |
Tyler Goodlet | a0c238daa7 | |
Tyler Goodlet | 7cbdc6a246 | |
Tyler Goodlet | 2ff8be71aa | |
Tyler Goodlet | ddffaa952d | |
Tyler Goodlet | 5520e9ef21 | |
Tyler Goodlet | 958e542f7d | |
goodboy | 927bbc7258 | |
Tyler Goodlet | 45bef0cea9 | |
goodboy | a3d46f713e | |
Tyler Goodlet | 5684120c11 | |
Tyler Goodlet | ddb195ed2c | |
Tyler Goodlet | 6747831677 | |
Tyler Goodlet | 9326379b04 | |
Tyler Goodlet | 09d9a7ea2b | |
Tyler Goodlet | 45871d5846 | |
goodboy | bf7a49c19b | |
goodboy | 0a7fce087c | |
Tyler Goodlet | d3130ca04c | |
Tyler Goodlet | e30a3c5b54 | |
Tyler Goodlet | 2393965e83 | |
Tyler Goodlet | fb39da19f4 | |
Tyler Goodlet | a27431c34f | |
Tyler Goodlet | 070b9f3dc1 | |
goodboy | f2dba44169 | |
Tyler Goodlet | 0ef5da0881 | |
Tyler Goodlet | 0580b204a3 | |
Tyler Goodlet | 6ce699ae1f | |
Tyler Goodlet | 3aa72abacf | |
Tyler Goodlet | 04004525c1 | |
Tyler Goodlet | a7f0adf1cf | |
Tyler Goodlet | cef511092d | |
Tyler Goodlet | 4e5df973a9 | |
Tyler Goodlet | 6a1a62d8c0 | |
Tyler Goodlet | e0491cf2e7 | |
Tyler Goodlet | 90bc9b9730 | |
goodboy | f449672c68 | |
Tyler Goodlet | fd22f45178 | |
goodboy | 37f634a2ed | |
Tyler Goodlet | dfee9dd97e | |
Tyler Goodlet | 2a99f7a4d7 | |
Tyler Goodlet | b44e2d9ed9 | |
Tyler Goodlet | 795d4d76f4 | |
Tyler Goodlet | c26acb1fa8 | |
Tyler Goodlet | 11b6699a54 | |
Tyler Goodlet | f9bdd643cf | |
Tyler Goodlet | 2baea21c7d | |
Tyler Goodlet | bea0111753 | |
Tyler Goodlet | c870665be0 | |
Tyler Goodlet | 4ff1090284 | |
Tyler Goodlet | f22461a844 | |
Tyler Goodlet | 458c7211ee | |
Tyler Goodlet | 5cc4b19a7c | |
goodboy | f5236f658b | |
goodboy | a360b66cc0 | |
Tyler Goodlet | 4bcb791161 | |
Tyler Goodlet | 4c7c78c815 | |
Tyler Goodlet | 019867b413 | |
Tyler Goodlet | f356fb0a68 | |
goodboy | 756249ff70 | |
goodboy | 419ebebe72 | |
goodboy | a229996ebe | |
Tyler Goodlet | af01e89612 | |
Tyler Goodlet | 609034c634 | |
Tyler Goodlet | 95dd0e6bd6 | |
goodboy | 479ad1bb15 | |
Tyler Goodlet | d506235a8b | |
Tyler Goodlet | 7846446a44 | |
Tyler Goodlet | 214f864dcf | |
Tyler Goodlet | 4c0f2099aa | |
Tyler Goodlet | aea7bec2c3 | |
Tyler Goodlet | 47777e4192 | |
Tyler Goodlet | f6888057c3 | |
Tyler Goodlet | f65f56ec75 | |
Tyler Goodlet | 5d39b04552 | |
Tyler Goodlet | 735fbc6259 | |
Tyler Goodlet | fcd7e0f3f3 | |
Tyler Goodlet | 9106d13dfe | |
Tyler Goodlet | d3caad6e11 | |
Tyler Goodlet | f87a2a810a | |
Tyler Goodlet | 208e2e9e97 | |
Tyler Goodlet | 90cc6eb317 | |
Tyler Goodlet | b118becc84 | |
Tyler Goodlet | 7442d68ecf | |
Tyler Goodlet | 076c167d6e | |
Tyler Goodlet | 64d8cd448f | |
Tyler Goodlet | ec6a28a8b1 | |
Tyler Goodlet | cc15d02488 | |
goodboy | d5bc43e8dd | |
Tyler Goodlet | 287a2c8396 | |
Tyler Goodlet | 453ebdfe30 | |
Tyler Goodlet | 2b1fb90e03 | |
Tyler Goodlet | 695ba5288d | |
Tyler Goodlet | d6c32bba86 | |
Tyler Goodlet | fa89207583 | |
Tyler Goodlet | 557562e25c | |
Tyler Goodlet | c6efa2641b | |
Tyler Goodlet | 8a7e391b4e | |
Tyler Goodlet | aec48a1dd5 | |
Tyler Goodlet | 87f301500d | |
Tyler Goodlet | 566a54ffb6 | |
Tyler Goodlet | f9c4b3cc96 | |
Tyler Goodlet | a12e6800ff | |
Tyler Goodlet | cc68501c7a | |
Tyler Goodlet | 7ebf8a8dc0 | |
Tyler Goodlet | 4475823e48 | |
Tyler Goodlet | 3713288b48 | |
Tyler Goodlet | 4fdfb81876 | |
Tyler Goodlet | f32b4d37cb | |
Tyler Goodlet | 2063b9d8bb | |
Tyler Goodlet | fe14605034 | |
Tyler Goodlet | 68b32208de | |
Tyler Goodlet | f1fe369bbf | |
Tyler Goodlet | 16b2937d23 | |
Tyler Goodlet | bfad676b7c | |
Tyler Goodlet | c617a06905 | |
Tyler Goodlet | ff74f4302a | |
Tyler Goodlet | 21153a0e1e | |
Tyler Goodlet | b6f344f34a | |
Tyler Goodlet | ecdc747ced | |
Tyler Goodlet | 5147cd7be0 | |
Tyler Goodlet | 3dcb72d429 | |
Tyler Goodlet | fbee33b00d | |
Tyler Goodlet | 3991d8f911 | |
Tyler Goodlet | 7b2e8f1ba5 | |
Tyler Goodlet | cbcbb2b243 | |
Tyler Goodlet | cd3bfb1ea4 | |
Tyler Goodlet | 82b718d5a3 | |
Tyler Goodlet | 05a1a4e3d8 | |
Tyler Goodlet | 412138a75b | |
Tyler Goodlet | c1b63f4757 | |
Tyler Goodlet | 5d774bef90 | |
Tyler Goodlet | de77c7d209 | |
Tyler Goodlet | ce1eb11b59 | |
Tyler Goodlet | b629ce177d | |
Tyler Goodlet | 73fa320917 | |
Tyler Goodlet | dd05ed1371 | |
Tyler Goodlet | 2a641ab8b4 | |
Tyler Goodlet | f8f7ca350c | |
Tyler Goodlet | 88b4ccc768 | |
Tyler Goodlet | eb2bad5138 | |
Tyler Goodlet | f768576060 | |
Tyler Goodlet | add0e92335 | |
Tyler Goodlet | 1eb7e109e6 | |
Tyler Goodlet | 725909a94c | |
Tyler Goodlet | 050aa7594c | |
Tyler Goodlet | 450009ff9c | |
goodboy | b2d5892010 | |
goodboy | 5a3b465ac0 | |
Tyler Goodlet | be7afdaa89 | |
Tyler Goodlet | 1c561207f5 | |
Tyler Goodlet | ed2c962bb9 | |
Tyler Goodlet | 147ceca016 | |
Tyler Goodlet | 03a7940f83 | |
Tyler Goodlet | dd2a9f74f1 | |
Tyler Goodlet | 49c720af3c | |
Tyler Goodlet | c620517543 | |
Tyler Goodlet | a425c29ef1 | |
Tyler Goodlet | 783914c7fe | |
Tyler Goodlet | 920a394539 | |
Tyler Goodlet | e977597cd0 | |
Tyler Goodlet | 7a33ba64f1 | |
Tyler Goodlet | 191b94b67c | |
Tyler Goodlet | 4ad7b073c3 | |
Tyler Goodlet | d92ff9c7a0 |
|
@ -3,9 +3,8 @@ name: CI
|
|||
|
||||
on:
|
||||
# Triggers the workflow on push or pull request events but only for the master branch
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
push:
|
||||
branches: [ master ]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
|
@ -14,21 +13,46 @@ on:
|
|||
|
||||
jobs:
|
||||
|
||||
# test that we can generate a software distribution and install it
|
||||
# thus avoid missing file issues after packaging.
|
||||
sdist-linux:
|
||||
name: 'sdist'
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Build sdist
|
||||
run: python setup.py sdist --formats=zip
|
||||
|
||||
- name: Install sdist from .zips
|
||||
run: python -m pip install dist/*.zip
|
||||
|
||||
testing:
|
||||
name: 'install + test-suite'
|
||||
timeout-minutes: 10
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Build DB container
|
||||
run: docker build -t piker:elastic dockering/elastic
|
||||
|
||||
- name: Setup python
|
||||
uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: '3.10'
|
||||
|
||||
- name: Install dependencies
|
||||
run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||
run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||
|
||||
- name: Test suite
|
||||
run: pytest tests -rs
|
||||
|
|
13
README.rst
13
README.rst
|
@ -71,6 +71,19 @@ for a development install::
|
|||
source ./env/bin/activate
|
||||
pip install -r requirements.txt -e .
|
||||
|
||||
install for nixos
|
||||
*****************
|
||||
for users of `NixOS` we offer a development shell envoirment that can be
|
||||
loaded with::
|
||||
|
||||
nix-shell develop.nix
|
||||
|
||||
this will setup the required python environment to run piker, make sure to
|
||||
run::
|
||||
|
||||
pip install -r requirements.txt -e .
|
||||
|
||||
once after loading the shell
|
||||
|
||||
install for tinas
|
||||
*****************
|
||||
|
|
|
@ -50,3 +50,8 @@ prefer_data_account = [
|
|||
paper = "XX0000000"
|
||||
margin = "X0000000"
|
||||
ira = "X0000000"
|
||||
|
||||
|
||||
[deribit]
|
||||
key_id = 'XXXXXXXX'
|
||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
||||
|
|
|
@ -0,0 +1,32 @@
|
|||
with (import <nixpkgs> {});
|
||||
with python310Packages;
|
||||
stdenv.mkDerivation {
|
||||
name = "pip-env";
|
||||
buildInputs = [
|
||||
# System requirements.
|
||||
readline
|
||||
|
||||
# Python requirements (enough to get a virtualenv going).
|
||||
python310Full
|
||||
virtualenv
|
||||
setuptools
|
||||
pyqt5
|
||||
pip
|
||||
];
|
||||
src = null;
|
||||
shellHook = ''
|
||||
# Allow the use of wheels.
|
||||
SOURCE_DATE_EPOCH=$(date +%s)
|
||||
|
||||
# Augment the dynamic linker path
|
||||
export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${R}/lib/R/lib:${readline}/lib
|
||||
|
||||
export QT_QPA_PLATFORM_PLUGIN_PATH="${qt5.qtbase.bin}/lib/qt-${qt5.qtbase.version}/plugins";
|
||||
|
||||
if [ ! -d "venv" ]; then
|
||||
virtualenv venv
|
||||
fi
|
||||
|
||||
source venv/bin/activate
|
||||
'';
|
||||
}
|
|
@ -0,0 +1,11 @@
|
|||
FROM elasticsearch:7.17.4
|
||||
|
||||
ENV ES_JAVA_OPTS "-Xms2g -Xmx2g"
|
||||
ENV ELASTIC_USERNAME "elastic"
|
||||
ENV ELASTIC_PASSWORD "password"
|
||||
|
||||
COPY elasticsearch.yml /usr/share/elasticsearch/config/
|
||||
|
||||
RUN printf "password" | ./bin/elasticsearch-keystore add -f -x "bootstrap.password"
|
||||
|
||||
EXPOSE 19200
|
|
@ -0,0 +1,5 @@
|
|||
network.host: 0.0.0.0
|
||||
|
||||
http.port: 19200
|
||||
|
||||
discovery.type: single-node
|
|
@ -3,11 +3,12 @@
|
|||
version: "3.5"
|
||||
|
||||
services:
|
||||
ib-gateway:
|
||||
ib_gw_paper:
|
||||
# other image tags available:
|
||||
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
||||
image: waytrade/ib-gateway:981.3j
|
||||
restart: always
|
||||
# image: waytrade/ib-gateway:981.3j
|
||||
image: waytrade/ib-gateway:1012.2i
|
||||
restart: 'no' # restart on boot whenev there's a crash or user clicsk
|
||||
network_mode: 'host'
|
||||
|
||||
volumes:
|
||||
|
@ -39,14 +40,12 @@ services:
|
|||
# this compose file which looks something like:
|
||||
# TWS_USERID='myuser'
|
||||
# TWS_PASSWORD='guest'
|
||||
# TRADING_MODE=paper (or live)
|
||||
# VNC_SERVER_PASSWORD='diggity'
|
||||
|
||||
environment:
|
||||
TWS_USERID: ${TWS_USERID}
|
||||
TWS_PASSWORD: ${TWS_PASSWORD}
|
||||
TRADING_MODE: ${TRADING_MODE:-paper}
|
||||
VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-}
|
||||
TRADING_MODE: 'paper'
|
||||
VNC_SERVER_PASSWORD: 'doggy'
|
||||
VNC_SERVER_PORT: '3003'
|
||||
|
||||
# ports:
|
||||
# - target: 4002
|
||||
|
@ -62,3 +61,40 @@ services:
|
|||
# - "127.0.0.1:4001:4001"
|
||||
# - "127.0.0.1:4002:4002"
|
||||
# - "127.0.0.1:5900:5900"
|
||||
|
||||
# ib_gw_live:
|
||||
# image: waytrade/ib-gateway:1012.2i
|
||||
# restart: no
|
||||
# network_mode: 'host'
|
||||
|
||||
# volumes:
|
||||
# - type: bind
|
||||
# source: ./jts_live.ini
|
||||
# target: /root/jts/jts.ini
|
||||
# # don't let ibc clobber this file for
|
||||
# # the main reason of not having a stupid
|
||||
# # timezone set..
|
||||
# read_only: true
|
||||
|
||||
# # force our own ibc config
|
||||
# - type: bind
|
||||
# source: ./ibc.ini
|
||||
# target: /root/ibc/config.ini
|
||||
|
||||
# # force our noop script - socat isn't needed in host mode.
|
||||
# - type: bind
|
||||
# source: ./fork_ports_delayed.sh
|
||||
# target: /root/scripts/fork_ports_delayed.sh
|
||||
|
||||
# # force our noop script - socat isn't needed in host mode.
|
||||
# - type: bind
|
||||
# source: ./run_x11_vnc.sh
|
||||
# target: /root/scripts/run_x11_vnc.sh
|
||||
# read_only: true
|
||||
|
||||
# # NOTE: to fill these out, define an `.env` file in the same dir as
|
||||
# # this compose file which looks something like:
|
||||
# environment:
|
||||
# TRADING_MODE: 'live'
|
||||
# VNC_SERVER_PASSWORD: 'doggy'
|
||||
# VNC_SERVER_PORT: '3004'
|
||||
|
|
|
@ -188,7 +188,7 @@ AcceptNonBrokerageAccountWarning=yes
|
|||
#
|
||||
# The default value is 60.
|
||||
|
||||
LoginDialogDisplayTimeout = 60
|
||||
LoginDialogDisplayTimeout=20
|
||||
|
||||
|
||||
|
||||
|
@ -292,7 +292,7 @@ ExistingSessionDetectedAction=primary
|
|||
# be set dynamically at run-time: most users will never need it,
|
||||
# so don't use it unless you know you need it.
|
||||
|
||||
OverrideTwsApiPort=4002
|
||||
; OverrideTwsApiPort=4002
|
||||
|
||||
|
||||
# Read-only Login
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
[IBGateway]
|
||||
ApiOnly=true
|
||||
LocalServerPort=4001
|
||||
# NOTE: must be set if using IBC's "reject" mode
|
||||
TrustedIPs=127.0.0.1
|
||||
; RemoteHostOrderRouting=ndc1.ibllc.com
|
||||
; WriteDebug=true
|
||||
; RemotePortOrderRouting=4001
|
||||
; useRemoteSettings=false
|
||||
; tradingMode=p
|
||||
; Steps=8
|
||||
; colorPalletName=dark
|
||||
|
||||
# window geo, this may be useful for sending `xdotool` commands?
|
||||
; MainWindow.Width=1986
|
||||
; screenHeight=3960
|
||||
|
||||
|
||||
[Logon]
|
||||
Locale=en
|
||||
# most markets are oriented around this zone
|
||||
# so might as well hard code it.
|
||||
TimeZone=America/New_York
|
||||
UseSSL=true
|
||||
displayedproxymsg=1
|
||||
os_titlebar=true
|
||||
s3store=true
|
||||
useRemoteSettings=false
|
||||
|
||||
[Communication]
|
||||
ctciAutoEncrypt=true
|
||||
Region=usr
|
||||
; Peer=cdc1.ibllc.com:4001
|
|
@ -1,16 +1,35 @@
|
|||
#!/bin/sh
|
||||
# start vnc server and listen for connections
|
||||
# on port specced in `$VNC_SERVER_PORT`
|
||||
|
||||
# start VNC server
|
||||
x11vnc \
|
||||
-ncache_cr \
|
||||
-listen localhost \
|
||||
-listen 127.0.0.1 \
|
||||
-allow 127.0.0.1 \
|
||||
-rfbport "${VNC_SERVER_PORT}" \
|
||||
-display :1 \
|
||||
-forever \
|
||||
-shared \
|
||||
-logappend /var/log/x11vnc.log \
|
||||
-bg \
|
||||
-nowf \
|
||||
-noxdamage \
|
||||
-noxfixes \
|
||||
-no6 \
|
||||
-noipv6 \
|
||||
-autoport 3003 \
|
||||
# can't use this because of ``asyncvnc`` issue:
|
||||
|
||||
|
||||
# -nowcr \
|
||||
# TODO: can't use this because of ``asyncvnc`` issue:
|
||||
# https://github.com/barneygale/asyncvnc/issues/1
|
||||
# -passwd 'ibcansmbz'
|
||||
|
||||
# XXX: optional graphics caching flags that seem to rekt the overlay
|
||||
# of the 2 gw windows? When running a single gateway
|
||||
# this seems to maybe optimize some memory usage?
|
||||
# -ncache_cr \
|
||||
# -ncache \
|
||||
|
||||
# NOTE: this will prevent logs from going to the console.
|
||||
# -logappend /var/log/x11vnc.log \
|
||||
|
||||
# where to start allocating ports
|
||||
# -autoport "${VNC_SERVER_PORT}" \
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers.
|
||||
# Copyright 2020-eternity Tyler Goodlet (in stewardship for piker0)
|
||||
# Copyright 2020-eternity Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -14,7 +14,14 @@
|
|||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
'''
|
||||
piker: trading gear for hackers.
|
||||
|
||||
"""
|
||||
'''
|
||||
from .service import open_piker_runtime
|
||||
from .data.feed import open_feed
|
||||
|
||||
__all__ = [
|
||||
'open_piker_runtime',
|
||||
'open_feed',
|
||||
]
|
||||
|
|
561
piker/_daemon.py
561
piker/_daemon.py
|
@ -1,561 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Structured, daemon tree service management.
|
||||
|
||||
"""
|
||||
from typing import Optional, Union, Callable, Any
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from collections import defaultdict
|
||||
|
||||
from pydantic import BaseModel
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
|
||||
from .log import get_logger, get_console_log
|
||||
from .brokers import get_brokermod
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_root_dname = 'pikerd'
|
||||
|
||||
_registry_addr = ('127.0.0.1', 6116)
|
||||
_tractor_kwargs: dict[str, Any] = {
|
||||
# use a different registry addr then tractor's default
|
||||
'arbiter_addr': _registry_addr
|
||||
}
|
||||
_root_modules = [
|
||||
__name__,
|
||||
'piker.clearing._ems',
|
||||
'piker.clearing._client',
|
||||
]
|
||||
|
||||
|
||||
class Services(BaseModel):
|
||||
|
||||
actor_n: tractor._supervise.ActorNursery
|
||||
service_n: trio.Nursery
|
||||
debug_mode: bool # tractor sub-actor debug mode flag
|
||||
service_tasks: dict[str, tuple[trio.CancelScope, tractor.Portal]] = {}
|
||||
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
async def start_service_task(
|
||||
self,
|
||||
name: str,
|
||||
portal: tractor.Portal,
|
||||
target: Callable,
|
||||
**kwargs,
|
||||
|
||||
) -> (trio.CancelScope, tractor.Context):
|
||||
'''
|
||||
Open a context in a service sub-actor, add to a stack
|
||||
that gets unwound at ``pikerd`` teardown.
|
||||
|
||||
This allows for allocating long-running sub-services in our main
|
||||
daemon and explicitly controlling their lifetimes.
|
||||
|
||||
'''
|
||||
async def open_context_in_task(
|
||||
task_status: TaskStatus[
|
||||
trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> Any:
|
||||
|
||||
with trio.CancelScope() as cs:
|
||||
async with portal.open_context(
|
||||
target,
|
||||
**kwargs,
|
||||
|
||||
) as (ctx, first):
|
||||
|
||||
# unblock once the remote context has started
|
||||
task_status.started((cs, first))
|
||||
log.info(
|
||||
f'`pikerd` service {name} started with value {first}'
|
||||
)
|
||||
try:
|
||||
# wait on any context's return value
|
||||
ctx_res = await ctx.result()
|
||||
except tractor.ContextCancelled:
|
||||
return await self.cancel_service(name)
|
||||
else:
|
||||
# wait on any error from the sub-actor
|
||||
# NOTE: this will block indefinitely until
|
||||
# cancelled either by error from the target
|
||||
# context function or by being cancelled here by
|
||||
# the surrounding cancel scope
|
||||
return (await portal.result(), ctx_res)
|
||||
|
||||
cs, first = await self.service_n.start(open_context_in_task)
|
||||
|
||||
# store the cancel scope and portal for later cancellation or
|
||||
# retstart if needed.
|
||||
self.service_tasks[name] = (cs, portal)
|
||||
|
||||
return cs, first
|
||||
|
||||
# TODO: per service cancellation by scope, we aren't using this
|
||||
# anywhere right?
|
||||
async def cancel_service(
|
||||
self,
|
||||
name: str,
|
||||
) -> Any:
|
||||
log.info(f'Cancelling `pikerd` service {name}')
|
||||
cs, portal = self.service_tasks[name]
|
||||
# XXX: not entirely sure why this is required,
|
||||
# and should probably be better fine tuned in
|
||||
# ``tractor``?
|
||||
cs.cancel()
|
||||
return await portal.cancel_actor()
|
||||
|
||||
|
||||
_services: Optional[Services] = None
|
||||
|
||||
|
||||
@acm
|
||||
async def open_pikerd(
|
||||
start_method: str = 'trio',
|
||||
loglevel: Optional[str] = None,
|
||||
|
||||
# XXX: you should pretty much never want debug mode
|
||||
# for data daemons when running in production.
|
||||
debug_mode: bool = False,
|
||||
|
||||
) -> Optional[tractor._portal.Portal]:
|
||||
'''
|
||||
Start a root piker daemon who's lifetime extends indefinitely
|
||||
until cancelled.
|
||||
|
||||
A root actor nursery is created which can be used to create and keep
|
||||
alive underling services (see below).
|
||||
|
||||
'''
|
||||
global _services
|
||||
assert _services is None
|
||||
|
||||
# XXX: this may open a root actor as well
|
||||
async with (
|
||||
tractor.open_root_actor(
|
||||
|
||||
# passed through to ``open_root_actor``
|
||||
arbiter_addr=_registry_addr,
|
||||
name=_root_dname,
|
||||
loglevel=loglevel,
|
||||
debug_mode=debug_mode,
|
||||
start_method=start_method,
|
||||
|
||||
# TODO: eventually we should be able to avoid
|
||||
# having the root have more then permissions to
|
||||
# spawn other specialized daemons I think?
|
||||
enable_modules=_root_modules,
|
||||
) as _,
|
||||
|
||||
tractor.open_nursery() as actor_nursery,
|
||||
):
|
||||
async with trio.open_nursery() as service_nursery:
|
||||
|
||||
# # setup service mngr singleton instance
|
||||
# async with AsyncExitStack() as stack:
|
||||
|
||||
# assign globally for future daemon/task creation
|
||||
_services = Services(
|
||||
actor_n=actor_nursery,
|
||||
service_n=service_nursery,
|
||||
debug_mode=debug_mode,
|
||||
)
|
||||
|
||||
yield _services
|
||||
|
||||
|
||||
@acm
|
||||
async def open_piker_runtime(
|
||||
name: str,
|
||||
enable_modules: list[str] = [],
|
||||
start_method: str = 'trio',
|
||||
loglevel: Optional[str] = None,
|
||||
|
||||
# XXX: you should pretty much never want debug mode
|
||||
# for data daemons when running in production.
|
||||
debug_mode: bool = False,
|
||||
|
||||
) -> Optional[tractor._portal.Portal]:
|
||||
'''
|
||||
Start a piker actor who's runtime will automatically
|
||||
sync with existing piker actors in local network
|
||||
based on configuration.
|
||||
|
||||
'''
|
||||
global _services
|
||||
assert _services is None
|
||||
|
||||
# XXX: this may open a root actor as well
|
||||
async with (
|
||||
tractor.open_root_actor(
|
||||
|
||||
# passed through to ``open_root_actor``
|
||||
arbiter_addr=_registry_addr,
|
||||
name=name,
|
||||
loglevel=loglevel,
|
||||
debug_mode=debug_mode,
|
||||
start_method=start_method,
|
||||
|
||||
# TODO: eventually we should be able to avoid
|
||||
# having the root have more then permissions to
|
||||
# spawn other specialized daemons I think?
|
||||
enable_modules=_root_modules,
|
||||
) as _,
|
||||
):
|
||||
yield tractor.current_actor()
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_runtime(
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
"""
|
||||
Start the ``tractor`` runtime (a root actor) if none exists.
|
||||
|
||||
"""
|
||||
settings = _tractor_kwargs
|
||||
settings.update(kwargs)
|
||||
|
||||
if not tractor.current_actor(err_on_no_runtime=False):
|
||||
async with tractor.open_root_actor(
|
||||
loglevel=loglevel,
|
||||
**settings,
|
||||
):
|
||||
yield
|
||||
else:
|
||||
yield
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_pikerd(
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> Union[tractor._portal.Portal, Services]:
|
||||
"""If no ``pikerd`` daemon-root-actor can be found start it and
|
||||
yield up (we should probably figure out returning a portal to self
|
||||
though).
|
||||
|
||||
"""
|
||||
if loglevel:
|
||||
get_console_log(loglevel)
|
||||
|
||||
# subtle, we must have the runtime up here or portal lookup will fail
|
||||
async with maybe_open_runtime(loglevel, **kwargs):
|
||||
|
||||
async with tractor.find_actor(_root_dname) as portal:
|
||||
# assert portal is not None
|
||||
if portal is not None:
|
||||
yield portal
|
||||
return
|
||||
|
||||
# presume pikerd role since no daemon could be found at
|
||||
# configured address
|
||||
async with open_pikerd(
|
||||
|
||||
loglevel=loglevel,
|
||||
debug_mode=kwargs.get('debug_mode', False),
|
||||
|
||||
) as _:
|
||||
# in the case where we're starting up the
|
||||
# tractor-piker runtime stack in **this** process
|
||||
# we return no portal to self.
|
||||
yield None
|
||||
|
||||
|
||||
# brokerd enabled modules
|
||||
_data_mods = [
|
||||
'piker.brokers.core',
|
||||
'piker.brokers.data',
|
||||
'piker.data',
|
||||
'piker.data.feed',
|
||||
'piker.data._sampling'
|
||||
]
|
||||
|
||||
|
||||
class Brokerd:
|
||||
locks = defaultdict(trio.Lock)
|
||||
|
||||
|
||||
@acm
|
||||
async def find_service(
|
||||
service_name: str,
|
||||
) -> Optional[tractor.Portal]:
|
||||
|
||||
log.info(f'Scanning for service `{service_name}`')
|
||||
# attach to existing daemon by name if possible
|
||||
async with tractor.find_actor(
|
||||
service_name,
|
||||
arbiter_sockaddr=_registry_addr,
|
||||
) as maybe_portal:
|
||||
yield maybe_portal
|
||||
|
||||
|
||||
async def check_for_service(
|
||||
service_name: str,
|
||||
|
||||
) -> bool:
|
||||
'''
|
||||
Service daemon "liveness" predicate.
|
||||
|
||||
'''
|
||||
async with tractor.query_actor(
|
||||
service_name,
|
||||
arbiter_sockaddr=_registry_addr,
|
||||
) as sockaddr:
|
||||
return sockaddr
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_spawn_daemon(
|
||||
|
||||
service_name: str,
|
||||
service_task_target: Callable,
|
||||
spawn_args: dict[str, Any],
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
'''
|
||||
If no ``service_name`` daemon-actor can be found,
|
||||
spawn one in a local subactor and return a portal to it.
|
||||
|
||||
If this function is called from a non-pikerd actor, the
|
||||
spawned service will persist as long as pikerd does or
|
||||
it is requested to be cancelled.
|
||||
|
||||
This can be seen as a service starting api for remote-actor
|
||||
clients.
|
||||
|
||||
'''
|
||||
if loglevel:
|
||||
get_console_log(loglevel)
|
||||
|
||||
# serialize access to this section to avoid
|
||||
# 2 or more tasks racing to create a daemon
|
||||
lock = Brokerd.locks[service_name]
|
||||
await lock.acquire()
|
||||
|
||||
async with find_service(service_name) as portal:
|
||||
if portal is not None:
|
||||
lock.release()
|
||||
yield portal
|
||||
return
|
||||
|
||||
log.warning(f"Couldn't find any existing {service_name}")
|
||||
|
||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||
# pikerd is not live we now become the root of the
|
||||
# process tree
|
||||
async with maybe_open_pikerd(
|
||||
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as pikerd_portal:
|
||||
|
||||
if pikerd_portal is None:
|
||||
# we are the root and thus are `pikerd`
|
||||
# so spawn the target service directly by calling
|
||||
# the provided target routine.
|
||||
# XXX: this assumes that the target is well formed and will
|
||||
# do the right things to setup both a sub-actor **and** call
|
||||
# the ``_Services`` api from above to start the top level
|
||||
# service task for that actor.
|
||||
await service_task_target(**spawn_args)
|
||||
|
||||
else:
|
||||
# tell the remote `pikerd` to start the target,
|
||||
# the target can't return a non-serializable value
|
||||
# since it is expected that service startingn is
|
||||
# non-blocking and the target task will persist running
|
||||
# on `pikerd` after the client requesting it's start
|
||||
# disconnects.
|
||||
await pikerd_portal.run(
|
||||
service_task_target,
|
||||
**spawn_args,
|
||||
)
|
||||
|
||||
async with tractor.wait_for_actor(service_name) as portal:
|
||||
lock.release()
|
||||
yield portal
|
||||
await portal.cancel_actor()
|
||||
|
||||
|
||||
async def spawn_brokerd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: Optional[str] = None,
|
||||
**tractor_kwargs,
|
||||
|
||||
) -> bool:
|
||||
|
||||
log.info(f'Spawning {brokername} broker daemon')
|
||||
|
||||
brokermod = get_brokermod(brokername)
|
||||
dname = f'brokerd.{brokername}'
|
||||
|
||||
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
||||
tractor_kwargs.update(extra_tractor_kwargs)
|
||||
|
||||
global _services
|
||||
assert _services
|
||||
|
||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||
# actor nursery
|
||||
modpath = brokermod.__name__
|
||||
broker_enable = [modpath]
|
||||
for submodname in getattr(
|
||||
brokermod,
|
||||
'__enable_modules__',
|
||||
[],
|
||||
):
|
||||
subpath = f'{modpath}.{submodname}'
|
||||
broker_enable.append(subpath)
|
||||
|
||||
portal = await _services.actor_n.start_actor(
|
||||
dname,
|
||||
enable_modules=_data_mods + broker_enable,
|
||||
loglevel=loglevel,
|
||||
debug_mode=_services.debug_mode,
|
||||
**tractor_kwargs
|
||||
)
|
||||
|
||||
# non-blocking setup of brokerd service nursery
|
||||
from .data import _setup_persistent_brokerd
|
||||
|
||||
await _services.start_service_task(
|
||||
dname,
|
||||
portal,
|
||||
_setup_persistent_brokerd,
|
||||
brokername=brokername,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_spawn_brokerd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
'''
|
||||
Helper to spawn a brokerd service *from* a client
|
||||
who wishes to use the sub-actor-daemon.
|
||||
|
||||
'''
|
||||
async with maybe_spawn_daemon(
|
||||
|
||||
f'brokerd.{brokername}',
|
||||
service_task_target=spawn_brokerd,
|
||||
spawn_args={'brokername': brokername, 'loglevel': loglevel},
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as portal:
|
||||
yield portal
|
||||
|
||||
|
||||
async def spawn_emsd(
|
||||
|
||||
loglevel: Optional[str] = None,
|
||||
**extra_tractor_kwargs
|
||||
|
||||
) -> bool:
|
||||
"""
|
||||
Start the clearing engine under ``pikerd``.
|
||||
|
||||
"""
|
||||
log.info('Spawning emsd')
|
||||
|
||||
global _services
|
||||
assert _services
|
||||
|
||||
portal = await _services.actor_n.start_actor(
|
||||
'emsd',
|
||||
enable_modules=[
|
||||
'piker.clearing._ems',
|
||||
'piker.clearing._client',
|
||||
],
|
||||
loglevel=loglevel,
|
||||
debug_mode=_services.debug_mode, # set by pikerd flag
|
||||
**extra_tractor_kwargs
|
||||
)
|
||||
|
||||
# non-blocking setup of clearing service
|
||||
from .clearing._ems import _setup_persistent_emsd
|
||||
|
||||
await _services.start_service_task(
|
||||
'emsd',
|
||||
portal,
|
||||
_setup_persistent_emsd,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_emsd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor._portal.Portal: # noqa
|
||||
|
||||
async with maybe_spawn_daemon(
|
||||
|
||||
'emsd',
|
||||
service_task_target=spawn_emsd,
|
||||
spawn_args={'loglevel': loglevel},
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as portal:
|
||||
yield portal
|
||||
|
||||
|
||||
# TODO: ideally we can start the tsdb "on demand" but it's
|
||||
# probably going to require "rootless" docker, at least if we don't
|
||||
# want to expect the user to start ``pikerd`` with root perms all the
|
||||
# time.
|
||||
# async def maybe_open_marketstored(
|
||||
# loglevel: Optional[str] = None,
|
||||
# **kwargs,
|
||||
|
||||
# ) -> tractor._portal.Portal: # noqa
|
||||
|
||||
# async with maybe_spawn_daemon(
|
||||
|
||||
# 'marketstored',
|
||||
# service_task_target=spawn_emsd,
|
||||
# spawn_args={'loglevel': loglevel},
|
||||
# loglevel=loglevel,
|
||||
# **kwargs,
|
||||
|
||||
# ) as portal:
|
||||
# yield portal
|
|
@ -18,7 +18,10 @@
|
|||
Profiling wrappers for internal libs.
|
||||
|
||||
"""
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from time import perf_counter
|
||||
from functools import wraps
|
||||
|
||||
# NOTE: you can pass a flag to enable this:
|
||||
|
@ -44,3 +47,193 @@ def timeit(fn):
|
|||
return res
|
||||
|
||||
return wrapper
|
||||
|
||||
|
||||
# Modified version of ``pyqtgraph.debug.Profiler`` that
|
||||
# core seems hesitant to land in:
|
||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2281
|
||||
class Profiler(object):
|
||||
'''
|
||||
Simple profiler allowing measurement of multiple time intervals.
|
||||
|
||||
By default, profilers are disabled. To enable profiling, set the
|
||||
environment variable `PYQTGRAPHPROFILE` to a comma-separated list of
|
||||
fully-qualified names of profiled functions.
|
||||
|
||||
Calling a profiler registers a message (defaulting to an increasing
|
||||
counter) that contains the time elapsed since the last call. When the
|
||||
profiler is about to be garbage-collected, the messages are passed to the
|
||||
outer profiler if one is running, or printed to stdout otherwise.
|
||||
|
||||
If `delayed` is set to False, messages are immediately printed instead.
|
||||
|
||||
Example:
|
||||
def function(...):
|
||||
profiler = Profiler()
|
||||
... do stuff ...
|
||||
profiler('did stuff')
|
||||
... do other stuff ...
|
||||
profiler('did other stuff')
|
||||
# profiler is garbage-collected and flushed at function end
|
||||
|
||||
If this function is a method of class C, setting `PYQTGRAPHPROFILE` to
|
||||
"C.function" (without the module name) will enable this profiler.
|
||||
|
||||
For regular functions, use the qualified name of the function, stripping
|
||||
only the initial "pyqtgraph." prefix from the module.
|
||||
'''
|
||||
|
||||
_profilers = os.environ.get("PYQTGRAPHPROFILE", None)
|
||||
_profilers = _profilers.split(",") if _profilers is not None else []
|
||||
|
||||
_depth = 0
|
||||
|
||||
# NOTE: without this defined at the class level
|
||||
# you won't see apprpriately "nested" sub-profiler
|
||||
# instance calls.
|
||||
_msgs = []
|
||||
|
||||
# set this flag to disable all or individual profilers at runtime
|
||||
disable = False
|
||||
|
||||
class DisabledProfiler(object):
|
||||
def __init__(self, *args, **kwds):
|
||||
pass
|
||||
|
||||
def __call__(self, *args):
|
||||
pass
|
||||
|
||||
def finish(self):
|
||||
pass
|
||||
|
||||
def mark(self, msg=None):
|
||||
pass
|
||||
|
||||
_disabledProfiler = DisabledProfiler()
|
||||
|
||||
def __new__(
|
||||
cls,
|
||||
msg=None,
|
||||
disabled='env',
|
||||
delayed=True,
|
||||
ms_threshold: float = 0.0,
|
||||
):
|
||||
"""Optionally create a new profiler based on caller's qualname.
|
||||
|
||||
``ms_threshold`` can be set to value in ms for which, if the
|
||||
total measured time of the lifetime of this profiler is **less
|
||||
than** this value, then no profiling messages will be printed.
|
||||
Setting ``delayed=False`` disables this feature since messages
|
||||
are emitted immediately.
|
||||
|
||||
"""
|
||||
if (
|
||||
disabled is True
|
||||
or (
|
||||
disabled == 'env'
|
||||
and len(cls._profilers) == 0
|
||||
)
|
||||
):
|
||||
return cls._disabledProfiler
|
||||
|
||||
# determine the qualified name of the caller function
|
||||
caller_frame = sys._getframe(1)
|
||||
try:
|
||||
caller_object_type = type(caller_frame.f_locals["self"])
|
||||
|
||||
except KeyError: # we are in a regular function
|
||||
qualifier = caller_frame.f_globals["__name__"].split(".", 1)[-1]
|
||||
|
||||
else: # we are in a method
|
||||
qualifier = caller_object_type.__name__
|
||||
func_qualname = qualifier + "." + caller_frame.f_code.co_name
|
||||
|
||||
if disabled == 'env' and func_qualname not in cls._profilers:
|
||||
# don't do anything
|
||||
return cls._disabledProfiler
|
||||
|
||||
cls._depth += 1
|
||||
obj = super(Profiler, cls).__new__(cls)
|
||||
obj._msgs = []
|
||||
|
||||
# create an actual profiling object
|
||||
if cls._depth < 1:
|
||||
cls._msgs = []
|
||||
|
||||
obj._name = msg or func_qualname
|
||||
obj._delayed = delayed
|
||||
obj._markCount = 0
|
||||
obj._finished = False
|
||||
obj._firstTime = obj._lastTime = perf_counter()
|
||||
obj._mt = ms_threshold
|
||||
obj._newMsg("> Entering " + obj._name)
|
||||
return obj
|
||||
|
||||
def __call__(self, msg=None):
|
||||
"""Register or print a new message with timing information.
|
||||
"""
|
||||
if self.disable:
|
||||
return
|
||||
if msg is None:
|
||||
msg = str(self._markCount)
|
||||
|
||||
self._markCount += 1
|
||||
newTime = perf_counter()
|
||||
tot_ms = (newTime - self._firstTime) * 1000
|
||||
ms = (newTime - self._lastTime) * 1000
|
||||
self._newMsg(
|
||||
f" {msg}: {ms:0.4f}, tot:{tot_ms:0.4f}"
|
||||
)
|
||||
|
||||
self._lastTime = newTime
|
||||
|
||||
def mark(self, msg=None):
|
||||
self(msg)
|
||||
|
||||
def _newMsg(self, msg, *args):
|
||||
msg = " " * (self._depth - 1) + msg
|
||||
if self._delayed:
|
||||
self._msgs.append((msg, args))
|
||||
else:
|
||||
print(msg % args)
|
||||
|
||||
def __del__(self):
|
||||
self.finish()
|
||||
|
||||
def finish(self, msg=None):
|
||||
"""Add a final message; flush the message list if no parent profiler.
|
||||
"""
|
||||
if self._finished or self.disable:
|
||||
return
|
||||
|
||||
self._finished = True
|
||||
if msg is not None:
|
||||
self(msg)
|
||||
|
||||
tot_ms = (perf_counter() - self._firstTime) * 1000
|
||||
self._newMsg(
|
||||
"< Exiting %s, total time: %0.4f ms",
|
||||
self._name,
|
||||
tot_ms,
|
||||
)
|
||||
|
||||
if tot_ms < self._mt:
|
||||
# print(f'{tot_ms} < {self._mt}, clearing')
|
||||
# NOTE: this list **must** be an instance var to avoid
|
||||
# deleting common messages during GC I think?
|
||||
self._msgs.clear()
|
||||
# else:
|
||||
# print(f'{tot_ms} > {self._mt}, not clearing')
|
||||
|
||||
# XXX: why is this needed?
|
||||
# don't we **want to show** nested profiler messages?
|
||||
if self._msgs: # and self._depth < 1:
|
||||
|
||||
# if self._msgs:
|
||||
print("\n".join([m[0] % m[1] for m in self._msgs]))
|
||||
|
||||
# clear all entries
|
||||
self._msgs.clear()
|
||||
# type(self)._msgs = []
|
||||
|
||||
type(self)._depth -= 1
|
||||
|
|
|
@ -20,30 +20,41 @@ Broker clients, daemons and general back end machinery.
|
|||
from importlib import import_module
|
||||
from types import ModuleType
|
||||
|
||||
# TODO: move to urllib3/requests once supported
|
||||
import asks
|
||||
asks.init('trio')
|
||||
|
||||
__brokers__ = [
|
||||
'binance',
|
||||
'questrade',
|
||||
'robinhood',
|
||||
'ib',
|
||||
'kraken',
|
||||
|
||||
# broken but used to work
|
||||
# 'questrade',
|
||||
# 'robinhood',
|
||||
|
||||
# TODO: we should get on these stat!
|
||||
# alpaca
|
||||
# wstrade
|
||||
# iex
|
||||
|
||||
# deribit
|
||||
# kucoin
|
||||
# bitso
|
||||
]
|
||||
|
||||
|
||||
def get_brokermod(brokername: str) -> ModuleType:
|
||||
"""Return the imported broker module by name.
|
||||
"""
|
||||
'''
|
||||
Return the imported broker module by name.
|
||||
|
||||
'''
|
||||
module = import_module('.' + brokername, 'piker.brokers')
|
||||
# we only allow monkeying because it's for internal keying
|
||||
module.name = module.__name__.split('.')[-1]
|
||||
module.name = module.__name__.split('.')[-1]
|
||||
return module
|
||||
|
||||
|
||||
def iter_brokermods():
|
||||
"""Iterate all built-in broker modules.
|
||||
"""
|
||||
'''
|
||||
Iterate all built-in broker modules.
|
||||
|
||||
'''
|
||||
for name in __brokers__:
|
||||
yield get_brokermod(name)
|
||||
|
|
|
@ -33,15 +33,23 @@ import asks
|
|||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import tractor
|
||||
from pydantic.dataclasses import dataclass
|
||||
from pydantic import BaseModel
|
||||
import wsproto
|
||||
|
||||
from .._cacheables import open_cached_client
|
||||
from ._util import resproc, SymbolNotFound
|
||||
from ..log import get_logger, get_console_log
|
||||
from ..data import ShmArray
|
||||
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
||||
from ._util import (
|
||||
resproc,
|
||||
SymbolNotFound,
|
||||
DataUnavailable,
|
||||
)
|
||||
from ..log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
)
|
||||
from ..data.types import Struct
|
||||
from ..data._web_bs import (
|
||||
open_autorecon_ws,
|
||||
NoBsWs,
|
||||
)
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -79,12 +87,14 @@ _show_wap_in_history = False
|
|||
|
||||
|
||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
|
||||
class Pair(BaseModel):
|
||||
class Pair(Struct, frozen=True):
|
||||
symbol: str
|
||||
status: str
|
||||
|
||||
baseAsset: str
|
||||
baseAssetPrecision: int
|
||||
cancelReplaceAllowed: bool
|
||||
allowTrailingStop: bool
|
||||
quoteAsset: str
|
||||
quotePrecision: int
|
||||
quoteAssetPrecision: int
|
||||
|
@ -100,18 +110,21 @@ class Pair(BaseModel):
|
|||
isSpotTradingAllowed: bool
|
||||
isMarginTradingAllowed: bool
|
||||
|
||||
defaultSelfTradePreventionMode: str
|
||||
allowedSelfTradePreventionModes: list[str]
|
||||
|
||||
filters: list[dict[str, Union[str, int, float]]]
|
||||
permissions: list[str]
|
||||
|
||||
|
||||
@dataclass
|
||||
class OHLC:
|
||||
"""Description of the flattened OHLC quote format.
|
||||
class OHLC(Struct):
|
||||
'''
|
||||
Description of the flattened OHLC quote format.
|
||||
|
||||
For schema details see:
|
||||
https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
|
||||
|
||||
"""
|
||||
'''
|
||||
time: int
|
||||
|
||||
open: float
|
||||
|
@ -134,7 +147,9 @@ class OHLC:
|
|||
|
||||
|
||||
# convert datetime obj timestamp to unixtime in milliseconds
|
||||
def binance_timestamp(when):
|
||||
def binance_timestamp(
|
||||
when: datetime
|
||||
) -> int:
|
||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||
|
||||
|
||||
|
@ -173,7 +188,7 @@ class Client:
|
|||
params = {}
|
||||
|
||||
if sym is not None:
|
||||
sym = sym.upper()
|
||||
sym = sym.lower()
|
||||
params = {'symbol': sym}
|
||||
|
||||
resp = await self._api(
|
||||
|
@ -230,7 +245,7 @@ class Client:
|
|||
) -> dict:
|
||||
|
||||
if end_dt is None:
|
||||
end_dt = pendulum.now('UTC')
|
||||
end_dt = pendulum.now('UTC').add(minutes=1)
|
||||
|
||||
if start_dt is None:
|
||||
start_dt = end_dt.start_of(
|
||||
|
@ -260,6 +275,7 @@ class Client:
|
|||
for i, bar in enumerate(bars):
|
||||
|
||||
bar = OHLC(*bar)
|
||||
bar.typecast()
|
||||
|
||||
row = []
|
||||
for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
|
||||
|
@ -287,7 +303,7 @@ async def get_client() -> Client:
|
|||
|
||||
|
||||
# validation type
|
||||
class AggTrade(BaseModel):
|
||||
class AggTrade(Struct):
|
||||
e: str # Event type
|
||||
E: int # Event time
|
||||
s: str # Symbol
|
||||
|
@ -341,7 +357,9 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
|||
|
||||
elif msg.get('e') == 'aggTrade':
|
||||
|
||||
# validate
|
||||
# NOTE: this is purely for a definition, ``msgspec.Struct``
|
||||
# does not runtime-validate until you decode/encode.
|
||||
# see: https://jcristharif.com/msgspec/structs.html#type-validation
|
||||
msg = AggTrade(**msg)
|
||||
|
||||
# TODO: type out and require this quote format
|
||||
|
@ -352,8 +370,8 @@ async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
|||
'brokerd_ts': time.time(),
|
||||
'ticks': [{
|
||||
'type': 'trade',
|
||||
'price': msg.p,
|
||||
'size': msg.q,
|
||||
'price': float(msg.p),
|
||||
'size': float(msg.q),
|
||||
'broker_ts': msg.T,
|
||||
}],
|
||||
}
|
||||
|
@ -384,41 +402,39 @@ async def open_history_client(
|
|||
async with open_cached_client('binance') as client:
|
||||
|
||||
async def get_ohlc(
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
timeframe: float,
|
||||
end_dt: datetime | None = None,
|
||||
start_dt: datetime | None = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
if timeframe != 60:
|
||||
raise DataUnavailable('Only 1m bars are supported')
|
||||
|
||||
array = await client.bars(
|
||||
symbol,
|
||||
start_dt=start_dt,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
times = array['time']
|
||||
if (
|
||||
end_dt is None
|
||||
):
|
||||
inow = round(time.time())
|
||||
if (inow - times[-1]) > 60:
|
||||
await tractor.breakpoint()
|
||||
|
||||
start_dt = pendulum.from_timestamp(times[0])
|
||||
end_dt = pendulum.from_timestamp(times[-1])
|
||||
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||
|
||||
|
||||
async def backfill_bars(
|
||||
sym: str,
|
||||
shm: ShmArray, # type: ignore # noqa
|
||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||
) -> None:
|
||||
"""Fill historical bars into shared mem / storage afap.
|
||||
"""
|
||||
with trio.CancelScope() as cs:
|
||||
async with open_cached_client('binance') as client:
|
||||
bars = await client.bars(symbol=sym)
|
||||
shm.push(bars)
|
||||
task_status.started(cs)
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
|
@ -448,12 +464,20 @@ async def stream_quotes(
|
|||
d = cache[sym.upper()]
|
||||
syminfo = Pair(**d) # validation
|
||||
|
||||
si = sym_infos[sym] = syminfo.dict()
|
||||
si = sym_infos[sym] = syminfo.to_dict()
|
||||
filters = {}
|
||||
for entry in syminfo.filters:
|
||||
ftype = entry['filterType']
|
||||
filters[ftype] = entry
|
||||
|
||||
# XXX: after manually inspecting the response format we
|
||||
# just directly pick out the info we need
|
||||
si['price_tick_size'] = float(syminfo.filters[0]['tickSize'])
|
||||
si['lot_tick_size'] = float(syminfo.filters[2]['stepSize'])
|
||||
si['price_tick_size'] = float(
|
||||
filters['PRICE_FILTER']['tickSize']
|
||||
)
|
||||
si['lot_tick_size'] = float(
|
||||
filters['LOT_SIZE']['stepSize']
|
||||
)
|
||||
si['asset_type'] = 'crypto'
|
||||
|
||||
symbol = symbols[0]
|
||||
|
@ -495,14 +519,15 @@ async def stream_quotes(
|
|||
subs.append("{sym}@bookTicker")
|
||||
|
||||
# unsub from all pairs on teardown
|
||||
await ws.send_msg({
|
||||
"method": "UNSUBSCRIBE",
|
||||
"params": subs,
|
||||
"id": uid,
|
||||
})
|
||||
if ws.connected():
|
||||
await ws.send_msg({
|
||||
"method": "UNSUBSCRIBE",
|
||||
"params": subs,
|
||||
"id": uid,
|
||||
})
|
||||
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
|
||||
async with open_autorecon_ws(
|
||||
'wss://stream.binance.com/ws',
|
||||
|
|
|
@ -29,8 +29,15 @@ import tractor
|
|||
from ..cli import cli
|
||||
from .. import watchlists as wl
|
||||
from ..log import get_console_log, colorize_json, get_logger
|
||||
from .._daemon import maybe_spawn_brokerd, maybe_open_pikerd
|
||||
from ..brokers import core, get_brokermod, data
|
||||
from ..service import (
|
||||
maybe_spawn_brokerd,
|
||||
maybe_open_pikerd,
|
||||
)
|
||||
from ..brokers import (
|
||||
core,
|
||||
get_brokermod,
|
||||
data,
|
||||
)
|
||||
|
||||
log = get_logger('cli')
|
||||
DEFAULT_BROKER = 'questrade'
|
||||
|
@ -39,6 +46,148 @@ _config_dir = click.get_app_dir('piker')
|
|||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||
|
||||
|
||||
OK = '\033[92m'
|
||||
WARNING = '\033[93m'
|
||||
FAIL = '\033[91m'
|
||||
ENDC = '\033[0m'
|
||||
|
||||
|
||||
def print_ok(s: str, **kwargs):
|
||||
print(OK + s + ENDC, **kwargs)
|
||||
|
||||
|
||||
def print_error(s: str, **kwargs):
|
||||
print(FAIL + s + ENDC, **kwargs)
|
||||
|
||||
|
||||
def get_method(client, meth_name: str):
|
||||
print(f'checking client for method \'{meth_name}\'...', end='', flush=True)
|
||||
method = getattr(client, meth_name, None)
|
||||
assert method
|
||||
print_ok('found!.')
|
||||
return method
|
||||
|
||||
|
||||
async def run_method(client, meth_name: str, **kwargs):
|
||||
method = get_method(client, meth_name)
|
||||
print('running...', end='', flush=True)
|
||||
result = await method(**kwargs)
|
||||
print_ok(f'done! result: {type(result)}')
|
||||
return result
|
||||
|
||||
|
||||
async def run_test(broker_name: str):
|
||||
brokermod = get_brokermod(broker_name)
|
||||
total = 0
|
||||
passed = 0
|
||||
failed = 0
|
||||
|
||||
print('getting client...', end='', flush=True)
|
||||
if not hasattr(brokermod, 'get_client'):
|
||||
print_error('fail! no \'get_client\' context manager found.')
|
||||
return
|
||||
|
||||
async with brokermod.get_client(is_brokercheck=True) as client:
|
||||
print_ok('done! inside client context.')
|
||||
|
||||
# check for methods present on brokermod
|
||||
method_list = [
|
||||
'backfill_bars',
|
||||
'get_client',
|
||||
'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
|
||||
]
|
||||
|
||||
for method in method_list:
|
||||
print(
|
||||
f'checking brokermod for method \'{method}\'...',
|
||||
end='', flush=True)
|
||||
if not hasattr(brokermod, method):
|
||||
print_error(f'fail! method \'{method}\' not found.')
|
||||
failed += 1
|
||||
else:
|
||||
print_ok('done!')
|
||||
passed += 1
|
||||
|
||||
total += 1
|
||||
|
||||
# check for methods present con brokermod.Client and their
|
||||
# results
|
||||
|
||||
# for private methods only check is present
|
||||
method_list = [
|
||||
'get_balances',
|
||||
'get_assets',
|
||||
'get_trades',
|
||||
'get_xfers',
|
||||
'submit_limit',
|
||||
'submit_cancel',
|
||||
'search_symbols',
|
||||
]
|
||||
|
||||
for method_name in method_list:
|
||||
try:
|
||||
get_method(client, method_name)
|
||||
passed += 1
|
||||
|
||||
except AssertionError:
|
||||
print_error(f'fail! method \'{method_name}\' not found.')
|
||||
failed += 1
|
||||
|
||||
total += 1
|
||||
|
||||
# check for methods present con brokermod.Client and their
|
||||
# results
|
||||
|
||||
syms = await run_method(client, 'symbol_info')
|
||||
total += 1
|
||||
|
||||
if len(syms) == 0:
|
||||
raise BaseException('Empty Symbol list?')
|
||||
|
||||
passed += 1
|
||||
|
||||
first_sym = tuple(syms.keys())[0]
|
||||
|
||||
method_list = [
|
||||
('cache_symbols', {}),
|
||||
('search_symbols', {'pattern': first_sym[:-1]}),
|
||||
('bars', {'symbol': first_sym})
|
||||
]
|
||||
|
||||
for method_name, method_kwargs in method_list:
|
||||
try:
|
||||
await run_method(client, method_name, **method_kwargs)
|
||||
passed += 1
|
||||
|
||||
except AssertionError:
|
||||
print_error(f'fail! method \'{method_name}\' not found.')
|
||||
failed += 1
|
||||
|
||||
total += 1
|
||||
|
||||
print(f'total: {total}, passed: {passed}, failed: {failed}')
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.argument('broker', nargs=1, required=True)
|
||||
@click.pass_obj
|
||||
def brokercheck(config, broker):
|
||||
'''
|
||||
Test broker apis for completeness.
|
||||
|
||||
'''
|
||||
async def bcheck_main():
|
||||
async with maybe_spawn_brokerd(broker) as portal:
|
||||
await portal.run(run_test, broker)
|
||||
await portal.cancel_actor()
|
||||
|
||||
trio.run(run_test, broker)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option('--keys', '-k', multiple=True,
|
||||
help='Return results only for these keys')
|
||||
|
|
|
@ -28,7 +28,7 @@ import trio
|
|||
|
||||
from ..log import get_logger
|
||||
from . import get_brokermod
|
||||
from .._daemon import maybe_spawn_brokerd
|
||||
from ..service import maybe_spawn_brokerd
|
||||
from .._cacheables import open_cached_client
|
||||
|
||||
|
||||
|
|
|
@ -227,26 +227,28 @@ async def get_cached_feed(
|
|||
|
||||
@tractor.stream
|
||||
async def start_quote_stream(
|
||||
ctx: tractor.Context, # marks this as a streaming func
|
||||
stream: tractor.Context, # marks this as a streaming func
|
||||
broker: str,
|
||||
symbols: List[Any],
|
||||
feed_type: str = 'stock',
|
||||
rate: int = 3,
|
||||
) -> None:
|
||||
"""Handle per-broker quote stream subscriptions using a "lazy" pub-sub
|
||||
'''
|
||||
Handle per-broker quote stream subscriptions using a "lazy" pub-sub
|
||||
pattern.
|
||||
|
||||
Spawns new quoter tasks for each broker backend on-demand.
|
||||
Since most brokers seems to support batch quote requests we
|
||||
limit to one task per process (for now).
|
||||
"""
|
||||
|
||||
'''
|
||||
# XXX: why do we need this again?
|
||||
get_console_log(tractor.current_actor().loglevel)
|
||||
|
||||
# pull global vars from local actor
|
||||
symbols = list(symbols)
|
||||
log.info(
|
||||
f"{ctx.chan.uid} subscribed to {broker} for symbols {symbols}")
|
||||
f"{stream.chan.uid} subscribed to {broker} for symbols {symbols}")
|
||||
# another actor task may have already created it
|
||||
async with get_cached_feed(broker) as feed:
|
||||
|
||||
|
@ -290,13 +292,13 @@ async def start_quote_stream(
|
|||
assert fquote['displayable']
|
||||
payload[sym] = fquote
|
||||
|
||||
await ctx.send_yield(payload)
|
||||
await stream.send_yield(payload)
|
||||
|
||||
await stream_poll_requests(
|
||||
|
||||
# ``trionics.msgpub`` required kwargs
|
||||
task_name=feed_type,
|
||||
ctx=ctx,
|
||||
ctx=stream,
|
||||
topics=symbols,
|
||||
packetizer=feed.mod.packetizer,
|
||||
|
||||
|
@ -319,9 +321,11 @@ async def call_client(
|
|||
|
||||
|
||||
class DataFeed:
|
||||
"""Data feed client for streaming symbol data from and making API client calls
|
||||
to a (remote) ``brokerd`` daemon.
|
||||
"""
|
||||
'''
|
||||
Data feed client for streaming symbol data from and making API
|
||||
client calls to a (remote) ``brokerd`` daemon.
|
||||
|
||||
'''
|
||||
_allowed = ('stock', 'option')
|
||||
|
||||
def __init__(self, portal, brokermod):
|
||||
|
|
|
@ -0,0 +1,70 @@
|
|||
``deribit`` backend
|
||||
------------------
|
||||
pretty good liquidity crypto derivatives, uses custom json rpc over ws for
|
||||
client methods, then `cryptofeed` for data streams.
|
||||
|
||||
status
|
||||
******
|
||||
- supports option charts
|
||||
- no order support yet
|
||||
|
||||
|
||||
config
|
||||
******
|
||||
In order to get order mode support your ``brokers.toml``
|
||||
needs to have something like the following:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[deribit]
|
||||
key_id = 'XXXXXXXX'
|
||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
|
||||
|
||||
To obtain an api id and secret you need to create an account, which can be a
|
||||
real market account over at:
|
||||
|
||||
- deribit.com (requires KYC for deposit address)
|
||||
|
||||
Or a testnet account over at:
|
||||
|
||||
- test.deribit.com
|
||||
|
||||
For testnet once the account is created here is how you deposit fake crypto to
|
||||
try it out:
|
||||
|
||||
1) Go to Wallet:
|
||||
|
||||
.. figure:: assets/0_wallet.png
|
||||
:align: center
|
||||
:target: assets/0_wallet.png
|
||||
:alt: wallet page
|
||||
|
||||
2) Then click on the elipsis menu and select deposit
|
||||
|
||||
.. figure:: assets/1_wallet_select_deposit.png
|
||||
:align: center
|
||||
:target: assets/1_wallet_select_deposit.png
|
||||
:alt: wallet deposit page
|
||||
|
||||
3) This will take you to the deposit address page
|
||||
|
||||
.. figure:: assets/2_gen_deposit_addr.png
|
||||
:align: center
|
||||
:target: assets/2_gen_deposit_addr.png
|
||||
:alt: generate deposit address page
|
||||
|
||||
4) After clicking generate you should see the address, copy it and go to the
|
||||
`coin faucet <https://test.deribit.com/dericoin/BTC/deposit>`_ and send fake
|
||||
coins to that address.
|
||||
|
||||
.. figure:: assets/3_deposit_address.png
|
||||
:align: center
|
||||
:target: assets/3_deposit_address.png
|
||||
:alt: generated address
|
||||
|
||||
5) Back in the deposit address page you should see the deposit in your history
|
||||
|
||||
.. figure:: assets/4_wallet_deposit_history.png
|
||||
:align: center
|
||||
:target: assets/4_wallet_deposit_history.png
|
||||
:alt: wallet deposit history
|
|
@ -0,0 +1,65 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Deribit backend.
|
||||
|
||||
'''
|
||||
|
||||
from piker.log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
open_history_client,
|
||||
open_symbol_search,
|
||||
stream_quotes,
|
||||
backfill_bars
|
||||
)
|
||||
# from .broker import (
|
||||
# trades_dialogue,
|
||||
# norm_trade_records,
|
||||
# )
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
# 'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
# 'norm_trade_records',
|
||||
]
|
||||
|
||||
|
||||
# tractor RPC enable arg
|
||||
__enable_modules__: list[str] = [
|
||||
'api',
|
||||
'feed',
|
||||
# 'broker',
|
||||
]
|
||||
|
||||
# passed to ``tractor.ActorNursery.start_actor()``
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
# annotation to let backend agnostic code
|
||||
# know if ``brokerd`` should be spawned with
|
||||
# ``tractor``'s aio mode.
|
||||
_infect_asyncio: bool = True
|
|
@ -0,0 +1,672 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Deribit backend.
|
||||
|
||||
'''
|
||||
import json
|
||||
import time
|
||||
import asyncio
|
||||
|
||||
from contextlib import asynccontextmanager as acm, AsyncExitStack
|
||||
from functools import partial
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional, Iterable, Callable
|
||||
|
||||
import pendulum
|
||||
import asks
|
||||
import trio
|
||||
from trio_typing import Nursery, TaskStatus
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
|
||||
from piker.data.types import Struct
|
||||
from piker.data._web_bs import (
|
||||
NoBsWs,
|
||||
open_autorecon_ws,
|
||||
open_jsonrpc_session
|
||||
)
|
||||
|
||||
from .._util import resproc
|
||||
|
||||
from piker import config
|
||||
from piker.log import get_logger
|
||||
|
||||
from tractor.trionics import (
|
||||
broadcast_receiver,
|
||||
BroadcastReceiver,
|
||||
maybe_open_context
|
||||
)
|
||||
from tractor import to_asyncio
|
||||
|
||||
from cryptofeed import FeedHandler
|
||||
|
||||
from cryptofeed.defines import (
|
||||
DERIBIT,
|
||||
L1_BOOK, TRADES,
|
||||
OPTION, CALL, PUT
|
||||
)
|
||||
from cryptofeed.symbols import Symbol
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
|
||||
_url = 'https://www.deribit.com'
|
||||
_ws_url = 'wss://www.deribit.com/ws/api/v2'
|
||||
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
|
||||
|
||||
|
||||
# Broker specific ohlc schema (rest)
|
||||
_ohlc_dtype = [
|
||||
('index', int),
|
||||
('time', int),
|
||||
('open', float),
|
||||
('high', float),
|
||||
('low', float),
|
||||
('close', float),
|
||||
('volume', float),
|
||||
('bar_wap', float), # will be zeroed by sampler if not filled
|
||||
]
|
||||
|
||||
|
||||
class JSONRPCResult(Struct):
|
||||
jsonrpc: str = '2.0'
|
||||
id: int
|
||||
result: Optional[dict] = None
|
||||
error: Optional[dict] = None
|
||||
usIn: int
|
||||
usOut: int
|
||||
usDiff: int
|
||||
testnet: bool
|
||||
|
||||
class JSONRPCChannel(Struct):
|
||||
jsonrpc: str = '2.0'
|
||||
method: str
|
||||
params: dict
|
||||
|
||||
|
||||
class KLinesResult(Struct):
|
||||
close: list[float]
|
||||
cost: list[float]
|
||||
high: list[float]
|
||||
low: list[float]
|
||||
open: list[float]
|
||||
status: str
|
||||
ticks: list[int]
|
||||
volume: list[float]
|
||||
|
||||
class Trade(Struct):
|
||||
trade_seq: int
|
||||
trade_id: str
|
||||
timestamp: int
|
||||
tick_direction: int
|
||||
price: float
|
||||
mark_price: float
|
||||
iv: float
|
||||
instrument_name: str
|
||||
index_price: float
|
||||
direction: str
|
||||
combo_trade_id: Optional[int] = 0,
|
||||
combo_id: Optional[str] = '',
|
||||
amount: float
|
||||
|
||||
class LastTradesResult(Struct):
|
||||
trades: list[Trade]
|
||||
has_more: bool
|
||||
|
||||
|
||||
# convert datetime obj timestamp to unixtime in milliseconds
|
||||
def deribit_timestamp(when):
|
||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||
|
||||
|
||||
def str_to_cb_sym(name: str) -> Symbol:
|
||||
base, strike_price, expiry_date, option_type = name.split('-')
|
||||
|
||||
quote = base
|
||||
|
||||
if option_type == 'put':
|
||||
option_type = PUT
|
||||
elif option_type == 'call':
|
||||
option_type = CALL
|
||||
else:
|
||||
raise Exception("Couldn\'t parse option type")
|
||||
|
||||
return Symbol(
|
||||
base, quote,
|
||||
type=OPTION,
|
||||
strike_price=strike_price,
|
||||
option_type=option_type,
|
||||
expiry_date=expiry_date,
|
||||
expiry_normalize=False)
|
||||
|
||||
|
||||
def piker_sym_to_cb_sym(name: str) -> Symbol:
|
||||
base, expiry_date, strike_price, option_type = tuple(
|
||||
name.upper().split('-'))
|
||||
|
||||
quote = base
|
||||
|
||||
if option_type == 'P':
|
||||
option_type = PUT
|
||||
elif option_type == 'C':
|
||||
option_type = CALL
|
||||
else:
|
||||
raise Exception("Couldn\'t parse option type")
|
||||
|
||||
return Symbol(
|
||||
base, quote,
|
||||
type=OPTION,
|
||||
strike_price=strike_price,
|
||||
option_type=option_type,
|
||||
expiry_date=expiry_date.upper())
|
||||
|
||||
|
||||
def cb_sym_to_deribit_inst(sym: Symbol):
|
||||
# cryptofeed normalized
|
||||
cb_norm = ['F', 'G', 'H', 'J', 'K', 'M', 'N', 'Q', 'U', 'V', 'X', 'Z']
|
||||
|
||||
# deribit specific
|
||||
months = ['JAN', 'FEB', 'MAR', 'APR', 'MAY', 'JUN', 'JUL', 'AUG', 'SEP', 'OCT', 'NOV', 'DEC']
|
||||
|
||||
exp = sym.expiry_date
|
||||
|
||||
# YYMDD
|
||||
# 01234
|
||||
year, month, day = (
|
||||
exp[:2], months[cb_norm.index(exp[2:3])], exp[3:])
|
||||
|
||||
otype = 'C' if sym.option_type == CALL else 'P'
|
||||
|
||||
return f'{sym.base}-{day}{month}{year}-{sym.strike_price}-{otype}'
|
||||
|
||||
|
||||
def get_config() -> dict[str, Any]:
|
||||
|
||||
conf, path = config.load()
|
||||
|
||||
section = conf.get('deribit')
|
||||
|
||||
# TODO: document why we send this, basically because logging params for cryptofeed
|
||||
conf['log'] = {}
|
||||
conf['log']['disabled'] = True
|
||||
|
||||
if section is None:
|
||||
log.warning(f'No config section found for deribit in {path}')
|
||||
|
||||
return conf
|
||||
|
||||
|
||||
class Client:
|
||||
|
||||
def __init__(self, json_rpc: Callable) -> None:
|
||||
self._pairs: dict[str, Any] = None
|
||||
|
||||
config = get_config().get('deribit', {})
|
||||
|
||||
if ('key_id' in config) and ('key_secret' in config):
|
||||
self._key_id = config['key_id']
|
||||
self._key_secret = config['key_secret']
|
||||
|
||||
else:
|
||||
self._key_id = None
|
||||
self._key_secret = None
|
||||
|
||||
self.json_rpc = json_rpc
|
||||
|
||||
@property
|
||||
def currencies(self):
|
||||
return ['btc', 'eth', 'sol', 'usd']
|
||||
|
||||
async def get_balances(self, kind: str = 'option') -> dict[str, float]:
|
||||
"""Return the set of positions for this account
|
||||
by symbol.
|
||||
"""
|
||||
balances = {}
|
||||
|
||||
for currency in self.currencies:
|
||||
resp = await self.json_rpc(
|
||||
'private/get_positions', params={
|
||||
'currency': currency.upper(),
|
||||
'kind': kind})
|
||||
|
||||
balances[currency] = resp.result
|
||||
|
||||
return balances
|
||||
|
||||
async def get_assets(self) -> dict[str, float]:
|
||||
"""Return the set of asset balances for this account
|
||||
by symbol.
|
||||
"""
|
||||
balances = {}
|
||||
|
||||
for currency in self.currencies:
|
||||
resp = await self.json_rpc(
|
||||
'private/get_account_summary', params={
|
||||
'currency': currency.upper()})
|
||||
|
||||
balances[currency] = resp.result['balance']
|
||||
|
||||
return balances
|
||||
|
||||
async def submit_limit(
|
||||
self,
|
||||
symbol: str,
|
||||
price: float,
|
||||
action: str,
|
||||
size: float
|
||||
) -> dict:
|
||||
"""Place an order
|
||||
"""
|
||||
params = {
|
||||
'instrument_name': symbol.upper(),
|
||||
'amount': size,
|
||||
'type': 'limit',
|
||||
'price': price,
|
||||
}
|
||||
resp = await self.json_rpc(
|
||||
f'private/{action}', params)
|
||||
|
||||
return resp.result
|
||||
|
||||
async def submit_cancel(self, oid: str):
|
||||
"""Send cancel request for order id
|
||||
"""
|
||||
resp = await self.json_rpc(
|
||||
'private/cancel', {'order_id': oid})
|
||||
return resp.result
|
||||
|
||||
async def symbol_info(
|
||||
self,
|
||||
instrument: Optional[str] = None,
|
||||
currency: str = 'btc', # BTC, ETH, SOL, USDC
|
||||
kind: str = 'option',
|
||||
expired: bool = False
|
||||
) -> dict[str, Any]:
|
||||
"""Get symbol info for the exchange.
|
||||
|
||||
"""
|
||||
if self._pairs:
|
||||
return self._pairs
|
||||
|
||||
# will retrieve all symbols by default
|
||||
params = {
|
||||
'currency': currency.upper(),
|
||||
'kind': kind,
|
||||
'expired': str(expired).lower()
|
||||
}
|
||||
|
||||
resp = await self.json_rpc('public/get_instruments', params)
|
||||
results = resp.result
|
||||
|
||||
instruments = {
|
||||
item['instrument_name'].lower(): item
|
||||
for item in results
|
||||
}
|
||||
|
||||
if instrument is not None:
|
||||
return instruments[instrument]
|
||||
else:
|
||||
return instruments
|
||||
|
||||
async def cache_symbols(
|
||||
self,
|
||||
) -> dict:
|
||||
if not self._pairs:
|
||||
self._pairs = await self.symbol_info()
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def search_symbols(
|
||||
self,
|
||||
pattern: str,
|
||||
limit: int = 30,
|
||||
) -> dict[str, Any]:
|
||||
data = await self.symbol_info()
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
data,
|
||||
score_cutoff=35,
|
||||
limit=limit
|
||||
)
|
||||
# repack in dict form
|
||||
return {item[0]['instrument_name'].lower(): item[0]
|
||||
for item in matches}
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
symbol: str,
|
||||
start_dt: Optional[datetime] = None,
|
||||
end_dt: Optional[datetime] = None,
|
||||
limit: int = 1000,
|
||||
as_np: bool = True,
|
||||
) -> dict:
|
||||
instrument = symbol
|
||||
|
||||
if end_dt is None:
|
||||
end_dt = pendulum.now('UTC')
|
||||
|
||||
if start_dt is None:
|
||||
start_dt = end_dt.start_of(
|
||||
'minute').subtract(minutes=limit)
|
||||
|
||||
start_time = deribit_timestamp(start_dt)
|
||||
end_time = deribit_timestamp(end_dt)
|
||||
|
||||
# https://docs.deribit.com/#public-get_tradingview_chart_data
|
||||
resp = await self.json_rpc(
|
||||
'public/get_tradingview_chart_data',
|
||||
params={
|
||||
'instrument_name': instrument.upper(),
|
||||
'start_timestamp': start_time,
|
||||
'end_timestamp': end_time,
|
||||
'resolution': '1'
|
||||
})
|
||||
|
||||
result = KLinesResult(**resp.result)
|
||||
new_bars = []
|
||||
for i in range(len(result.close)):
|
||||
|
||||
_open = result.open[i]
|
||||
high = result.high[i]
|
||||
low = result.low[i]
|
||||
close = result.close[i]
|
||||
volume = result.volume[i]
|
||||
|
||||
row = [
|
||||
(start_time + (i * (60 * 1000))) / 1000.0, # time
|
||||
result.open[i],
|
||||
result.high[i],
|
||||
result.low[i],
|
||||
result.close[i],
|
||||
result.volume[i],
|
||||
0
|
||||
]
|
||||
|
||||
new_bars.append((i,) + tuple(row))
|
||||
|
||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines
|
||||
return array
|
||||
|
||||
async def last_trades(
|
||||
self,
|
||||
instrument: str,
|
||||
count: int = 10
|
||||
):
|
||||
resp = await self.json_rpc(
|
||||
'public/get_last_trades_by_instrument',
|
||||
params={
|
||||
'instrument_name': instrument,
|
||||
'count': count
|
||||
})
|
||||
|
||||
return LastTradesResult(**resp.result)
|
||||
|
||||
|
||||
@acm
|
||||
async def get_client(
|
||||
is_brokercheck: bool = False
|
||||
) -> Client:
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
open_jsonrpc_session(
|
||||
_testnet_ws_url, dtype=JSONRPCResult) as json_rpc
|
||||
):
|
||||
client = Client(json_rpc)
|
||||
|
||||
_refresh_token: Optional[str] = None
|
||||
_access_token: Optional[str] = None
|
||||
|
||||
async def _auth_loop(
|
||||
task_status: TaskStatus = trio.TASK_STATUS_IGNORED
|
||||
):
|
||||
"""Background task that adquires a first access token and then will
|
||||
refresh the access token while the nursery isn't cancelled.
|
||||
|
||||
https://docs.deribit.com/?python#authentication-2
|
||||
"""
|
||||
renew_time = 10
|
||||
access_scope = 'trade:read_write'
|
||||
_expiry_time = time.time()
|
||||
got_access = False
|
||||
nonlocal _refresh_token
|
||||
nonlocal _access_token
|
||||
|
||||
while True:
|
||||
if time.time() - _expiry_time < renew_time:
|
||||
# if we are close to token expiry time
|
||||
|
||||
if _refresh_token != None:
|
||||
# if we have a refresh token already dont need to send
|
||||
# secret
|
||||
params = {
|
||||
'grant_type': 'refresh_token',
|
||||
'refresh_token': _refresh_token,
|
||||
'scope': access_scope
|
||||
}
|
||||
|
||||
else:
|
||||
# we don't have refresh token, send secret to initialize
|
||||
params = {
|
||||
'grant_type': 'client_credentials',
|
||||
'client_id': client._key_id,
|
||||
'client_secret': client._key_secret,
|
||||
'scope': access_scope
|
||||
}
|
||||
|
||||
resp = await json_rpc('public/auth', params)
|
||||
result = resp.result
|
||||
|
||||
_expiry_time = time.time() + result['expires_in']
|
||||
_refresh_token = result['refresh_token']
|
||||
|
||||
if 'access_token' in result:
|
||||
_access_token = result['access_token']
|
||||
|
||||
if not got_access:
|
||||
# first time this loop runs we must indicate task is
|
||||
# started, we have auth
|
||||
got_access = True
|
||||
task_status.started()
|
||||
|
||||
else:
|
||||
await trio.sleep(renew_time / 2)
|
||||
|
||||
# if we have client creds launch auth loop
|
||||
if client._key_id is not None:
|
||||
await n.start(_auth_loop)
|
||||
|
||||
await client.cache_symbols()
|
||||
yield client
|
||||
n.cancel_scope.cancel()
|
||||
|
||||
|
||||
@acm
|
||||
async def open_feed_handler():
|
||||
fh = FeedHandler(config=get_config())
|
||||
yield fh
|
||||
await to_asyncio.run_task(fh.stop_async)
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_feed_handler() -> trio.abc.ReceiveStream:
|
||||
async with maybe_open_context(
|
||||
acm_func=open_feed_handler,
|
||||
key='feedhandler',
|
||||
) as (cache_hit, fh):
|
||||
yield fh
|
||||
|
||||
|
||||
async def aio_price_feed_relay(
|
||||
fh: FeedHandler,
|
||||
instrument: Symbol,
|
||||
from_trio: asyncio.Queue,
|
||||
to_trio: trio.abc.SendChannel,
|
||||
) -> None:
|
||||
async def _trade(data: dict, receipt_timestamp):
|
||||
to_trio.send_nowait(('trade', {
|
||||
'symbol': cb_sym_to_deribit_inst(
|
||||
str_to_cb_sym(data.symbol)).lower(),
|
||||
'last': data,
|
||||
'broker_ts': time.time(),
|
||||
'data': data.to_dict(),
|
||||
'receipt': receipt_timestamp
|
||||
}))
|
||||
|
||||
async def _l1(data: dict, receipt_timestamp):
|
||||
to_trio.send_nowait(('l1', {
|
||||
'symbol': cb_sym_to_deribit_inst(
|
||||
str_to_cb_sym(data.symbol)).lower(),
|
||||
'ticks': [
|
||||
{'type': 'bid',
|
||||
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||
{'type': 'bsize',
|
||||
'price': float(data.bid_price), 'size': float(data.bid_size)},
|
||||
{'type': 'ask',
|
||||
'price': float(data.ask_price), 'size': float(data.ask_size)},
|
||||
{'type': 'asize',
|
||||
'price': float(data.ask_price), 'size': float(data.ask_size)}
|
||||
]
|
||||
}))
|
||||
|
||||
fh.add_feed(
|
||||
DERIBIT,
|
||||
channels=[TRADES, L1_BOOK],
|
||||
symbols=[piker_sym_to_cb_sym(instrument)],
|
||||
callbacks={
|
||||
TRADES: _trade,
|
||||
L1_BOOK: _l1
|
||||
})
|
||||
|
||||
if not fh.running:
|
||||
fh.run(
|
||||
start_loop=False,
|
||||
install_signal_handlers=False)
|
||||
|
||||
# sync with trio
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
await asyncio.sleep(float('inf'))
|
||||
|
||||
|
||||
@acm
|
||||
async def open_price_feed(
|
||||
instrument: str
|
||||
) -> trio.abc.ReceiveStream:
|
||||
async with maybe_open_feed_handler() as fh:
|
||||
async with to_asyncio.open_channel_from(
|
||||
partial(
|
||||
aio_price_feed_relay,
|
||||
fh,
|
||||
instrument
|
||||
)
|
||||
) as (first, chan):
|
||||
yield chan
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_price_feed(
|
||||
instrument: str
|
||||
) -> trio.abc.ReceiveStream:
|
||||
|
||||
# TODO: add a predicate to maybe_open_context
|
||||
async with maybe_open_context(
|
||||
acm_func=open_price_feed,
|
||||
kwargs={
|
||||
'instrument': instrument
|
||||
},
|
||||
key=f'{instrument}-price',
|
||||
) as (cache_hit, feed):
|
||||
if cache_hit:
|
||||
yield broadcast_receiver(feed, 10)
|
||||
else:
|
||||
yield feed
|
||||
|
||||
|
||||
|
||||
async def aio_order_feed_relay(
|
||||
fh: FeedHandler,
|
||||
instrument: Symbol,
|
||||
from_trio: asyncio.Queue,
|
||||
to_trio: trio.abc.SendChannel,
|
||||
) -> None:
|
||||
async def _fill(data: dict, receipt_timestamp):
|
||||
breakpoint()
|
||||
|
||||
async def _order_info(data: dict, receipt_timestamp):
|
||||
breakpoint()
|
||||
|
||||
fh.add_feed(
|
||||
DERIBIT,
|
||||
channels=[FILLS, ORDER_INFO],
|
||||
symbols=[instrument.upper()],
|
||||
callbacks={
|
||||
FILLS: _fill,
|
||||
ORDER_INFO: _order_info,
|
||||
})
|
||||
|
||||
if not fh.running:
|
||||
fh.run(
|
||||
start_loop=False,
|
||||
install_signal_handlers=False)
|
||||
|
||||
# sync with trio
|
||||
to_trio.send_nowait(None)
|
||||
|
||||
await asyncio.sleep(float('inf'))
|
||||
|
||||
|
||||
@acm
|
||||
async def open_order_feed(
|
||||
instrument: list[str]
|
||||
) -> trio.abc.ReceiveStream:
|
||||
async with maybe_open_feed_handler() as fh:
|
||||
async with to_asyncio.open_channel_from(
|
||||
partial(
|
||||
aio_order_feed_relay,
|
||||
fh,
|
||||
instrument
|
||||
)
|
||||
) as (first, chan):
|
||||
yield chan
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_order_feed(
|
||||
instrument: str
|
||||
) -> trio.abc.ReceiveStream:
|
||||
|
||||
# TODO: add a predicate to maybe_open_context
|
||||
async with maybe_open_context(
|
||||
acm_func=open_order_feed,
|
||||
kwargs={
|
||||
'instrument': instrument,
|
||||
'fh': fh
|
||||
},
|
||||
key=f'{instrument}-order',
|
||||
) as (cache_hit, feed):
|
||||
if cache_hit:
|
||||
yield broadcast_receiver(feed, 10)
|
||||
else:
|
||||
yield feed
|
Binary file not shown.
After Width: | Height: | Size: 169 KiB |
Binary file not shown.
After Width: | Height: | Size: 106 KiB |
Binary file not shown.
After Width: | Height: | Size: 59 KiB |
Binary file not shown.
After Width: | Height: | Size: 70 KiB |
Binary file not shown.
After Width: | Height: | Size: 132 KiB |
|
@ -0,0 +1,185 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Deribit backend.
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
from typing import Any, Optional, Callable
|
||||
import time
|
||||
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import pendulum
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import tractor
|
||||
|
||||
from piker._cacheables import open_cached_client
|
||||
from piker.log import get_logger, get_console_log
|
||||
from piker.data import ShmArray
|
||||
from piker.brokers._util import (
|
||||
BrokerError,
|
||||
DataUnavailable,
|
||||
)
|
||||
|
||||
from cryptofeed import FeedHandler
|
||||
|
||||
from cryptofeed.defines import (
|
||||
DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
|
||||
)
|
||||
from cryptofeed.symbols import Symbol
|
||||
|
||||
from .api import (
|
||||
Client, Trade,
|
||||
get_config,
|
||||
str_to_cb_sym, piker_sym_to_cb_sym, cb_sym_to_deribit_inst,
|
||||
maybe_open_price_feed
|
||||
)
|
||||
|
||||
_spawn_kwargs = {
|
||||
'infect_asyncio': True,
|
||||
}
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
@acm
|
||||
async def open_history_client(
|
||||
instrument: str,
|
||||
) -> tuple[Callable, int]:
|
||||
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('deribit') as client:
|
||||
|
||||
async def get_ohlc(
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
|
||||
array = await client.bars(
|
||||
instrument,
|
||||
start_dt=start_dt,
|
||||
end_dt=end_dt,
|
||||
)
|
||||
if len(array) == 0:
|
||||
raise DataUnavailable
|
||||
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: list[str],
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
sym = symbols[0]
|
||||
|
||||
async with (
|
||||
open_cached_client('deribit') as client,
|
||||
send_chan as send_chan
|
||||
):
|
||||
|
||||
init_msgs = {
|
||||
# pass back token, and bool, signalling if we're the writer
|
||||
# and that history has been written
|
||||
sym: {
|
||||
'symbol_info': {
|
||||
'asset_type': 'option',
|
||||
'price_tick_size': 0.0005
|
||||
},
|
||||
'shm_write_opts': {'sum_tick_vml': False},
|
||||
'fqsn': sym,
|
||||
},
|
||||
}
|
||||
|
||||
nsym = piker_sym_to_cb_sym(sym)
|
||||
|
||||
async with maybe_open_price_feed(sym) as stream:
|
||||
|
||||
cache = await client.cache_symbols()
|
||||
|
||||
last_trades = (await client.last_trades(
|
||||
cb_sym_to_deribit_inst(nsym), count=1)).trades
|
||||
|
||||
if len(last_trades) == 0:
|
||||
last_trade = None
|
||||
async for typ, quote in stream:
|
||||
if typ == 'trade':
|
||||
last_trade = Trade(**(quote['data']))
|
||||
break
|
||||
|
||||
else:
|
||||
last_trade = Trade(**(last_trades[0]))
|
||||
|
||||
first_quote = {
|
||||
'symbol': sym,
|
||||
'last': last_trade.price,
|
||||
'brokerd_ts': last_trade.timestamp,
|
||||
'ticks': [{
|
||||
'type': 'trade',
|
||||
'price': last_trade.price,
|
||||
'size': last_trade.amount,
|
||||
'broker_ts': last_trade.timestamp
|
||||
}]
|
||||
}
|
||||
task_status.started((init_msgs, first_quote))
|
||||
|
||||
feed_is_live.set()
|
||||
|
||||
async for typ, quote in stream:
|
||||
topic = quote['symbol']
|
||||
await send_chan.send({topic: quote})
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
) -> Client:
|
||||
async with open_cached_client('deribit') as client:
|
||||
|
||||
# load all symbols locally for fast search
|
||||
cache = await client.cache_symbols()
|
||||
await ctx.started()
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
async for pattern in stream:
|
||||
# repack in dict form
|
||||
await stream.send(
|
||||
await client.search_symbols(pattern))
|
|
@ -0,0 +1,134 @@
|
|||
``ib`` backend
|
||||
--------------
|
||||
more or less the "everything broker" for traditional and international
|
||||
markets. they are the "go to" provider for automatic retail trading
|
||||
and we interface to their APIs using the `ib_insync` project.
|
||||
|
||||
status
|
||||
******
|
||||
current support is *production grade* and both real-time data and order
|
||||
management should be correct and fast. this backend is used by core devs
|
||||
for live trading.
|
||||
|
||||
currently there is not yet full support for:
|
||||
- options charting and trading
|
||||
- paxos based crypto rt feeds and trading
|
||||
|
||||
|
||||
config
|
||||
******
|
||||
In order to get order mode support your ``brokers.toml``
|
||||
needs to have something like the following:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[ib]
|
||||
hosts = [
|
||||
"127.0.0.1",
|
||||
]
|
||||
# TODO: when we eventually spawn gateways in our
|
||||
# container, we can just dynamically allocate these
|
||||
# using IBC.
|
||||
ports = [
|
||||
4002,
|
||||
4003,
|
||||
4006,
|
||||
4001,
|
||||
7497,
|
||||
]
|
||||
|
||||
# XXX: for a paper account the flex web query service
|
||||
# is not supported so you have to manually download
|
||||
# and XML report and put it in a location that can be
|
||||
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||
flex_token = '1111111111111111'
|
||||
flex_trades_query_id = '6969696' # live accounts only?
|
||||
|
||||
# 3rd party web-api token
|
||||
# (XXX: not sure if this works yet)
|
||||
trade_log_token = '111111111111111'
|
||||
|
||||
# when clients are being scanned this determines
|
||||
# which clients are preferred to be used for data feeds
|
||||
# based on account names which are detected as active
|
||||
# on each client.
|
||||
prefer_data_account = [
|
||||
# this has to be first in order to make data work with dual paper + live
|
||||
'main',
|
||||
'algopaper',
|
||||
]
|
||||
|
||||
[ib.accounts]
|
||||
main = 'U69696969'
|
||||
algopaper = 'DU9696969'
|
||||
|
||||
|
||||
If everything works correctly you should see any current positions
|
||||
loaded in the pps pane on chart load and you should also be able to
|
||||
check your trade records in the file::
|
||||
|
||||
<pikerk_conf_dir>/ledgers/trades_ib_algopaper.toml
|
||||
|
||||
|
||||
An example ledger file will have entries written verbatim from the
|
||||
trade events schema:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
["0000e1a7.630f5e5a.01.01"]
|
||||
secType = "FUT"
|
||||
conId = 515416577
|
||||
symbol = "MNQ"
|
||||
lastTradeDateOrContractMonth = "20221216"
|
||||
strike = 0.0
|
||||
right = ""
|
||||
multiplier = "2"
|
||||
exchange = "GLOBEX"
|
||||
primaryExchange = ""
|
||||
currency = "USD"
|
||||
localSymbol = "MNQZ2"
|
||||
tradingClass = "MNQ"
|
||||
includeExpired = false
|
||||
secIdType = ""
|
||||
secId = ""
|
||||
comboLegsDescrip = ""
|
||||
comboLegs = []
|
||||
execId = "0000e1a7.630f5e5a.01.01"
|
||||
time = 1661972086.0
|
||||
acctNumber = "DU69696969"
|
||||
side = "BOT"
|
||||
shares = 1.0
|
||||
price = 12372.75
|
||||
permId = 441472655
|
||||
clientId = 6116
|
||||
orderId = 985
|
||||
liquidation = 0
|
||||
cumQty = 1.0
|
||||
avgPrice = 12372.75
|
||||
orderRef = ""
|
||||
evRule = ""
|
||||
evMultiplier = 0.0
|
||||
modelCode = ""
|
||||
lastLiquidity = 1
|
||||
broker_time = 1661972086.0
|
||||
name = "ib"
|
||||
commission = 0.57
|
||||
realizedPNL = 243.41
|
||||
yield_ = 0.0
|
||||
yieldRedemptionDate = 0
|
||||
listingExchange = "GLOBEX"
|
||||
date = "2022-08-31T18:54:46+00:00"
|
||||
|
||||
|
||||
your ``pps.toml`` file will have position entries like,
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[ib.algopaper."mnq.globex.20221216"]
|
||||
size = -1.0
|
||||
ppu = 12423.630576923071
|
||||
bsuid = 515416577
|
||||
expiry = "2022-12-16T00:00:00+00:00"
|
||||
clears = [
|
||||
{ dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
|
||||
]
|
|
@ -20,15 +20,10 @@ Interactive Brokers API backend.
|
|||
Sub-modules within break into the core functionalities:
|
||||
|
||||
- ``broker.py`` part for orders / trading endpoints
|
||||
- ``data.py`` for real-time data feed endpoints
|
||||
|
||||
- ``client.py`` for the core API machinery which is ``trio``-ized
|
||||
- ``feed.py`` for real-time data feed endpoints
|
||||
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||
wrapping around ``ib_insync``.
|
||||
|
||||
- ``report.py`` for the hackery to build manual pp calcs
|
||||
to avoid ib's absolute bullshit FIFO style position
|
||||
tracking..
|
||||
|
||||
"""
|
||||
from .api import (
|
||||
get_client,
|
||||
|
@ -38,7 +33,10 @@ from .feed import (
|
|||
open_symbol_search,
|
||||
stream_quotes,
|
||||
)
|
||||
from .broker import trades_dialogue
|
||||
from .broker import (
|
||||
trades_dialogue,
|
||||
norm_trade_records,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
|
|
|
@ -0,0 +1,187 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
``ib`` utilities and hacks suitable for use in the backend and/or as
|
||||
runnable script-programs.
|
||||
|
||||
'''
|
||||
from typing import Literal
|
||||
import subprocess
|
||||
|
||||
import tractor
|
||||
|
||||
|
||||
_reset_tech: Literal[
|
||||
'vnc',
|
||||
'i3ipc_xdotool',
|
||||
|
||||
# TODO: in theory we can use a different linux DE API or
|
||||
# some other type of similar window scanning/mgmt client
|
||||
# (on other OSs) to do the same.
|
||||
|
||||
] = 'vnc'
|
||||
|
||||
|
||||
async def data_reset_hack(
|
||||
reset_type: str = 'data',
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Run key combos for resetting data feeds and yield back to caller
|
||||
when complete.
|
||||
|
||||
NOTE: this is a linux-only hack around!
|
||||
|
||||
There are multiple "techs" you can use depending on your infra setup:
|
||||
|
||||
- if running ib-gw in a container with a VNC server running the most
|
||||
performant method is the `'vnc'` option.
|
||||
|
||||
- if running ib-gw/tws locally, and you are using `i3` you can use
|
||||
the ``i3ipc`` lib and ``xdotool`` to send the appropriate click
|
||||
and key-combos automatically to your local desktop's java X-apps.
|
||||
|
||||
https://interactivebrokers.github.io/tws-api/historical_limitations.html#pacing_violations
|
||||
|
||||
TODOs:
|
||||
- a return type that hopefully determines if the hack was
|
||||
successful.
|
||||
- other OS support?
|
||||
- integration with ``ib-gw`` run in docker + Xorg?
|
||||
- is it possible to offer a local server that can be accessed by
|
||||
a client? Would be sure be handy for running native java blobs
|
||||
that need to be wrangle.
|
||||
|
||||
'''
|
||||
global _reset_tech
|
||||
|
||||
match _reset_tech:
|
||||
case 'vnc':
|
||||
try:
|
||||
await tractor.to_asyncio.run_task(vnc_click_hack)
|
||||
except OSError:
|
||||
_reset_tech = 'i3ipc_xdotool'
|
||||
try:
|
||||
i3ipc_xdotool_manual_click_hack()
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
case 'i3ipc_xdotool':
|
||||
i3ipc_xdotool_manual_click_hack()
|
||||
|
||||
case _ as tech:
|
||||
raise RuntimeError(f'{tech} is not supported for reset tech!?')
|
||||
|
||||
# we don't really need the ``xdotool`` approach any more B)
|
||||
return True
|
||||
|
||||
|
||||
async def vnc_click_hack(
|
||||
reset_type: str = 'data'
|
||||
) -> None:
|
||||
'''
|
||||
Reset the data or netowork connection for the VNC attached
|
||||
ib gateway using magic combos.
|
||||
|
||||
'''
|
||||
key = {'data': 'f', 'connection': 'r'}[reset_type]
|
||||
|
||||
import asyncvnc
|
||||
|
||||
async with asyncvnc.connect(
|
||||
'localhost',
|
||||
port=3003,
|
||||
# password='ibcansmbz',
|
||||
) as client:
|
||||
|
||||
# move to middle of screen
|
||||
# 640x1800
|
||||
client.mouse.move(
|
||||
x=500,
|
||||
y=500,
|
||||
)
|
||||
client.mouse.click()
|
||||
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
||||
|
||||
|
||||
def i3ipc_xdotool_manual_click_hack() -> None:
|
||||
import i3ipc
|
||||
|
||||
i3 = i3ipc.Connection()
|
||||
t = i3.get_tree()
|
||||
|
||||
orig_win_id = t.find_focused().window
|
||||
|
||||
# for tws
|
||||
win_names: list[str] = [
|
||||
'Interactive Brokers', # tws running in i3
|
||||
'IB Gateway', # gw running in i3
|
||||
# 'IB', # gw running in i3 (newer version?)
|
||||
]
|
||||
|
||||
for name in win_names:
|
||||
results = t.find_titled(name)
|
||||
print(f'results for {name}: {results}')
|
||||
if results:
|
||||
con = results[0]
|
||||
print(f'Resetting data feed for {name}')
|
||||
win_id = str(con.window)
|
||||
w, h = con.rect.width, con.rect.height
|
||||
|
||||
# TODO: seems to be a few libs for python but not sure
|
||||
# if they support all the sub commands we need, order of
|
||||
# most recent commit history:
|
||||
# https://github.com/rr-/pyxdotool
|
||||
# https://github.com/ShaneHutter/pyxdotool
|
||||
# https://github.com/cphyc/pyxdotool
|
||||
|
||||
# TODO: only run the reconnect (2nd) kc on a detected
|
||||
# disconnect?
|
||||
for key_combo, timeout in [
|
||||
# only required if we need a connection reset.
|
||||
# ('ctrl+alt+r', 12),
|
||||
# data feed reset.
|
||||
('ctrl+alt+f', 6)
|
||||
]:
|
||||
subprocess.call([
|
||||
'xdotool',
|
||||
'windowactivate', '--sync', win_id,
|
||||
|
||||
# move mouse to bottom left of window (where there should
|
||||
# be nothing to click).
|
||||
'mousemove_relative', '--sync', str(w-4), str(h-4),
|
||||
|
||||
# NOTE: we may need to stick a `--retry 3` in here..
|
||||
'click', '--window', win_id,
|
||||
'--repeat', '3', '1',
|
||||
|
||||
# hackzorzes
|
||||
'key', key_combo,
|
||||
],
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
# re-activate and focus original window
|
||||
try:
|
||||
subprocess.call([
|
||||
'xdotool',
|
||||
'windowactivate', '--sync', str(orig_win_id),
|
||||
'click', '--window', str(orig_win_id), '1',
|
||||
])
|
||||
except subprocess.TimeoutExpired:
|
||||
log.exception(f'xdotool timed out?')
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,64 @@
|
|||
``kraken`` backend
|
||||
------------------
|
||||
though they don't have the most liquidity of all the cexes they sure are
|
||||
accommodating to those of us who appreciate a little ``xmr``.
|
||||
|
||||
status
|
||||
******
|
||||
current support is *production grade* and both real-time data and order
|
||||
management should be correct and fast. this backend is used by core devs
|
||||
for live trading.
|
||||
|
||||
|
||||
config
|
||||
******
|
||||
In order to get order mode support your ``brokers.toml``
|
||||
needs to have something like the following:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[kraken]
|
||||
accounts.spot = 'spot'
|
||||
key_descr = "spot"
|
||||
api_key = "69696969696969696696969696969696969696969696969696969696"
|
||||
secret = "BOOBSBOOBSBOOBSBOOBSBOOBSSMBZ69696969696969669969696969696"
|
||||
|
||||
|
||||
If everything works correctly you should see any current positions
|
||||
loaded in the pps pane on chart load and you should also be able to
|
||||
check your trade records in the file::
|
||||
|
||||
<pikerk_conf_dir>/ledgers/trades_kraken_spot.toml
|
||||
|
||||
|
||||
An example ledger file will have entries written verbatim from the
|
||||
trade events schema:
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[TFJBKK-SMBZS-VJ4UWS]
|
||||
ordertxid = "SMBZSA-7CNQU-3HWLNJ"
|
||||
postxid = "SMBZSE-M7IF5-CFI7LT"
|
||||
pair = "XXMRZEUR"
|
||||
time = 1655691993.4133966
|
||||
type = "buy"
|
||||
ordertype = "limit"
|
||||
price = "103.97000000"
|
||||
cost = "499.99999977"
|
||||
fee = "0.80000000"
|
||||
vol = "4.80907954"
|
||||
margin = "0.00000000"
|
||||
misc = ""
|
||||
|
||||
|
||||
your ``pps.toml`` file will have position entries like,
|
||||
|
||||
.. code:: toml
|
||||
|
||||
[kraken.spot."xmreur.kraken"]
|
||||
size = 4.80907954
|
||||
ppu = 103.97000000
|
||||
bsuid = "XXMRZEUR"
|
||||
clears = [
|
||||
{ tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
|
||||
]
|
|
@ -0,0 +1,61 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Kraken backend.
|
||||
|
||||
Sub-modules within break into the core functionalities:
|
||||
|
||||
- ``broker.py`` part for orders / trading endpoints
|
||||
- ``feed.py`` for real-time data feed endpoints
|
||||
- ``api.py`` for the core API machinery which is ``trio``-ized
|
||||
wrapping around ``ib_insync``.
|
||||
|
||||
'''
|
||||
|
||||
from piker.log import get_logger
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
from .api import (
|
||||
get_client,
|
||||
)
|
||||
from .feed import (
|
||||
open_history_client,
|
||||
open_symbol_search,
|
||||
stream_quotes,
|
||||
)
|
||||
from .broker import (
|
||||
trades_dialogue,
|
||||
norm_trade_records,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
'get_client',
|
||||
'trades_dialogue',
|
||||
'open_history_client',
|
||||
'open_symbol_search',
|
||||
'stream_quotes',
|
||||
'norm_trade_records',
|
||||
]
|
||||
|
||||
|
||||
# tractor RPC enable arg
|
||||
__enable_modules__: list[str] = [
|
||||
'api',
|
||||
'feed',
|
||||
'broker',
|
||||
]
|
|
@ -0,0 +1,621 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Kraken web API wrapping.
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
import itertools
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
import time
|
||||
|
||||
from bidict import bidict
|
||||
import pendulum
|
||||
import asks
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import urllib.parse
|
||||
import hashlib
|
||||
import hmac
|
||||
import base64
|
||||
import trio
|
||||
|
||||
from piker import config
|
||||
from piker.data.types import Struct
|
||||
from piker.data._source import Symbol
|
||||
from piker.brokers._util import (
|
||||
resproc,
|
||||
SymbolNotFound,
|
||||
BrokerError,
|
||||
DataThrottle,
|
||||
)
|
||||
from piker.pp import Transaction
|
||||
from . import log
|
||||
|
||||
# <uri>/<version>/
|
||||
_url = 'https://api.kraken.com/0'
|
||||
|
||||
|
||||
# Broker specific ohlc schema which includes a vwap field
|
||||
_ohlc_dtype = [
|
||||
('index', int),
|
||||
('time', int),
|
||||
('open', float),
|
||||
('high', float),
|
||||
('low', float),
|
||||
('close', float),
|
||||
('volume', float),
|
||||
('count', int),
|
||||
('bar_wap', float),
|
||||
]
|
||||
|
||||
# UI components allow this to be declared such that additional
|
||||
# (historical) fields can be exposed.
|
||||
ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||
|
||||
_show_wap_in_history = True
|
||||
_symbol_info_translation: dict[str, str] = {
|
||||
'tick_decimals': 'pair_decimals',
|
||||
}
|
||||
|
||||
|
||||
def get_config() -> dict[str, Any]:
|
||||
|
||||
conf, path = config.load()
|
||||
section = conf.get('kraken')
|
||||
|
||||
if section is None:
|
||||
log.warning(f'No config section found for kraken in {path}')
|
||||
return {}
|
||||
|
||||
return section
|
||||
|
||||
|
||||
def get_kraken_signature(
|
||||
urlpath: str,
|
||||
data: dict[str, Any],
|
||||
secret: str
|
||||
) -> str:
|
||||
postdata = urllib.parse.urlencode(data)
|
||||
encoded = (str(data['nonce']) + postdata).encode()
|
||||
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
||||
|
||||
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
||||
sigdigest = base64.b64encode(mac.digest())
|
||||
return sigdigest.decode()
|
||||
|
||||
|
||||
class InvalidKey(ValueError):
|
||||
'''
|
||||
EAPI:Invalid key
|
||||
This error is returned when the API key used for the call is
|
||||
either expired or disabled, please review the API key in your
|
||||
Settings -> API tab of account management or generate a new one
|
||||
and update your application.
|
||||
|
||||
'''
|
||||
|
||||
|
||||
# https://www.kraken.com/features/api#get-tradable-pairs
|
||||
class Pair(Struct):
|
||||
altname: str # alternate pair name
|
||||
wsname: str # WebSocket pair name (if available)
|
||||
aclass_base: str # asset class of base component
|
||||
base: str # asset id of base component
|
||||
aclass_quote: str # asset class of quote component
|
||||
quote: str # asset id of quote component
|
||||
lot: str # volume lot size
|
||||
|
||||
cost_decimals: int
|
||||
costmin: float
|
||||
pair_decimals: int # scaling decimal places for pair
|
||||
lot_decimals: int # scaling decimal places for volume
|
||||
|
||||
# amount to multiply lot volume by to get currency volume
|
||||
lot_multiplier: float
|
||||
|
||||
# array of leverage amounts available when buying
|
||||
leverage_buy: list[int]
|
||||
# array of leverage amounts available when selling
|
||||
leverage_sell: list[int]
|
||||
|
||||
# fee schedule array in [volume, percent fee] tuples
|
||||
fees: list[tuple[int, float]]
|
||||
|
||||
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||
# maker/taker)
|
||||
fees_maker: list[tuple[int, float]]
|
||||
|
||||
fee_volume_currency: str # volume discount currency
|
||||
margin_call: str # margin call level
|
||||
margin_stop: str # stop-out/liquidation margin level
|
||||
ordermin: float # minimum order volume for pair
|
||||
tick_size: float # min price step size
|
||||
status: str
|
||||
|
||||
short_position_limit: float = 0
|
||||
long_position_limit: float = float('inf')
|
||||
|
||||
|
||||
class Client:
|
||||
|
||||
# global symbol normalization table
|
||||
_ntable: dict[str, str] = {}
|
||||
_atable: bidict[str, str] = bidict()
|
||||
_pairs: dict[str, Pair] = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
config: dict[str, str],
|
||||
name: str = '',
|
||||
api_key: str = '',
|
||||
secret: str = ''
|
||||
) -> None:
|
||||
self._sesh = asks.Session(connections=4)
|
||||
self._sesh.base_location = _url
|
||||
self._sesh.headers.update({
|
||||
'User-Agent':
|
||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||
})
|
||||
self.conf: dict[str, str] = config
|
||||
self._name = name
|
||||
self._api_key = api_key
|
||||
self._secret = secret
|
||||
|
||||
@property
|
||||
def pairs(self) -> dict[str, Pair]:
|
||||
if self._pairs is None:
|
||||
raise RuntimeError(
|
||||
"Make sure to run `cache_symbols()` on startup!"
|
||||
)
|
||||
# retreive and cache all symbols
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def _public(
|
||||
self,
|
||||
method: str,
|
||||
data: dict,
|
||||
) -> dict[str, Any]:
|
||||
resp = await self._sesh.post(
|
||||
path=f'/public/{method}',
|
||||
json=data,
|
||||
timeout=float('inf')
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def _private(
|
||||
self,
|
||||
method: str,
|
||||
data: dict,
|
||||
uri_path: str
|
||||
) -> dict[str, Any]:
|
||||
headers = {
|
||||
'Content-Type':
|
||||
'application/x-www-form-urlencoded',
|
||||
'API-Key':
|
||||
self._api_key,
|
||||
'API-Sign':
|
||||
get_kraken_signature(uri_path, data, self._secret)
|
||||
}
|
||||
resp = await self._sesh.post(
|
||||
path=f'/private/{method}',
|
||||
data=data,
|
||||
headers=headers,
|
||||
timeout=float('inf')
|
||||
)
|
||||
return resproc(resp, log)
|
||||
|
||||
async def endpoint(
|
||||
self,
|
||||
method: str,
|
||||
data: dict[str, Any]
|
||||
|
||||
) -> dict[str, Any]:
|
||||
uri_path = f'/0/private/{method}'
|
||||
data['nonce'] = str(int(1000*time.time()))
|
||||
return await self._private(method, data, uri_path)
|
||||
|
||||
async def get_balances(
|
||||
self,
|
||||
) -> dict[str, float]:
|
||||
'''
|
||||
Return the set of asset balances for this account
|
||||
by symbol.
|
||||
|
||||
'''
|
||||
resp = await self.endpoint(
|
||||
'Balance',
|
||||
{},
|
||||
)
|
||||
by_bsuid = resp['result']
|
||||
return {
|
||||
self._atable[sym].lower(): float(bal)
|
||||
for sym, bal in by_bsuid.items()
|
||||
}
|
||||
|
||||
async def get_assets(self) -> dict[str, dict]:
|
||||
resp = await self._public('Assets', {})
|
||||
return resp['result']
|
||||
|
||||
async def cache_assets(self) -> None:
|
||||
assets = self.assets = await self.get_assets()
|
||||
for bsuid, info in assets.items():
|
||||
self._atable[bsuid] = info['altname']
|
||||
|
||||
async def get_trades(
|
||||
self,
|
||||
fetch_limit: int | None = None,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
'''
|
||||
Get the trades (aka cleared orders) history from the rest endpoint:
|
||||
https://docs.kraken.com/rest/#operation/getTradeHistory
|
||||
|
||||
'''
|
||||
ofs = 0
|
||||
trades_by_id: dict[str, Any] = {}
|
||||
|
||||
for i in itertools.count():
|
||||
if (
|
||||
fetch_limit
|
||||
and i >= fetch_limit
|
||||
):
|
||||
break
|
||||
|
||||
# increment 'ofs' pagination offset
|
||||
ofs = i*50
|
||||
|
||||
resp = await self.endpoint(
|
||||
'TradesHistory',
|
||||
{'ofs': ofs},
|
||||
)
|
||||
by_id = resp['result']['trades']
|
||||
trades_by_id.update(by_id)
|
||||
|
||||
# can get up to 50 results per query, see:
|
||||
# https://docs.kraken.com/rest/#tag/User-Data/operation/getTradeHistory
|
||||
if (
|
||||
len(by_id) < 50
|
||||
):
|
||||
err = resp.get('error')
|
||||
if err:
|
||||
raise BrokerError(err)
|
||||
|
||||
# we know we received the max amount of
|
||||
# trade results so there may be more history.
|
||||
# catch the end of the trades
|
||||
count = resp['result']['count']
|
||||
break
|
||||
|
||||
# santity check on update
|
||||
assert count == len(trades_by_id.values())
|
||||
return trades_by_id
|
||||
|
||||
async def get_xfers(
|
||||
self,
|
||||
asset: str,
|
||||
src_asset: str = '',
|
||||
|
||||
) -> dict[str, Transaction]:
|
||||
'''
|
||||
Get asset balance transfer transactions.
|
||||
|
||||
Currently only withdrawals are supported.
|
||||
|
||||
'''
|
||||
xfers: list[dict] = (await self.endpoint(
|
||||
'WithdrawStatus',
|
||||
{'asset': asset},
|
||||
))['result']
|
||||
|
||||
# eg. resp schema:
|
||||
# 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
|
||||
# 'XXBT', 'refid': 'AGBJRMB-JHD2M4-NDI3NR', 'txid':
|
||||
# 'b95d66d3bb6fd76cbccb93f7639f99a505cb20752c62ea0acc093a0e46547c44',
|
||||
# 'info': 'bc1qc8enqjekwppmw3g80p56z5ns7ze3wraqk5rl9z',
|
||||
# 'amount': '0.00300726', 'fee': '0.00001000', 'time':
|
||||
# 1658347714, 'status': 'Success'}]}
|
||||
|
||||
trans: dict[str, Transaction] = {}
|
||||
for entry in xfers:
|
||||
|
||||
# look up the normalized name and asset info
|
||||
asset_key = entry['asset']
|
||||
asset_info = self.assets[asset_key]
|
||||
asset = self._atable[asset_key].lower()
|
||||
|
||||
# XXX: this is in the asset units (likely) so it isn't
|
||||
# quite the same as a commisions cost necessarily..)
|
||||
cost = float(entry['fee'])
|
||||
|
||||
fqsn = asset + '.kraken'
|
||||
pairinfo = Symbol.from_fqsn(
|
||||
fqsn,
|
||||
info={
|
||||
'asset_type': 'crypto',
|
||||
'lot_tick_size': asset_info['decimals'],
|
||||
},
|
||||
)
|
||||
|
||||
tran = Transaction(
|
||||
fqsn=fqsn,
|
||||
sym=pairinfo,
|
||||
tid=entry['txid'],
|
||||
dt=pendulum.from_timestamp(entry['time']),
|
||||
bsuid=f'{asset}{src_asset}',
|
||||
size=-1*(
|
||||
float(entry['amount'])
|
||||
+
|
||||
cost
|
||||
),
|
||||
# since this will be treated as a "sell" it
|
||||
# shouldn't be needed to compute the be price.
|
||||
price='NaN',
|
||||
|
||||
# XXX: see note above
|
||||
cost=cost,
|
||||
)
|
||||
trans[tran.tid] = tran
|
||||
|
||||
return trans
|
||||
|
||||
async def submit_limit(
|
||||
self,
|
||||
symbol: str,
|
||||
price: float,
|
||||
action: str,
|
||||
size: float,
|
||||
reqid: str = None,
|
||||
validate: bool = False # set True test call without a real submission
|
||||
|
||||
) -> dict:
|
||||
'''
|
||||
Place an order and return integer request id provided by client.
|
||||
|
||||
'''
|
||||
# Build common data dict for common keys from both endpoints
|
||||
data = {
|
||||
"pair": symbol,
|
||||
"price": str(price),
|
||||
"validate": validate
|
||||
}
|
||||
if reqid is None:
|
||||
# Build order data for kraken api
|
||||
data |= {
|
||||
"ordertype": "limit",
|
||||
"type": action,
|
||||
"volume": str(size),
|
||||
}
|
||||
return await self.endpoint('AddOrder', data)
|
||||
|
||||
else:
|
||||
# Edit order data for kraken api
|
||||
data["txid"] = reqid
|
||||
return await self.endpoint('EditOrder', data)
|
||||
|
||||
async def submit_cancel(
|
||||
self,
|
||||
reqid: str,
|
||||
) -> dict:
|
||||
'''
|
||||
Send cancel request for order id ``reqid``.
|
||||
|
||||
'''
|
||||
# txid is a transaction id given by kraken
|
||||
return await self.endpoint('CancelOrder', {"txid": reqid})
|
||||
|
||||
async def symbol_info(
|
||||
self,
|
||||
pair: Optional[str] = None,
|
||||
|
||||
) -> dict[str, Pair] | Pair:
|
||||
|
||||
if pair is not None:
|
||||
pairs = {'pair': pair}
|
||||
else:
|
||||
pairs = None # get all pairs
|
||||
|
||||
resp = await self._public('AssetPairs', pairs)
|
||||
err = resp['error']
|
||||
if err:
|
||||
symbolname = pairs['pair'] if pair else None
|
||||
raise SymbolNotFound(f'{symbolname}.kraken')
|
||||
|
||||
pairs = resp['result']
|
||||
|
||||
if pair is not None:
|
||||
_, data = next(iter(pairs.items()))
|
||||
return Pair(**data)
|
||||
else:
|
||||
return {key: Pair(**data) for key, data in pairs.items()}
|
||||
|
||||
async def cache_symbols(self) -> dict:
|
||||
'''
|
||||
Load all market pair info build and cache it for downstream use.
|
||||
|
||||
A ``._ntable: dict[str, str]`` is available for mapping the
|
||||
websocket pair name-keys and their http endpoint API (smh)
|
||||
equivalents to the "alternative name" which is generally the one
|
||||
we actually want to use XD
|
||||
|
||||
'''
|
||||
if not self._pairs:
|
||||
self._pairs.update(await self.symbol_info())
|
||||
|
||||
# table of all ws and rest keys to their alt-name values.
|
||||
ntable: dict[str, str] = {}
|
||||
|
||||
for rest_key in list(self._pairs.keys()):
|
||||
|
||||
pair: Pair = self._pairs[rest_key]
|
||||
altname = pair.altname
|
||||
wsname = pair.wsname
|
||||
ntable[rest_key] = ntable[wsname] = altname
|
||||
|
||||
# register the pair under all monikers, a giant flat
|
||||
# surjection of all possible names to each info obj.
|
||||
self._pairs[altname] = self._pairs[wsname] = pair
|
||||
|
||||
self._ntable.update(ntable)
|
||||
|
||||
return self._pairs
|
||||
|
||||
async def search_symbols(
|
||||
self,
|
||||
pattern: str,
|
||||
limit: int = None,
|
||||
|
||||
) -> dict[str, Any]:
|
||||
'''
|
||||
Search for a symbol by "alt name"..
|
||||
|
||||
It is expected that the ``Client._pairs`` table
|
||||
gets populated before conducting the underlying fuzzy-search
|
||||
over the pair-key set.
|
||||
|
||||
'''
|
||||
if not len(self._pairs):
|
||||
await self.cache_symbols()
|
||||
assert self._pairs, '`Client.cache_symbols()` was never called!?'
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
self._pairs,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
return {item[0].altname: item[0] for item in matches}
|
||||
|
||||
async def bars(
|
||||
self,
|
||||
symbol: str = 'XBTUSD',
|
||||
|
||||
# UTC 2017-07-02 12:53:20
|
||||
since: Union[int, datetime] | None = None,
|
||||
count: int = 720, # <- max allowed per query
|
||||
as_np: bool = True,
|
||||
|
||||
) -> dict:
|
||||
|
||||
if since is None:
|
||||
since = pendulum.now('UTC').start_of('minute').subtract(
|
||||
minutes=count).timestamp()
|
||||
|
||||
elif isinstance(since, int):
|
||||
since = pendulum.from_timestamp(since).timestamp()
|
||||
|
||||
else: # presumably a pendulum datetime
|
||||
since = since.timestamp()
|
||||
|
||||
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||
since = str(max(1499000000, int(since)))
|
||||
json = await self._public(
|
||||
'OHLC',
|
||||
data={
|
||||
'pair': symbol,
|
||||
'since': since,
|
||||
},
|
||||
)
|
||||
try:
|
||||
res = json['result']
|
||||
res.pop('last')
|
||||
bars = next(iter(res.values()))
|
||||
|
||||
new_bars = []
|
||||
|
||||
first = bars[0]
|
||||
last_nz_vwap = first[-3]
|
||||
if last_nz_vwap == 0:
|
||||
# use close if vwap is zero
|
||||
last_nz_vwap = first[-4]
|
||||
|
||||
# convert all fields to native types
|
||||
for i, bar in enumerate(bars):
|
||||
# normalize weird zero-ed vwap values..cmon kraken..
|
||||
# indicates vwap didn't change since last bar
|
||||
vwap = float(bar.pop(-3))
|
||||
if vwap != 0:
|
||||
last_nz_vwap = vwap
|
||||
if vwap == 0:
|
||||
vwap = last_nz_vwap
|
||||
|
||||
# re-insert vwap as the last of the fields
|
||||
bar.append(vwap)
|
||||
|
||||
new_bars.append(
|
||||
(i,) + tuple(
|
||||
ftype(bar[j]) for j, (name, ftype) in enumerate(
|
||||
_ohlc_dtype[1:]
|
||||
)
|
||||
)
|
||||
)
|
||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||
return array
|
||||
except KeyError:
|
||||
errmsg = json['error'][0]
|
||||
|
||||
if 'not found' in errmsg:
|
||||
raise SymbolNotFound(errmsg + f': {symbol}')
|
||||
|
||||
elif 'Too many requests' in errmsg:
|
||||
raise DataThrottle(f'{symbol}')
|
||||
|
||||
else:
|
||||
raise BrokerError(errmsg)
|
||||
|
||||
@classmethod
|
||||
def normalize_symbol(
|
||||
cls,
|
||||
ticker: str
|
||||
) -> tuple[str, Pair]:
|
||||
'''
|
||||
Normalize symbol names to to a 3x3 pair from the global
|
||||
definition map which we build out from the data retreived from
|
||||
the 'AssetPairs' endpoint, see methods above.
|
||||
|
||||
'''
|
||||
ticker = cls._ntable[ticker]
|
||||
return ticker.lower(), cls._pairs[ticker]
|
||||
|
||||
|
||||
@acm
|
||||
async def get_client() -> Client:
|
||||
|
||||
conf = get_config()
|
||||
if conf:
|
||||
client = Client(
|
||||
conf,
|
||||
name=conf['key_descr'],
|
||||
api_key=conf['api_key'],
|
||||
secret=conf['secret']
|
||||
)
|
||||
else:
|
||||
client = Client({})
|
||||
|
||||
# at startup, load all symbols, and asset info in
|
||||
# batch requests.
|
||||
async with trio.open_nursery() as nurse:
|
||||
nurse.start_soon(client.cache_assets)
|
||||
await client.cache_symbols()
|
||||
|
||||
yield client
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,459 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Real-time and historical data feed endpoints.
|
||||
|
||||
'''
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
)
|
||||
import time
|
||||
|
||||
from async_generator import aclosing
|
||||
from fuzzywuzzy import process as fuzzy
|
||||
import numpy as np
|
||||
import pendulum
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
import trio
|
||||
|
||||
from piker._cacheables import open_cached_client
|
||||
from piker.brokers._util import (
|
||||
BrokerError,
|
||||
DataThrottle,
|
||||
DataUnavailable,
|
||||
)
|
||||
from piker.log import get_console_log
|
||||
from piker.data.types import Struct
|
||||
from piker.data._web_bs import open_autorecon_ws, NoBsWs
|
||||
from . import log
|
||||
from .api import (
|
||||
Client,
|
||||
Pair,
|
||||
)
|
||||
|
||||
|
||||
class OHLC(Struct):
|
||||
'''
|
||||
Description of the flattened OHLC quote format.
|
||||
|
||||
For schema details see:
|
||||
https://docs.kraken.com/websockets/#message-ohlc
|
||||
|
||||
'''
|
||||
chan_id: int # internal kraken id
|
||||
chan_name: str # eg. ohlc-1 (name-interval)
|
||||
pair: str # fx pair
|
||||
time: float # Begin time of interval, in seconds since epoch
|
||||
etime: float # End time of interval, in seconds since epoch
|
||||
open: float # Open price of interval
|
||||
high: float # High price within interval
|
||||
low: float # Low price within interval
|
||||
close: float # Close price of interval
|
||||
vwap: float # Volume weighted average price within interval
|
||||
volume: float # Accumulated volume **within interval**
|
||||
count: int # Number of trades within interval
|
||||
# (sampled) generated tick data
|
||||
ticks: list[Any] = []
|
||||
|
||||
|
||||
async def stream_messages(
|
||||
ws: NoBsWs,
|
||||
):
|
||||
'''
|
||||
Message stream parser and heartbeat handler.
|
||||
|
||||
Deliver ws subscription messages as well as handle heartbeat logic
|
||||
though a single async generator.
|
||||
|
||||
'''
|
||||
too_slow_count = last_hb = 0
|
||||
|
||||
while True:
|
||||
|
||||
with trio.move_on_after(5) as cs:
|
||||
msg = await ws.recv_msg()
|
||||
|
||||
# trigger reconnection if heartbeat is laggy
|
||||
if cs.cancelled_caught:
|
||||
|
||||
too_slow_count += 1
|
||||
|
||||
if too_slow_count > 20:
|
||||
log.warning(
|
||||
"Heartbeat is too slow, resetting ws connection")
|
||||
|
||||
await ws._connect()
|
||||
too_slow_count = 0
|
||||
continue
|
||||
|
||||
match msg:
|
||||
case {'event': 'heartbeat'}:
|
||||
now = time.time()
|
||||
delay = now - last_hb
|
||||
last_hb = now
|
||||
|
||||
# XXX: why tf is this not printing without --tl flag?
|
||||
log.debug(f"Heartbeat after {delay}")
|
||||
# print(f"Heartbeat after {delay}")
|
||||
|
||||
continue
|
||||
|
||||
case _:
|
||||
# passthrough sub msgs
|
||||
yield msg
|
||||
|
||||
|
||||
async def process_data_feed_msgs(
|
||||
ws: NoBsWs,
|
||||
):
|
||||
'''
|
||||
Parse and pack data feed messages.
|
||||
|
||||
'''
|
||||
async for msg in stream_messages(ws):
|
||||
match msg:
|
||||
case {
|
||||
'errorMessage': errmsg
|
||||
}:
|
||||
raise BrokerError(errmsg)
|
||||
|
||||
case {
|
||||
'event': 'subscriptionStatus',
|
||||
} as sub:
|
||||
log.info(
|
||||
'WS subscription is active:\n'
|
||||
f'{sub}'
|
||||
)
|
||||
continue
|
||||
|
||||
case [
|
||||
chan_id,
|
||||
*payload_array,
|
||||
chan_name,
|
||||
pair
|
||||
]:
|
||||
if 'ohlc' in chan_name:
|
||||
ohlc = OHLC(
|
||||
chan_id,
|
||||
chan_name,
|
||||
pair,
|
||||
*payload_array[0]
|
||||
)
|
||||
ohlc.typecast()
|
||||
yield 'ohlc', ohlc
|
||||
|
||||
elif 'spread' in chan_name:
|
||||
|
||||
bid, ask, ts, bsize, asize = map(
|
||||
float, payload_array[0])
|
||||
|
||||
# TODO: really makes you think IB has a horrible API...
|
||||
quote = {
|
||||
'symbol': pair.replace('/', ''),
|
||||
'ticks': [
|
||||
{'type': 'bid', 'price': bid, 'size': bsize},
|
||||
{'type': 'bsize', 'price': bid, 'size': bsize},
|
||||
|
||||
{'type': 'ask', 'price': ask, 'size': asize},
|
||||
{'type': 'asize', 'price': ask, 'size': asize},
|
||||
],
|
||||
}
|
||||
yield 'l1', quote
|
||||
|
||||
# elif 'book' in msg[-2]:
|
||||
# chan_id, *payload_array, chan_name, pair = msg
|
||||
# print(msg)
|
||||
|
||||
case _:
|
||||
print(f'UNHANDLED MSG: {msg}')
|
||||
# yield msg
|
||||
|
||||
|
||||
def normalize(
|
||||
ohlc: OHLC,
|
||||
|
||||
) -> dict:
|
||||
quote = ohlc.to_dict()
|
||||
quote['broker_ts'] = quote['time']
|
||||
quote['brokerd_ts'] = time.time()
|
||||
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
|
||||
quote['last'] = quote['close']
|
||||
quote['bar_wap'] = ohlc.vwap
|
||||
|
||||
# seriously eh? what's with this non-symmetry everywhere
|
||||
# in subscription systems...
|
||||
# XXX: piker style is always lowercases symbols.
|
||||
topic = quote['pair'].replace('/', '').lower()
|
||||
|
||||
# print(quote)
|
||||
return topic, quote
|
||||
|
||||
|
||||
@acm
|
||||
async def open_history_client(
|
||||
symbol: str,
|
||||
|
||||
) -> tuple[Callable, int]:
|
||||
|
||||
# TODO implement history getter for the new storage layer.
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# lol, kraken won't send any more then the "last"
|
||||
# 720 1m bars.. so we have to just ignore further
|
||||
# requests of this type..
|
||||
queries: int = 0
|
||||
|
||||
async def get_ohlc(
|
||||
timeframe: float,
|
||||
end_dt: Optional[datetime] = None,
|
||||
start_dt: Optional[datetime] = None,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
datetime, # start
|
||||
datetime, # end
|
||||
]:
|
||||
|
||||
nonlocal queries
|
||||
if (
|
||||
queries > 0
|
||||
or timeframe != 60
|
||||
):
|
||||
raise DataUnavailable(
|
||||
'Only a single query for 1m bars supported')
|
||||
|
||||
count = 0
|
||||
while count <= 3:
|
||||
try:
|
||||
array = await client.bars(
|
||||
symbol,
|
||||
since=end_dt,
|
||||
)
|
||||
count += 1
|
||||
queries += 1
|
||||
break
|
||||
except DataThrottle:
|
||||
log.warning(f'kraken OHLC throttle for {symbol}')
|
||||
await trio.sleep(1)
|
||||
|
||||
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||
return array, start_dt, end_dt
|
||||
|
||||
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
||||
|
||||
|
||||
async def stream_quotes(
|
||||
|
||||
send_chan: trio.abc.SendChannel,
|
||||
symbols: list[str],
|
||||
feed_is_live: trio.Event,
|
||||
loglevel: str = None,
|
||||
|
||||
# backend specific
|
||||
sub_type: str = 'ohlc',
|
||||
|
||||
# startup sync
|
||||
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Subscribe for ohlc stream of quotes for ``pairs``.
|
||||
|
||||
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||
|
||||
'''
|
||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||
|
||||
ws_pairs = {}
|
||||
sym_infos = {}
|
||||
|
||||
async with open_cached_client('kraken') as client, send_chan as send_chan:
|
||||
|
||||
# keep client cached for real-time section
|
||||
for sym in symbols:
|
||||
|
||||
# transform to upper since piker style is always lower
|
||||
sym = sym.upper()
|
||||
si: Pair = await client.symbol_info(sym)
|
||||
# try:
|
||||
# si = Pair(**sym_info) # validation
|
||||
# except TypeError:
|
||||
# fields_diff = set(sym_info) - set(Pair.__struct_fields__)
|
||||
# raise TypeError(
|
||||
# f'Missing msg fields {fields_diff}'
|
||||
# )
|
||||
syminfo = si.to_dict()
|
||||
syminfo['price_tick_size'] = 1. / 10**si.pair_decimals
|
||||
syminfo['lot_tick_size'] = 1. / 10**si.lot_decimals
|
||||
syminfo['asset_type'] = 'crypto'
|
||||
sym_infos[sym] = syminfo
|
||||
ws_pairs[sym] = si.wsname
|
||||
|
||||
symbol = symbols[0].lower()
|
||||
|
||||
init_msgs = {
|
||||
# pass back token, and bool, signalling if we're the writer
|
||||
# and that history has been written
|
||||
symbol: {
|
||||
'symbol_info': sym_infos[sym],
|
||||
'shm_write_opts': {'sum_tick_vml': False},
|
||||
'fqsn': sym,
|
||||
},
|
||||
}
|
||||
|
||||
@acm
|
||||
async def subscribe(ws: NoBsWs):
|
||||
|
||||
# XXX: setup subs
|
||||
# https://docs.kraken.com/websockets/#message-subscribe
|
||||
# specific logic for this in kraken's sync client:
|
||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||
ohlc_sub = {
|
||||
'event': 'subscribe',
|
||||
'pair': list(ws_pairs.values()),
|
||||
'subscription': {
|
||||
'name': 'ohlc',
|
||||
'interval': 1,
|
||||
},
|
||||
}
|
||||
|
||||
# TODO: we want to eventually allow unsubs which should
|
||||
# be completely fine to request from a separate task
|
||||
# since internally the ws methods appear to be FIFO
|
||||
# locked.
|
||||
await ws.send_msg(ohlc_sub)
|
||||
|
||||
# trade data (aka L1)
|
||||
l1_sub = {
|
||||
'event': 'subscribe',
|
||||
'pair': list(ws_pairs.values()),
|
||||
'subscription': {
|
||||
'name': 'spread',
|
||||
# 'depth': 10}
|
||||
},
|
||||
}
|
||||
|
||||
# pull a first quote and deliver
|
||||
await ws.send_msg(l1_sub)
|
||||
|
||||
yield
|
||||
|
||||
# unsub from all pairs on teardown
|
||||
if ws.connected():
|
||||
await ws.send_msg({
|
||||
'pair': list(ws_pairs.values()),
|
||||
'event': 'unsubscribe',
|
||||
'subscription': ['ohlc', 'spread'],
|
||||
})
|
||||
|
||||
# XXX: do we need to ack the unsub?
|
||||
# await ws.recv_msg()
|
||||
|
||||
# see the tips on reconnection logic:
|
||||
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||
ws: NoBsWs
|
||||
async with (
|
||||
open_autorecon_ws(
|
||||
'wss://ws.kraken.com/',
|
||||
fixture=subscribe,
|
||||
) as ws,
|
||||
aclosing(process_data_feed_msgs(ws)) as msg_gen,
|
||||
):
|
||||
# pull a first quote and deliver
|
||||
typ, ohlc_last = await anext(msg_gen)
|
||||
topic, quote = normalize(ohlc_last)
|
||||
|
||||
task_status.started((init_msgs, quote))
|
||||
|
||||
# lol, only "closes" when they're margin squeezing clients ;P
|
||||
feed_is_live.set()
|
||||
|
||||
# keep start of last interval for volume tracking
|
||||
last_interval_start = ohlc_last.etime
|
||||
|
||||
# start streaming
|
||||
async for typ, ohlc in msg_gen:
|
||||
|
||||
if typ == 'ohlc':
|
||||
|
||||
# TODO: can get rid of all this by using
|
||||
# ``trades`` subscription...
|
||||
|
||||
# generate tick values to match time & sales pane:
|
||||
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
|
||||
volume = ohlc.volume
|
||||
|
||||
# new OHLC sample interval
|
||||
if ohlc.etime > last_interval_start:
|
||||
last_interval_start = ohlc.etime
|
||||
tick_volume = volume
|
||||
|
||||
else:
|
||||
# this is the tick volume *within the interval*
|
||||
tick_volume = volume - ohlc_last.volume
|
||||
|
||||
ohlc_last = ohlc
|
||||
last = ohlc.close
|
||||
|
||||
if tick_volume:
|
||||
ohlc.ticks.append({
|
||||
'type': 'trade',
|
||||
'price': last,
|
||||
'size': tick_volume,
|
||||
})
|
||||
|
||||
topic, quote = normalize(ohlc)
|
||||
|
||||
elif typ == 'l1':
|
||||
quote = ohlc
|
||||
topic = quote['symbol'].lower()
|
||||
|
||||
await send_chan.send({topic: quote})
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_symbol_search(
|
||||
ctx: tractor.Context,
|
||||
|
||||
) -> Client:
|
||||
async with open_cached_client('kraken') as client:
|
||||
|
||||
# load all symbols locally for fast search
|
||||
cache = await client.cache_symbols()
|
||||
await ctx.started(cache)
|
||||
|
||||
async with ctx.open_stream() as stream:
|
||||
|
||||
async for pattern in stream:
|
||||
|
||||
matches = fuzzy.extractBests(
|
||||
pattern,
|
||||
cache,
|
||||
score_cutoff=50,
|
||||
)
|
||||
# repack in dict form
|
||||
await stream.send({
|
||||
pair[0].altname: pair[0]
|
||||
for pair in matches
|
||||
})
|
|
@ -18,3 +18,9 @@
|
|||
Market machinery for order executions, book, management.
|
||||
|
||||
"""
|
||||
from ._client import open_ems
|
||||
|
||||
|
||||
__all__ = [
|
||||
'open_ems',
|
||||
]
|
||||
|
|
|
@ -22,54 +22,10 @@ from enum import Enum
|
|||
from typing import Optional
|
||||
|
||||
from bidict import bidict
|
||||
from pydantic import BaseModel, validator
|
||||
|
||||
from ..data._source import Symbol
|
||||
from ._messages import BrokerdPosition, Status
|
||||
|
||||
|
||||
class Position(BaseModel):
|
||||
'''
|
||||
Basic pp (personal position) model with attached fills history.
|
||||
|
||||
This type should be IPC wire ready?
|
||||
|
||||
'''
|
||||
symbol: Symbol
|
||||
|
||||
# last size and avg entry price
|
||||
size: float
|
||||
avg_price: float # TODO: contextual pricing
|
||||
|
||||
# ordered record of known constituent trade messages
|
||||
fills: list[Status] = []
|
||||
|
||||
def update_from_msg(
|
||||
self,
|
||||
msg: BrokerdPosition,
|
||||
|
||||
) -> None:
|
||||
|
||||
# XXX: better place to do this?
|
||||
symbol = self.symbol
|
||||
|
||||
lot_size_digits = symbol.lot_size_digits
|
||||
avg_price, size = (
|
||||
round(msg['avg_price'], ndigits=symbol.tick_size_digits),
|
||||
round(msg['size'], ndigits=lot_size_digits),
|
||||
)
|
||||
|
||||
self.avg_price = avg_price
|
||||
self.size = size
|
||||
|
||||
@property
|
||||
def dsize(self) -> float:
|
||||
'''
|
||||
The "dollar" size of the pp, normally in trading (fiat) unit
|
||||
terms.
|
||||
|
||||
'''
|
||||
return self.avg_price * self.size
|
||||
from ..data.types import Struct
|
||||
from ..pp import Position
|
||||
|
||||
|
||||
_size_units = bidict({
|
||||
|
@ -84,34 +40,9 @@ SizeUnit = Enum(
|
|||
)
|
||||
|
||||
|
||||
class Allocator(BaseModel):
|
||||
|
||||
class Config:
|
||||
validate_assignment = True
|
||||
copy_on_model_validation = False
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
# required to get the account validator lookup working?
|
||||
extra = 'allow'
|
||||
underscore_attrs_are_private = False
|
||||
class Allocator(Struct):
|
||||
|
||||
symbol: Symbol
|
||||
account: Optional[str] = 'paper'
|
||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||
# a default at startup by passing in a `dict` but yet you can set
|
||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||
# unintuitive garbage.
|
||||
size_unit: str = 'currency'
|
||||
_size_units: dict[str, Optional[str]] = _size_units
|
||||
|
||||
@validator('size_unit', pre=True)
|
||||
def maybe_lookup_key(cls, v):
|
||||
# apply the corresponding enum key for the text "description" value
|
||||
if v not in _size_units:
|
||||
return _size_units.inverse[v]
|
||||
|
||||
assert v in _size_units
|
||||
return v
|
||||
|
||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||
# "sizes"
|
||||
|
@ -120,6 +51,28 @@ class Allocator(BaseModel):
|
|||
units_limit: float
|
||||
currency_limit: float
|
||||
slots: int
|
||||
account: Optional[str] = 'paper'
|
||||
|
||||
_size_units: bidict[str, Optional[str]] = _size_units
|
||||
|
||||
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||
# a default at startup by passing in a `dict` but yet you can set
|
||||
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||
# unintuitive garbage.
|
||||
_size_unit: str = 'currency'
|
||||
|
||||
@property
|
||||
def size_unit(self) -> str:
|
||||
return self._size_unit
|
||||
|
||||
@size_unit.setter
|
||||
def size_unit(self, v: str) -> Optional[str]:
|
||||
if v not in _size_units:
|
||||
v = _size_units.inverse[v]
|
||||
|
||||
assert v in _size_units
|
||||
self._size_unit = v
|
||||
return v
|
||||
|
||||
def step_sizes(
|
||||
self,
|
||||
|
@ -140,10 +93,13 @@ class Allocator(BaseModel):
|
|||
else:
|
||||
return self.units_limit
|
||||
|
||||
def limit_info(self) -> tuple[str, float]:
|
||||
return self.size_unit, self.limit()
|
||||
|
||||
def next_order_info(
|
||||
self,
|
||||
|
||||
# we only need a startup size for exit calcs, we can the
|
||||
# we only need a startup size for exit calcs, we can then
|
||||
# determine how large slots should be if the initial pp size was
|
||||
# larger then the current live one, and the live one is smaller
|
||||
# then the initial config settings.
|
||||
|
@ -173,7 +129,7 @@ class Allocator(BaseModel):
|
|||
l_sub_pp = self.units_limit - abs_live_size
|
||||
|
||||
elif size_unit == 'currency':
|
||||
live_cost_basis = abs_live_size * live_pp.avg_price
|
||||
live_cost_basis = abs_live_size * live_pp.ppu
|
||||
slot_size = currency_per_slot / price
|
||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||
|
||||
|
@ -184,12 +140,14 @@ class Allocator(BaseModel):
|
|||
|
||||
# an entry (adding-to or starting a pp)
|
||||
if (
|
||||
action == 'buy' and live_size > 0 or
|
||||
action == 'sell' and live_size < 0 or
|
||||
live_size == 0
|
||||
or (action == 'buy' and live_size > 0)
|
||||
or action == 'sell' and live_size < 0
|
||||
):
|
||||
|
||||
order_size = min(slot_size, l_sub_pp)
|
||||
order_size = min(
|
||||
slot_size,
|
||||
max(l_sub_pp, 0),
|
||||
)
|
||||
|
||||
# an exit (removing-from or going to net-zero pp)
|
||||
else:
|
||||
|
@ -205,7 +163,7 @@ class Allocator(BaseModel):
|
|||
if size_unit == 'currency':
|
||||
# compute the "projected" limit's worth of units at the
|
||||
# current pp (weighted) price:
|
||||
slot_size = currency_per_slot / live_pp.avg_price
|
||||
slot_size = currency_per_slot / live_pp.ppu
|
||||
|
||||
else:
|
||||
slot_size = u_per_slot
|
||||
|
@ -244,7 +202,12 @@ class Allocator(BaseModel):
|
|||
if order_size < slot_size:
|
||||
# compute a fractional slots size to display
|
||||
slots_used = self.slots_used(
|
||||
Position(symbol=sym, size=order_size, avg_price=price)
|
||||
Position(
|
||||
symbol=sym,
|
||||
size=order_size,
|
||||
ppu=price,
|
||||
bsuid=sym,
|
||||
)
|
||||
)
|
||||
|
||||
return {
|
||||
|
@ -271,8 +234,8 @@ class Allocator(BaseModel):
|
|||
abs_pp_size = abs(pp.size)
|
||||
|
||||
if self.size_unit == 'currency':
|
||||
# live_currency_size = size or (abs_pp_size * pp.avg_price)
|
||||
live_currency_size = abs_pp_size * pp.avg_price
|
||||
# live_currency_size = size or (abs_pp_size * pp.ppu)
|
||||
live_currency_size = abs_pp_size * pp.ppu
|
||||
prop = live_currency_size / self.currency_limit
|
||||
|
||||
else:
|
||||
|
@ -284,14 +247,6 @@ class Allocator(BaseModel):
|
|||
return round(prop * self.slots)
|
||||
|
||||
|
||||
_derivs = (
|
||||
'future',
|
||||
'continuous_future',
|
||||
'option',
|
||||
'futures_option',
|
||||
)
|
||||
|
||||
|
||||
def mk_allocator(
|
||||
|
||||
symbol: Symbol,
|
||||
|
@ -300,7 +255,7 @@ def mk_allocator(
|
|||
# default allocation settings
|
||||
defaults: dict[str, float] = {
|
||||
'account': None, # select paper by default
|
||||
'size_unit': 'currency',
|
||||
# 'size_unit': 'currency',
|
||||
'units_limit': 400,
|
||||
'currency_limit': 5e3,
|
||||
'slots': 4,
|
||||
|
@ -318,42 +273,9 @@ def mk_allocator(
|
|||
'currency_limit': 6e3,
|
||||
'slots': 6,
|
||||
}
|
||||
|
||||
defaults.update(user_def)
|
||||
|
||||
alloc = Allocator(
|
||||
return Allocator(
|
||||
symbol=symbol,
|
||||
**defaults,
|
||||
)
|
||||
|
||||
asset_type = symbol.type_key
|
||||
|
||||
# specific configs by asset class / type
|
||||
|
||||
if asset_type in _derivs:
|
||||
# since it's harder to know how currency "applies" in this case
|
||||
# given leverage properties
|
||||
alloc.size_unit = '# units'
|
||||
|
||||
# set units limit to slots size thus making make the next
|
||||
# entry step 1.0
|
||||
alloc.units_limit = alloc.slots
|
||||
|
||||
# if the current position is already greater then the limit
|
||||
# settings, increase the limit to the current position
|
||||
if alloc.size_unit == 'currency':
|
||||
startup_size = startup_pp.size * startup_pp.avg_price
|
||||
|
||||
if startup_size > alloc.currency_limit:
|
||||
alloc.currency_limit = round(startup_size, ndigits=2)
|
||||
|
||||
else:
|
||||
startup_size = abs(startup_pp.size)
|
||||
|
||||
if startup_size > alloc.units_limit:
|
||||
alloc.units_limit = startup_size
|
||||
|
||||
if asset_type in _derivs:
|
||||
alloc.slots = alloc.units_limit
|
||||
|
||||
return alloc
|
||||
|
|
|
@ -18,26 +18,35 @@
|
|||
Orders and execution client API.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from typing import Dict
|
||||
from pprint import pformat
|
||||
from dataclasses import dataclass, field
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
import trio
|
||||
import tractor
|
||||
from tractor.trionics import broadcast_receiver
|
||||
|
||||
from ..log import get_logger
|
||||
from ._ems import _emsd_main
|
||||
from .._daemon import maybe_open_emsd
|
||||
from ._messages import Order, Cancel
|
||||
from ..data.types import Struct
|
||||
from ..service import maybe_open_emsd
|
||||
from ._messages import (
|
||||
Order,
|
||||
Cancel,
|
||||
)
|
||||
from ..brokers import get_brokermod
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._messages import (
|
||||
BrokerdPosition,
|
||||
Status,
|
||||
)
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class OrderBook:
|
||||
class OrderBook(Struct):
|
||||
'''EMS-client-side order book ctl and tracking.
|
||||
|
||||
A style similar to "model-view" is used here where this api is
|
||||
|
@ -52,20 +61,18 @@ class OrderBook:
|
|||
# mem channels used to relay order requests to the EMS daemon
|
||||
_to_ems: trio.abc.SendChannel
|
||||
_from_order_book: trio.abc.ReceiveChannel
|
||||
|
||||
_sent_orders: Dict[str, Order] = field(default_factory=dict)
|
||||
_ready_to_receive: trio.Event = trio.Event()
|
||||
_sent_orders: dict[str, Order] = {}
|
||||
|
||||
def send(
|
||||
self,
|
||||
msg: Order,
|
||||
msg: Order | dict,
|
||||
|
||||
) -> dict:
|
||||
self._sent_orders[msg.oid] = msg
|
||||
self._to_ems.send_nowait(msg.dict())
|
||||
self._to_ems.send_nowait(msg)
|
||||
return msg
|
||||
|
||||
def update(
|
||||
def send_update(
|
||||
self,
|
||||
|
||||
uuid: str,
|
||||
|
@ -73,9 +80,8 @@ class OrderBook:
|
|||
|
||||
) -> dict:
|
||||
cmd = self._sent_orders[uuid]
|
||||
msg = cmd.dict()
|
||||
msg.update(data)
|
||||
self._sent_orders[uuid] = Order(**msg)
|
||||
msg = cmd.copy(update=data)
|
||||
self._sent_orders[uuid] = msg
|
||||
self._to_ems.send_nowait(msg)
|
||||
return cmd
|
||||
|
||||
|
@ -83,12 +89,18 @@ class OrderBook:
|
|||
"""Cancel an order (or alert) in the EMS.
|
||||
|
||||
"""
|
||||
cmd = self._sent_orders[uuid]
|
||||
cmd = self._sent_orders.get(uuid)
|
||||
if not cmd:
|
||||
log.error(
|
||||
f'Unknown order {uuid}!?\n'
|
||||
f'Maybe there is a stale entry or line?\n'
|
||||
f'You should report this as a bug!'
|
||||
)
|
||||
msg = Cancel(
|
||||
oid=uuid,
|
||||
symbol=cmd.symbol,
|
||||
)
|
||||
self._to_ems.send_nowait(msg.dict())
|
||||
self._to_ems.send_nowait(msg)
|
||||
|
||||
|
||||
_orders: OrderBook = None
|
||||
|
@ -149,21 +161,36 @@ async def relay_order_cmds_from_sync_code(
|
|||
book = get_orders()
|
||||
async with book._from_order_book.subscribe() as orders_stream:
|
||||
async for cmd in orders_stream:
|
||||
if cmd['symbol'] == symbol_key:
|
||||
log.info(f'Send order cmd:\n{pformat(cmd)}')
|
||||
sym = cmd.symbol
|
||||
msg = pformat(cmd)
|
||||
if sym == symbol_key:
|
||||
log.info(f'Send order cmd:\n{msg}')
|
||||
# send msg over IPC / wire
|
||||
await to_ems_stream.send(cmd)
|
||||
else:
|
||||
log.warning(
|
||||
f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
|
||||
f'\n{msg}'
|
||||
)
|
||||
|
||||
|
||||
@acm
|
||||
async def open_ems(
|
||||
fqsn: str,
|
||||
mode: str = 'live',
|
||||
loglevel: str = 'error',
|
||||
|
||||
) -> (
|
||||
) -> tuple[
|
||||
OrderBook,
|
||||
tractor.MsgStream,
|
||||
dict,
|
||||
):
|
||||
dict[
|
||||
# brokername, acctid
|
||||
tuple[str, str],
|
||||
list[BrokerdPosition],
|
||||
],
|
||||
list[str],
|
||||
dict[str, Status],
|
||||
]:
|
||||
'''
|
||||
Spawn an EMS daemon and begin sending orders and receiving
|
||||
alerts.
|
||||
|
@ -206,18 +233,36 @@ async def open_ems(
|
|||
|
||||
async with maybe_open_emsd(broker) as portal:
|
||||
|
||||
mod = get_brokermod(broker)
|
||||
if (
|
||||
not getattr(mod, 'trades_dialogue', None)
|
||||
or mode == 'paper'
|
||||
):
|
||||
mode = 'paper'
|
||||
|
||||
from ._ems import _emsd_main
|
||||
async with (
|
||||
# connect to emsd
|
||||
portal.open_context(
|
||||
|
||||
_emsd_main,
|
||||
fqsn=fqsn,
|
||||
exec_mode=mode,
|
||||
loglevel=loglevel,
|
||||
|
||||
) as (ctx, (positions, accounts)),
|
||||
) as (
|
||||
ctx,
|
||||
(
|
||||
positions,
|
||||
accounts,
|
||||
dialogs,
|
||||
)
|
||||
),
|
||||
|
||||
# open 2-way trade command stream
|
||||
ctx.open_stream() as trades_stream,
|
||||
):
|
||||
# start sync code order msg delivery task
|
||||
async with trio.open_nursery() as n:
|
||||
n.start_soon(
|
||||
relay_order_cmds_from_sync_code,
|
||||
|
@ -225,4 +270,10 @@ async def open_ems(
|
|||
trades_stream
|
||||
)
|
||||
|
||||
yield book, trades_stream, positions, accounts
|
||||
yield (
|
||||
book,
|
||||
trades_stream,
|
||||
positions,
|
||||
accounts,
|
||||
dialogs,
|
||||
)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -15,108 +15,162 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Clearing system messagingn types and protocols.
|
||||
Clearing sub-system message and protocols.
|
||||
|
||||
"""
|
||||
from typing import Optional, Union
|
||||
# from collections import (
|
||||
# ChainMap,
|
||||
# deque,
|
||||
# )
|
||||
from typing import (
|
||||
Optional,
|
||||
Literal,
|
||||
)
|
||||
|
||||
# TODO: try out just encoding/send direction for now?
|
||||
# import msgspec
|
||||
from pydantic import BaseModel
|
||||
from msgspec import field
|
||||
|
||||
from ..data._source import Symbol
|
||||
from ..data.types import Struct
|
||||
|
||||
|
||||
# TODO: a composite for tracking msg flow on 2-legged
|
||||
# dialogs.
|
||||
# class Dialog(ChainMap):
|
||||
# '''
|
||||
# Msg collection abstraction to easily track the state changes of
|
||||
# a msg flow in one high level, query-able and immutable construct.
|
||||
|
||||
# The main use case is to query data from a (long-running)
|
||||
# msg-transaction-sequence
|
||||
|
||||
|
||||
# '''
|
||||
# def update(
|
||||
# self,
|
||||
# msg,
|
||||
# ) -> None:
|
||||
# self.maps.insert(0, msg.to_dict())
|
||||
|
||||
# def flatten(self) -> dict:
|
||||
# return dict(self)
|
||||
|
||||
|
||||
# TODO: ``msgspec`` stuff worth paying attention to:
|
||||
# - schema evolution:
|
||||
# https://jcristharif.com/msgspec/usage.html#schema-evolution
|
||||
# - for eg. ``BrokerdStatus``, instead just have separate messages?
|
||||
# - use literals for a common msg determined by diff keys?
|
||||
# - https://jcristharif.com/msgspec/usage.html#literal
|
||||
|
||||
# --------------
|
||||
# Client -> emsd
|
||||
# --------------
|
||||
|
||||
class Order(Struct):
|
||||
|
||||
class Cancel(BaseModel):
|
||||
'''Cancel msg for removing a dark (ems triggered) or
|
||||
broker-submitted (live) trigger/order.
|
||||
|
||||
'''
|
||||
action: str = 'cancel'
|
||||
oid: str # uuid4
|
||||
symbol: str
|
||||
|
||||
|
||||
class Order(BaseModel):
|
||||
|
||||
action: str # {'buy', 'sell', 'alert'}
|
||||
# internal ``emdsd`` unique "order id"
|
||||
oid: str # uuid4
|
||||
symbol: Union[str, Symbol]
|
||||
account: str # should we set a default as '' ?
|
||||
|
||||
price: float
|
||||
size: float
|
||||
brokers: list[str]
|
||||
|
||||
# Assigned once initial ack is received
|
||||
# ack_time_ns: Optional[int] = None
|
||||
# TODO: ideally we can combine these 2 fields into
|
||||
# 1 and just use the size polarity to determine a buy/sell.
|
||||
# i would like to see this become more like
|
||||
# https://jcristharif.com/msgspec/usage.html#literal
|
||||
# action: Literal[
|
||||
# 'live',
|
||||
# 'dark',
|
||||
# 'alert',
|
||||
# ]
|
||||
|
||||
action: Literal[
|
||||
'buy',
|
||||
'sell',
|
||||
'alert',
|
||||
]
|
||||
# determines whether the create execution
|
||||
# will be submitted to the ems or directly to
|
||||
# the backend broker
|
||||
exec_mode: str # {'dark', 'live', 'paper'}
|
||||
exec_mode: Literal[
|
||||
'dark',
|
||||
'live',
|
||||
# 'paper', no right?
|
||||
]
|
||||
|
||||
class Config:
|
||||
# just for pre-loading a ``Symbol`` when used
|
||||
# in the order mode staging process
|
||||
arbitrary_types_allowed = True
|
||||
# don't copy this model instance when used in
|
||||
# a recursive model
|
||||
copy_on_model_validation = False
|
||||
# internal ``emdsd`` unique "order id"
|
||||
oid: str # uuid4
|
||||
symbol: str | Symbol
|
||||
account: str # should we set a default as '' ?
|
||||
|
||||
price: float
|
||||
size: float # -ve is "sell", +ve is "buy"
|
||||
|
||||
brokers: list[str] = []
|
||||
|
||||
|
||||
class Cancel(Struct):
|
||||
'''
|
||||
Cancel msg for removing a dark (ems triggered) or
|
||||
broker-submitted (live) trigger/order.
|
||||
|
||||
'''
|
||||
oid: str # uuid4
|
||||
symbol: str
|
||||
action: str = 'cancel'
|
||||
|
||||
|
||||
# --------------
|
||||
# Client <- emsd
|
||||
# --------------
|
||||
# update msgs from ems which relay state change info
|
||||
# from the active clearing engine.
|
||||
|
||||
class Status(Struct):
|
||||
|
||||
class Status(BaseModel):
|
||||
time_ns: int
|
||||
oid: str # uuid4 ems-order dialog id
|
||||
|
||||
resp: Literal[
|
||||
'pending', # acked by broker but not yet open
|
||||
'open',
|
||||
'dark_open', # dark/algo triggered order is open in ems clearing loop
|
||||
'triggered', # above triggered order sent to brokerd, or an alert closed
|
||||
'closed', # fully cleared all size/units
|
||||
'fill', # partial execution
|
||||
'canceled',
|
||||
'error',
|
||||
]
|
||||
|
||||
name: str = 'status'
|
||||
oid: str # uuid4
|
||||
time_ns: int
|
||||
|
||||
# {
|
||||
# 'dark_submitted',
|
||||
# 'dark_cancelled',
|
||||
# 'dark_triggered',
|
||||
|
||||
# 'broker_submitted',
|
||||
# 'broker_cancelled',
|
||||
# 'broker_executed',
|
||||
# 'broker_filled',
|
||||
# 'broker_errored',
|
||||
|
||||
# 'alert_submitted',
|
||||
# 'alert_triggered',
|
||||
|
||||
# }
|
||||
resp: str # "response", see above
|
||||
|
||||
# symbol: str
|
||||
|
||||
# trigger info
|
||||
trigger_price: Optional[float] = None
|
||||
# price: float
|
||||
|
||||
# broker: Optional[str] = None
|
||||
|
||||
# this maps normally to the ``BrokerdOrder.reqid`` below, an id
|
||||
# normally allocated internally by the backend broker routing system
|
||||
broker_reqid: Optional[Union[int, str]] = None
|
||||
reqid: Optional[int | str] = None
|
||||
|
||||
# for relaying backend msg data "through" the ems layer
|
||||
# the (last) source order/request msg if provided
|
||||
# (eg. the Order/Cancel which causes this msg) and
|
||||
# acts as a back-reference to the corresponding
|
||||
# request message which was the source of this msg.
|
||||
req: Order | None = None
|
||||
|
||||
# XXX: better design/name here?
|
||||
# flag that can be set to indicate a message for an order
|
||||
# event that wasn't originated by piker's emsd (eg. some external
|
||||
# trading system which does it's own order control but that you
|
||||
# might want to "track" using piker UIs/systems).
|
||||
src: Optional[str] = None
|
||||
|
||||
# set when a cancel request msg was set for this order flow dialog
|
||||
# but the brokerd dialog isn't yet in a cancelled state.
|
||||
cancel_called: bool = False
|
||||
|
||||
# for relaying a boxed brokerd-dialog-side msg data "through" the
|
||||
# ems layer to clients.
|
||||
brokerd_msg: dict = {}
|
||||
|
||||
|
||||
# ---------------
|
||||
# emsd -> brokerd
|
||||
# ---------------
|
||||
# requests *sent* from ems to respective backend broker daemon
|
||||
|
||||
class BrokerdCancel(BaseModel):
|
||||
class BrokerdCancel(Struct):
|
||||
|
||||
action: str = 'cancel'
|
||||
oid: str # piker emsd order id
|
||||
time_ns: int
|
||||
|
||||
|
@ -127,34 +181,39 @@ class BrokerdCancel(BaseModel):
|
|||
# for setting a unique order id then this value will be relayed back
|
||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||
# field
|
||||
reqid: Optional[Union[int, str]] = None
|
||||
reqid: Optional[int | str] = None
|
||||
action: str = 'cancel'
|
||||
|
||||
|
||||
class BrokerdOrder(BaseModel):
|
||||
class BrokerdOrder(Struct):
|
||||
|
||||
action: str # {buy, sell}
|
||||
oid: str
|
||||
account: str
|
||||
time_ns: int
|
||||
|
||||
symbol: str # fqsn
|
||||
price: float
|
||||
size: float
|
||||
|
||||
# TODO: if we instead rely on a +ve/-ve size to determine
|
||||
# the action we more or less don't need this field right?
|
||||
action: str = '' # {buy, sell}
|
||||
|
||||
# "broker request id": broker specific/internal order id if this is
|
||||
# None, creates a new order otherwise if the id is valid the backend
|
||||
# api must modify the existing matching order. If the broker allows
|
||||
# for setting a unique order id then this value will be relayed back
|
||||
# on the emsd order request stream as the ``BrokerdOrderAck.reqid``
|
||||
# field
|
||||
reqid: Optional[Union[int, str]] = None
|
||||
|
||||
symbol: str # symbol.<providername> ?
|
||||
price: float
|
||||
size: float
|
||||
reqid: Optional[int | str] = None
|
||||
|
||||
|
||||
# ---------------
|
||||
# emsd <- brokerd
|
||||
# ---------------
|
||||
# requests *received* to ems from broker backend
|
||||
|
||||
|
||||
class BrokerdOrderAck(BaseModel):
|
||||
class BrokerdOrderAck(Struct):
|
||||
'''
|
||||
Immediate reponse to a brokerd order request providing the broker
|
||||
specific unique order id so that the EMS can associate this
|
||||
|
@ -162,102 +221,93 @@ class BrokerdOrderAck(BaseModel):
|
|||
``.oid`` (which is a uuid4).
|
||||
|
||||
'''
|
||||
name: str = 'ack'
|
||||
|
||||
# defined and provided by backend
|
||||
reqid: Union[int, str]
|
||||
reqid: int | str
|
||||
|
||||
# emsd id originally sent in matching request msg
|
||||
oid: str
|
||||
account: str = ''
|
||||
name: str = 'ack'
|
||||
|
||||
|
||||
class BrokerdStatus(BaseModel):
|
||||
class BrokerdStatus(Struct):
|
||||
|
||||
name: str = 'status'
|
||||
reqid: Union[int, str]
|
||||
reqid: int | str
|
||||
time_ns: int
|
||||
status: Literal[
|
||||
'open',
|
||||
'canceled',
|
||||
'fill',
|
||||
'pending',
|
||||
'error',
|
||||
]
|
||||
|
||||
# XXX: should be best effort set for every update
|
||||
account: str = ''
|
||||
|
||||
# {
|
||||
# 'submitted',
|
||||
# 'cancelled',
|
||||
# 'filled',
|
||||
# }
|
||||
status: str
|
||||
|
||||
account: str
|
||||
name: str = 'status'
|
||||
filled: float = 0.0
|
||||
reason: str = ''
|
||||
remaining: float = 0.0
|
||||
|
||||
# XXX: better design/name here?
|
||||
# flag that can be set to indicate a message for an order
|
||||
# event that wasn't originated by piker's emsd (eg. some external
|
||||
# trading system which does it's own order control but that you
|
||||
# might want to "track" using piker UIs/systems).
|
||||
external: bool = False
|
||||
# external: bool = False
|
||||
|
||||
# XXX: not required schema as of yet
|
||||
broker_details: dict = {
|
||||
broker_details: dict = field(default_factory=lambda: {
|
||||
'name': '',
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
class BrokerdFill(BaseModel):
|
||||
class BrokerdFill(Struct):
|
||||
'''
|
||||
A single message indicating a "fill-details" event from the broker
|
||||
if avaiable.
|
||||
|
||||
'''
|
||||
name: str = 'fill'
|
||||
reqid: Union[int, str]
|
||||
time_ns: int
|
||||
|
||||
# order exeuction related
|
||||
action: str
|
||||
size: float
|
||||
price: float
|
||||
|
||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||
|
||||
# brokerd timestamp required for order mode arrow placement on x-axis
|
||||
|
||||
# TODO: maybe int if we force ns?
|
||||
# we need to normalize this somehow since backends will use their
|
||||
# own format and likely across many disparate epoch clocks...
|
||||
broker_time: float
|
||||
reqid: int | str
|
||||
time_ns: int
|
||||
|
||||
# order exeuction related
|
||||
size: float
|
||||
price: float
|
||||
|
||||
name: str = 'fill'
|
||||
action: Optional[str] = None
|
||||
broker_details: dict = {} # meta-data (eg. commisions etc.)
|
||||
|
||||
|
||||
class BrokerdError(BaseModel):
|
||||
class BrokerdError(Struct):
|
||||
'''
|
||||
Optional error type that can be relayed to emsd for error handling.
|
||||
|
||||
This is still a TODO thing since we're not sure how to employ it yet.
|
||||
|
||||
'''
|
||||
name: str = 'error'
|
||||
oid: str
|
||||
symbol: str
|
||||
reason: str
|
||||
|
||||
# if no brokerd order request was actually submitted (eg. we errored
|
||||
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
||||
reqid: Optional[Union[int, str]] = None
|
||||
reqid: Optional[int | str] = None
|
||||
|
||||
symbol: str
|
||||
reason: str
|
||||
name: str = 'error'
|
||||
broker_details: dict = {}
|
||||
|
||||
|
||||
class BrokerdPosition(BaseModel):
|
||||
class BrokerdPosition(Struct):
|
||||
'''Position update event from brokerd.
|
||||
|
||||
'''
|
||||
name: str = 'position'
|
||||
|
||||
broker: str
|
||||
account: str
|
||||
symbol: str
|
||||
currency: str
|
||||
size: float
|
||||
avg_price: float
|
||||
currency: str = ''
|
||||
name: str = 'position'
|
||||
|
|
|
@ -18,54 +18,75 @@
|
|||
Fake trading for forward testing.
|
||||
|
||||
"""
|
||||
from collections import defaultdict
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime
|
||||
from operator import itemgetter
|
||||
import itertools
|
||||
import time
|
||||
from typing import Tuple, Optional, Callable
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
)
|
||||
import uuid
|
||||
|
||||
from bidict import bidict
|
||||
import pendulum
|
||||
import trio
|
||||
import tractor
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .. import data
|
||||
from ..data.types import Struct
|
||||
from ..data._source import Symbol
|
||||
from ..pp import (
|
||||
Position,
|
||||
Transaction,
|
||||
open_trade_ledger,
|
||||
open_pps,
|
||||
)
|
||||
from ..data._normalize import iterticks
|
||||
from ..data._source import unpack_fqsn
|
||||
from ..log import get_logger
|
||||
from ._messages import (
|
||||
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
||||
BrokerdFill, BrokerdPosition, BrokerdError
|
||||
BrokerdCancel,
|
||||
BrokerdOrder,
|
||||
BrokerdOrderAck,
|
||||
BrokerdStatus,
|
||||
BrokerdFill,
|
||||
BrokerdPosition,
|
||||
BrokerdError,
|
||||
)
|
||||
|
||||
from ..config import load
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class PaperBoi:
|
||||
"""
|
||||
Emulates a broker order client providing the same API and
|
||||
delivering an order-event response stream but with methods for
|
||||
class PaperBoi(Struct):
|
||||
'''
|
||||
Emulates a broker order client providing approximately the same API
|
||||
and delivering an order-event response stream but with methods for
|
||||
triggering desired events based on forward testing engine
|
||||
requirements.
|
||||
requirements (eg open, closed, fill msgs).
|
||||
|
||||
"""
|
||||
'''
|
||||
broker: str
|
||||
|
||||
ems_trades_stream: tractor.MsgStream
|
||||
|
||||
# map of paper "live" orders which be used
|
||||
# to simulate fills based on paper engine settings
|
||||
_buys: bidict
|
||||
_sells: bidict
|
||||
_buys: defaultdict[str, bidict]
|
||||
_sells: defaultdict[str, bidict]
|
||||
_reqids: bidict
|
||||
_positions: dict[str, BrokerdPosition]
|
||||
_positions: dict[str, Position]
|
||||
_trade_ledger: dict[str, Any]
|
||||
_syms: dict[str, Symbol] = {}
|
||||
|
||||
# init edge case L1 spread
|
||||
last_ask: Tuple[float, float] = (float('inf'), 0) # price, size
|
||||
last_bid: Tuple[float, float] = (0, 0)
|
||||
last_ask: tuple[float, float] = (float('inf'), 0) # price, size
|
||||
last_bid: tuple[float, float] = (0, 0)
|
||||
|
||||
async def submit_limit(
|
||||
self,
|
||||
|
@ -75,27 +96,24 @@ class PaperBoi:
|
|||
action: str,
|
||||
size: float,
|
||||
reqid: Optional[str],
|
||||
|
||||
) -> int:
|
||||
"""Place an order and return integer request id provided by client.
|
||||
|
||||
"""
|
||||
is_modify: bool = False
|
||||
if reqid is None:
|
||||
reqid = str(uuid.uuid4())
|
||||
|
||||
else:
|
||||
# order is already existing, this is a modify
|
||||
(oid, symbol, action, old_price) = self._reqids[reqid]
|
||||
assert old_price != price
|
||||
is_modify = True
|
||||
|
||||
# register order internally
|
||||
self._reqids[reqid] = (oid, symbol, action, price)
|
||||
'''
|
||||
Place an order and return integer request id provided by client.
|
||||
|
||||
'''
|
||||
if action == 'alert':
|
||||
# bypass all fill simulation
|
||||
return reqid
|
||||
|
||||
entry = self._reqids.get(reqid)
|
||||
if entry:
|
||||
# order is already existing, this is a modify
|
||||
(oid, symbol, action, old_price) = entry
|
||||
else:
|
||||
# register order internally
|
||||
self._reqids[reqid] = (oid, symbol, action, price)
|
||||
|
||||
# TODO: net latency model
|
||||
# we checkpoint here quickly particulalry
|
||||
# for dark orders since we want the dark_executed
|
||||
|
@ -107,15 +125,18 @@ class PaperBoi:
|
|||
size = -size
|
||||
|
||||
msg = BrokerdStatus(
|
||||
status='submitted',
|
||||
status='open',
|
||||
# account=f'paper_{self.broker}',
|
||||
account='paper',
|
||||
reqid=reqid,
|
||||
broker=self.broker,
|
||||
time_ns=time.time_ns(),
|
||||
filled=0.0,
|
||||
reason='paper_trigger',
|
||||
remaining=size,
|
||||
|
||||
broker_details={'name': 'paperboi'},
|
||||
)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
await self.ems_trades_stream.send(msg)
|
||||
|
||||
# if we're already a clearing price simulate an immediate fill
|
||||
if (
|
||||
|
@ -123,28 +144,28 @@ class PaperBoi:
|
|||
) or (
|
||||
action == 'sell' and (clear_price := self.last_bid[0]) >= price
|
||||
):
|
||||
await self.fake_fill(symbol, clear_price, size, action, reqid, oid)
|
||||
await self.fake_fill(
|
||||
symbol,
|
||||
clear_price,
|
||||
size,
|
||||
action,
|
||||
reqid,
|
||||
oid,
|
||||
)
|
||||
|
||||
# register this submissions as a paper live order
|
||||
else:
|
||||
# register this submissions as a paper live order
|
||||
|
||||
# submit order to book simulation fill loop
|
||||
# set the simulated order in the respective table for lookup
|
||||
# and trigger by the simulated clearing task normally
|
||||
# running ``simulate_fills()``.
|
||||
if action == 'buy':
|
||||
orders = self._buys
|
||||
|
||||
elif action == 'sell':
|
||||
orders = self._sells
|
||||
|
||||
# set the simulated order in the respective table for lookup
|
||||
# and trigger by the simulated clearing task normally
|
||||
# running ``simulate_fills()``.
|
||||
|
||||
if is_modify:
|
||||
# remove any existing order for the old price
|
||||
orders[symbol].pop((oid, old_price))
|
||||
|
||||
# buys/sells: (symbol -> (price -> order))
|
||||
orders.setdefault(symbol, {})[(oid, price)] = (size, reqid, action)
|
||||
# {symbol -> bidict[oid, (<price data>)]}
|
||||
orders[symbol][oid] = (price, size, reqid, action)
|
||||
|
||||
return reqid
|
||||
|
||||
|
@ -157,26 +178,26 @@ class PaperBoi:
|
|||
oid, symbol, action, price = self._reqids[reqid]
|
||||
|
||||
if action == 'buy':
|
||||
self._buys[symbol].pop((oid, price))
|
||||
self._buys[symbol].pop(oid, None)
|
||||
elif action == 'sell':
|
||||
self._sells[symbol].pop((oid, price))
|
||||
self._sells[symbol].pop(oid, None)
|
||||
|
||||
# TODO: net latency model
|
||||
await trio.sleep(0.05)
|
||||
|
||||
msg = BrokerdStatus(
|
||||
status='cancelled',
|
||||
oid=oid,
|
||||
status='canceled',
|
||||
account='paper',
|
||||
reqid=reqid,
|
||||
broker=self.broker,
|
||||
time_ns=time.time_ns(),
|
||||
broker_details={'name': 'paperboi'},
|
||||
)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
await self.ems_trades_stream.send(msg)
|
||||
|
||||
async def fake_fill(
|
||||
self,
|
||||
|
||||
symbol: str,
|
||||
fqsn: str,
|
||||
price: float,
|
||||
size: float,
|
||||
action: str, # one of {'buy', 'sell'}
|
||||
|
@ -190,21 +211,21 @@ class PaperBoi:
|
|||
remaining: float = 0,
|
||||
|
||||
) -> None:
|
||||
"""Pretend to fill a broker order @ price and size.
|
||||
'''
|
||||
Pretend to fill a broker order @ price and size.
|
||||
|
||||
"""
|
||||
'''
|
||||
# TODO: net latency model
|
||||
await trio.sleep(0.05)
|
||||
fill_time_ns = time.time_ns()
|
||||
fill_time_s = time.time()
|
||||
|
||||
msg = BrokerdFill(
|
||||
|
||||
fill_msg = BrokerdFill(
|
||||
reqid=reqid,
|
||||
time_ns=time.time_ns(),
|
||||
|
||||
time_ns=fill_time_ns,
|
||||
action=action,
|
||||
size=size,
|
||||
price=price,
|
||||
|
||||
broker_time=datetime.now().timestamp(),
|
||||
broker_details={
|
||||
'paper_info': {
|
||||
|
@ -214,79 +235,64 @@ class PaperBoi:
|
|||
'name': self.broker + '_paper',
|
||||
},
|
||||
)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
log.info(f'Fake filling order:\n{fill_msg}')
|
||||
await self.ems_trades_stream.send(fill_msg)
|
||||
|
||||
if order_complete:
|
||||
|
||||
msg = BrokerdStatus(
|
||||
|
||||
reqid=reqid,
|
||||
time_ns=time.time_ns(),
|
||||
|
||||
status='filled',
|
||||
# account=f'paper_{self.broker}',
|
||||
account='paper',
|
||||
status='closed',
|
||||
filled=size,
|
||||
remaining=0 if order_complete else remaining,
|
||||
|
||||
action=action,
|
||||
size=size,
|
||||
price=price,
|
||||
|
||||
broker_details={
|
||||
'paper_info': {
|
||||
'oid': oid,
|
||||
},
|
||||
'name': self.broker,
|
||||
},
|
||||
)
|
||||
await self.ems_trades_stream.send(msg.dict())
|
||||
await self.ems_trades_stream.send(msg)
|
||||
|
||||
# lookup any existing position
|
||||
token = f'{symbol}.{self.broker}'
|
||||
pp_msg = self._positions.setdefault(
|
||||
token,
|
||||
BrokerdPosition(
|
||||
key = fqsn.rstrip(f'.{self.broker}')
|
||||
t = Transaction(
|
||||
fqsn=fqsn,
|
||||
sym=self._syms[fqsn],
|
||||
tid=oid,
|
||||
size=size,
|
||||
price=price,
|
||||
cost=0, # TODO: cost model
|
||||
dt=pendulum.from_timestamp(fill_time_s),
|
||||
bsuid=key,
|
||||
)
|
||||
|
||||
with (
|
||||
open_trade_ledger(self.broker, 'paper') as ledger,
|
||||
open_pps(self.broker, 'paper', write_on_exit=True) as table
|
||||
):
|
||||
tx = t.to_dict()
|
||||
tx.pop('sym')
|
||||
ledger.update({oid: tx})
|
||||
# Write to pps toml right now
|
||||
table.update_from_trans({oid: t})
|
||||
|
||||
pp = table.pps[key]
|
||||
pp_msg = BrokerdPosition(
|
||||
broker=self.broker,
|
||||
account='paper',
|
||||
symbol=symbol,
|
||||
symbol=fqsn,
|
||||
# TODO: we need to look up the asset currency from
|
||||
# broker info. i guess for crypto this can be
|
||||
# inferred from the pair?
|
||||
currency='',
|
||||
size=0.0,
|
||||
avg_price=0,
|
||||
currency=key,
|
||||
size=pp.size,
|
||||
avg_price=pp.ppu,
|
||||
)
|
||||
)
|
||||
|
||||
# "avg position price" calcs
|
||||
# TODO: eventually it'd be nice to have a small set of routines
|
||||
# to do this stuff from a sequence of cleared orders to enable
|
||||
# so called "contextual positions".
|
||||
new_size = size + pp_msg.size
|
||||
|
||||
# old size minus the new size gives us size differential with
|
||||
# +ve -> increase in pp size
|
||||
# -ve -> decrease in pp size
|
||||
size_diff = abs(new_size) - abs(pp_msg.size)
|
||||
|
||||
if new_size == 0:
|
||||
pp_msg.avg_price = 0
|
||||
|
||||
elif size_diff > 0:
|
||||
# only update the "average position price" when the position
|
||||
# size increases not when it decreases (i.e. the position is
|
||||
# being made smaller)
|
||||
pp_msg.avg_price = (
|
||||
abs(size) * price + pp_msg.avg_price * abs(pp_msg.size)
|
||||
) / abs(new_size)
|
||||
|
||||
pp_msg.size = new_size
|
||||
|
||||
await self.ems_trades_stream.send(pp_msg.dict())
|
||||
await self.ems_trades_stream.send(pp_msg)
|
||||
|
||||
|
||||
async def simulate_fills(
|
||||
quote_stream: 'tractor.ReceiveStream', # noqa
|
||||
quote_stream: tractor.MsgStream, # noqa
|
||||
client: PaperBoi,
|
||||
|
||||
) -> None:
|
||||
|
||||
# TODO: more machinery to better simulate real-world market things:
|
||||
|
@ -306,61 +312,116 @@ async def simulate_fills(
|
|||
|
||||
# this stream may eventually contain multiple symbols
|
||||
async for quotes in quote_stream:
|
||||
|
||||
for sym, quote in quotes.items():
|
||||
|
||||
for tick in iterticks(
|
||||
quote,
|
||||
# dark order price filter(s)
|
||||
types=('ask', 'bid', 'trade', 'last')
|
||||
):
|
||||
# print(tick)
|
||||
tick_price = tick.get('price')
|
||||
ttype = tick['type']
|
||||
tick_price = tick['price']
|
||||
|
||||
if ttype in ('ask',):
|
||||
buys: bidict[str, tuple] = client._buys[sym]
|
||||
iter_buys = reversed(sorted(
|
||||
buys.values(),
|
||||
key=itemgetter(0),
|
||||
))
|
||||
|
||||
client.last_ask = (
|
||||
tick_price,
|
||||
tick.get('size', client.last_ask[1]),
|
||||
)
|
||||
def buy_on_ask(our_price):
|
||||
return tick_price <= our_price
|
||||
|
||||
orders = client._buys.get(sym, {})
|
||||
sells: bidict[str, tuple] = client._sells[sym]
|
||||
iter_sells = sorted(
|
||||
sells.values(),
|
||||
key=itemgetter(0)
|
||||
)
|
||||
|
||||
book_sequence = reversed(
|
||||
sorted(orders.keys(), key=itemgetter(1)))
|
||||
def sell_on_bid(our_price):
|
||||
return tick_price >= our_price
|
||||
|
||||
def pred(our_price):
|
||||
return tick_price < our_price
|
||||
match tick:
|
||||
|
||||
elif ttype in ('bid',):
|
||||
# on an ask queue tick, only clear buy entries
|
||||
case {
|
||||
'price': tick_price,
|
||||
'type': 'ask',
|
||||
}:
|
||||
client.last_ask = (
|
||||
tick_price,
|
||||
tick.get('size', client.last_ask[1]),
|
||||
)
|
||||
|
||||
client.last_bid = (
|
||||
tick_price,
|
||||
tick.get('size', client.last_bid[1]),
|
||||
)
|
||||
iter_entries = zip(
|
||||
iter_buys,
|
||||
itertools.repeat(buy_on_ask)
|
||||
)
|
||||
|
||||
orders = client._sells.get(sym, {})
|
||||
book_sequence = sorted(orders.keys(), key=itemgetter(1))
|
||||
# on a bid queue tick, only clear sell entries
|
||||
case {
|
||||
'price': tick_price,
|
||||
'type': 'bid',
|
||||
}:
|
||||
client.last_bid = (
|
||||
tick_price,
|
||||
tick.get('size', client.last_bid[1]),
|
||||
)
|
||||
|
||||
def pred(our_price):
|
||||
return tick_price > our_price
|
||||
iter_entries = zip(
|
||||
iter_sells,
|
||||
itertools.repeat(sell_on_bid)
|
||||
)
|
||||
|
||||
elif ttype in ('trade', 'last'):
|
||||
# TODO: simulate actual book queues and our orders
|
||||
# place in it, might require full L2 data?
|
||||
continue
|
||||
# TODO: fix this block, though it definitely
|
||||
# costs a lot more CPU-wise
|
||||
# - doesn't seem like clears are happening still on
|
||||
# "resting" limit orders?
|
||||
case {
|
||||
'price': tick_price,
|
||||
'type': ('trade' | 'last'),
|
||||
}:
|
||||
# in the clearing price / last price case we
|
||||
# want to iterate both sides of our book for
|
||||
# clears since we don't know which direction the
|
||||
# price is going to move (especially with HFT)
|
||||
# and thus we simply interleave both sides (buys
|
||||
# and sells) until one side clears and then
|
||||
# break until the next tick?
|
||||
def interleave():
|
||||
for pair in zip(
|
||||
iter_buys,
|
||||
iter_sells,
|
||||
):
|
||||
for order_info, pred in zip(
|
||||
pair,
|
||||
itertools.cycle([buy_on_ask, sell_on_bid]),
|
||||
):
|
||||
yield order_info, pred
|
||||
|
||||
# iterate book prices descending
|
||||
for oid, our_price in book_sequence:
|
||||
if pred(our_price):
|
||||
iter_entries = interleave()
|
||||
|
||||
# retreive order info
|
||||
(size, reqid, action) = orders.pop((oid, our_price))
|
||||
# NOTE: all other (non-clearable) tick event types
|
||||
# - we don't want to sping the simulated clear loop
|
||||
# below unecessarily and further don't want to pop
|
||||
# simulated live orders prematurely.
|
||||
case _:
|
||||
continue
|
||||
|
||||
# iterate all potentially clearable book prices
|
||||
# in FIFO order per side.
|
||||
for order_info, pred in iter_entries:
|
||||
(our_price, size, reqid, action) = order_info
|
||||
|
||||
# print(order_info)
|
||||
clearable = pred(our_price)
|
||||
if clearable:
|
||||
# pop and retreive order info
|
||||
oid = {
|
||||
'buy': buys,
|
||||
'sell': sells
|
||||
}[action].inverse.pop(order_info)
|
||||
|
||||
# clearing price would have filled entirely
|
||||
await client.fake_fill(
|
||||
symbol=sym,
|
||||
fqsn=sym,
|
||||
# todo slippage to determine fill price
|
||||
price=tick_price,
|
||||
size=size,
|
||||
|
@ -368,9 +429,6 @@ async def simulate_fills(
|
|||
reqid=reqid,
|
||||
oid=oid,
|
||||
)
|
||||
else:
|
||||
# prices are iterated in sorted order so we're done
|
||||
break
|
||||
|
||||
|
||||
async def handle_order_requests(
|
||||
|
@ -380,66 +438,81 @@ async def handle_order_requests(
|
|||
|
||||
) -> None:
|
||||
|
||||
# order_request: dict
|
||||
request_msg: dict
|
||||
async for request_msg in ems_order_stream:
|
||||
match request_msg:
|
||||
case {'action': ('buy' | 'sell')}:
|
||||
order = BrokerdOrder(**request_msg)
|
||||
account = order.account
|
||||
|
||||
action = request_msg['action']
|
||||
# error on bad inputs
|
||||
reason = None
|
||||
if account != 'paper':
|
||||
reason = f'No account found:`{account}` (paper only)?'
|
||||
|
||||
if action in {'buy', 'sell'}:
|
||||
elif order.size == 0:
|
||||
reason = 'Invalid size: 0'
|
||||
|
||||
account = request_msg['account']
|
||||
if account != 'paper':
|
||||
log.error(
|
||||
'This is a paper account, only a `paper` selection is valid'
|
||||
if reason:
|
||||
log.error(reason)
|
||||
await ems_order_stream.send(BrokerdError(
|
||||
oid=order.oid,
|
||||
symbol=order.symbol,
|
||||
reason=reason,
|
||||
))
|
||||
continue
|
||||
|
||||
reqid = order.reqid or str(uuid.uuid4())
|
||||
|
||||
# deliver ack that order has been submitted to broker routing
|
||||
await ems_order_stream.send(
|
||||
BrokerdOrderAck(
|
||||
oid=order.oid,
|
||||
reqid=reqid,
|
||||
)
|
||||
)
|
||||
await ems_order_stream.send(BrokerdError(
|
||||
oid=request_msg['oid'],
|
||||
symbol=request_msg['symbol'],
|
||||
reason=f'Paper only. No account found: `{account}` ?',
|
||||
).dict())
|
||||
continue
|
||||
|
||||
# validate
|
||||
order = BrokerdOrder(**request_msg)
|
||||
|
||||
# call our client api to submit the order
|
||||
reqid = await client.submit_limit(
|
||||
|
||||
oid=order.oid,
|
||||
symbol=order.symbol,
|
||||
price=order.price,
|
||||
action=order.action,
|
||||
size=order.size,
|
||||
|
||||
# XXX: by default 0 tells ``ib_insync`` methods that
|
||||
# there is no existing order so ask the client to create
|
||||
# a new one (which it seems to do by allocating an int
|
||||
# counter - collision prone..)
|
||||
reqid=order.reqid,
|
||||
)
|
||||
|
||||
# deliver ack that order has been submitted to broker routing
|
||||
await ems_order_stream.send(
|
||||
BrokerdOrderAck(
|
||||
|
||||
# ems order request id
|
||||
# call our client api to submit the order
|
||||
reqid = await client.submit_limit(
|
||||
oid=order.oid,
|
||||
|
||||
# broker specific request id
|
||||
symbol=f'{order.symbol}.{client.broker}',
|
||||
price=order.price,
|
||||
action=order.action,
|
||||
size=order.size,
|
||||
# XXX: by default 0 tells ``ib_insync`` methods that
|
||||
# there is no existing order so ask the client to create
|
||||
# a new one (which it seems to do by allocating an int
|
||||
# counter - collision prone..)
|
||||
reqid=reqid,
|
||||
)
|
||||
log.info(f'Submitted paper LIMIT {reqid}:\n{order}')
|
||||
|
||||
).dict()
|
||||
)
|
||||
case {'action': 'cancel'}:
|
||||
msg = BrokerdCancel(**request_msg)
|
||||
await client.submit_cancel(
|
||||
reqid=msg.reqid
|
||||
)
|
||||
|
||||
elif action == 'cancel':
|
||||
msg = BrokerdCancel(**request_msg)
|
||||
case _:
|
||||
log.error(f'Unknown order command: {request_msg}')
|
||||
|
||||
await client.submit_cancel(
|
||||
reqid=msg.reqid
|
||||
)
|
||||
|
||||
else:
|
||||
log.error(f'Unknown order command: {request_msg}')
|
||||
_reqids: bidict[str, tuple] = {}
|
||||
_buys: defaultdict[
|
||||
str, # symbol
|
||||
bidict[
|
||||
str, # oid
|
||||
tuple[float, float, str, str], # order info
|
||||
]
|
||||
] = defaultdict(bidict)
|
||||
_sells: defaultdict[
|
||||
str, # symbol
|
||||
bidict[
|
||||
str, # oid
|
||||
tuple[float, float, str, str], # order info
|
||||
]
|
||||
] = defaultdict(bidict)
|
||||
_positions: dict[str, Position] = {}
|
||||
|
||||
|
||||
@tractor.context
|
||||
|
@ -451,42 +524,68 @@ async def trades_dialogue(
|
|||
loglevel: str = None,
|
||||
|
||||
) -> None:
|
||||
|
||||
tractor.log.get_console_log(loglevel)
|
||||
|
||||
async with (
|
||||
|
||||
data.open_feed(
|
||||
[fqsn],
|
||||
loglevel=loglevel,
|
||||
) as feed,
|
||||
|
||||
):
|
||||
# TODO: load paper positions per broker from .toml config file
|
||||
# and pass as symbol to position data mapping: ``dict[str, dict]``
|
||||
# await ctx.started(all_positions)
|
||||
await ctx.started(({}, {'paper',}))
|
||||
|
||||
with open_pps(broker, 'paper') as table:
|
||||
# save pps in local state
|
||||
_positions.update(table.pps)
|
||||
|
||||
pp_msgs: list[BrokerdPosition] = []
|
||||
pos: Position
|
||||
token: str # f'{symbol}.{self.broker}'
|
||||
for token, pos in _positions.items():
|
||||
pp_msgs.append(BrokerdPosition(
|
||||
broker=broker,
|
||||
account='paper',
|
||||
symbol=pos.symbol.front_fqsn(),
|
||||
size=pos.size,
|
||||
avg_price=pos.ppu,
|
||||
))
|
||||
|
||||
await ctx.started((
|
||||
pp_msgs,
|
||||
['paper'],
|
||||
))
|
||||
|
||||
async with (
|
||||
ctx.open_stream() as ems_stream,
|
||||
trio.open_nursery() as n,
|
||||
):
|
||||
|
||||
client = PaperBoi(
|
||||
broker,
|
||||
ems_stream,
|
||||
_buys={},
|
||||
_sells={},
|
||||
_buys=_buys,
|
||||
_sells=_sells,
|
||||
|
||||
_reqids={},
|
||||
_reqids=_reqids,
|
||||
|
||||
# TODO: load paper positions from ``positions.toml``
|
||||
_positions={},
|
||||
_positions=_positions,
|
||||
|
||||
# TODO: load postions from ledger file
|
||||
_trade_ledger={},
|
||||
_syms={
|
||||
fqsn: flume.symbol
|
||||
for fqsn, flume in feed.flumes.items()
|
||||
}
|
||||
)
|
||||
|
||||
n.start_soon(handle_order_requests, client, ems_stream)
|
||||
n.start_soon(
|
||||
handle_order_requests,
|
||||
client,
|
||||
ems_stream,
|
||||
)
|
||||
|
||||
# paper engine simulator clearing task
|
||||
await simulate_fills(feed.stream, client)
|
||||
await simulate_fills(feed.streams[broker], client)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
|
@ -511,17 +610,17 @@ async def open_paperboi(
|
|||
# (we likely don't need more then one proc for basic
|
||||
# simulated order clearing)
|
||||
if portal is None:
|
||||
log.info('Starting new paper-engine actor')
|
||||
portal = await tn.start_actor(
|
||||
service_name,
|
||||
enable_modules=[__name__]
|
||||
)
|
||||
|
||||
async with portal.open_context(
|
||||
trades_dialogue,
|
||||
broker=broker,
|
||||
fqsn=fqsn,
|
||||
loglevel=loglevel,
|
||||
trades_dialogue,
|
||||
broker=broker,
|
||||
fqsn=fqsn,
|
||||
loglevel=loglevel,
|
||||
|
||||
) as (ctx, first):
|
||||
|
||||
yield ctx, first
|
||||
|
|
|
@ -19,38 +19,58 @@ CLI commons.
|
|||
|
||||
'''
|
||||
import os
|
||||
from pprint import pformat
|
||||
|
||||
import click
|
||||
import trio
|
||||
import tractor
|
||||
|
||||
from ..log import get_console_log, get_logger, colorize_json
|
||||
from ..log import (
|
||||
get_console_log,
|
||||
get_logger,
|
||||
colorize_json,
|
||||
)
|
||||
from ..brokers import get_brokermod
|
||||
from .._daemon import _tractor_kwargs
|
||||
from ..service import (
|
||||
_default_registry_host,
|
||||
_default_registry_port,
|
||||
)
|
||||
from .. import config
|
||||
|
||||
|
||||
log = get_logger('cli')
|
||||
DEFAULT_BROKER = 'questrade'
|
||||
|
||||
|
||||
@click.command()
|
||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
||||
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
||||
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
||||
@click.option('--port', '-p', default=None, help='Port number to bind')
|
||||
@click.option(
|
||||
'--tsdb',
|
||||
is_flag=True,
|
||||
help='Enable local ``marketstore`` instance'
|
||||
)
|
||||
def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||
@click.option(
|
||||
'--es',
|
||||
is_flag=True,
|
||||
help='Enable local ``elasticsearch`` instance'
|
||||
)
|
||||
def pikerd(
|
||||
loglevel: str,
|
||||
host: str,
|
||||
port: int,
|
||||
tl: bool,
|
||||
pdb: bool,
|
||||
tsdb: bool,
|
||||
es: bool,
|
||||
):
|
||||
'''
|
||||
Spawn the piker broker-daemon.
|
||||
|
||||
'''
|
||||
from .._daemon import open_pikerd
|
||||
|
||||
from ..service import open_pikerd
|
||||
log = get_console_log(loglevel)
|
||||
|
||||
if pdb:
|
||||
|
@ -62,32 +82,25 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
|||
"\n"
|
||||
))
|
||||
|
||||
async def main():
|
||||
reg_addr: None | tuple[str, int] = None
|
||||
if host or port:
|
||||
reg_addr = (
|
||||
host or _default_registry_host,
|
||||
int(port) or _default_registry_port,
|
||||
)
|
||||
|
||||
async def main():
|
||||
async with (
|
||||
open_pikerd(
|
||||
tsdb=tsdb,
|
||||
es=es,
|
||||
loglevel=loglevel,
|
||||
debug_mode=pdb,
|
||||
registry_addr=reg_addr,
|
||||
|
||||
), # normally delivers a ``Services`` handle
|
||||
trio.open_nursery() as n,
|
||||
):
|
||||
if tsdb:
|
||||
from piker.data._ahab import start_ahab
|
||||
from piker.data.marketstore import start_marketstore
|
||||
|
||||
log.info('Spawning `marketstore` supervisor')
|
||||
ctn_ready, config, (cid, pid) = await n.start(
|
||||
start_ahab,
|
||||
'marketstored',
|
||||
start_marketstore,
|
||||
|
||||
)
|
||||
log.info(
|
||||
f'`marketstore` up!\n'
|
||||
f'`marketstored` pid: {pid}\n'
|
||||
f'docker container id: {cid}\n'
|
||||
f'config: {pformat(config)}'
|
||||
)
|
||||
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
@ -97,25 +110,46 @@ def pikerd(loglevel, host, tl, pdb, tsdb):
|
|||
@click.group(context_settings=config._context_defaults)
|
||||
@click.option(
|
||||
'--brokers', '-b',
|
||||
default=[DEFAULT_BROKER],
|
||||
default=None,
|
||||
multiple=True,
|
||||
help='Broker backend to use'
|
||||
)
|
||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.option('--configdir', '-c', help='Configuration directory')
|
||||
@click.option('--host', '-h', default=None, help='Host addr to bind')
|
||||
@click.option('--port', '-p', default=None, help='Port number to bind')
|
||||
@click.pass_context
|
||||
def cli(ctx, brokers, loglevel, tl, configdir):
|
||||
def cli(
|
||||
ctx: click.Context,
|
||||
brokers: list[str],
|
||||
loglevel: str,
|
||||
tl: bool,
|
||||
configdir: str,
|
||||
host: str,
|
||||
port: int,
|
||||
|
||||
) -> None:
|
||||
if configdir is not None:
|
||||
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
|
||||
config._override_config_dir(configdir)
|
||||
|
||||
ctx.ensure_object(dict)
|
||||
|
||||
if len(brokers) == 1:
|
||||
brokermods = [get_brokermod(brokers[0])]
|
||||
else:
|
||||
brokermods = [get_brokermod(broker) for broker in brokers]
|
||||
if not brokers:
|
||||
# (try to) load all (supposedly) supported data/broker backends
|
||||
from piker.brokers import __brokers__
|
||||
brokers = __brokers__
|
||||
|
||||
brokermods = [get_brokermod(broker) for broker in brokers]
|
||||
assert brokermods
|
||||
|
||||
reg_addr: None | tuple[str, int] = None
|
||||
if host or port:
|
||||
reg_addr = (
|
||||
host or _default_registry_host,
|
||||
int(port) or _default_registry_port,
|
||||
)
|
||||
|
||||
ctx.obj.update({
|
||||
'brokers': brokers,
|
||||
|
@ -125,6 +159,7 @@ def cli(ctx, brokers, loglevel, tl, configdir):
|
|||
'log': get_console_log(loglevel),
|
||||
'confdir': config._config_dir,
|
||||
'wl_path': config._watchlists_data_path,
|
||||
'registry_addr': reg_addr,
|
||||
})
|
||||
|
||||
# allow enabling same loglevel in ``tractor`` machinery
|
||||
|
@ -134,33 +169,45 @@ def cli(ctx, brokers, loglevel, tl, configdir):
|
|||
|
||||
@cli.command()
|
||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||
@click.argument('names', nargs=-1, required=False)
|
||||
@click.argument('ports', nargs=-1, required=False)
|
||||
@click.pass_obj
|
||||
def services(config, tl, names):
|
||||
def services(config, tl, ports):
|
||||
|
||||
from ..service import (
|
||||
open_piker_runtime,
|
||||
_default_registry_port,
|
||||
_default_registry_host,
|
||||
)
|
||||
|
||||
host = _default_registry_host
|
||||
if not ports:
|
||||
ports = [_default_registry_port]
|
||||
|
||||
async def list_services():
|
||||
|
||||
async with tractor.get_arbiter(
|
||||
*_tractor_kwargs['arbiter_addr']
|
||||
) as portal:
|
||||
nonlocal host
|
||||
async with (
|
||||
open_piker_runtime(
|
||||
name='service_query',
|
||||
loglevel=config['loglevel'] if tl else None,
|
||||
),
|
||||
tractor.get_arbiter(
|
||||
host=host,
|
||||
port=ports[0]
|
||||
) as portal
|
||||
):
|
||||
registry = await portal.run_from_ns('self', 'get_registry')
|
||||
json_d = {}
|
||||
for key, socket in registry.items():
|
||||
# name, uuid = uid
|
||||
host, port = socket
|
||||
json_d[key] = f'{host}:{port}'
|
||||
click.echo(f"{colorize_json(json_d)}")
|
||||
|
||||
tractor.run(
|
||||
list_services,
|
||||
name='service_query',
|
||||
loglevel=config['loglevel'] if tl else None,
|
||||
arbiter_addr=_tractor_kwargs['arbiter_addr'],
|
||||
)
|
||||
trio.run(list_services)
|
||||
|
||||
|
||||
def _load_clis() -> None:
|
||||
from ..data import marketstore # noqa
|
||||
from ..service import marketstore # noqa
|
||||
from ..service import elastic
|
||||
from ..data import cli # noqa
|
||||
from ..brokers import cli # noqa
|
||||
from ..ui import cli # noqa
|
||||
|
|
|
@ -15,15 +15,17 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Broker configuration mgmt.
|
||||
Platform configuration (files) mgmt.
|
||||
|
||||
"""
|
||||
import platform
|
||||
import sys
|
||||
import os
|
||||
from os import path
|
||||
from os.path import dirname
|
||||
import shutil
|
||||
from typing import Optional
|
||||
from pathlib import Path
|
||||
|
||||
from bidict import bidict
|
||||
import toml
|
||||
|
@ -33,9 +35,16 @@ from .log import get_logger
|
|||
log = get_logger('broker-config')
|
||||
|
||||
|
||||
# taken from ``click`` since apparently they have some
|
||||
# XXX NOTE: taken from ``click`` since apparently they have some
|
||||
# super weirdness with sigint and sudo..no clue
|
||||
def get_app_dir(app_name, roaming=True, force_posix=False):
|
||||
# we're probably going to slowly just modify it to our own version over
|
||||
# time..
|
||||
def get_app_dir(
|
||||
app_name: str,
|
||||
roaming: bool = True,
|
||||
force_posix: bool = False,
|
||||
|
||||
) -> str:
|
||||
r"""Returns the config folder for the application. The default behavior
|
||||
is to return whatever is most appropriate for the operating system.
|
||||
|
||||
|
@ -74,7 +83,30 @@ def get_app_dir(app_name, roaming=True, force_posix=False):
|
|||
def _posixify(name):
|
||||
return "-".join(name.split()).lower()
|
||||
|
||||
# if WIN:
|
||||
# NOTE: for testing with `pytest` we leverage the `tmp_dir`
|
||||
# fixture to generate (and clean up) a test-request-specific
|
||||
# directory for isolated configuration files such that,
|
||||
# - multiple tests can run (possibly in parallel) without data races
|
||||
# on the config state,
|
||||
# - we don't need to ever worry about leaking configs into the
|
||||
# system thus avoiding needing to manage config cleaup fixtures or
|
||||
# other bothers (since obviously `tmp_dir` cleans up after itself).
|
||||
#
|
||||
# In order to "pass down" the test dir path to all (sub-)actors in
|
||||
# the actor tree we preload the root actor's runtime vars state (an
|
||||
# internal mechanism for inheriting state down an actor tree in
|
||||
# `tractor`) with the testing dir and check for it whenever we
|
||||
# detect `pytest` is being used (which it isn't under normal
|
||||
# operation).
|
||||
if "pytest" in sys.modules:
|
||||
import tractor
|
||||
actor = tractor.current_actor(err_on_no_runtime=False)
|
||||
if actor: # runtime is up
|
||||
rvs = tractor._state._runtime_vars
|
||||
testdirpath = Path(rvs['piker_vars']['piker_test_dir'])
|
||||
assert testdirpath.exists(), 'piker test harness might be borked!?'
|
||||
app_name = str(testdirpath)
|
||||
|
||||
if platform.system() == 'Windows':
|
||||
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||
folder = os.environ.get(key)
|
||||
|
@ -111,8 +143,10 @@ if _parent_user:
|
|||
|
||||
_conf_names: set[str] = {
|
||||
'brokers',
|
||||
'pps',
|
||||
'trades',
|
||||
'watchlists',
|
||||
'paper_trades'
|
||||
}
|
||||
|
||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||
|
@ -147,19 +181,21 @@ def get_conf_path(
|
|||
conf_name: str = 'brokers',
|
||||
|
||||
) -> str:
|
||||
"""Return the default config path normally under
|
||||
``~/.config/piker`` on linux.
|
||||
'''
|
||||
Return the top-level default config path normally under
|
||||
``~/.config/piker`` on linux for a given ``conf_name``, the config
|
||||
name.
|
||||
|
||||
Contains files such as:
|
||||
- brokers.toml
|
||||
- pp.toml
|
||||
- watchlists.toml
|
||||
- trades.toml
|
||||
|
||||
# maybe coming soon ;)
|
||||
- signals.toml
|
||||
- strats.toml
|
||||
|
||||
"""
|
||||
'''
|
||||
assert conf_name in _conf_names
|
||||
fn = _conf_fn_w_ext(conf_name)
|
||||
return os.path.join(
|
||||
|
@ -173,7 +209,7 @@ def repodir():
|
|||
Return the abspath to the repo directory.
|
||||
|
||||
'''
|
||||
dirpath = os.path.abspath(
|
||||
dirpath = path.abspath(
|
||||
# we're 3 levels down in **this** module file
|
||||
dirname(dirname(os.path.realpath(__file__)))
|
||||
)
|
||||
|
@ -182,7 +218,9 @@ def repodir():
|
|||
|
||||
def load(
|
||||
conf_name: str = 'brokers',
|
||||
path: str = None
|
||||
path: str = None,
|
||||
|
||||
**tomlkws,
|
||||
|
||||
) -> (dict, str):
|
||||
'''
|
||||
|
@ -190,6 +228,10 @@ def load(
|
|||
|
||||
'''
|
||||
path = path or get_conf_path(conf_name)
|
||||
|
||||
if not os.path.isdir(_config_dir):
|
||||
Path(_config_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not os.path.isfile(path):
|
||||
fn = _conf_fn_w_ext(conf_name)
|
||||
|
||||
|
@ -202,8 +244,15 @@ def load(
|
|||
# if one exists.
|
||||
if os.path.isfile(template):
|
||||
shutil.copyfile(template, path)
|
||||
else:
|
||||
# create an empty file
|
||||
with open(path, 'x'):
|
||||
pass
|
||||
else:
|
||||
with open(path, 'r'):
|
||||
pass # touch it
|
||||
|
||||
config = toml.load(path)
|
||||
config = toml.load(path, **tomlkws)
|
||||
log.debug(f"Read config file {path}")
|
||||
return config, path
|
||||
|
||||
|
@ -212,6 +261,8 @@ def write(
|
|||
config: dict, # toml config as dict
|
||||
name: str = 'brokers',
|
||||
path: str = None,
|
||||
fail_empty: bool = True,
|
||||
**toml_kwargs,
|
||||
|
||||
) -> None:
|
||||
''''
|
||||
|
@ -226,7 +277,7 @@ def write(
|
|||
log.debug(f"Creating config dir {_config_dir}")
|
||||
os.makedirs(dirname)
|
||||
|
||||
if not config:
|
||||
if not config and fail_empty:
|
||||
raise ValueError(
|
||||
"Watch out you're trying to write a blank config!")
|
||||
|
||||
|
@ -235,11 +286,14 @@ def write(
|
|||
f"{path}"
|
||||
)
|
||||
with open(path, 'w') as cf:
|
||||
return toml.dump(config, cf)
|
||||
return toml.dump(
|
||||
config,
|
||||
cf,
|
||||
**toml_kwargs,
|
||||
)
|
||||
|
||||
|
||||
def load_accounts(
|
||||
|
||||
providers: Optional[list[str]] = None
|
||||
|
||||
) -> bidict[str, Optional[str]]:
|
||||
|
|
|
@ -22,6 +22,12 @@ and storing data from your brokers as well as
|
|||
sharing live streams over a network.
|
||||
|
||||
"""
|
||||
import tractor
|
||||
import trio
|
||||
|
||||
from ..log import (
|
||||
get_console_log,
|
||||
)
|
||||
from ._normalize import iterticks
|
||||
from ._sharedmem import (
|
||||
maybe_open_shm_array,
|
||||
|
@ -32,7 +38,6 @@ from ._sharedmem import (
|
|||
)
|
||||
from .feed import (
|
||||
open_feed,
|
||||
_setup_persistent_brokerd,
|
||||
)
|
||||
|
||||
|
||||
|
@ -44,5 +49,40 @@ __all__ = [
|
|||
'attach_shm_array',
|
||||
'open_shm_array',
|
||||
'get_shm_token',
|
||||
'_setup_persistent_brokerd',
|
||||
]
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def _setup_persistent_brokerd(
|
||||
ctx: tractor.Context,
|
||||
brokername: str,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Allocate a actor-wide service nursery in ``brokerd``
|
||||
such that feeds can be run in the background persistently by
|
||||
the broker backend as needed.
|
||||
|
||||
'''
|
||||
get_console_log(tractor.current_actor().loglevel)
|
||||
|
||||
from .feed import (
|
||||
_bus,
|
||||
get_feed_bus,
|
||||
)
|
||||
global _bus
|
||||
assert not _bus
|
||||
|
||||
async with trio.open_nursery() as service_nursery:
|
||||
# assign a nursery to the feeds bus for spawning
|
||||
# background tasks from clients
|
||||
get_feed_bus(brokername, service_nursery)
|
||||
|
||||
# unblock caller
|
||||
await ctx.started()
|
||||
|
||||
# we pin this task to keep the feeds manager active until the
|
||||
# parent actor decides to tear it down
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
||||
|
|
|
@ -0,0 +1,827 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Pre-(path)-graphics formatted x/y nd/1d rendering subsystem.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import msgspec
|
||||
from msgspec import field
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
|
||||
from ._sharedmem import (
|
||||
ShmArray,
|
||||
)
|
||||
from ._pathops import (
|
||||
path_arrays_from_ohlc,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._dataviz import (
|
||||
Viz,
|
||||
)
|
||||
from .._profile import Profiler
|
||||
|
||||
|
||||
class IncrementalFormatter(msgspec.Struct):
|
||||
'''
|
||||
Incrementally updating, pre-path-graphics tracking, formatter.
|
||||
|
||||
Allows tracking source data state in an updateable pre-graphics
|
||||
``np.ndarray`` format (in local process memory) as well as
|
||||
incrementally rendering from that format **to** 1d x/y for path
|
||||
generation using ``pg.functions.arrayToQPath()``.
|
||||
|
||||
'''
|
||||
shm: ShmArray
|
||||
viz: Viz
|
||||
|
||||
# the value to be multiplied any any index into the x/y_1d arrays
|
||||
# given the input index is based on the original source data array.
|
||||
flat_index_ratio: float = 1
|
||||
|
||||
@property
|
||||
def index_field(self) -> 'str':
|
||||
'''
|
||||
Value (``str``) used to look up the "index series" from the
|
||||
underlying source ``numpy`` struct-array; delegate directly to
|
||||
the managing ``Viz``.
|
||||
|
||||
'''
|
||||
return self.viz.index_field
|
||||
|
||||
# Incrementally updated xy ndarray formatted data, a pre-1d
|
||||
# format which is updated and cached independently of the final
|
||||
# pre-graphics-path 1d format.
|
||||
x_nd: Optional[np.ndarray] = None
|
||||
y_nd: Optional[np.ndarray] = None
|
||||
|
||||
@property
|
||||
def xy_nd(self) -> tuple[np.ndarray, np.ndarray]:
|
||||
return (
|
||||
self.x_nd[self.xy_slice],
|
||||
self.y_nd[self.xy_slice],
|
||||
)
|
||||
|
||||
@property
|
||||
def xy_slice(self) -> slice:
|
||||
return slice(
|
||||
self.xy_nd_start,
|
||||
self.xy_nd_stop,
|
||||
)
|
||||
|
||||
# indexes which slice into the above arrays (which are allocated
|
||||
# based on source data shm input size) and allow retrieving
|
||||
# incrementally updated data.
|
||||
xy_nd_start: int | None = None
|
||||
xy_nd_stop: int | None = None
|
||||
|
||||
# TODO: eventually incrementally update 1d-pre-graphics path data?
|
||||
x_1d: np.ndarray | None = None
|
||||
y_1d: np.ndarray | None = None
|
||||
|
||||
# incremental view-change state(s) tracking
|
||||
_last_vr: tuple[float, float] | None = None
|
||||
_last_ivdr: tuple[float, float] | None = None
|
||||
|
||||
@property
|
||||
def index_step_size(self) -> float:
|
||||
'''
|
||||
Readonly value computed on first ``.diff()`` call.
|
||||
|
||||
'''
|
||||
return self.viz.index_step()
|
||||
|
||||
def diff(
|
||||
self,
|
||||
new_read: tuple[np.ndarray],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
# TODO:
|
||||
# - can the renderer just call ``Viz.read()`` directly? unpack
|
||||
# latest source data read
|
||||
# - eventually maybe we can implement some kind of
|
||||
# transform on the ``QPainterPath`` that will more or less
|
||||
# detect the diff in "elements" terms? update diff state since
|
||||
# we've now rendered paths.
|
||||
(
|
||||
xfirst,
|
||||
xlast,
|
||||
array,
|
||||
ivl,
|
||||
ivr,
|
||||
in_view,
|
||||
) = new_read
|
||||
|
||||
index = array['index']
|
||||
|
||||
# if the first index in the read array is 0 then
|
||||
# it means the source buffer has bee completely backfilled to
|
||||
# available space.
|
||||
src_start = index[0]
|
||||
src_stop = index[-1] + 1
|
||||
|
||||
# these are the "formatted output data" indices
|
||||
# for the pre-graphics arrays.
|
||||
nd_start = self.xy_nd_start
|
||||
nd_stop = self.xy_nd_stop
|
||||
|
||||
if (
|
||||
nd_start is None
|
||||
):
|
||||
assert nd_stop is None
|
||||
|
||||
# setup to do a prepend of all existing src history
|
||||
nd_start = self.xy_nd_start = src_stop
|
||||
# set us in a zero-to-append state
|
||||
nd_stop = self.xy_nd_stop = src_stop
|
||||
|
||||
# compute the length diffs between the first/last index entry in
|
||||
# the input data and the last indexes we have on record from the
|
||||
# last time we updated the curve index.
|
||||
prepend_length = int(nd_start - src_start)
|
||||
append_length = int(src_stop - nd_stop)
|
||||
|
||||
# blah blah blah
|
||||
# do diffing for prepend, append and last entry
|
||||
return (
|
||||
slice(src_start, nd_start),
|
||||
prepend_length,
|
||||
append_length,
|
||||
slice(nd_stop, src_stop),
|
||||
)
|
||||
|
||||
def _track_inview_range(
|
||||
self,
|
||||
view_range: tuple[int, int],
|
||||
|
||||
) -> bool:
|
||||
# if a view range is passed, plan to draw the
|
||||
# source ouput that's "in view" of the chart.
|
||||
vl, vr = view_range
|
||||
zoom_or_append = False
|
||||
last_vr = self._last_vr
|
||||
|
||||
# incremental in-view data update.
|
||||
if last_vr:
|
||||
lvl, lvr = last_vr # relative slice indices
|
||||
|
||||
# TODO: detecting more specifically the interaction changes
|
||||
# last_ivr = self._last_ivdr or (vl, vr)
|
||||
# al, ar = last_ivr # abs slice indices
|
||||
# left_change = abs(x_iv[0] - al) >= 1
|
||||
# right_change = abs(x_iv[-1] - ar) >= 1
|
||||
|
||||
# likely a zoom/pan view change or data append update
|
||||
if (
|
||||
(vr - lvr) > 2
|
||||
or vl < lvl
|
||||
|
||||
# append / prepend update
|
||||
# we had an append update where the view range
|
||||
# didn't change but the data-viewed (shifted)
|
||||
# underneath, so we need to redraw.
|
||||
# or left_change and right_change and last_vr == view_range
|
||||
|
||||
# not (left_change and right_change) and ivr
|
||||
# (
|
||||
# or abs(x_iv[ivr] - livr) > 1
|
||||
):
|
||||
zoom_or_append = True
|
||||
|
||||
self._last_vr = view_range
|
||||
|
||||
return zoom_or_append
|
||||
|
||||
def format_to_1d(
|
||||
self,
|
||||
new_read: tuple,
|
||||
array_key: str,
|
||||
profiler: Profiler,
|
||||
|
||||
slice_to_inview: bool = True,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
shm = self.shm
|
||||
|
||||
(
|
||||
_,
|
||||
_,
|
||||
array,
|
||||
ivl,
|
||||
ivr,
|
||||
in_view,
|
||||
|
||||
) = new_read
|
||||
|
||||
(
|
||||
pre_slice,
|
||||
prepend_len,
|
||||
append_len,
|
||||
post_slice,
|
||||
) = self.diff(new_read)
|
||||
|
||||
# we first need to allocate xy data arrays
|
||||
# from the source data.
|
||||
if self.y_nd is None:
|
||||
self.xy_nd_start = shm._first.value
|
||||
self.xy_nd_stop = shm._last.value
|
||||
self.x_nd, self.y_nd = self.allocate_xy_nd(
|
||||
shm,
|
||||
array_key,
|
||||
)
|
||||
profiler('allocated xy history')
|
||||
|
||||
# once allocated we do incremental pre/append
|
||||
# updates from the diff with the source buffer.
|
||||
else:
|
||||
if prepend_len:
|
||||
|
||||
self.incr_update_xy_nd(
|
||||
shm,
|
||||
array_key,
|
||||
|
||||
# this is the pre-sliced, "normally expected"
|
||||
# new data that an updater would normally be
|
||||
# expected to process, however in some cases (like
|
||||
# step curves) the updater routine may want to do
|
||||
# the source history-data reading itself, so we pass
|
||||
# both here.
|
||||
shm._array[pre_slice],
|
||||
pre_slice,
|
||||
prepend_len,
|
||||
|
||||
self.xy_nd_start,
|
||||
self.xy_nd_stop,
|
||||
is_append=False,
|
||||
)
|
||||
|
||||
self.xy_nd_start -= prepend_len
|
||||
profiler('prepended xy history: {prepend_length}')
|
||||
|
||||
if append_len:
|
||||
self.incr_update_xy_nd(
|
||||
shm,
|
||||
array_key,
|
||||
|
||||
shm._array[post_slice],
|
||||
post_slice,
|
||||
append_len,
|
||||
|
||||
self.xy_nd_start,
|
||||
self.xy_nd_stop,
|
||||
is_append=True,
|
||||
)
|
||||
self.xy_nd_stop += append_len
|
||||
profiler('appened xy history: {append_length}')
|
||||
# sanity
|
||||
# slice_ln = post_slice.stop - post_slice.start
|
||||
# assert append_len == slice_ln
|
||||
|
||||
view_changed: bool = False
|
||||
view_range: tuple[int, int] = (ivl, ivr)
|
||||
if slice_to_inview:
|
||||
view_changed = self._track_inview_range(view_range)
|
||||
array = in_view
|
||||
profiler(f'{self.viz.name} view range slice {view_range}')
|
||||
|
||||
# TODO: we need to check if the last-datum-in-view is true and
|
||||
# if so only slice to the 2nd last datumonly slice to the 2nd
|
||||
# last datum.
|
||||
# hist = array[:slice_to_head]
|
||||
|
||||
# XXX: WOA WTF TRACTOR DEBUGGING BUGGG
|
||||
# assert 0
|
||||
|
||||
# xy-path data transform: convert source data to a format
|
||||
# able to be passed to a `QPainterPath` rendering routine.
|
||||
if not len(array):
|
||||
# XXX: this might be why the profiler only has exits?
|
||||
return
|
||||
|
||||
# TODO: hist here should be the pre-sliced
|
||||
# x/y_data in the case where allocate_xy is
|
||||
# defined?
|
||||
x_1d, y_1d, connect = self.format_xy_nd_to_1d(
|
||||
array,
|
||||
array_key,
|
||||
view_range,
|
||||
)
|
||||
# cache/save last 1d outputs for use by other
|
||||
# readers (eg. `Viz.draw_last_datum()` in the
|
||||
# only-draw-last-uppx case).
|
||||
self.x_1d = x_1d
|
||||
self.y_1d = y_1d
|
||||
|
||||
# app_tres = None
|
||||
# if append_len:
|
||||
# appended = array[-append_len-1:slice_to_head]
|
||||
# app_tres = self.format_xy_nd_to_1d(
|
||||
# appended,
|
||||
# array_key,
|
||||
# (
|
||||
# view_range[1] - append_len + slice_to_head,
|
||||
# view_range[1]
|
||||
# ),
|
||||
# )
|
||||
# # assert (len(appended) - 1) == append_len
|
||||
# # assert len(appended) == append_len
|
||||
# print(
|
||||
# f'{self.viz.name} APPEND LEN: {append_len}\n'
|
||||
# f'{self.viz.name} APPENDED: {appended}\n'
|
||||
# f'{self.viz.name} app_tres: {app_tres}\n'
|
||||
# )
|
||||
|
||||
# update the last "in view data range"
|
||||
if len(x_1d):
|
||||
self._last_ivdr = x_1d[0], x_1d[-1]
|
||||
|
||||
profiler('.format_to_1d()')
|
||||
|
||||
return (
|
||||
x_1d,
|
||||
y_1d,
|
||||
connect,
|
||||
prepend_len,
|
||||
append_len,
|
||||
view_changed,
|
||||
# app_tres,
|
||||
)
|
||||
|
||||
###############################
|
||||
# Sub-type override interface #
|
||||
###############################
|
||||
|
||||
x_offset: np.ndarray = np.array([0])
|
||||
|
||||
# optional pre-graphics xy formatted data which
|
||||
# is incrementally updated in sync with the source data.
|
||||
# XXX: was ``.allocate_xy()``
|
||||
def allocate_xy_nd(
|
||||
self,
|
||||
src_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # x
|
||||
np.nd.array # y
|
||||
]:
|
||||
'''
|
||||
Convert the structured-array ``src_shm`` format to
|
||||
a equivalently shaped (and field-less) ``np.ndarray``.
|
||||
|
||||
Eg. a 4 field x N struct-array => (N, 4)
|
||||
|
||||
'''
|
||||
y_nd = src_shm._array[data_field].copy()
|
||||
x_nd = (
|
||||
src_shm._array[self.index_field].copy()
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
return x_nd, y_nd
|
||||
|
||||
# XXX: was ``.update_xy()``
|
||||
def incr_update_xy_nd(
|
||||
self,
|
||||
|
||||
src_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
new_from_src: np.ndarray, # portion of source that was updated
|
||||
|
||||
read_slc: slice,
|
||||
ln: int, # len of updated
|
||||
|
||||
nd_start: int,
|
||||
nd_stop: int,
|
||||
|
||||
is_append: bool,
|
||||
|
||||
) -> None:
|
||||
# write pushed data to flattened copy
|
||||
y_nd_new = new_from_src[data_field]
|
||||
self.y_nd[read_slc] = y_nd_new
|
||||
|
||||
x_nd_new = self.x_nd[read_slc]
|
||||
x_nd_new[:] = (
|
||||
new_from_src[self.index_field]
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
|
||||
# x_nd = self.x_nd[self.xy_slice]
|
||||
# y_nd = self.y_nd[self.xy_slice]
|
||||
# name = self.viz.name
|
||||
# if 'trade_rate' == name:
|
||||
# s = 4
|
||||
# print(
|
||||
# f'{name.upper()}:\n'
|
||||
# 'NEW_FROM_SRC:\n'
|
||||
# f'new_from_src: {new_from_src}\n\n'
|
||||
|
||||
# f'PRE self.x_nd:'
|
||||
# f'\n{list(x_nd[-s:])}\n'
|
||||
|
||||
# f'PRE self.y_nd:\n'
|
||||
# f'{list(y_nd[-s:])}\n\n'
|
||||
|
||||
# f'TO WRITE:\n'
|
||||
|
||||
# f'x_nd_new:\n'
|
||||
# f'{x_nd_new[0]}\n'
|
||||
|
||||
# f'y_nd_new:\n'
|
||||
# f'{y_nd_new}\n'
|
||||
# )
|
||||
|
||||
# XXX: was ``.format_xy()``
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # 1d x
|
||||
np.ndarray, # 1d y
|
||||
np.ndarray | str, # connection array/style
|
||||
]:
|
||||
'''
|
||||
Default xy-nd array to 1d pre-graphics-path render routine.
|
||||
|
||||
Return single field column data verbatim
|
||||
|
||||
'''
|
||||
# NOTE: we don't include the very last datum which is filled in
|
||||
# normally by another graphics object.
|
||||
x_1d = array[self.index_field][:-1]
|
||||
y_1d = array[array_key][:-1]
|
||||
|
||||
# name = self.viz.name
|
||||
# if 'trade_rate' == name:
|
||||
# s = 4
|
||||
# x_nd = list(self.x_nd[self.xy_slice][-s:-1])
|
||||
# y_nd = list(self.y_nd[self.xy_slice][-s:-1])
|
||||
# print(
|
||||
# f'{name}:\n'
|
||||
# f'XY data:\n'
|
||||
# f'x: {x_nd}\n'
|
||||
# f'y: {y_nd}\n\n'
|
||||
# f'x_1d: {list(x_1d[-s:])}\n'
|
||||
# f'y_1d: {list(y_1d[-s:])}\n\n'
|
||||
|
||||
# )
|
||||
return (
|
||||
x_1d,
|
||||
y_1d,
|
||||
|
||||
# 1d connection array or style-key to
|
||||
# ``pg.functions.arrayToQPath()``
|
||||
'all',
|
||||
)
|
||||
|
||||
|
||||
class OHLCBarsFmtr(IncrementalFormatter):
|
||||
x_offset: np.ndarray = np.array([
|
||||
-0.5,
|
||||
0,
|
||||
0,
|
||||
0.5,
|
||||
])
|
||||
|
||||
fields: list[str] = field(
|
||||
default_factory=lambda: ['open', 'high', 'low', 'close']
|
||||
)
|
||||
flat_index_ratio: float = 4
|
||||
|
||||
def allocate_xy_nd(
|
||||
self,
|
||||
|
||||
ohlc_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # x
|
||||
np.nd.array # y
|
||||
]:
|
||||
'''
|
||||
Convert an input struct-array holding OHLC samples into a pair of
|
||||
flattened x, y arrays with the same size (datums wise) as the source
|
||||
data.
|
||||
|
||||
'''
|
||||
y_nd = ohlc_shm.ustruct(self.fields)
|
||||
|
||||
# generate an flat-interpolated x-domain
|
||||
x_nd = (
|
||||
np.broadcast_to(
|
||||
ohlc_shm._array[self.index_field][:, None],
|
||||
(
|
||||
ohlc_shm._array.size,
|
||||
# 4, # only ohlc
|
||||
y_nd.shape[1],
|
||||
),
|
||||
)
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
assert y_nd.any()
|
||||
|
||||
# write pushed data to flattened copy
|
||||
return (
|
||||
x_nd,
|
||||
y_nd,
|
||||
)
|
||||
|
||||
def incr_update_xy_nd(
|
||||
self,
|
||||
|
||||
src_shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
new_from_src: np.ndarray, # portion of source that was updated
|
||||
|
||||
read_slc: slice,
|
||||
ln: int, # len of updated
|
||||
|
||||
nd_start: int,
|
||||
nd_stop: int,
|
||||
|
||||
is_append: bool,
|
||||
|
||||
) -> None:
|
||||
# write newly pushed data to flattened copy
|
||||
# a struct-arr is always passed in.
|
||||
new_y_nd = rfn.structured_to_unstructured(
|
||||
new_from_src[self.fields]
|
||||
)
|
||||
self.y_nd[read_slc] = new_y_nd
|
||||
|
||||
# generate same-valued-per-row x support based on y shape
|
||||
x_nd_new = self.x_nd[read_slc]
|
||||
x_nd_new[:] = np.broadcast_to(
|
||||
new_from_src[self.index_field][:, None],
|
||||
new_y_nd.shape,
|
||||
) + self.x_offset
|
||||
|
||||
# TODO: can we drop this frame and just use the above?
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
start: int = 0, # XXX: do we need this?
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.16,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
More or less direct proxy to the ``numba``-fied
|
||||
``path_arrays_from_ohlc()`` (above) but with closed in kwargs
|
||||
for line spacing.
|
||||
|
||||
'''
|
||||
x, y, c = path_arrays_from_ohlc(
|
||||
array[:-1],
|
||||
start,
|
||||
bar_w=self.index_step_size,
|
||||
bar_gap=w * self.index_step_size,
|
||||
|
||||
# XXX: don't ask, due to a ``numba`` bug..
|
||||
use_time_index=(self.index_field == 'time'),
|
||||
)
|
||||
return x, y, c
|
||||
|
||||
|
||||
class OHLCBarsAsCurveFmtr(OHLCBarsFmtr):
|
||||
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
str,
|
||||
]:
|
||||
# TODO: in the case of an existing ``.update_xy()``
|
||||
# should we be passing in array as an xy arrays tuple?
|
||||
|
||||
# 2 more datum-indexes to capture zero at end
|
||||
x_flat = self.x_nd[self.xy_nd_start:self.xy_nd_stop-1]
|
||||
y_flat = self.y_nd[self.xy_nd_start:self.xy_nd_stop-1]
|
||||
|
||||
# slice to view
|
||||
ivl, ivr = vr
|
||||
x_iv_flat = x_flat[ivl:ivr]
|
||||
y_iv_flat = y_flat[ivl:ivr]
|
||||
|
||||
# reshape to 1d for graphics rendering
|
||||
y_iv = y_iv_flat.reshape(-1)
|
||||
x_iv = x_iv_flat.reshape(-1)
|
||||
|
||||
return x_iv, y_iv, 'all'
|
||||
|
||||
|
||||
class StepCurveFmtr(IncrementalFormatter):
|
||||
|
||||
x_offset: np.ndarray = np.array([
|
||||
0,
|
||||
1,
|
||||
])
|
||||
|
||||
def allocate_xy_nd(
|
||||
self,
|
||||
|
||||
shm: ShmArray,
|
||||
data_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # x
|
||||
np.nd.array # y
|
||||
]:
|
||||
'''
|
||||
Convert an input 1d shm array to a "step array" format
|
||||
for use by path graphics generation.
|
||||
|
||||
'''
|
||||
i = shm._array[self.index_field].copy()
|
||||
out = shm._array[data_field].copy()
|
||||
|
||||
x_out = (
|
||||
np.broadcast_to(
|
||||
i[:, None],
|
||||
(i.size, 2),
|
||||
)
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
|
||||
# fill out Nx2 array to hold each step's left + right vertices.
|
||||
y_out = np.empty(
|
||||
x_out.shape,
|
||||
dtype=out.dtype,
|
||||
)
|
||||
# fill in (current) values from source shm buffer
|
||||
y_out[:] = out[:, np.newaxis]
|
||||
|
||||
# TODO: pretty sure we can drop this?
|
||||
# start y at origin level
|
||||
# y_out[0, 0] = 0
|
||||
# y_out[self.xy_nd_start] = 0
|
||||
return x_out, y_out
|
||||
|
||||
def incr_update_xy_nd(
|
||||
self,
|
||||
|
||||
src_shm: ShmArray,
|
||||
array_key: str,
|
||||
|
||||
new_from_src: np.ndarray, # portion of source that was updated
|
||||
read_slc: slice,
|
||||
ln: int, # len of updated
|
||||
|
||||
nd_start: int,
|
||||
nd_stop: int,
|
||||
|
||||
is_append: bool,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
slice,
|
||||
]:
|
||||
# NOTE: for a step curve we slice from one datum prior
|
||||
# to the current "update slice" to get the previous
|
||||
# "level".
|
||||
#
|
||||
# why this is needed,
|
||||
# - the current new append slice will often have a zero
|
||||
# value in the latest datum-step (at least for zero-on-new
|
||||
# cases like vlm in the) as per configuration of the FSP
|
||||
# engine.
|
||||
# - we need to look back a datum to get the last level which
|
||||
# will be used to terminate/complete the last step x-width
|
||||
# which will be set to pair with the last x-index THIS MEANS
|
||||
#
|
||||
# XXX: this means WE CAN'T USE the append slice since we need to
|
||||
# "look backward" one step to get the needed back-to-zero level
|
||||
# and the update data in ``new_from_src`` will only contain the
|
||||
# latest new data.
|
||||
back_1 = slice(
|
||||
read_slc.start - 1,
|
||||
read_slc.stop,
|
||||
)
|
||||
|
||||
to_write = src_shm._array[back_1]
|
||||
y_nd_new = self.y_nd[back_1]
|
||||
y_nd_new[:] = to_write[array_key][:, None]
|
||||
|
||||
x_nd_new = self.x_nd[read_slc]
|
||||
x_nd_new[:] = (
|
||||
new_from_src[self.index_field][:, None]
|
||||
+
|
||||
self.x_offset
|
||||
)
|
||||
|
||||
# XXX: uncomment for debugging
|
||||
# x_nd = self.x_nd[self.xy_slice]
|
||||
# y_nd = self.y_nd[self.xy_slice]
|
||||
# name = self.viz.name
|
||||
# if 'dolla_vlm' in name:
|
||||
# s = 4
|
||||
# print(
|
||||
# f'{name}:\n'
|
||||
# 'NEW_FROM_SRC:\n'
|
||||
# f'new_from_src: {new_from_src}\n\n'
|
||||
|
||||
# f'PRE self.x_nd:'
|
||||
# f'\n{x_nd[-s:]}\n'
|
||||
# f'PRE self.y_nd:\n'
|
||||
# f'{y_nd[-s:]}\n\n'
|
||||
|
||||
# f'TO WRITE:\n'
|
||||
# f'x_nd_new:\n'
|
||||
# f'{x_nd_new}\n'
|
||||
# f'y_nd_new:\n'
|
||||
# f'{y_nd_new}\n'
|
||||
# )
|
||||
|
||||
def format_xy_nd_to_1d(
|
||||
self,
|
||||
|
||||
array: np.ndarray,
|
||||
array_key: str,
|
||||
vr: tuple[int, int],
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
str,
|
||||
]:
|
||||
last_t, last = array[-1][[self.index_field, array_key]]
|
||||
|
||||
start = self.xy_nd_start
|
||||
stop = self.xy_nd_stop
|
||||
|
||||
x_step = self.x_nd[start:stop]
|
||||
y_step = self.y_nd[start:stop]
|
||||
|
||||
# slice out in-view data
|
||||
ivl, ivr = vr
|
||||
|
||||
# NOTE: add an extra step to get the vertical-line-down-to-zero
|
||||
# adjacent to the last-datum graphic (filled rect).
|
||||
x_step_iv = x_step[ivl:ivr+1]
|
||||
y_step_iv = y_step[ivl:ivr+1]
|
||||
|
||||
# flatten to 1d
|
||||
x_1d = x_step_iv.reshape(x_step_iv.size)
|
||||
y_1d = y_step_iv.reshape(y_step_iv.size)
|
||||
|
||||
# debugging
|
||||
# if y_1d.any():
|
||||
# s = 6
|
||||
# print(
|
||||
# f'x_step_iv:\n{x_step_iv[-s:]}\n'
|
||||
# f'y_step_iv:\n{y_step_iv[-s:]}\n\n'
|
||||
# f'x_1d:\n{x_1d[-s:]}\n'
|
||||
# f'y_1d:\n{y_1d[-s:]}\n'
|
||||
# )
|
||||
|
||||
return x_1d, y_1d, 'all'
|
|
@ -15,17 +15,30 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Graphics related downsampling routines for compressing to pixel
|
||||
limits on the display device.
|
||||
Graphics downsampling using the infamous M4 algorithm.
|
||||
|
||||
This is one of ``piker``'s secret weapons allowing us to boss all other
|
||||
charting platforms B)
|
||||
|
||||
(AND DON'T YOU DARE TAKE THIS CODE WITHOUT CREDIT OR WE'LL SUE UR F#&@* ASS).
|
||||
|
||||
NOTES: this method is a so called "visualization driven data
|
||||
aggregation" approach. It gives error-free line chart
|
||||
downsampling, see
|
||||
further scientific paper resources:
|
||||
- http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
- http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||
|
||||
Details on implementation of this algo are based in,
|
||||
https://github.com/pikers/piker/issues/109
|
||||
|
||||
'''
|
||||
import math
|
||||
from typing import Optional
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import (
|
||||
jit,
|
||||
njit,
|
||||
# float64, optional, int64,
|
||||
)
|
||||
|
||||
|
@ -35,109 +48,6 @@ from ..log import get_logger
|
|||
log = get_logger(__name__)
|
||||
|
||||
|
||||
def hl2mxmn(ohlc: np.ndarray) -> np.ndarray:
|
||||
'''
|
||||
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||
to a "joined" max/min 1-d array.
|
||||
|
||||
'''
|
||||
index = ohlc['index']
|
||||
hls = ohlc[[
|
||||
'low',
|
||||
'high',
|
||||
]]
|
||||
|
||||
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||
x = np.empty(2*hls.size, dtype=np.float64)
|
||||
trace_hl(hls, mxmn, x, index[0])
|
||||
x = x + index[0]
|
||||
|
||||
return mxmn, x
|
||||
|
||||
|
||||
@jit(
|
||||
# TODO: the type annots..
|
||||
# float64[:](float64[:],),
|
||||
nopython=True,
|
||||
)
|
||||
def trace_hl(
|
||||
hl: 'np.ndarray',
|
||||
out: np.ndarray,
|
||||
x: np.ndarray,
|
||||
start: int,
|
||||
|
||||
# the "offset" values in the x-domain which
|
||||
# place the 2 output points around each ``int``
|
||||
# master index.
|
||||
margin: float = 0.43,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
"Trace" the outline of the high-low values of an ohlc sequence
|
||||
as a line such that the maximum deviation (aka disperaion) between
|
||||
bars if preserved.
|
||||
|
||||
This routine is expected to modify input arrays in-place.
|
||||
|
||||
'''
|
||||
last_l = hl['low'][0]
|
||||
last_h = hl['high'][0]
|
||||
|
||||
for i in range(hl.size):
|
||||
row = hl[i]
|
||||
l, h = row['low'], row['high']
|
||||
|
||||
up_diff = h - last_l
|
||||
down_diff = last_h - l
|
||||
|
||||
if up_diff > down_diff:
|
||||
out[2*i + 1] = h
|
||||
out[2*i] = last_l
|
||||
else:
|
||||
out[2*i + 1] = l
|
||||
out[2*i] = last_h
|
||||
|
||||
last_l = l
|
||||
last_h = h
|
||||
|
||||
x[2*i] = int(i) - margin
|
||||
x[2*i + 1] = int(i) + margin
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def ohlc_flatten(
|
||||
ohlc: np.ndarray,
|
||||
use_mxmn: bool = True,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||
1-d array that is 4 times the size with x-domain values distributed
|
||||
evenly (by 0.5 steps) over each index.
|
||||
|
||||
'''
|
||||
index = ohlc['index']
|
||||
|
||||
if use_mxmn:
|
||||
# traces a line optimally over highs to lows
|
||||
# using numba. NOTE: pretty sure this is faster
|
||||
# and looks about the same as the below output.
|
||||
flat, x = hl2mxmn(ohlc)
|
||||
|
||||
else:
|
||||
flat = rfn.structured_to_unstructured(
|
||||
ohlc[['open', 'high', 'low', 'close']]
|
||||
).flatten()
|
||||
|
||||
x = np.linspace(
|
||||
start=index[0] - 0.5,
|
||||
stop=index[-1] + 0.5,
|
||||
num=len(flat),
|
||||
)
|
||||
return x, flat
|
||||
|
||||
|
||||
def ds_m4(
|
||||
x: np.ndarray,
|
||||
y: np.ndarray,
|
||||
|
@ -160,16 +70,6 @@ def ds_m4(
|
|||
This is more or less an OHLC style sampling of a line-style series.
|
||||
|
||||
'''
|
||||
# NOTE: this method is a so called "visualization driven data
|
||||
# aggregation" approach. It gives error-free line chart
|
||||
# downsampling, see
|
||||
# further scientific paper resources:
|
||||
# - http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
# - http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||
|
||||
# Details on implementation of this algo are based in,
|
||||
# https://github.com/pikers/piker/issues/109
|
||||
|
||||
# XXX: from infinite on downsampling viewable graphics:
|
||||
# "one thing i remembered about the binning - if you are
|
||||
# picking a range within your timeseries the start and end bin
|
||||
|
@ -191,6 +91,14 @@ def ds_m4(
|
|||
x_end = x[-1] # x end value/highest in domain
|
||||
xrange = (x_end - x_start)
|
||||
|
||||
if xrange < 0:
|
||||
log.error(f'-VE M4 X-RANGE: {x_start} -> {x_end}')
|
||||
# XXX: broken x-range calc-case, likely the x-end points
|
||||
# are wrong and have some default value set (such as
|
||||
# x_end -> <some epoch float> while x_start -> 0.5).
|
||||
# breakpoint()
|
||||
return None
|
||||
|
||||
# XXX: always round up on the input pixels
|
||||
# lnx = len(x)
|
||||
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
||||
|
@ -223,14 +131,20 @@ def ds_m4(
|
|||
assert frames >= (xrange / uppx)
|
||||
|
||||
# call into ``numba``
|
||||
nb, i_win, y_out = _m4(
|
||||
(
|
||||
nb,
|
||||
x_out,
|
||||
y_out,
|
||||
ymn,
|
||||
ymx,
|
||||
) = _m4(
|
||||
x,
|
||||
y,
|
||||
|
||||
frames,
|
||||
|
||||
# TODO: see func below..
|
||||
# i_win,
|
||||
# x_out,
|
||||
# y_out,
|
||||
|
||||
# first index in x data to start at
|
||||
|
@ -243,14 +157,14 @@ def ds_m4(
|
|||
# filter out any overshoot in the input allocation arrays by
|
||||
# removing zero-ed tail entries which should start at a certain
|
||||
# index.
|
||||
i_win = i_win[i_win != 0]
|
||||
y_out = y_out[:i_win.size]
|
||||
x_out = x_out[x_out != 0]
|
||||
y_out = y_out[:x_out.size]
|
||||
|
||||
return nb, i_win, y_out
|
||||
# print(f'M4 output ymn, ymx: {ymn},{ymx}')
|
||||
return nb, x_out, y_out, ymn, ymx
|
||||
|
||||
|
||||
@jit(
|
||||
nopython=True,
|
||||
@njit(
|
||||
nogil=True,
|
||||
)
|
||||
def _m4(
|
||||
|
@ -260,8 +174,8 @@ def _m4(
|
|||
|
||||
frames: int,
|
||||
|
||||
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
||||
# below, in put python was causing segs faults and alloc crashes..
|
||||
# TODO: using this approach, having the ``.zeros()`` alloc lines
|
||||
# below in pure python, there were segs faults and alloc crashes..
|
||||
# we might need to see how it behaves with shm arrays and consider
|
||||
# allocating them once at startup?
|
||||
|
||||
|
@ -274,14 +188,22 @@ def _m4(
|
|||
x_start: int,
|
||||
step: float,
|
||||
|
||||
) -> int:
|
||||
# nbins = len(i_win)
|
||||
# count = len(xs)
|
||||
) -> tuple[
|
||||
int,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
float,
|
||||
float,
|
||||
]:
|
||||
'''
|
||||
Implementation of the m4 algorithm in ``numba``:
|
||||
http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||
|
||||
'''
|
||||
# these are pre-allocated and mutated by ``numba``
|
||||
# code in-place.
|
||||
y_out = np.zeros((frames, 4), ys.dtype)
|
||||
i_win = np.zeros(frames, xs.dtype)
|
||||
x_out = np.zeros(frames, xs.dtype)
|
||||
|
||||
bincount = 0
|
||||
x_left = x_start
|
||||
|
@ -295,24 +217,34 @@ def _m4(
|
|||
|
||||
# set all bins in the left-most entry to the starting left-most x value
|
||||
# (aka a row broadcast).
|
||||
i_win[bincount] = x_left
|
||||
x_out[bincount] = x_left
|
||||
# set all y-values to the first value passed in.
|
||||
y_out[bincount] = ys[0]
|
||||
|
||||
# full input y-data mx and mn
|
||||
mx: float = -np.inf
|
||||
mn: float = np.inf
|
||||
|
||||
# compute OHLC style max / min values per window sized x-frame.
|
||||
for i in range(len(xs)):
|
||||
|
||||
x = xs[i]
|
||||
y = ys[i]
|
||||
|
||||
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||
ymn = y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||
ymx = y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||
y_out[bincount, 3] = y
|
||||
mx = max(mx, ymx)
|
||||
mn = min(mn, ymn)
|
||||
|
||||
else:
|
||||
# Find the next bin
|
||||
while x >= x_left + step:
|
||||
x_left += step
|
||||
|
||||
bincount += 1
|
||||
i_win[bincount] = x_left
|
||||
x_out[bincount] = x_left
|
||||
y_out[bincount] = y
|
||||
|
||||
return bincount, i_win, y_out
|
||||
return bincount, x_out, y_out, mn, mx
|
|
@ -56,7 +56,7 @@ def iterticks(
|
|||
sig = (
|
||||
time,
|
||||
tick['price'],
|
||||
tick['size']
|
||||
tick.get('size')
|
||||
)
|
||||
|
||||
if ttype == 'dark_trade':
|
||||
|
|
|
@ -0,0 +1,452 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Super fast ``QPainterPath`` generation related operator routines.
|
||||
|
||||
"""
|
||||
from math import (
|
||||
ceil,
|
||||
floor,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import (
|
||||
# types,
|
||||
njit,
|
||||
float64,
|
||||
int64,
|
||||
# optional,
|
||||
)
|
||||
|
||||
# TODO: for ``numba`` typing..
|
||||
# from ._source import numba_ohlc_dtype
|
||||
from ._m4 import ds_m4
|
||||
from .._profile import (
|
||||
Profiler,
|
||||
pg_profile_enabled,
|
||||
ms_slower_then,
|
||||
)
|
||||
|
||||
|
||||
def xy_downsample(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
|
||||
x_spacer: float = 0.5,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
float,
|
||||
float,
|
||||
]:
|
||||
'''
|
||||
Downsample 1D (flat ``numpy.ndarray``) arrays using M4 given an input
|
||||
``uppx`` (units-per-pixel) and add space between discreet datums.
|
||||
|
||||
'''
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
m4_out = ds_m4(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
)
|
||||
|
||||
if m4_out is not None:
|
||||
bins, x, y, ymn, ymx = m4_out
|
||||
# flatten output to 1d arrays suitable for path-graphics generation.
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
x = (x + np.array(
|
||||
[-x_spacer, 0, 0, x_spacer]
|
||||
)).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y, ymn, ymx
|
||||
|
||||
# XXX: we accept a None output for the case where the input range
|
||||
# to ``ds_m4()`` is bad (-ve) and we want to catch and debug
|
||||
# that (seemingly super rare) circumstance..
|
||||
return None
|
||||
|
||||
|
||||
@njit(
|
||||
# NOTE: need to construct this manually for readonly
|
||||
# arrays, see https://github.com/numba/numba/issues/4511
|
||||
# (
|
||||
# types.Array(
|
||||
# numba_ohlc_dtype,
|
||||
# 1,
|
||||
# 'C',
|
||||
# readonly=True,
|
||||
# ),
|
||||
# int64,
|
||||
# types.unicode_type,
|
||||
# optional(float64),
|
||||
# ),
|
||||
nogil=True
|
||||
)
|
||||
def path_arrays_from_ohlc(
|
||||
data: np.ndarray,
|
||||
start: int64,
|
||||
bar_w: float64,
|
||||
bar_gap: float64 = 0.16,
|
||||
use_time_index: bool = True,
|
||||
|
||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
||||
# index_field: str,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
Generate an array of lines objects from input ohlc data.
|
||||
|
||||
'''
|
||||
size = int(data.shape[0] * 6)
|
||||
|
||||
# XXX: see this for why the dtype might have to be defined outside
|
||||
# the routine.
|
||||
# https://github.com/numba/numba/issues/4098#issuecomment-493914533
|
||||
x = np.zeros(
|
||||
shape=size,
|
||||
dtype=float64,
|
||||
)
|
||||
y, c = x.copy(), x.copy()
|
||||
|
||||
half_w: float = bar_w/2
|
||||
|
||||
# TODO: report bug for assert @
|
||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||
for i, q in enumerate(data[start:], start):
|
||||
|
||||
open = q['open']
|
||||
high = q['high']
|
||||
low = q['low']
|
||||
close = q['close']
|
||||
|
||||
if use_time_index:
|
||||
index = float64(q['time'])
|
||||
else:
|
||||
index = float64(q['index'])
|
||||
|
||||
# XXX: ``numba`` issue: https://github.com/numba/numba/issues/8622
|
||||
# index = float64(q[index_field])
|
||||
# AND this (probably)
|
||||
# open, high, low, close, index = q[
|
||||
# ['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
istart = i * 6
|
||||
istop = istart + 6
|
||||
|
||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||
mid: float = index + half_w
|
||||
x[istart:istop] = (
|
||||
index + bar_gap,
|
||||
mid,
|
||||
mid,
|
||||
mid,
|
||||
mid,
|
||||
index + bar_w - bar_gap,
|
||||
)
|
||||
y[istart:istop] = (
|
||||
open,
|
||||
open,
|
||||
low,
|
||||
high,
|
||||
close,
|
||||
close,
|
||||
)
|
||||
|
||||
# specifies that the first edge is never connected to the
|
||||
# prior bars last edge thus providing a small "gap"/"space"
|
||||
# between bars determined by ``bar_gap``.
|
||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||
|
||||
return x, y, c
|
||||
|
||||
|
||||
def hl2mxmn(
|
||||
ohlc: np.ndarray,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||
to a "joined" max/min 1-d array.
|
||||
|
||||
'''
|
||||
index = ohlc[index_field]
|
||||
hls = ohlc[[
|
||||
'low',
|
||||
'high',
|
||||
]]
|
||||
|
||||
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||
x = np.empty(2*hls.size, dtype=np.float64)
|
||||
trace_hl(hls, mxmn, x, index[0])
|
||||
x = x + index[0]
|
||||
|
||||
return mxmn, x
|
||||
|
||||
|
||||
@njit(
|
||||
# TODO: the type annots..
|
||||
# float64[:](float64[:],),
|
||||
)
|
||||
def trace_hl(
|
||||
hl: 'np.ndarray',
|
||||
out: np.ndarray,
|
||||
x: np.ndarray,
|
||||
start: int,
|
||||
|
||||
# the "offset" values in the x-domain which
|
||||
# place the 2 output points around each ``int``
|
||||
# master index.
|
||||
margin: float = 0.43,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
"Trace" the outline of the high-low values of an ohlc sequence
|
||||
as a line such that the maximum deviation (aka disperaion) between
|
||||
bars if preserved.
|
||||
|
||||
This routine is expected to modify input arrays in-place.
|
||||
|
||||
'''
|
||||
last_l = hl['low'][0]
|
||||
last_h = hl['high'][0]
|
||||
|
||||
for i in range(hl.size):
|
||||
row = hl[i]
|
||||
l, h = row['low'], row['high']
|
||||
|
||||
up_diff = h - last_l
|
||||
down_diff = last_h - l
|
||||
|
||||
if up_diff > down_diff:
|
||||
out[2*i + 1] = h
|
||||
out[2*i] = last_l
|
||||
else:
|
||||
out[2*i + 1] = l
|
||||
out[2*i] = last_h
|
||||
|
||||
last_l = l
|
||||
last_h = h
|
||||
|
||||
x[2*i] = int(i) - margin
|
||||
x[2*i + 1] = int(i) + margin
|
||||
|
||||
return out
|
||||
|
||||
|
||||
def ohlc_flatten(
|
||||
ohlc: np.ndarray,
|
||||
use_mxmn: bool = True,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||
1-d array that is 4 times the size with x-domain values distributed
|
||||
evenly (by 0.5 steps) over each index.
|
||||
|
||||
'''
|
||||
index = ohlc[index_field]
|
||||
|
||||
if use_mxmn:
|
||||
# traces a line optimally over highs to lows
|
||||
# using numba. NOTE: pretty sure this is faster
|
||||
# and looks about the same as the below output.
|
||||
flat, x = hl2mxmn(ohlc)
|
||||
|
||||
else:
|
||||
flat = rfn.structured_to_unstructured(
|
||||
ohlc[['open', 'high', 'low', 'close']]
|
||||
).flatten()
|
||||
|
||||
x = np.linspace(
|
||||
start=index[0] - 0.5,
|
||||
stop=index[-1] + 0.5,
|
||||
num=len(flat),
|
||||
)
|
||||
return x, flat
|
||||
|
||||
|
||||
def slice_from_time(
|
||||
arr: np.ndarray,
|
||||
start_t: float,
|
||||
stop_t: float,
|
||||
step: int | None = None,
|
||||
|
||||
) -> slice:
|
||||
'''
|
||||
Calculate array indices mapped from a time range and return them in
|
||||
a slice.
|
||||
|
||||
Given an input array with an epoch `'time'` series entry, calculate
|
||||
the indices which span the time range and return in a slice. Presume
|
||||
each `'time'` step increment is uniform and when the time stamp
|
||||
series contains gaps (the uniform presumption is untrue) use
|
||||
``np.searchsorted()`` binary search to look up the appropriate
|
||||
index.
|
||||
|
||||
'''
|
||||
profiler = Profiler(
|
||||
msg='slice_from_time()',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
)
|
||||
|
||||
times = arr['time']
|
||||
t_first = floor(times[0])
|
||||
t_last = ceil(times[-1])
|
||||
|
||||
# the greatest index we can return which slices to the
|
||||
# end of the input array.
|
||||
read_i_max = arr.shape[0]
|
||||
|
||||
# TODO: require this is always passed in?
|
||||
if step is None:
|
||||
step = round(t_last - times[-2])
|
||||
if step == 0:
|
||||
step = 1
|
||||
|
||||
# compute (presumed) uniform-time-step index offsets
|
||||
i_start_t = floor(start_t)
|
||||
read_i_start = floor(((i_start_t - t_first) // step)) - 1
|
||||
|
||||
i_stop_t = ceil(stop_t)
|
||||
|
||||
# XXX: edge case -> always set stop index to last in array whenever
|
||||
# the input stop time is detected to be greater then the equiv time
|
||||
# stamp at that last entry.
|
||||
if i_stop_t >= t_last:
|
||||
read_i_stop = read_i_max
|
||||
else:
|
||||
read_i_stop = ceil((i_stop_t - t_first) // step) + 1
|
||||
|
||||
# always clip outputs to array support
|
||||
# for read start:
|
||||
# - never allow a start < the 0 index
|
||||
# - never allow an end index > the read array len
|
||||
read_i_start = min(
|
||||
max(0, read_i_start),
|
||||
read_i_max - 1,
|
||||
)
|
||||
read_i_stop = max(
|
||||
0,
|
||||
min(read_i_stop, read_i_max),
|
||||
)
|
||||
|
||||
# check for larger-then-latest calculated index for given start
|
||||
# time, in which case we do a binary search for the correct index.
|
||||
# NOTE: this is usually the result of a time series with time gaps
|
||||
# where it is expected that each index step maps to a uniform step
|
||||
# in the time stamp series.
|
||||
t_iv_start = times[read_i_start]
|
||||
if (
|
||||
t_iv_start > i_start_t
|
||||
):
|
||||
# do a binary search for the best index mapping to ``start_t``
|
||||
# given we measured an overshoot using the uniform-time-step
|
||||
# calculation from above.
|
||||
|
||||
# TODO: once we start caching these per source-array,
|
||||
# we can just overwrite ``read_i_start`` directly.
|
||||
new_read_i_start = np.searchsorted(
|
||||
times,
|
||||
i_start_t,
|
||||
side='left',
|
||||
)
|
||||
|
||||
# TODO: minimize binary search work as much as possible:
|
||||
# - cache these remap values which compensate for gaps in the
|
||||
# uniform time step basis where we calc a later start
|
||||
# index for the given input ``start_t``.
|
||||
# - can we shorten the input search sequence by heuristic?
|
||||
# up_to_arith_start = index[:read_i_start]
|
||||
|
||||
if (
|
||||
new_read_i_start <= read_i_start
|
||||
):
|
||||
# t_diff = t_iv_start - start_t
|
||||
# print(
|
||||
# f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
|
||||
# f'start_t:{start_t} -> 0index start_t:{t_iv_start}\n'
|
||||
# f'diff: {t_diff}\n'
|
||||
# f'REMAPPED START i: {read_i_start} -> {new_read_i_start}\n'
|
||||
# )
|
||||
read_i_start = new_read_i_start - 1
|
||||
|
||||
t_iv_stop = times[read_i_stop - 1]
|
||||
if (
|
||||
t_iv_stop > i_stop_t
|
||||
):
|
||||
# t_diff = stop_t - t_iv_stop
|
||||
# print(
|
||||
# f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
|
||||
# f'calced iv stop:{t_iv_stop} -> stop_t:{stop_t}\n'
|
||||
# f'diff: {t_diff}\n'
|
||||
# # f'SHOULD REMAP STOP: {read_i_start} -> {new_read_i_start}\n'
|
||||
# )
|
||||
new_read_i_stop = np.searchsorted(
|
||||
times[read_i_start:],
|
||||
# times,
|
||||
i_stop_t,
|
||||
side='left',
|
||||
)
|
||||
|
||||
if (
|
||||
new_read_i_stop <= read_i_stop
|
||||
):
|
||||
read_i_stop = read_i_start + new_read_i_stop + 1
|
||||
|
||||
# sanity checks for range size
|
||||
# samples = (i_stop_t - i_start_t) // step
|
||||
# index_diff = read_i_stop - read_i_start + 1
|
||||
# if index_diff > (samples + 3):
|
||||
# breakpoint()
|
||||
|
||||
# read-relative indexes: gives a slice where `shm.array[read_slc]`
|
||||
# will be the data spanning the input time range `start_t` ->
|
||||
# `stop_t`
|
||||
read_slc = slice(
|
||||
int(read_i_start),
|
||||
int(read_i_stop),
|
||||
)
|
||||
|
||||
profiler(
|
||||
'slicing complete'
|
||||
# f'{start_t} -> {abs_slc.start} | {read_slc.start}\n'
|
||||
# f'{stop_t} -> {abs_slc.stop} | {read_slc.stop}\n'
|
||||
)
|
||||
|
||||
# NOTE: if caller needs absolute buffer indices they can
|
||||
# slice the buffer abs index like so:
|
||||
# index = arr['index']
|
||||
# abs_indx = index[read_slc]
|
||||
# abs_slc = slice(
|
||||
# int(abs_indx[0]),
|
||||
# int(abs_indx[-1]),
|
||||
# )
|
||||
|
||||
return read_slc
|
File diff suppressed because it is too large
Load Diff
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -27,13 +27,14 @@ from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
|||
if _USE_POSIX:
|
||||
from _posixshmem import shm_unlink
|
||||
|
||||
import tractor
|
||||
# import msgspec
|
||||
import numpy as np
|
||||
from pydantic import BaseModel
|
||||
from numpy.lib import recfunctions as rfn
|
||||
import tractor
|
||||
|
||||
from ..log import get_logger
|
||||
from ._source import base_iohlc_dtype
|
||||
from .types import Struct
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -49,7 +50,11 @@ _rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
|
|||
|
||||
|
||||
def cuckoff_mantracker():
|
||||
'''
|
||||
Disable all ``multiprocessing``` "resource tracking" machinery since
|
||||
it's an absolute multi-threaded mess of non-SC madness.
|
||||
|
||||
'''
|
||||
from multiprocessing import resource_tracker as mantracker
|
||||
|
||||
# Tell the "resource tracker" thing to fuck off.
|
||||
|
@ -107,36 +112,39 @@ class SharedInt:
|
|||
log.warning(f'Shm for {name} already unlinked?')
|
||||
|
||||
|
||||
class _Token(BaseModel):
|
||||
class _Token(Struct, frozen=True):
|
||||
'''
|
||||
Internal represenation of a shared memory "token"
|
||||
which can be used to key a system wide post shm entry.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
frozen = True
|
||||
|
||||
shm_name: str # this servers as a "key" value
|
||||
shm_first_index_name: str
|
||||
shm_last_index_name: str
|
||||
dtype_descr: tuple
|
||||
size: int # in struct-array index / row terms
|
||||
|
||||
@property
|
||||
def dtype(self) -> np.dtype:
|
||||
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||
|
||||
def as_msg(self):
|
||||
return self.dict()
|
||||
return self.to_dict()
|
||||
|
||||
@classmethod
|
||||
def from_msg(cls, msg: dict) -> _Token:
|
||||
if isinstance(msg, _Token):
|
||||
return msg
|
||||
|
||||
# TODO: native struct decoding
|
||||
# return _token_dec.decode(msg)
|
||||
|
||||
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||
return _Token(**msg)
|
||||
|
||||
|
||||
# _token_dec = msgspec.msgpack.Decoder(_Token)
|
||||
|
||||
# TODO: this api?
|
||||
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
|
||||
# _known_tokens = tractor.ContextStack('_known_tokens', )
|
||||
|
@ -155,6 +163,7 @@ def get_shm_token(key: str) -> _Token:
|
|||
|
||||
def _make_token(
|
||||
key: str,
|
||||
size: int,
|
||||
dtype: Optional[np.dtype] = None,
|
||||
) -> _Token:
|
||||
'''
|
||||
|
@ -167,7 +176,8 @@ def _make_token(
|
|||
shm_name=key,
|
||||
shm_first_index_name=key + "_first",
|
||||
shm_last_index_name=key + "_last",
|
||||
dtype_descr=np.dtype(dtype).descr
|
||||
dtype_descr=tuple(np.dtype(dtype).descr),
|
||||
size=size,
|
||||
)
|
||||
|
||||
|
||||
|
@ -219,6 +229,7 @@ class ShmArray:
|
|||
shm_first_index_name=self._first._shm.name,
|
||||
shm_last_index_name=self._last._shm.name,
|
||||
dtype_descr=tuple(self._array.dtype.descr),
|
||||
size=self._len,
|
||||
)
|
||||
|
||||
@property
|
||||
|
@ -433,7 +444,7 @@ class ShmArray:
|
|||
def open_shm_array(
|
||||
|
||||
key: Optional[str] = None,
|
||||
size: int = _default_size,
|
||||
size: int = _default_size, # see above
|
||||
dtype: Optional[np.dtype] = None,
|
||||
readonly: bool = False,
|
||||
|
||||
|
@ -464,7 +475,8 @@ def open_shm_array(
|
|||
|
||||
token = _make_token(
|
||||
key=key,
|
||||
dtype=dtype
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
)
|
||||
|
||||
# create single entry arrays for storing an first and last indices
|
||||
|
@ -516,15 +528,15 @@ def open_shm_array(
|
|||
# "unlink" created shm on process teardown by
|
||||
# pushing teardown calls onto actor context stack
|
||||
|
||||
tractor._actor._lifetime_stack.callback(shmarr.close)
|
||||
tractor._actor._lifetime_stack.callback(shmarr.destroy)
|
||||
stack = tractor.current_actor().lifetime_stack
|
||||
stack.callback(shmarr.close)
|
||||
stack.callback(shmarr.destroy)
|
||||
|
||||
return shmarr
|
||||
|
||||
|
||||
def attach_shm_array(
|
||||
token: tuple[str, str, tuple[str, str]],
|
||||
size: int = _default_size,
|
||||
readonly: bool = True,
|
||||
|
||||
) -> ShmArray:
|
||||
|
@ -563,7 +575,7 @@ def attach_shm_array(
|
|||
raise _err
|
||||
|
||||
shmarr = np.ndarray(
|
||||
(size,),
|
||||
(token.size,),
|
||||
dtype=token.dtype,
|
||||
buffer=shm.buf
|
||||
)
|
||||
|
@ -602,8 +614,8 @@ def attach_shm_array(
|
|||
if key not in _known_tokens:
|
||||
_known_tokens[key] = token
|
||||
|
||||
# "close" attached shm on process teardown
|
||||
tractor._actor._lifetime_stack.callback(sha.close)
|
||||
# "close" attached shm on actor teardown
|
||||
tractor.current_actor().lifetime_stack.callback(sha.close)
|
||||
|
||||
return sha
|
||||
|
||||
|
@ -631,6 +643,7 @@ def maybe_open_shm_array(
|
|||
use ``attach_shm_array``.
|
||||
|
||||
'''
|
||||
size = kwargs.pop('size', _default_size)
|
||||
try:
|
||||
# see if we already know this key
|
||||
token = _known_tokens[key]
|
||||
|
@ -638,7 +651,11 @@ def maybe_open_shm_array(
|
|||
except KeyError:
|
||||
log.warning(f"Could not find {key} in shms cache")
|
||||
if dtype:
|
||||
token = _make_token(key, dtype)
|
||||
token = _make_token(
|
||||
key,
|
||||
size=size,
|
||||
dtype=dtype,
|
||||
)
|
||||
try:
|
||||
return attach_shm_array(token=token, **kwargs), False
|
||||
except FileNotFoundError:
|
||||
|
|
|
@ -18,12 +18,16 @@
|
|||
numpy data source coversion helpers.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from decimal import (
|
||||
Decimal,
|
||||
ROUND_HALF_EVEN,
|
||||
)
|
||||
from typing import Any
|
||||
import decimal
|
||||
|
||||
from bidict import bidict
|
||||
import numpy as np
|
||||
from pydantic import BaseModel
|
||||
|
||||
from .types import Struct
|
||||
# from numba import from_dtype
|
||||
|
||||
|
||||
|
@ -76,10 +80,14 @@ def mk_fqsn(
|
|||
def float_digits(
|
||||
value: float,
|
||||
) -> int:
|
||||
'''
|
||||
Return the number of precision digits read from a float value.
|
||||
|
||||
'''
|
||||
if value == 0:
|
||||
return 0
|
||||
|
||||
return int(-decimal.Decimal(str(value)).as_tuple().exponent)
|
||||
return int(-Decimal(str(value)).as_tuple().exponent)
|
||||
|
||||
|
||||
def ohlc_zeros(length: int) -> np.ndarray:
|
||||
|
@ -126,7 +134,57 @@ def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
|||
)
|
||||
|
||||
|
||||
class Symbol(BaseModel):
|
||||
class MktPair(Struct, frozen=True):
|
||||
|
||||
src: str # source asset name being used to buy
|
||||
src_type: str # source asset's financial type/classification name
|
||||
# ^ specifies a "class" of financial instrument
|
||||
# egs. stock, futer, option, bond etc.
|
||||
|
||||
dst: str # destination asset name being bought
|
||||
dst_type: str # destination asset's financial type/classification name
|
||||
|
||||
price_tick: float # minimum price increment value increment
|
||||
price_tick_digits: int # required decimal digits for above
|
||||
|
||||
size_tick: float # minimum size (aka vlm) increment value increment
|
||||
size_tick_digits: int # required decimal digits for above
|
||||
|
||||
venue: str | None = None # market venue provider name
|
||||
expiry: str | None = None # for derivs, expiry datetime parseable str
|
||||
|
||||
# for derivs, info describing contract, egs.
|
||||
# strike price, call or put, swap type, exercise model, etc.
|
||||
contract_info: str | None = None
|
||||
|
||||
@classmethod
|
||||
def from_msg(
|
||||
self,
|
||||
msg: dict[str, Any],
|
||||
|
||||
) -> MktPair:
|
||||
'''
|
||||
Constructor for a received msg-dict normally received over IPC.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
# fqa, fqma, .. etc. see issue:
|
||||
# https://github.com/pikers/piker/issues/467
|
||||
@property
|
||||
def fqsn(self) -> str:
|
||||
'''
|
||||
Return the fully qualified market (endpoint) name for the
|
||||
pair of transacting assets.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
|
||||
# TODO: rework the below `Symbol` (which was originally inspired and
|
||||
# derived from stuff in quantdom) into a simpler, ipc msg ready, market
|
||||
# endpoint meta-data container type as per the drafted interace above.
|
||||
class Symbol(Struct):
|
||||
'''
|
||||
I guess this is some kinda container thing for dealing with
|
||||
all the different meta-data formats from brokers?
|
||||
|
@ -140,10 +198,6 @@ class Symbol(BaseModel):
|
|||
suffix: str = ''
|
||||
broker_info: dict[str, dict[str, Any]] = {}
|
||||
|
||||
# specifies a "class" of financial instrument
|
||||
# ex. stock, futer, option, bond etc.
|
||||
|
||||
# @validate_arguments
|
||||
@classmethod
|
||||
def from_broker_info(
|
||||
cls,
|
||||
|
@ -152,19 +206,17 @@ class Symbol(BaseModel):
|
|||
info: dict[str, Any],
|
||||
suffix: str = '',
|
||||
|
||||
# XXX: like wtf..
|
||||
# ) -> 'Symbol':
|
||||
) -> None:
|
||||
) -> Symbol:
|
||||
|
||||
tick_size = info.get('price_tick_size', 0.01)
|
||||
lot_tick_size = info.get('lot_tick_size', 0.0)
|
||||
lot_size = info.get('lot_tick_size', 0.0)
|
||||
|
||||
return Symbol(
|
||||
key=symbol,
|
||||
tick_size=tick_size,
|
||||
lot_tick_size=lot_tick_size,
|
||||
lot_tick_size=lot_size,
|
||||
tick_size_digits=float_digits(tick_size),
|
||||
lot_size_digits=float_digits(lot_tick_size),
|
||||
lot_size_digits=float_digits(lot_size),
|
||||
suffix=suffix,
|
||||
broker_info={broker: info},
|
||||
)
|
||||
|
@ -175,9 +227,7 @@ class Symbol(BaseModel):
|
|||
fqsn: str,
|
||||
info: dict[str, Any],
|
||||
|
||||
# XXX: like wtf..
|
||||
# ) -> 'Symbol':
|
||||
) -> None:
|
||||
) -> Symbol:
|
||||
broker, key, suffix = unpack_fqsn(fqsn)
|
||||
return cls.from_broker_info(
|
||||
broker,
|
||||
|
@ -221,6 +271,10 @@ class Symbol(BaseModel):
|
|||
else:
|
||||
return (key, broker)
|
||||
|
||||
@property
|
||||
def fqsn(self) -> str:
|
||||
return '.'.join(self.tokens()).lower()
|
||||
|
||||
def front_fqsn(self) -> str:
|
||||
'''
|
||||
fqsn = "fully qualified symbol name"
|
||||
|
@ -240,18 +294,24 @@ class Symbol(BaseModel):
|
|||
|
||||
'''
|
||||
tokens = self.tokens()
|
||||
fqsn = '.'.join(tokens)
|
||||
fqsn = '.'.join(map(str.lower, tokens))
|
||||
return fqsn
|
||||
|
||||
def iterfqsns(self) -> list[str]:
|
||||
keys = []
|
||||
for broker in self.broker_info.keys():
|
||||
fqsn = mk_fqsn(self.key, broker)
|
||||
if self.suffix:
|
||||
fqsn += f'.{self.suffix}'
|
||||
keys.append(fqsn)
|
||||
def quantize_size(
|
||||
self,
|
||||
size: float,
|
||||
|
||||
return keys
|
||||
) -> Decimal:
|
||||
'''
|
||||
Truncate input ``size: float`` using ``Decimal``
|
||||
and ``.lot_size_digits``.
|
||||
|
||||
'''
|
||||
digits = self.lot_size_digits
|
||||
return Decimal(size).quantize(
|
||||
Decimal(f'1.{"0".ljust(digits, "0")}'),
|
||||
rounding=ROUND_HALF_EVEN
|
||||
)
|
||||
|
||||
|
||||
def _nan_to_closest_num(array: np.ndarray):
|
||||
|
|
|
@ -18,13 +18,24 @@
|
|||
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
||||
|
||||
"""
|
||||
from contextlib import asynccontextmanager, AsyncExitStack
|
||||
from contextlib import (
|
||||
asynccontextmanager,
|
||||
AsyncExitStack,
|
||||
)
|
||||
from itertools import count
|
||||
from types import ModuleType
|
||||
from typing import Any, Callable, AsyncGenerator
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Callable,
|
||||
AsyncGenerator,
|
||||
Iterable,
|
||||
)
|
||||
import json
|
||||
|
||||
import trio
|
||||
import trio_websocket
|
||||
from wsproto.utilities import LocalProtocolError
|
||||
from trio_websocket._impl import (
|
||||
ConnectionClosed,
|
||||
DisconnectionTimeout,
|
||||
|
@ -35,43 +46,53 @@ from trio_websocket._impl import (
|
|||
|
||||
from ..log import get_logger
|
||||
|
||||
from .types import Struct
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class NoBsWs:
|
||||
"""Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||
'''
|
||||
Make ``trio_websocket`` sockets stay up no matter the bs.
|
||||
|
||||
"""
|
||||
You can provide a ``fixture`` async-context-manager which will be
|
||||
enter/exitted around each reconnect operation.
|
||||
'''
|
||||
recon_errors = (
|
||||
ConnectionClosed,
|
||||
DisconnectionTimeout,
|
||||
ConnectionRejected,
|
||||
HandshakeError,
|
||||
ConnectionTimeout,
|
||||
LocalProtocolError,
|
||||
)
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
url: str,
|
||||
token: str,
|
||||
stack: AsyncExitStack,
|
||||
fixture: Callable,
|
||||
serializer: ModuleType = json,
|
||||
fixture: Optional[Callable] = None,
|
||||
serializer: ModuleType = json
|
||||
):
|
||||
self.url = url
|
||||
self.token = token
|
||||
self.fixture = fixture
|
||||
self._stack = stack
|
||||
self._ws: 'WebSocketConnection' = None # noqa
|
||||
|
||||
# TODO: is there some method we can call
|
||||
# on the underlying `._ws` to get this?
|
||||
self._connected: bool = False
|
||||
|
||||
async def _connect(
|
||||
self,
|
||||
tries: int = 1000,
|
||||
) -> None:
|
||||
|
||||
self._connected = False
|
||||
while True:
|
||||
try:
|
||||
await self._stack.aclose()
|
||||
except (DisconnectionTimeout, RuntimeError):
|
||||
except self.recon_errors:
|
||||
await trio.sleep(0.5)
|
||||
else:
|
||||
break
|
||||
|
@ -82,19 +103,18 @@ class NoBsWs:
|
|||
self._ws = await self._stack.enter_async_context(
|
||||
trio_websocket.open_websocket_url(self.url)
|
||||
)
|
||||
# rerun user code fixture
|
||||
if self.token == '':
|
||||
|
||||
if self.fixture is not None:
|
||||
# rerun user code fixture
|
||||
ret = await self._stack.enter_async_context(
|
||||
self.fixture(self)
|
||||
)
|
||||
else:
|
||||
ret = await self._stack.enter_async_context(
|
||||
self.fixture(self, self.token)
|
||||
)
|
||||
|
||||
assert ret is None
|
||||
assert ret is None
|
||||
|
||||
log.info(f'Connection success: {self.url}')
|
||||
|
||||
self._connected = True
|
||||
return self._ws
|
||||
|
||||
except self.recon_errors as err:
|
||||
|
@ -104,11 +124,15 @@ class NoBsWs:
|
|||
f'{type(err)}...retry attempt {i}'
|
||||
)
|
||||
await trio.sleep(0.5)
|
||||
self._connected = False
|
||||
continue
|
||||
else:
|
||||
log.exception('ws connection fail...')
|
||||
raise last_err
|
||||
|
||||
def connected(self) -> bool:
|
||||
return self._connected
|
||||
|
||||
async def send_msg(
|
||||
self,
|
||||
data: Any,
|
||||
|
@ -128,21 +152,26 @@ class NoBsWs:
|
|||
except self.recon_errors:
|
||||
await self._connect()
|
||||
|
||||
def __aiter__(self):
|
||||
return self
|
||||
|
||||
async def __anext__(self):
|
||||
return await self.recv_msg()
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_autorecon_ws(
|
||||
url: str,
|
||||
|
||||
# TODO: proper type annot smh
|
||||
fixture: Callable,
|
||||
# used for authenticated websockets
|
||||
token: str = '',
|
||||
# TODO: proper type cannot smh
|
||||
fixture: Optional[Callable] = None,
|
||||
|
||||
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||
|
||||
"""
|
||||
async with AsyncExitStack() as stack:
|
||||
ws = NoBsWs(url, token, stack, fixture=fixture)
|
||||
ws = NoBsWs(url, stack, fixture=fixture)
|
||||
await ws._connect()
|
||||
|
||||
try:
|
||||
|
@ -150,3 +179,114 @@ async def open_autorecon_ws(
|
|||
|
||||
finally:
|
||||
await stack.aclose()
|
||||
|
||||
|
||||
'''
|
||||
JSONRPC response-request style machinery for transparent multiplexing of msgs
|
||||
over a NoBsWs.
|
||||
|
||||
'''
|
||||
|
||||
|
||||
class JSONRPCResult(Struct):
|
||||
id: int
|
||||
jsonrpc: str = '2.0'
|
||||
result: Optional[dict] = None
|
||||
error: Optional[dict] = None
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_jsonrpc_session(
|
||||
url: str,
|
||||
start_id: int = 0,
|
||||
response_type: type = JSONRPCResult,
|
||||
request_type: Optional[type] = None,
|
||||
request_hook: Optional[Callable] = None,
|
||||
error_hook: Optional[Callable] = None,
|
||||
) -> Callable[[str, dict], dict]:
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
open_autorecon_ws(url) as ws
|
||||
):
|
||||
rpc_id: Iterable = count(start_id)
|
||||
rpc_results: dict[int, dict] = {}
|
||||
|
||||
async def json_rpc(method: str, params: dict) -> dict:
|
||||
'''
|
||||
perform a json rpc call and wait for the result, raise exception in
|
||||
case of error field present on response
|
||||
'''
|
||||
msg = {
|
||||
'jsonrpc': '2.0',
|
||||
'id': next(rpc_id),
|
||||
'method': method,
|
||||
'params': params
|
||||
}
|
||||
_id = msg['id']
|
||||
|
||||
rpc_results[_id] = {
|
||||
'result': None,
|
||||
'event': trio.Event()
|
||||
}
|
||||
|
||||
await ws.send_msg(msg)
|
||||
|
||||
await rpc_results[_id]['event'].wait()
|
||||
|
||||
ret = rpc_results[_id]['result']
|
||||
|
||||
del rpc_results[_id]
|
||||
|
||||
if ret.error is not None:
|
||||
raise Exception(json.dumps(ret.error, indent=4))
|
||||
|
||||
return ret
|
||||
|
||||
async def recv_task():
|
||||
'''
|
||||
receives every ws message and stores it in its corresponding
|
||||
result field, then sets the event to wakeup original sender
|
||||
tasks. also recieves responses to requests originated from
|
||||
the server side.
|
||||
|
||||
'''
|
||||
async for msg in ws:
|
||||
match msg:
|
||||
case {
|
||||
'result': _,
|
||||
'id': mid,
|
||||
} if res_entry := rpc_results.get(mid):
|
||||
|
||||
res_entry['result'] = response_type(**msg)
|
||||
res_entry['event'].set()
|
||||
|
||||
case {
|
||||
'result': _,
|
||||
'id': mid,
|
||||
} if not rpc_results.get(mid):
|
||||
log.warning(
|
||||
f'Unexpected ws msg: {json.dumps(msg, indent=4)}'
|
||||
)
|
||||
|
||||
case {
|
||||
'method': _,
|
||||
'params': _,
|
||||
}:
|
||||
log.debug(f'Recieved\n{msg}')
|
||||
if request_hook:
|
||||
await request_hook(request_type(**msg))
|
||||
|
||||
case {
|
||||
'error': error
|
||||
}:
|
||||
log.warning(f'Recieved\n{error}')
|
||||
if error_hook:
|
||||
await error_hook(response_type(**msg))
|
||||
|
||||
case _:
|
||||
log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
|
||||
|
||||
n.start_soon(recv_task)
|
||||
yield json_rpc
|
||||
n.cancel_scope.cancel()
|
||||
|
|
|
@ -18,31 +18,22 @@
|
|||
marketstore cli.
|
||||
|
||||
"""
|
||||
from functools import partial
|
||||
from pprint import pformat
|
||||
|
||||
from anyio_marketstore import open_marketstore_client
|
||||
import trio
|
||||
import tractor
|
||||
import click
|
||||
import numpy as np
|
||||
|
||||
from .marketstore import (
|
||||
get_client,
|
||||
from ..service.marketstore import (
|
||||
# get_client,
|
||||
# stream_quotes,
|
||||
ingest_quote_stream,
|
||||
# _url,
|
||||
_tick_tbk_ids,
|
||||
mk_tbk,
|
||||
# _tick_tbk_ids,
|
||||
# mk_tbk,
|
||||
)
|
||||
from ..cli import cli
|
||||
from .. import watchlists as wl
|
||||
from ..log import get_logger
|
||||
from ._sharedmem import (
|
||||
maybe_open_shm_array,
|
||||
)
|
||||
from ._source import (
|
||||
base_iohlc_dtype,
|
||||
from ..log import (
|
||||
get_logger,
|
||||
)
|
||||
|
||||
|
||||
|
@ -89,16 +80,16 @@ def ms_stream(
|
|||
# async def main():
|
||||
# nonlocal names
|
||||
# async with get_client(url) as client:
|
||||
#
|
||||
#
|
||||
# if not names:
|
||||
# names = await client.list_symbols()
|
||||
#
|
||||
#
|
||||
# # default is to wipe db entirely.
|
||||
# answer = input(
|
||||
# "This will entirely wipe you local marketstore db @ "
|
||||
# f"{url} of the following symbols:\n {pformat(names)}"
|
||||
# "\n\nDelete [N/y]?\n")
|
||||
#
|
||||
#
|
||||
# if answer == 'y':
|
||||
# for sym in names:
|
||||
# # tbk = _tick_tbk.format(sym)
|
||||
|
@ -107,21 +98,17 @@ def ms_stream(
|
|||
# await client.destroy(mk_tbk(tbk))
|
||||
# else:
|
||||
# print("Nothing deleted.")
|
||||
#
|
||||
#
|
||||
# tractor.run(main)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
'--tl',
|
||||
is_flag=True,
|
||||
help='Enable tractor logging')
|
||||
@click.option(
|
||||
'--host',
|
||||
'--tsdb_host',
|
||||
default='localhost'
|
||||
)
|
||||
@click.option(
|
||||
'--port',
|
||||
'--tsdb_port',
|
||||
default=5993
|
||||
)
|
||||
@click.argument('symbols', nargs=-1)
|
||||
|
@ -137,18 +124,93 @@ def storesh(
|
|||
Start an IPython shell ready to query the local marketstore db.
|
||||
|
||||
'''
|
||||
from piker.data.marketstore import tsdb_history_update
|
||||
from piker._daemon import open_piker_runtime
|
||||
from piker.data.marketstore import open_tsdb_client
|
||||
from piker.service import open_piker_runtime
|
||||
|
||||
async def main():
|
||||
nonlocal symbols
|
||||
|
||||
async with open_piker_runtime(
|
||||
'storesh',
|
||||
enable_modules=['piker.data._ahab'],
|
||||
enable_modules=['piker.service._ahab'],
|
||||
):
|
||||
symbol = symbols[0]
|
||||
await tsdb_history_update(symbol)
|
||||
|
||||
async with open_tsdb_client(symbol):
|
||||
# TODO: ask if user wants to write history for detected
|
||||
# available shm buffers?
|
||||
from tractor.trionics import ipython_embed
|
||||
await ipython_embed()
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
||||
@cli.command()
|
||||
@click.option(
|
||||
'--host',
|
||||
default='localhost'
|
||||
)
|
||||
@click.option(
|
||||
'--port',
|
||||
default=5993
|
||||
)
|
||||
@click.option(
|
||||
'--delete',
|
||||
'-d',
|
||||
is_flag=True,
|
||||
help='Delete history (1 Min) for symbol(s)',
|
||||
)
|
||||
@click.argument('symbols', nargs=-1)
|
||||
@click.pass_obj
|
||||
def storage(
|
||||
config,
|
||||
host,
|
||||
port,
|
||||
symbols: list[str],
|
||||
delete: bool,
|
||||
|
||||
):
|
||||
'''
|
||||
Start an IPython shell ready to query the local marketstore db.
|
||||
|
||||
'''
|
||||
from piker.service.marketstore import open_tsdb_client
|
||||
from piker.service import open_piker_runtime
|
||||
|
||||
async def main():
|
||||
nonlocal symbols
|
||||
|
||||
async with open_piker_runtime(
|
||||
'tsdb_storage',
|
||||
enable_modules=['piker.service._ahab'],
|
||||
):
|
||||
symbol = symbols[0]
|
||||
async with open_tsdb_client(symbol) as storage:
|
||||
if delete:
|
||||
for fqsn in symbols:
|
||||
syms = await storage.client.list_symbols()
|
||||
|
||||
resp60s = await storage.delete_ts(fqsn, 60)
|
||||
|
||||
msgish = resp60s.ListFields()[0][1]
|
||||
if 'error' in str(msgish):
|
||||
|
||||
# TODO: MEGA LOL, apparently the symbols don't
|
||||
# flush out until you refresh something or other
|
||||
# (maybe the WALFILE)... #lelandorlulzone, classic
|
||||
# alpaca(Rtm) design here ..
|
||||
# well, if we ever can make this work we
|
||||
# probably want to dogsplain the real reason
|
||||
# for the delete errurz..llululu
|
||||
if fqsn not in syms:
|
||||
log.error(f'Pair {fqsn} dne in DB')
|
||||
|
||||
log.error(f'Deletion error: {fqsn}\n{msgish}')
|
||||
|
||||
resp1s = await storage.delete_ts(fqsn, 1)
|
||||
msgish = resp1s.ListFields()[0][1]
|
||||
if 'error' in str(msgish):
|
||||
log.error(f'Deletion error: {fqsn}\n{msgish}')
|
||||
|
||||
trio.run(main)
|
||||
|
||||
|
@ -182,7 +244,7 @@ def ingest(config, name, test_file, tl):
|
|||
|
||||
async def entry_point():
|
||||
async with tractor.open_nursery() as n:
|
||||
for provider, symbols in grouped_syms.items():
|
||||
for provider, symbols in grouped_syms.items():
|
||||
await n.run_in_actor(
|
||||
ingest_quote_stream,
|
||||
name='ingest_marketstore',
|
||||
|
|
1945
piker/data/feed.py
1945
piker/data/feed.py
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,210 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
abstractions for organizing, managing and generally operating-on
|
||||
real-time data processing data-structures.
|
||||
|
||||
"Streams, flumes, cascades and flows.."
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import tractor
|
||||
import pendulum
|
||||
import numpy as np
|
||||
|
||||
from .types import Struct
|
||||
from ._source import (
|
||||
Symbol,
|
||||
)
|
||||
from ._sharedmem import (
|
||||
attach_shm_array,
|
||||
ShmArray,
|
||||
_Token,
|
||||
)
|
||||
# from .._profile import (
|
||||
# Profiler,
|
||||
# pg_profile_enabled,
|
||||
# )
|
||||
|
||||
if TYPE_CHECKING:
|
||||
# from pyqtgraph import PlotItem
|
||||
from .feed import Feed
|
||||
|
||||
|
||||
# TODO: ideas for further abstractions as per
|
||||
# https://github.com/pikers/piker/issues/216 and
|
||||
# https://github.com/pikers/piker/issues/270:
|
||||
# - a ``Cascade`` would be the minimal "connection" of 2 ``Flumes``
|
||||
# as per circuit parlance:
|
||||
# https://en.wikipedia.org/wiki/Two-port_network#Cascade_connection
|
||||
# - could cover the combination of our `FspAdmin` and the
|
||||
# backend `.fsp._engine` related machinery to "connect" one flume
|
||||
# to another?
|
||||
# - a (financial signal) ``Flow`` would be the a "collection" of such
|
||||
# minmial cascades. Some engineering based jargon concepts:
|
||||
# - https://en.wikipedia.org/wiki/Signal_chain
|
||||
# - https://en.wikipedia.org/wiki/Daisy_chain_(electrical_engineering)
|
||||
# - https://en.wikipedia.org/wiki/Audio_signal_flow
|
||||
# - https://en.wikipedia.org/wiki/Digital_signal_processing#Implementation
|
||||
# - https://en.wikipedia.org/wiki/Dataflow_programming
|
||||
# - https://en.wikipedia.org/wiki/Signal_programming
|
||||
# - https://en.wikipedia.org/wiki/Incremental_computing
|
||||
|
||||
|
||||
class Flume(Struct):
|
||||
'''
|
||||
Composite reference type which points to all the addressing handles
|
||||
and other meta-data necessary for the read, measure and management
|
||||
of a set of real-time updated data flows.
|
||||
|
||||
Can be thought of as a "flow descriptor" or "flow frame" which
|
||||
describes the high level properties of a set of data flows that can
|
||||
be used seamlessly across process-memory boundaries.
|
||||
|
||||
Each instance's sub-components normally includes:
|
||||
- a msg oriented quote stream provided via an IPC transport
|
||||
- history and real-time shm buffers which are both real-time
|
||||
updated and backfilled.
|
||||
- associated startup indexing information related to both buffer
|
||||
real-time-append and historical prepend addresses.
|
||||
- low level APIs to read and measure the updated data and manage
|
||||
queuing properties.
|
||||
|
||||
'''
|
||||
symbol: Symbol
|
||||
first_quote: dict
|
||||
_rt_shm_token: _Token
|
||||
|
||||
# optional since some data flows won't have a "downsampled" history
|
||||
# buffer/stream (eg. FSPs).
|
||||
_hist_shm_token: _Token | None = None
|
||||
|
||||
# private shm refs loaded dynamically from tokens
|
||||
_hist_shm: ShmArray | None = None
|
||||
_rt_shm: ShmArray | None = None
|
||||
|
||||
stream: tractor.MsgStream | None = None
|
||||
izero_hist: int = 0
|
||||
izero_rt: int = 0
|
||||
throttle_rate: int | None = None
|
||||
|
||||
# TODO: do we need this really if we can pull the `Portal` from
|
||||
# ``tractor``'s internals?
|
||||
feed: Feed | None = None
|
||||
|
||||
@property
|
||||
def rt_shm(self) -> ShmArray:
|
||||
|
||||
if self._rt_shm is None:
|
||||
self._rt_shm = attach_shm_array(
|
||||
token=self._rt_shm_token,
|
||||
readonly=True,
|
||||
)
|
||||
|
||||
return self._rt_shm
|
||||
|
||||
@property
|
||||
def hist_shm(self) -> ShmArray:
|
||||
|
||||
if self._hist_shm_token is None:
|
||||
raise RuntimeError(
|
||||
'No shm token has been set for the history buffer?'
|
||||
)
|
||||
|
||||
if (
|
||||
self._hist_shm is None
|
||||
):
|
||||
self._hist_shm = attach_shm_array(
|
||||
token=self._hist_shm_token,
|
||||
readonly=True,
|
||||
)
|
||||
|
||||
return self._hist_shm
|
||||
|
||||
async def receive(self) -> dict:
|
||||
return await self.stream.receive()
|
||||
|
||||
def get_ds_info(
|
||||
self,
|
||||
) -> tuple[float, float, float]:
|
||||
'''
|
||||
Compute the "downsampling" ratio info between the historical shm
|
||||
buffer and the real-time (HFT) one.
|
||||
|
||||
Return a tuple of the fast sample period, historical sample
|
||||
period and ratio between them.
|
||||
|
||||
'''
|
||||
times = self.hist_shm.array['time']
|
||||
end = pendulum.from_timestamp(times[-1])
|
||||
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
||||
hist_step_size_s = (end - start).seconds
|
||||
|
||||
times = self.rt_shm.array['time']
|
||||
end = pendulum.from_timestamp(times[-1])
|
||||
start = pendulum.from_timestamp(times[times != times[-1]][-1])
|
||||
rt_step_size_s = (end - start).seconds
|
||||
|
||||
ratio = hist_step_size_s / rt_step_size_s
|
||||
return (
|
||||
rt_step_size_s,
|
||||
hist_step_size_s,
|
||||
ratio,
|
||||
)
|
||||
|
||||
# TODO: get native msgspec decoding for these workinn
|
||||
def to_msg(self) -> dict:
|
||||
msg = self.to_dict()
|
||||
msg['symbol'] = msg['symbol'].to_dict()
|
||||
|
||||
# can't serialize the stream or feed objects, it's expected
|
||||
# you'll have a ref to it since this msg should be rxed on
|
||||
# a stream on whatever far end IPC..
|
||||
msg.pop('stream')
|
||||
msg.pop('feed')
|
||||
return msg
|
||||
|
||||
@classmethod
|
||||
def from_msg(cls, msg: dict) -> dict:
|
||||
symbol = Symbol(**msg.pop('symbol'))
|
||||
return cls(
|
||||
symbol=symbol,
|
||||
**msg,
|
||||
)
|
||||
|
||||
def get_index(
|
||||
self,
|
||||
time_s: float,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> int | float:
|
||||
'''
|
||||
Return array shm-buffer index for for epoch time.
|
||||
|
||||
'''
|
||||
times = array['time']
|
||||
first = np.searchsorted(
|
||||
times,
|
||||
time_s,
|
||||
side='left',
|
||||
)
|
||||
imx = times.shape[0] - 1
|
||||
return min(first, imx)
|
|
@ -0,0 +1,88 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Built-in (extension) types.
|
||||
|
||||
"""
|
||||
import sys
|
||||
from typing import Optional
|
||||
from pprint import pformat
|
||||
|
||||
import msgspec
|
||||
|
||||
|
||||
class Struct(
|
||||
msgspec.Struct,
|
||||
|
||||
# https://jcristharif.com/msgspec/structs.html#tagged-unions
|
||||
# tag='pikerstruct',
|
||||
# tag=True,
|
||||
):
|
||||
'''
|
||||
A "human friendlier" (aka repl buddy) struct subtype.
|
||||
|
||||
'''
|
||||
def to_dict(self) -> dict:
|
||||
return {
|
||||
f: getattr(self, f)
|
||||
for f in self.__struct_fields__
|
||||
}
|
||||
|
||||
# Lul, doesn't seem to work that well..
|
||||
# def __repr__(self):
|
||||
# # only turn on pprint when we detect a python REPL
|
||||
# # at runtime B)
|
||||
# if (
|
||||
# hasattr(sys, 'ps1')
|
||||
# # TODO: check if we're in pdb
|
||||
# ):
|
||||
# return self.pformat()
|
||||
|
||||
# return super().__repr__()
|
||||
|
||||
def pformat(self) -> str:
|
||||
return f'Struct({pformat(self.to_dict())})'
|
||||
|
||||
def copy(
|
||||
self,
|
||||
update: Optional[dict] = None,
|
||||
|
||||
) -> msgspec.Struct:
|
||||
'''
|
||||
Validate-typecast all self defined fields, return a copy of us
|
||||
with all such fields.
|
||||
|
||||
This is kinda like the default behaviour in `pydantic.BaseModel`.
|
||||
|
||||
'''
|
||||
if update:
|
||||
for k, v in update.items():
|
||||
setattr(self, k, v)
|
||||
|
||||
# roundtrip serialize to validate
|
||||
return msgspec.msgpack.Decoder(
|
||||
type=type(self)
|
||||
).decode(
|
||||
msgspec.msgpack.Encoder().encode(self)
|
||||
)
|
||||
|
||||
def typecast(
|
||||
self,
|
||||
# fields: Optional[list[str]] = None,
|
||||
) -> None:
|
||||
for fname, ftype in self.__annotations__.items():
|
||||
setattr(self, fname, ftype(getattr(self, fname)))
|
|
@ -78,7 +78,8 @@ class Fsp:
|
|||
# + the consuming fsp *to* the consumers output
|
||||
# shm flow.
|
||||
_flow_registry: dict[
|
||||
tuple[_Token, str], _Token,
|
||||
tuple[_Token, str],
|
||||
tuple[_Token, Optional[ShmArray]],
|
||||
] = {}
|
||||
|
||||
def __init__(
|
||||
|
@ -120,7 +121,6 @@ class Fsp:
|
|||
):
|
||||
return self.func(*args, **kwargs)
|
||||
|
||||
# TODO: lru_cache this? prettty sure it'll work?
|
||||
def get_shm(
|
||||
self,
|
||||
src_shm: ShmArray,
|
||||
|
@ -131,12 +131,27 @@ class Fsp:
|
|||
for this "instance" of a signal processor for
|
||||
the given ``key``.
|
||||
|
||||
The destination shm "token" and array are cached if possible to
|
||||
minimize multiple stdlib/system calls.
|
||||
|
||||
'''
|
||||
dst_token = self._flow_registry[
|
||||
dst_token, maybe_array = self._flow_registry[
|
||||
(src_shm._token, self.name)
|
||||
]
|
||||
shm = attach_shm_array(dst_token)
|
||||
return shm
|
||||
if maybe_array is None:
|
||||
self._flow_registry[
|
||||
(src_shm._token, self.name)
|
||||
] = (
|
||||
dst_token,
|
||||
# "cache" the ``ShmArray`` such that
|
||||
# we call the underlying "attach" code as few
|
||||
# times as possible as per:
|
||||
# - https://github.com/pikers/piker/issues/359
|
||||
# - https://github.com/pikers/piker/issues/332
|
||||
maybe_array := attach_shm_array(dst_token)
|
||||
)
|
||||
|
||||
return maybe_array
|
||||
|
||||
|
||||
def fsp(
|
||||
|
@ -184,7 +199,10 @@ def maybe_mk_fsp_shm(
|
|||
# TODO: load output types from `Fsp`
|
||||
# - should `index` be a required internal field?
|
||||
fsp_dtype = np.dtype(
|
||||
[('index', int)] +
|
||||
[('index', int)]
|
||||
+
|
||||
[('time', float)]
|
||||
+
|
||||
[(field_name, float) for field_name in target.outputs]
|
||||
)
|
||||
|
||||
|
|
|
@ -21,12 +21,13 @@ core task logic for processing chains
|
|||
from dataclasses import dataclass
|
||||
from functools import partial
|
||||
from typing import (
|
||||
AsyncIterator, Callable, Optional,
|
||||
AsyncIterator,
|
||||
Callable,
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
|
@ -35,14 +36,22 @@ from tractor.msg import NamespacePath
|
|||
from ..log import get_logger, get_console_log
|
||||
from .. import data
|
||||
from ..data import attach_shm_array
|
||||
from ..data.feed import Feed
|
||||
from ..data.feed import (
|
||||
Flume,
|
||||
Feed,
|
||||
)
|
||||
from ..data._sharedmem import ShmArray
|
||||
from ..data._sampling import (
|
||||
_default_delay_s,
|
||||
open_sample_stream,
|
||||
)
|
||||
from ..data._source import Symbol
|
||||
from ._api import (
|
||||
Fsp,
|
||||
_load_builtins,
|
||||
_Token,
|
||||
)
|
||||
from .._profile import Profiler
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -77,7 +86,7 @@ async def filter_quotes_by_sym(
|
|||
async def fsp_compute(
|
||||
|
||||
symbol: Symbol,
|
||||
feed: Feed,
|
||||
flume: Flume,
|
||||
quote_stream: trio.abc.ReceiveChannel,
|
||||
|
||||
src: ShmArray,
|
||||
|
@ -90,7 +99,7 @@ async def fsp_compute(
|
|||
|
||||
) -> None:
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
profiler = Profiler(
|
||||
delayed=False,
|
||||
disabled=True
|
||||
)
|
||||
|
@ -105,16 +114,17 @@ async def fsp_compute(
|
|||
filter_quotes_by_sym(fqsn, quote_stream),
|
||||
|
||||
# XXX: currently the ``ohlcv`` arg
|
||||
feed.shm,
|
||||
flume.rt_shm,
|
||||
)
|
||||
|
||||
# Conduct a single iteration of fsp with historical bars input
|
||||
# and get historical output
|
||||
# HISTORY COMPUTE PHASE
|
||||
# conduct a single iteration of fsp with historical bars input
|
||||
# and get historical output.
|
||||
history_output: Union[
|
||||
dict[str, np.ndarray], # multi-output case
|
||||
np.ndarray, # single output case
|
||||
]
|
||||
history_output = await out_stream.__anext__()
|
||||
history_output = await anext(out_stream)
|
||||
|
||||
func_name = func.__name__
|
||||
profiler(f'{func_name} generated history')
|
||||
|
@ -126,9 +136,13 @@ async def fsp_compute(
|
|||
# each respective field.
|
||||
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
||||
fields.pop('index')
|
||||
history: Optional[np.ndarray] = None # TODO: nptyping here!
|
||||
history_by_field: Optional[np.ndarray] = None
|
||||
src_time = src.array['time']
|
||||
|
||||
if fields and len(fields) > 1 and fields:
|
||||
if (
|
||||
fields and
|
||||
len(fields) > 1
|
||||
):
|
||||
if not isinstance(history_output, dict):
|
||||
raise ValueError(
|
||||
f'`{func_name}` is a multi-output FSP and should yield a '
|
||||
|
@ -139,7 +153,7 @@ async def fsp_compute(
|
|||
if key in history_output:
|
||||
output = history_output[key]
|
||||
|
||||
if history is None:
|
||||
if history_by_field is None:
|
||||
|
||||
if output is None:
|
||||
length = len(src.array)
|
||||
|
@ -149,7 +163,7 @@ async def fsp_compute(
|
|||
# using the first output, determine
|
||||
# the length of the struct-array that
|
||||
# will be pushed to shm.
|
||||
history = np.zeros(
|
||||
history_by_field = np.zeros(
|
||||
length,
|
||||
dtype=dst.array.dtype
|
||||
)
|
||||
|
@ -157,7 +171,7 @@ async def fsp_compute(
|
|||
if output is None:
|
||||
continue
|
||||
|
||||
history[key] = output
|
||||
history_by_field[key] = output
|
||||
|
||||
# single-key output stream
|
||||
else:
|
||||
|
@ -166,11 +180,15 @@ async def fsp_compute(
|
|||
f'`{func_name}` is a single output FSP and should yield an '
|
||||
'`np.ndarray` for history'
|
||||
)
|
||||
history = np.zeros(
|
||||
history_by_field = np.zeros(
|
||||
len(history_output),
|
||||
dtype=dst.array.dtype
|
||||
)
|
||||
history[func_name] = history_output
|
||||
history_by_field[func_name] = history_output
|
||||
|
||||
history_by_field['time'] = src_time[-len(history_by_field):]
|
||||
|
||||
history_output['time'] = src.array['time']
|
||||
|
||||
# TODO: XXX:
|
||||
# THERE'S A BIG BUG HERE WITH THE `index` field since we're
|
||||
|
@ -187,7 +205,10 @@ async def fsp_compute(
|
|||
|
||||
# TODO: can we use this `start` flag instead of the manual
|
||||
# setting above?
|
||||
index = dst.push(history, start=first)
|
||||
index = dst.push(
|
||||
history_by_field,
|
||||
start=first,
|
||||
)
|
||||
|
||||
profiler(f'{func_name} pushed history')
|
||||
profiler.finish()
|
||||
|
@ -213,8 +234,14 @@ async def fsp_compute(
|
|||
|
||||
log.debug(f"{func_name}: {processed}")
|
||||
key, output = processed
|
||||
index = src.index
|
||||
dst.array[-1][key] = output
|
||||
# dst.array[-1][key] = output
|
||||
dst.array[[key, 'time']][-1] = (
|
||||
output,
|
||||
# TODO: what about pushing ``time.time_ns()``
|
||||
# in which case we'll need to round at the graphics
|
||||
# processing / sampling layer?
|
||||
src.array[-1]['time']
|
||||
)
|
||||
|
||||
# NOTE: for now we aren't streaming this to the consumer
|
||||
# stream latest array index entry which basically just acts
|
||||
|
@ -225,6 +252,7 @@ async def fsp_compute(
|
|||
# N-consumers who subscribe for the real-time output,
|
||||
# which we'll likely want to implement using local-mem
|
||||
# chans for the fan out?
|
||||
# index = src.index
|
||||
# if attach_stream:
|
||||
# await client_stream.send(index)
|
||||
|
||||
|
@ -261,7 +289,7 @@ async def cascade(
|
|||
destination shm array buffer.
|
||||
|
||||
'''
|
||||
profiler = pg.debug.Profiler(
|
||||
profiler = Profiler(
|
||||
delayed=False,
|
||||
disabled=False
|
||||
)
|
||||
|
@ -284,9 +312,10 @@ async def cascade(
|
|||
# TODO: ugh i hate this wind/unwind to list over the wire
|
||||
# but not sure how else to do it.
|
||||
for (token, fsp_name, dst_token) in shm_registry:
|
||||
Fsp._flow_registry[
|
||||
(_Token.from_msg(token), fsp_name)
|
||||
] = _Token.from_msg(dst_token)
|
||||
Fsp._flow_registry[(
|
||||
_Token.from_msg(token),
|
||||
fsp_name,
|
||||
)] = _Token.from_msg(dst_token), None
|
||||
|
||||
fsp: Fsp = reg.get(
|
||||
NamespacePath(ns_path)
|
||||
|
@ -298,6 +327,7 @@ async def cascade(
|
|||
raise ValueError(f'Unknown fsp target: {ns_path}')
|
||||
|
||||
# open a data feed stream with requested broker
|
||||
feed: Feed
|
||||
async with data.feed.maybe_open_feed(
|
||||
[fqsn],
|
||||
|
||||
|
@ -307,14 +337,13 @@ async def cascade(
|
|||
# needs to get throttled the ticks we generate.
|
||||
# tick_throttle=60,
|
||||
|
||||
) as (feed, quote_stream):
|
||||
symbol = feed.symbols[fqsn]
|
||||
) as feed:
|
||||
|
||||
flume = feed.flumes[fqsn]
|
||||
symbol = flume.symbol
|
||||
assert src.token == flume.rt_shm.token
|
||||
profiler(f'{func}: feed up')
|
||||
|
||||
assert src.token == feed.shm.token
|
||||
# last_len = new_len = len(src.array)
|
||||
|
||||
func_name = func.__name__
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
|
@ -324,8 +353,8 @@ async def cascade(
|
|||
|
||||
fsp_compute,
|
||||
symbol=symbol,
|
||||
feed=feed,
|
||||
quote_stream=quote_stream,
|
||||
flume=flume,
|
||||
quote_stream=flume.stream,
|
||||
|
||||
# shm
|
||||
src=src,
|
||||
|
@ -361,7 +390,7 @@ async def cascade(
|
|||
) -> tuple[TaskTracker, int]:
|
||||
# TODO: adopt an incremental update engine/approach
|
||||
# where possible here eventually!
|
||||
log.debug(f're-syncing fsp {func_name} to source')
|
||||
log.info(f're-syncing fsp {func_name} to source')
|
||||
tracker.cs.cancel()
|
||||
await tracker.complete.wait()
|
||||
tracker, index = await n.start(fsp_target)
|
||||
|
@ -374,14 +403,16 @@ async def cascade(
|
|||
'key': dst_shm_token,
|
||||
'first': dst._first.value,
|
||||
'last': dst._last.value,
|
||||
}})
|
||||
}
|
||||
})
|
||||
return tracker, index
|
||||
|
||||
def is_synced(
|
||||
src: ShmArray,
|
||||
dst: ShmArray
|
||||
) -> tuple[bool, int, int]:
|
||||
'''Predicate to dertmine if a destination FSP
|
||||
'''
|
||||
Predicate to dertmine if a destination FSP
|
||||
output array is aligned to its source array.
|
||||
|
||||
'''
|
||||
|
@ -390,16 +421,15 @@ async def cascade(
|
|||
return not (
|
||||
# the source is likely backfilling and we must
|
||||
# sync history calculations
|
||||
len_diff > 2 or
|
||||
len_diff > 2
|
||||
|
||||
# we aren't step synced to the source and may be
|
||||
# leading/lagging by a step
|
||||
step_diff > 1 or
|
||||
step_diff < 0
|
||||
or step_diff > 1
|
||||
or step_diff < 0
|
||||
), step_diff, len_diff
|
||||
|
||||
async def poll_and_sync_to_step(
|
||||
|
||||
tracker: TaskTracker,
|
||||
src: ShmArray,
|
||||
dst: ShmArray,
|
||||
|
@ -418,18 +448,23 @@ async def cascade(
|
|||
# detect sample period step for subscription to increment
|
||||
# signal
|
||||
times = src.array['time']
|
||||
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||
if len(times) > 1:
|
||||
last_ts = times[-1]
|
||||
delay_s = float(last_ts - times[times != last_ts][-1])
|
||||
else:
|
||||
# our default "HFT" sample rate.
|
||||
delay_s = _default_delay_s
|
||||
|
||||
# Increment the underlying shared memory buffer on every
|
||||
# "increment" msg received from the underlying data feed.
|
||||
async with feed.index_stream(
|
||||
int(delay_s)
|
||||
) as istream:
|
||||
# sub and increment the underlying shared memory buffer
|
||||
# on every step msg received from the global `samplerd`
|
||||
# service.
|
||||
async with open_sample_stream(float(delay_s)) as istream:
|
||||
|
||||
profiler(f'{func_name}: sample stream up')
|
||||
profiler.finish()
|
||||
|
||||
async for _ in istream:
|
||||
async for i in istream:
|
||||
# print(f'FSP incrementing {i}')
|
||||
|
||||
# respawn the compute task if the source
|
||||
# array has been updated such that we compute
|
||||
|
@ -458,3 +493,23 @@ async def cascade(
|
|||
last = array[-1:].copy()
|
||||
|
||||
dst.push(last)
|
||||
|
||||
# sync with source buffer's time step
|
||||
src_l2 = src.array[-2:]
|
||||
src_li, src_lt = src_l2[-1][['index', 'time']]
|
||||
src_2li, src_2lt = src_l2[-2][['index', 'time']]
|
||||
dst._array['time'][src_li] = src_lt
|
||||
dst._array['time'][src_2li] = src_2lt
|
||||
|
||||
# last2 = dst.array[-2:]
|
||||
# if (
|
||||
# last2[-1]['index'] != src_li
|
||||
# or last2[-2]['index'] != src_2li
|
||||
# ):
|
||||
# dstl2 = list(last2)
|
||||
# srcl2 = list(src_l2)
|
||||
# print(
|
||||
# # f'{dst.token}\n'
|
||||
# f'src: {srcl2}\n'
|
||||
# f'dst: {dstl2}\n'
|
||||
# )
|
||||
|
|
|
@ -234,7 +234,7 @@ async def flow_rates(
|
|||
# FSPs, user input, and possibly any general event stream in
|
||||
# real-time. Hint: ideally implemented with caching until mutated
|
||||
# ;)
|
||||
period: 'Param[int]' = 6, # noqa
|
||||
period: 'Param[int]' = 1, # noqa
|
||||
|
||||
# TODO: support other means by providing a map
|
||||
# to weights `partial()`-ed with `wma()`?
|
||||
|
@ -268,8 +268,7 @@ async def flow_rates(
|
|||
'dark_dvlm_rate': None,
|
||||
}
|
||||
|
||||
# TODO: 3.10 do ``anext()``
|
||||
quote = await source.__anext__()
|
||||
quote = await anext(source)
|
||||
|
||||
# ltr = 0
|
||||
# lvr = 0
|
||||
|
|
46
piker/log.py
46
piker/log.py
|
@ -21,7 +21,11 @@ import logging
|
|||
import json
|
||||
|
||||
import tractor
|
||||
from pygments import highlight, lexers, formatters
|
||||
from pygments import (
|
||||
highlight,
|
||||
lexers,
|
||||
formatters,
|
||||
)
|
||||
|
||||
# Makes it so we only see the full module name when using ``__name__``
|
||||
# without the extra "piker." prefix.
|
||||
|
@ -32,26 +36,48 @@ def get_logger(
|
|||
name: str = None,
|
||||
|
||||
) -> logging.Logger:
|
||||
'''Return the package log or a sub-log for `name` if provided.
|
||||
'''
|
||||
Return the package log or a sub-log for `name` if provided.
|
||||
|
||||
'''
|
||||
return tractor.log.get_logger(name=name, _root_name=_proj_name)
|
||||
|
||||
|
||||
def get_console_log(level: str = None, name: str = None) -> logging.Logger:
|
||||
'''Get the package logger and enable a handler which writes to stderr.
|
||||
def get_console_log(
|
||||
level: str | None = None,
|
||||
name: str | None = None,
|
||||
|
||||
) -> logging.Logger:
|
||||
'''
|
||||
Get the package logger and enable a handler which writes to stderr.
|
||||
|
||||
Yeah yeah, i know we can use ``DictConfig``. You do it...
|
||||
|
||||
'''
|
||||
return tractor.log.get_console_log(
|
||||
level, name=name, _root_name=_proj_name) # our root logger
|
||||
level,
|
||||
name=name,
|
||||
_root_name=_proj_name,
|
||||
) # our root logger
|
||||
|
||||
|
||||
def colorize_json(data, style='algol_nu'):
|
||||
"""Colorize json output using ``pygments``.
|
||||
"""
|
||||
formatted_json = json.dumps(data, sort_keys=True, indent=4)
|
||||
def colorize_json(
|
||||
data: dict,
|
||||
style='algol_nu',
|
||||
):
|
||||
'''
|
||||
Colorize json output using ``pygments``.
|
||||
|
||||
'''
|
||||
formatted_json = json.dumps(
|
||||
data,
|
||||
sort_keys=True,
|
||||
indent=4,
|
||||
)
|
||||
return highlight(
|
||||
formatted_json, lexers.JsonLexer(),
|
||||
formatted_json,
|
||||
lexers.JsonLexer(),
|
||||
|
||||
# likeable styles: algol_nu, tango, monokai
|
||||
formatters.TerminalTrueColorFormatter(style=style)
|
||||
)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,60 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Actor-runtime service orchestration machinery.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from ._mngr import Services
|
||||
from ._registry import ( # noqa
|
||||
_tractor_kwargs,
|
||||
_default_reg_addr,
|
||||
_default_registry_host,
|
||||
_default_registry_port,
|
||||
open_registry,
|
||||
find_service,
|
||||
check_for_service,
|
||||
)
|
||||
from ._daemon import ( # noqa
|
||||
maybe_spawn_daemon,
|
||||
spawn_brokerd,
|
||||
maybe_spawn_brokerd,
|
||||
spawn_emsd,
|
||||
maybe_open_emsd,
|
||||
)
|
||||
from ._actor_runtime import (
|
||||
open_piker_runtime,
|
||||
maybe_open_pikerd,
|
||||
open_pikerd,
|
||||
get_tractor_runtime_kwargs,
|
||||
)
|
||||
|
||||
|
||||
__all__ = [
|
||||
'check_for_service',
|
||||
'Services',
|
||||
'maybe_spawn_daemon',
|
||||
'spawn_brokerd',
|
||||
'maybe_spawn_brokerd',
|
||||
'spawn_emsd',
|
||||
'maybe_open_emsd',
|
||||
'open_piker_runtime',
|
||||
'maybe_open_pikerd',
|
||||
'open_pikerd',
|
||||
'get_tractor_runtime_kwargs',
|
||||
]
|
|
@ -0,0 +1,347 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
``tractor`` wrapping + default config to bootstrap the `pikerd`.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from pprint import pformat
|
||||
from functools import partial
|
||||
import os
|
||||
from typing import (
|
||||
Optional,
|
||||
Any,
|
||||
ClassVar,
|
||||
)
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
|
||||
import tractor
|
||||
import trio
|
||||
|
||||
from ..log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
)
|
||||
from ._mngr import (
|
||||
Services,
|
||||
)
|
||||
from ._registry import ( # noqa
|
||||
_tractor_kwargs,
|
||||
_default_reg_addr,
|
||||
open_registry,
|
||||
)
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
def get_tractor_runtime_kwargs() -> dict[str, Any]:
|
||||
'''
|
||||
Deliver ``tractor`` related runtime variables in a `dict`.
|
||||
|
||||
'''
|
||||
return _tractor_kwargs
|
||||
|
||||
|
||||
@acm
|
||||
async def open_piker_runtime(
|
||||
name: str,
|
||||
enable_modules: list[str] = [],
|
||||
loglevel: Optional[str] = None,
|
||||
|
||||
# XXX NOTE XXX: you should pretty much never want debug mode
|
||||
# for data daemons when running in production.
|
||||
debug_mode: bool = False,
|
||||
|
||||
registry_addr: None | tuple[str, int] = None,
|
||||
|
||||
# TODO: once we have `rsyscall` support we will read a config
|
||||
# and spawn the service tree distributed per that.
|
||||
start_method: str = 'trio',
|
||||
|
||||
tractor_runtime_overrides: dict | None = None,
|
||||
**tractor_kwargs,
|
||||
|
||||
) -> tuple[
|
||||
tractor.Actor,
|
||||
tuple[str, int],
|
||||
]:
|
||||
'''
|
||||
Start a piker actor who's runtime will automatically sync with
|
||||
existing piker actors on the local link based on configuration.
|
||||
|
||||
Can be called from a subactor or any program that needs to start
|
||||
a root actor.
|
||||
|
||||
'''
|
||||
try:
|
||||
# check for existing runtime
|
||||
actor = tractor.current_actor().uid
|
||||
|
||||
except tractor._exceptions.NoRuntime:
|
||||
tractor._state._runtime_vars[
|
||||
'piker_vars'] = tractor_runtime_overrides
|
||||
|
||||
registry_addr = registry_addr or _default_reg_addr
|
||||
|
||||
async with (
|
||||
tractor.open_root_actor(
|
||||
|
||||
# passed through to ``open_root_actor``
|
||||
arbiter_addr=registry_addr,
|
||||
name=name,
|
||||
loglevel=loglevel,
|
||||
debug_mode=debug_mode,
|
||||
start_method=start_method,
|
||||
|
||||
# TODO: eventually we should be able to avoid
|
||||
# having the root have more then permissions to
|
||||
# spawn other specialized daemons I think?
|
||||
enable_modules=enable_modules,
|
||||
|
||||
**tractor_kwargs,
|
||||
) as _,
|
||||
|
||||
open_registry(registry_addr, ensure_exists=False) as addr,
|
||||
):
|
||||
yield (
|
||||
tractor.current_actor(),
|
||||
addr,
|
||||
)
|
||||
else:
|
||||
async with open_registry(registry_addr) as addr:
|
||||
yield (
|
||||
actor,
|
||||
addr,
|
||||
)
|
||||
|
||||
|
||||
_root_dname = 'pikerd'
|
||||
_root_modules = [
|
||||
__name__,
|
||||
'piker.service._daemon',
|
||||
'piker.clearing._ems',
|
||||
'piker.clearing._client',
|
||||
'piker.data._sampling',
|
||||
]
|
||||
|
||||
|
||||
@acm
|
||||
async def open_pikerd(
|
||||
|
||||
loglevel: str | None = None,
|
||||
|
||||
# XXX: you should pretty much never want debug mode
|
||||
# for data daemons when running in production.
|
||||
debug_mode: bool = False,
|
||||
registry_addr: None | tuple[str, int] = None,
|
||||
|
||||
# db init flags
|
||||
tsdb: bool = False,
|
||||
es: bool = False,
|
||||
drop_root_perms_for_ahab: bool = True,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> Services:
|
||||
'''
|
||||
Start a root piker daemon with an indefinite lifetime.
|
||||
|
||||
A root actor nursery is created which can be used to create and keep
|
||||
alive underling services (see below).
|
||||
|
||||
'''
|
||||
async with (
|
||||
open_piker_runtime(
|
||||
|
||||
name=_root_dname,
|
||||
# TODO: eventually we should be able to avoid
|
||||
# having the root have more then permissions to
|
||||
# spawn other specialized daemons I think?
|
||||
enable_modules=_root_modules,
|
||||
loglevel=loglevel,
|
||||
debug_mode=debug_mode,
|
||||
registry_addr=registry_addr,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) as (root_actor, reg_addr),
|
||||
tractor.open_nursery() as actor_nursery,
|
||||
trio.open_nursery() as service_nursery,
|
||||
):
|
||||
if root_actor.accept_addr != reg_addr:
|
||||
raise RuntimeError(f'Daemon failed to bind on {reg_addr}!?')
|
||||
|
||||
# assign globally for future daemon/task creation
|
||||
Services.actor_n = actor_nursery
|
||||
Services.service_n = service_nursery
|
||||
Services.debug_mode = debug_mode
|
||||
|
||||
if tsdb:
|
||||
from ._ahab import start_ahab
|
||||
from .marketstore import start_marketstore
|
||||
|
||||
log.info('Spawning `marketstore` supervisor')
|
||||
ctn_ready, config, (cid, pid) = await service_nursery.start(
|
||||
partial(
|
||||
start_ahab,
|
||||
'marketstored',
|
||||
start_marketstore,
|
||||
loglevel=loglevel,
|
||||
drop_root_perms=drop_root_perms_for_ahab,
|
||||
)
|
||||
|
||||
)
|
||||
log.info(
|
||||
f'`marketstored` up!\n'
|
||||
f'pid: {pid}\n'
|
||||
f'container id: {cid[:12]}\n'
|
||||
f'config: {pformat(config)}'
|
||||
)
|
||||
|
||||
if es:
|
||||
from ._ahab import start_ahab
|
||||
from .elastic import start_elasticsearch
|
||||
|
||||
log.info('Spawning `elasticsearch` supervisor')
|
||||
ctn_ready, config, (cid, pid) = await service_nursery.start(
|
||||
partial(
|
||||
start_ahab,
|
||||
'elasticsearch',
|
||||
start_elasticsearch,
|
||||
loglevel=loglevel,
|
||||
drop_root_perms=drop_root_perms_for_ahab,
|
||||
)
|
||||
)
|
||||
|
||||
log.info(
|
||||
f'`elasticsearch` up!\n'
|
||||
f'pid: {pid}\n'
|
||||
f'container id: {cid[:12]}\n'
|
||||
f'config: {pformat(config)}'
|
||||
)
|
||||
|
||||
try:
|
||||
yield Services
|
||||
|
||||
finally:
|
||||
# TODO: is this more clever/efficient?
|
||||
# if 'samplerd' in Services.service_tasks:
|
||||
# await Services.cancel_service('samplerd')
|
||||
service_nursery.cancel_scope.cancel()
|
||||
|
||||
|
||||
# TODO: do we even need this?
|
||||
# @acm
|
||||
# async def maybe_open_runtime(
|
||||
# loglevel: Optional[str] = None,
|
||||
# **kwargs,
|
||||
|
||||
# ) -> None:
|
||||
# '''
|
||||
# Start the ``tractor`` runtime (a root actor) if none exists.
|
||||
|
||||
# '''
|
||||
# name = kwargs.pop('name')
|
||||
|
||||
# if not tractor.current_actor(err_on_no_runtime=False):
|
||||
# async with open_piker_runtime(
|
||||
# name,
|
||||
# loglevel=loglevel,
|
||||
# **kwargs,
|
||||
# ) as (_, addr):
|
||||
# yield addr,
|
||||
# else:
|
||||
# async with open_registry() as addr:
|
||||
# yield addr
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_pikerd(
|
||||
loglevel: Optional[str] = None,
|
||||
registry_addr: None | tuple = None,
|
||||
tsdb: bool = False,
|
||||
es: bool = False,
|
||||
drop_root_perms_for_ahab: bool = True,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> tractor._portal.Portal | ClassVar[Services]:
|
||||
'''
|
||||
If no ``pikerd`` daemon-root-actor can be found start it and
|
||||
yield up (we should probably figure out returning a portal to self
|
||||
though).
|
||||
|
||||
'''
|
||||
if loglevel:
|
||||
get_console_log(loglevel)
|
||||
|
||||
# subtle, we must have the runtime up here or portal lookup will fail
|
||||
query_name = kwargs.pop(
|
||||
'name',
|
||||
f'piker_query_{os.getpid()}',
|
||||
)
|
||||
|
||||
# TODO: if we need to make the query part faster we could not init
|
||||
# an actor runtime and instead just hit the socket?
|
||||
# from tractor._ipc import _connect_chan, Channel
|
||||
# async with _connect_chan(host, port) as chan:
|
||||
# async with open_portal(chan) as arb_portal:
|
||||
# yield arb_portal
|
||||
|
||||
async with (
|
||||
open_piker_runtime(
|
||||
name=query_name,
|
||||
registry_addr=registry_addr,
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
) as _,
|
||||
|
||||
tractor.find_actor(
|
||||
_root_dname,
|
||||
arbiter_sockaddr=registry_addr,
|
||||
) as portal
|
||||
):
|
||||
# connect to any existing daemon presuming
|
||||
# its registry socket was selected.
|
||||
if (
|
||||
portal is not None
|
||||
):
|
||||
yield portal
|
||||
return
|
||||
|
||||
# presume pikerd role since no daemon could be found at
|
||||
# configured address
|
||||
async with open_pikerd(
|
||||
loglevel=loglevel,
|
||||
registry_addr=registry_addr,
|
||||
|
||||
# ahabd (docker super) specific controls
|
||||
tsdb=tsdb,
|
||||
es=es,
|
||||
drop_root_perms_for_ahab=drop_root_perms_for_ahab,
|
||||
|
||||
# passthrough to ``tractor`` init
|
||||
**kwargs,
|
||||
|
||||
) as service_manager:
|
||||
# in the case where we're starting up the
|
||||
# tractor-piker runtime stack in **this** process
|
||||
# we return no portal to self.
|
||||
assert service_manager
|
||||
yield service_manager
|
|
@ -15,9 +15,12 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
Supervisor for docker with included specific-image service helpers.
|
||||
Supervisor for ``docker`` with included async and SC wrapping
|
||||
to ensure a cancellable container lifetime system.
|
||||
|
||||
'''
|
||||
from collections import ChainMap
|
||||
from functools import partial
|
||||
import os
|
||||
import time
|
||||
from typing import (
|
||||
|
@ -37,10 +40,18 @@ from docker.models.containers import Container as DockerContainer
|
|||
from docker.errors import (
|
||||
DockerException,
|
||||
APIError,
|
||||
# ContainerError,
|
||||
)
|
||||
import requests
|
||||
from requests.exceptions import (
|
||||
ConnectionError,
|
||||
ReadTimeout,
|
||||
)
|
||||
from requests.exceptions import ConnectionError, ReadTimeout
|
||||
|
||||
from ..log import get_logger, get_console_log
|
||||
from ..log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
)
|
||||
from .. import config
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -50,8 +61,8 @@ class DockerNotStarted(Exception):
|
|||
'Prolly you dint start da daemon bruh'
|
||||
|
||||
|
||||
class ContainerError(RuntimeError):
|
||||
'Error reported via app-container logging level'
|
||||
class ApplicationLogError(Exception):
|
||||
'App in container reported an error in logs'
|
||||
|
||||
|
||||
@acm
|
||||
|
@ -96,9 +107,9 @@ async def open_docker(
|
|||
# not perms?
|
||||
raise
|
||||
|
||||
finally:
|
||||
if client:
|
||||
client.close()
|
||||
# finally:
|
||||
# if client:
|
||||
# client.close()
|
||||
|
||||
|
||||
class Container:
|
||||
|
@ -119,8 +130,19 @@ class Container:
|
|||
|
||||
async def process_logs_until(
|
||||
self,
|
||||
patt: str,
|
||||
bp_on_msg: bool = False,
|
||||
log_msg_key: str,
|
||||
|
||||
# this is a predicate func for matching log msgs emitted by the
|
||||
# underlying containerized app
|
||||
patt_matcher: Callable[[str], bool],
|
||||
|
||||
# XXX WARNING XXX: do not touch this sleep value unless
|
||||
# you know what you are doing! the value is critical to
|
||||
# making sure the caller code inside the startup context
|
||||
# does not timeout BEFORE we receive a match on the
|
||||
# ``patt_matcher()`` predicate above.
|
||||
checkpoint_period: float = 0.001,
|
||||
|
||||
) -> bool:
|
||||
'''
|
||||
Attempt to capture container log messages and relay through our
|
||||
|
@ -131,6 +153,15 @@ class Container:
|
|||
|
||||
while True:
|
||||
logs = self.cntr.logs()
|
||||
try:
|
||||
logs = self.cntr.logs()
|
||||
except (
|
||||
docker.errors.NotFound,
|
||||
docker.errors.APIError
|
||||
):
|
||||
log.exception('Failed to parse logs?')
|
||||
return False
|
||||
|
||||
entries = logs.decode().split('\n')
|
||||
for entry in entries:
|
||||
|
||||
|
@ -138,34 +169,44 @@ class Container:
|
|||
if not entry:
|
||||
continue
|
||||
|
||||
entry = entry.strip()
|
||||
try:
|
||||
record = json.loads(entry.strip())
|
||||
record = json.loads(entry)
|
||||
msg = record[log_msg_key]
|
||||
level = record['level']
|
||||
|
||||
except json.JSONDecodeError:
|
||||
if 'Error' in entry:
|
||||
raise RuntimeError(entry)
|
||||
raise
|
||||
msg = entry
|
||||
level = 'error'
|
||||
|
||||
msg = record['msg']
|
||||
level = record['level']
|
||||
if msg and entry not in seen_so_far:
|
||||
# TODO: do we need a more general mechanism
|
||||
# for these kinda of "log record entries"?
|
||||
# if 'Error' in entry:
|
||||
# raise RuntimeError(entry)
|
||||
|
||||
if (
|
||||
msg
|
||||
and entry not in seen_so_far
|
||||
):
|
||||
seen_so_far.add(entry)
|
||||
if bp_on_msg:
|
||||
await tractor.breakpoint()
|
||||
getattr(log, level.lower(), log.error)(f'{msg}')
|
||||
|
||||
getattr(log, level, log.error)(f'{msg}')
|
||||
if level == 'fatal':
|
||||
raise ApplicationLogError(msg)
|
||||
|
||||
# print(f'level: {level}')
|
||||
if level in ('error', 'fatal'):
|
||||
raise ContainerError(msg)
|
||||
|
||||
if patt in msg:
|
||||
if await patt_matcher(msg):
|
||||
return True
|
||||
|
||||
# do a checkpoint so we don't block if cancelled B)
|
||||
await trio.sleep(0.01)
|
||||
await trio.sleep(checkpoint_period)
|
||||
|
||||
return False
|
||||
|
||||
@property
|
||||
def cuid(self) -> str:
|
||||
fqcn: str = self.cntr.attrs['Config']['Image']
|
||||
return f'{fqcn}[{self.cntr.short_id}]'
|
||||
|
||||
def try_signal(
|
||||
self,
|
||||
signal: str = 'SIGINT',
|
||||
|
@ -185,29 +226,65 @@ class Container:
|
|||
if 'is not running' in err.explanation:
|
||||
return False
|
||||
|
||||
def hard_kill(self, start: float) -> None:
|
||||
delay = time.time() - start
|
||||
# get out the big guns, bc apparently marketstore
|
||||
# doesn't actually know how to terminate gracefully
|
||||
# :eyeroll:...
|
||||
log.error(
|
||||
f'SIGKILL-ing: {self.cntr.id} after {delay}s\n'
|
||||
)
|
||||
self.try_signal('SIGKILL')
|
||||
self.cntr.wait(
|
||||
timeout=3,
|
||||
condition='not-running',
|
||||
)
|
||||
|
||||
async def cancel(
|
||||
self,
|
||||
stop_msg: str,
|
||||
) -> None:
|
||||
log_msg_key: str,
|
||||
stop_predicate: Callable[[str], bool],
|
||||
|
||||
hard_kill: bool = False,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Attempt to cancel this container gracefully, fail over to
|
||||
a hard kill on timeout.
|
||||
|
||||
'''
|
||||
cid = self.cntr.id
|
||||
|
||||
# first try a graceful cancel
|
||||
log.cancel(
|
||||
f'SIGINT cancelling container: {cid}\n'
|
||||
f'waiting on stop msg: "{stop_msg}"'
|
||||
f'SIGINT cancelling container: {self.cuid}\n'
|
||||
'waiting on stop predicate...'
|
||||
)
|
||||
self.try_signal('SIGINT')
|
||||
|
||||
start = time.time()
|
||||
for _ in range(30):
|
||||
for _ in range(6):
|
||||
|
||||
with trio.move_on_after(0.5) as cs:
|
||||
cs.shield = True
|
||||
await self.process_logs_until(stop_msg)
|
||||
log.cancel('polling for CNTR logs...')
|
||||
|
||||
# if we aren't cancelled on above checkpoint then we
|
||||
# assume we read the expected stop msg and terminated.
|
||||
break
|
||||
try:
|
||||
await self.process_logs_until(
|
||||
log_msg_key,
|
||||
stop_predicate,
|
||||
)
|
||||
except ApplicationLogError:
|
||||
hard_kill = True
|
||||
else:
|
||||
# if we aren't cancelled on above checkpoint then we
|
||||
# assume we read the expected stop msg and
|
||||
# terminated.
|
||||
break
|
||||
|
||||
if cs.cancelled_caught:
|
||||
# on timeout just try a hard kill after
|
||||
# a quick container sync-wait.
|
||||
hard_kill = True
|
||||
|
||||
try:
|
||||
log.info(f'Polling for container shutdown:\n{cid}')
|
||||
|
@ -218,6 +295,7 @@ class Container:
|
|||
condition='not-running',
|
||||
)
|
||||
|
||||
# graceful exit if we didn't time out
|
||||
break
|
||||
|
||||
except (
|
||||
|
@ -229,36 +307,39 @@ class Container:
|
|||
except (
|
||||
docker.errors.APIError,
|
||||
ConnectionError,
|
||||
requests.exceptions.ConnectionError,
|
||||
trio.Cancelled,
|
||||
):
|
||||
log.exception('Docker connection failure')
|
||||
break
|
||||
else:
|
||||
delay = time.time() - start
|
||||
log.error(
|
||||
f'Failed to kill container {cid} after {delay}s\n'
|
||||
'sending SIGKILL..'
|
||||
)
|
||||
# get out the big guns, bc apparently marketstore
|
||||
# doesn't actually know how to terminate gracefully
|
||||
# :eyeroll:...
|
||||
self.try_signal('SIGKILL')
|
||||
self.cntr.wait(
|
||||
timeout=3,
|
||||
condition='not-running',
|
||||
)
|
||||
self.hard_kill(start)
|
||||
raise
|
||||
|
||||
log.cancel(f'Container stopped: {cid}')
|
||||
except trio.Cancelled:
|
||||
log.exception('trio cancelled...')
|
||||
self.hard_kill(start)
|
||||
else:
|
||||
hard_kill = True
|
||||
|
||||
if hard_kill:
|
||||
self.hard_kill(start)
|
||||
else:
|
||||
log.cancel(f'Container stopped: {cid}')
|
||||
|
||||
|
||||
@tractor.context
|
||||
async def open_ahabd(
|
||||
ctx: tractor.Context,
|
||||
endpoint: str, # ns-pointer str-msg-type
|
||||
loglevel: str | None = 'cancel',
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
get_console_log('info', name=__name__)
|
||||
|
||||
log = get_console_log(
|
||||
loglevel,
|
||||
name=__name__,
|
||||
)
|
||||
|
||||
async with open_docker() as client:
|
||||
|
||||
|
@ -269,26 +350,84 @@ async def open_ahabd(
|
|||
(
|
||||
dcntr,
|
||||
cntr_config,
|
||||
start_msg,
|
||||
stop_msg,
|
||||
start_pred,
|
||||
stop_pred,
|
||||
) = ep_func(client)
|
||||
cntr = Container(dcntr)
|
||||
|
||||
with trio.move_on_after(1):
|
||||
found = await cntr.process_logs_until(start_msg)
|
||||
conf: ChainMap[str, Any] = ChainMap(
|
||||
|
||||
if not found and cntr not in client.containers.list():
|
||||
raise RuntimeError(
|
||||
'Failed to start `marketstore` check logs deats'
|
||||
)
|
||||
|
||||
await ctx.started((
|
||||
cntr.cntr.id,
|
||||
os.getpid(),
|
||||
# container specific
|
||||
cntr_config,
|
||||
))
|
||||
|
||||
# defaults
|
||||
{
|
||||
# startup time limit which is the max the supervisor
|
||||
# will wait for the container to be registered in
|
||||
# ``client.containers.list()``
|
||||
'startup_timeout': 1.0,
|
||||
|
||||
# how fast to poll for the starup predicate by sleeping
|
||||
# this amount incrementally thus yielding to the
|
||||
# ``trio`` scheduler on during sync polling execution.
|
||||
'startup_query_period': 0.001,
|
||||
|
||||
# str-key value expected to contain log message body-contents
|
||||
# when read using:
|
||||
# ``json.loads(entry for entry in DockerContainer.logs())``
|
||||
'log_msg_key': 'msg',
|
||||
|
||||
|
||||
# startup sync func, like `Nursery.started()`
|
||||
'started_afunc': None,
|
||||
},
|
||||
)
|
||||
|
||||
try:
|
||||
with trio.move_on_after(conf['startup_timeout']) as cs:
|
||||
async with trio.open_nursery() as tn:
|
||||
tn.start_soon(
|
||||
partial(
|
||||
cntr.process_logs_until,
|
||||
log_msg_key=conf['log_msg_key'],
|
||||
patt_matcher=start_pred,
|
||||
checkpoint_period=conf['startup_query_period'],
|
||||
)
|
||||
)
|
||||
|
||||
# optional blocking routine
|
||||
started = conf['started_afunc']
|
||||
if started:
|
||||
await started()
|
||||
|
||||
# poll for container startup or timeout
|
||||
while not cs.cancel_called:
|
||||
if dcntr in client.containers.list():
|
||||
break
|
||||
|
||||
await trio.sleep(conf['startup_query_period'])
|
||||
|
||||
# sync with remote caller actor-task but allow log
|
||||
# processing to continue running in bg.
|
||||
await ctx.started((
|
||||
cntr.cntr.id,
|
||||
os.getpid(),
|
||||
cntr_config,
|
||||
))
|
||||
|
||||
# XXX: if we timeout on finding the "startup msg" we
|
||||
# expect then we want to FOR SURE raise an error
|
||||
# upwards!
|
||||
if cs.cancelled_caught:
|
||||
# if dcntr not in client.containers.list():
|
||||
for entry in cntr.seen_so_far:
|
||||
log.info(entry)
|
||||
|
||||
raise DockerNotStarted(
|
||||
f'Failed to start container: {cntr.cuid}\n'
|
||||
f'due to timeout={conf["startup_timeout"]}s\n\n'
|
||||
"check ur container's logs!"
|
||||
)
|
||||
|
||||
# TODO: we might eventually want a proxy-style msg-prot here
|
||||
# to allow remote control of containers without needing
|
||||
|
@ -296,13 +435,25 @@ async def open_ahabd(
|
|||
await trio.sleep_forever()
|
||||
|
||||
finally:
|
||||
# TODO: ensure loglevel can be set and teardown logs are
|
||||
# reported if possible on error or cancel..
|
||||
# XXX WARNING: currently shielding here can result in hangs
|
||||
# on ctl-c from user.. ideally we can avoid a cancel getting
|
||||
# consumed and not propagating whilst still doing teardown
|
||||
# logging..
|
||||
with trio.CancelScope(shield=True):
|
||||
await cntr.cancel(stop_msg)
|
||||
await cntr.cancel(
|
||||
log_msg_key=conf['log_msg_key'],
|
||||
stop_predicate=stop_pred,
|
||||
)
|
||||
|
||||
|
||||
async def start_ahab(
|
||||
service_name: str,
|
||||
endpoint: Callable[docker.DockerClient, DockerContainer],
|
||||
loglevel: str | None = 'cancel',
|
||||
drop_root_perms: bool = True,
|
||||
|
||||
task_status: TaskStatus[
|
||||
tuple[
|
||||
trio.Event,
|
||||
|
@ -323,13 +474,12 @@ async def start_ahab(
|
|||
'''
|
||||
cn_ready = trio.Event()
|
||||
try:
|
||||
async with tractor.open_nursery(
|
||||
loglevel='runtime',
|
||||
) as tn:
|
||||
async with tractor.open_nursery() as an:
|
||||
|
||||
portal = await tn.start_actor(
|
||||
portal = await an.start_actor(
|
||||
service_name,
|
||||
enable_modules=[__name__]
|
||||
enable_modules=[__name__],
|
||||
loglevel=loglevel,
|
||||
)
|
||||
|
||||
# TODO: we have issues with this on teardown
|
||||
|
@ -339,7 +489,10 @@ async def start_ahab(
|
|||
|
||||
# de-escalate root perms to the original user
|
||||
# after the docker supervisor actor is spawned.
|
||||
if config._parent_user:
|
||||
if (
|
||||
drop_root_perms
|
||||
and config._parent_user
|
||||
):
|
||||
import pwd
|
||||
os.setuid(
|
||||
pwd.getpwnam(
|
||||
|
@ -350,6 +503,7 @@ async def start_ahab(
|
|||
async with portal.open_context(
|
||||
open_ahabd,
|
||||
endpoint=str(NamespacePath.from_ref(endpoint)),
|
||||
loglevel='cancel',
|
||||
) as (ctx, first):
|
||||
|
||||
cid, pid, cntr_config = first
|
|
@ -0,0 +1,271 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Daemon-actor spawning "endpoint-hooks".
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
Any,
|
||||
)
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
|
||||
import tractor
|
||||
|
||||
from ..log import (
|
||||
get_logger,
|
||||
get_console_log,
|
||||
)
|
||||
from ..brokers import get_brokermod
|
||||
from ._mngr import (
|
||||
Services,
|
||||
)
|
||||
from ._actor_runtime import maybe_open_pikerd
|
||||
from ._registry import find_service
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
# `brokerd` enabled modules
|
||||
# NOTE: keeping this list as small as possible is part of our caps-sec
|
||||
# model and should be treated with utmost care!
|
||||
_data_mods = [
|
||||
'piker.brokers.core',
|
||||
'piker.brokers.data',
|
||||
'piker.data',
|
||||
'piker.data.feed',
|
||||
'piker.data._sampling'
|
||||
]
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_spawn_daemon(
|
||||
|
||||
service_name: str,
|
||||
service_task_target: Callable,
|
||||
spawn_args: dict[str, Any],
|
||||
loglevel: Optional[str] = None,
|
||||
|
||||
singleton: bool = False,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
'''
|
||||
If no ``service_name`` daemon-actor can be found,
|
||||
spawn one in a local subactor and return a portal to it.
|
||||
|
||||
If this function is called from a non-pikerd actor, the
|
||||
spawned service will persist as long as pikerd does or
|
||||
it is requested to be cancelled.
|
||||
|
||||
This can be seen as a service starting api for remote-actor
|
||||
clients.
|
||||
|
||||
'''
|
||||
if loglevel:
|
||||
get_console_log(loglevel)
|
||||
|
||||
# serialize access to this section to avoid
|
||||
# 2 or more tasks racing to create a daemon
|
||||
lock = Services.locks[service_name]
|
||||
await lock.acquire()
|
||||
|
||||
async with find_service(service_name) as portal:
|
||||
if portal is not None:
|
||||
lock.release()
|
||||
yield portal
|
||||
return
|
||||
|
||||
log.warning(f"Couldn't find any existing {service_name}")
|
||||
|
||||
# TODO: really shouldn't the actor spawning be part of the service
|
||||
# starting method `Services.start_service()` ?
|
||||
|
||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||
# pikerd is not live we now become the root of the
|
||||
# process tree
|
||||
async with maybe_open_pikerd(
|
||||
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as pikerd_portal:
|
||||
|
||||
# we are the root and thus are `pikerd`
|
||||
# so spawn the target service directly by calling
|
||||
# the provided target routine.
|
||||
# XXX: this assumes that the target is well formed and will
|
||||
# do the right things to setup both a sub-actor **and** call
|
||||
# the ``_Services`` api from above to start the top level
|
||||
# service task for that actor.
|
||||
started: bool
|
||||
if pikerd_portal is None:
|
||||
started = await service_task_target(**spawn_args)
|
||||
|
||||
else:
|
||||
# tell the remote `pikerd` to start the target,
|
||||
# the target can't return a non-serializable value
|
||||
# since it is expected that service startingn is
|
||||
# non-blocking and the target task will persist running
|
||||
# on `pikerd` after the client requesting it's start
|
||||
# disconnects.
|
||||
started = await pikerd_portal.run(
|
||||
service_task_target,
|
||||
**spawn_args,
|
||||
)
|
||||
|
||||
if started:
|
||||
log.info(f'Service {service_name} started!')
|
||||
|
||||
async with tractor.wait_for_actor(service_name) as portal:
|
||||
lock.release()
|
||||
yield portal
|
||||
await portal.cancel_actor()
|
||||
|
||||
|
||||
async def spawn_brokerd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: Optional[str] = None,
|
||||
**tractor_kwargs,
|
||||
|
||||
) -> bool:
|
||||
|
||||
log.info(f'Spawning {brokername} broker daemon')
|
||||
|
||||
brokermod = get_brokermod(brokername)
|
||||
dname = f'brokerd.{brokername}'
|
||||
|
||||
extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
|
||||
tractor_kwargs.update(extra_tractor_kwargs)
|
||||
|
||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||
# actor nursery
|
||||
modpath = brokermod.__name__
|
||||
broker_enable = [modpath]
|
||||
for submodname in getattr(
|
||||
brokermod,
|
||||
'__enable_modules__',
|
||||
[],
|
||||
):
|
||||
subpath = f'{modpath}.{submodname}'
|
||||
broker_enable.append(subpath)
|
||||
|
||||
portal = await Services.actor_n.start_actor(
|
||||
dname,
|
||||
enable_modules=_data_mods + broker_enable,
|
||||
loglevel=loglevel,
|
||||
debug_mode=Services.debug_mode,
|
||||
**tractor_kwargs
|
||||
)
|
||||
|
||||
# non-blocking setup of brokerd service nursery
|
||||
from ..data import _setup_persistent_brokerd
|
||||
|
||||
await Services.start_service_task(
|
||||
dname,
|
||||
portal,
|
||||
_setup_persistent_brokerd,
|
||||
brokername=brokername,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_spawn_brokerd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor.Portal:
|
||||
'''
|
||||
Helper to spawn a brokerd service *from* a client
|
||||
who wishes to use the sub-actor-daemon.
|
||||
|
||||
'''
|
||||
async with maybe_spawn_daemon(
|
||||
|
||||
f'brokerd.{brokername}',
|
||||
service_task_target=spawn_brokerd,
|
||||
spawn_args={
|
||||
'brokername': brokername,
|
||||
'loglevel': loglevel,
|
||||
},
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as portal:
|
||||
yield portal
|
||||
|
||||
|
||||
async def spawn_emsd(
|
||||
|
||||
loglevel: Optional[str] = None,
|
||||
**extra_tractor_kwargs
|
||||
|
||||
) -> bool:
|
||||
"""
|
||||
Start the clearing engine under ``pikerd``.
|
||||
|
||||
"""
|
||||
log.info('Spawning emsd')
|
||||
|
||||
portal = await Services.actor_n.start_actor(
|
||||
'emsd',
|
||||
enable_modules=[
|
||||
'piker.clearing._ems',
|
||||
'piker.clearing._client',
|
||||
],
|
||||
loglevel=loglevel,
|
||||
debug_mode=Services.debug_mode, # set by pikerd flag
|
||||
**extra_tractor_kwargs
|
||||
)
|
||||
|
||||
# non-blocking setup of clearing service
|
||||
from ..clearing._ems import _setup_persistent_emsd
|
||||
|
||||
await Services.start_service_task(
|
||||
'emsd',
|
||||
portal,
|
||||
_setup_persistent_emsd,
|
||||
)
|
||||
return True
|
||||
|
||||
|
||||
@acm
|
||||
async def maybe_open_emsd(
|
||||
|
||||
brokername: str,
|
||||
loglevel: Optional[str] = None,
|
||||
**kwargs,
|
||||
|
||||
) -> tractor._portal.Portal: # noqa
|
||||
|
||||
async with maybe_spawn_daemon(
|
||||
|
||||
'emsd',
|
||||
service_task_target=spawn_emsd,
|
||||
spawn_args={'loglevel': loglevel},
|
||||
loglevel=loglevel,
|
||||
**kwargs,
|
||||
|
||||
) as portal:
|
||||
yield portal
|
|
@ -0,0 +1,136 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
daemon-service management API.
|
||||
|
||||
"""
|
||||
from collections import defaultdict
|
||||
from typing import (
|
||||
Callable,
|
||||
Any,
|
||||
)
|
||||
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
import tractor
|
||||
|
||||
from ..log import (
|
||||
get_logger,
|
||||
)
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
# TODO: factor this into a ``tractor.highlevel`` extension
|
||||
# pack for the library.
|
||||
class Services:
|
||||
|
||||
actor_n: tractor._supervise.ActorNursery
|
||||
service_n: trio.Nursery
|
||||
debug_mode: bool # tractor sub-actor debug mode flag
|
||||
service_tasks: dict[
|
||||
str,
|
||||
tuple[
|
||||
trio.CancelScope,
|
||||
tractor.Portal,
|
||||
trio.Event,
|
||||
]
|
||||
] = {}
|
||||
locks = defaultdict(trio.Lock)
|
||||
|
||||
@classmethod
|
||||
async def start_service_task(
|
||||
self,
|
||||
name: str,
|
||||
portal: tractor.Portal,
|
||||
target: Callable,
|
||||
**kwargs,
|
||||
|
||||
) -> (trio.CancelScope, tractor.Context):
|
||||
'''
|
||||
Open a context in a service sub-actor, add to a stack
|
||||
that gets unwound at ``pikerd`` teardown.
|
||||
|
||||
This allows for allocating long-running sub-services in our main
|
||||
daemon and explicitly controlling their lifetimes.
|
||||
|
||||
'''
|
||||
async def open_context_in_task(
|
||||
task_status: TaskStatus[
|
||||
tuple[
|
||||
trio.CancelScope,
|
||||
trio.Event,
|
||||
Any,
|
||||
]
|
||||
] = trio.TASK_STATUS_IGNORED,
|
||||
|
||||
) -> Any:
|
||||
|
||||
with trio.CancelScope() as cs:
|
||||
async with portal.open_context(
|
||||
target,
|
||||
**kwargs,
|
||||
|
||||
) as (ctx, first):
|
||||
|
||||
# unblock once the remote context has started
|
||||
complete = trio.Event()
|
||||
task_status.started((cs, complete, first))
|
||||
log.info(
|
||||
f'`pikerd` service {name} started with value {first}'
|
||||
)
|
||||
try:
|
||||
# wait on any context's return value
|
||||
# and any final portal result from the
|
||||
# sub-actor.
|
||||
ctx_res = await ctx.result()
|
||||
|
||||
# NOTE: blocks indefinitely until cancelled
|
||||
# either by error from the target context
|
||||
# function or by being cancelled here by the
|
||||
# surrounding cancel scope.
|
||||
return (await portal.result(), ctx_res)
|
||||
|
||||
finally:
|
||||
await portal.cancel_actor()
|
||||
complete.set()
|
||||
self.service_tasks.pop(name)
|
||||
|
||||
cs, complete, first = await self.service_n.start(open_context_in_task)
|
||||
|
||||
# store the cancel scope and portal for later cancellation or
|
||||
# retstart if needed.
|
||||
self.service_tasks[name] = (cs, portal, complete)
|
||||
|
||||
return cs, first
|
||||
|
||||
@classmethod
|
||||
async def cancel_service(
|
||||
self,
|
||||
name: str,
|
||||
|
||||
) -> Any:
|
||||
'''
|
||||
Cancel the service task and actor for the given ``name``.
|
||||
|
||||
'''
|
||||
log.info(f'Cancelling `pikerd` service {name}')
|
||||
cs, portal, complete = self.service_tasks[name]
|
||||
cs.cancel()
|
||||
await complete.wait()
|
||||
assert name not in self.service_tasks, \
|
||||
f'Serice task for {name} not terminated?'
|
|
@ -0,0 +1,144 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Inter-actor "discovery" (protocol) layer.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import (
|
||||
asynccontextmanager as acm,
|
||||
)
|
||||
from typing import (
|
||||
Any,
|
||||
)
|
||||
|
||||
import tractor
|
||||
|
||||
|
||||
from ..log import (
|
||||
get_logger,
|
||||
)
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
_default_registry_host: str = '127.0.0.1'
|
||||
_default_registry_port: int = 6116
|
||||
_default_reg_addr: tuple[str, int] = (
|
||||
_default_registry_host,
|
||||
_default_registry_port,
|
||||
)
|
||||
|
||||
|
||||
# NOTE: this value is set as an actor-global once the first endpoint
|
||||
# who is capable, spawns a `pikerd` service tree.
|
||||
_registry: Registry | None = None
|
||||
|
||||
|
||||
class Registry:
|
||||
addr: None | tuple[str, int] = None
|
||||
|
||||
# TODO: table of uids to sockaddrs
|
||||
peers: dict[
|
||||
tuple[str, str],
|
||||
tuple[str, int],
|
||||
] = {}
|
||||
|
||||
|
||||
_tractor_kwargs: dict[str, Any] = {}
|
||||
|
||||
|
||||
@acm
|
||||
async def open_registry(
|
||||
addr: None | tuple[str, int] = None,
|
||||
ensure_exists: bool = True,
|
||||
|
||||
) -> tuple[str, int]:
|
||||
|
||||
global _tractor_kwargs
|
||||
actor = tractor.current_actor()
|
||||
uid = actor.uid
|
||||
if (
|
||||
Registry.addr is not None
|
||||
and addr
|
||||
):
|
||||
raise RuntimeError(
|
||||
f'`{uid}` registry addr already bound @ {_registry.sockaddr}'
|
||||
)
|
||||
|
||||
was_set: bool = False
|
||||
|
||||
if (
|
||||
not tractor.is_root_process()
|
||||
and Registry.addr is None
|
||||
):
|
||||
Registry.addr = actor._arb_addr
|
||||
|
||||
if (
|
||||
ensure_exists
|
||||
and Registry.addr is None
|
||||
):
|
||||
raise RuntimeError(
|
||||
f"`{uid}` registry should already exist bug doesn't?"
|
||||
)
|
||||
|
||||
if (
|
||||
Registry.addr is None
|
||||
):
|
||||
was_set = True
|
||||
Registry.addr = addr or _default_reg_addr
|
||||
|
||||
_tractor_kwargs['arbiter_addr'] = Registry.addr
|
||||
|
||||
try:
|
||||
yield Registry.addr
|
||||
finally:
|
||||
# XXX: always clear the global addr if we set it so that the
|
||||
# next (set of) calls will apply whatever new one is passed
|
||||
# in.
|
||||
if was_set:
|
||||
Registry.addr = None
|
||||
|
||||
|
||||
@acm
|
||||
async def find_service(
|
||||
service_name: str,
|
||||
) -> tractor.Portal | None:
|
||||
|
||||
async with open_registry() as reg_addr:
|
||||
log.info(f'Scanning for service `{service_name}`')
|
||||
# attach to existing daemon by name if possible
|
||||
async with tractor.find_actor(
|
||||
service_name,
|
||||
arbiter_sockaddr=reg_addr,
|
||||
) as maybe_portal:
|
||||
yield maybe_portal
|
||||
|
||||
|
||||
async def check_for_service(
|
||||
service_name: str,
|
||||
|
||||
) -> None | tuple[str, int]:
|
||||
'''
|
||||
Service daemon "liveness" predicate.
|
||||
|
||||
'''
|
||||
async with open_registry(ensure_exists=False) as reg_addr:
|
||||
async with tractor.query_actor(
|
||||
service_name,
|
||||
arbiter_sockaddr=reg_addr,
|
||||
) as sockaddr:
|
||||
return sockaddr
|
|
@ -0,0 +1,128 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Any,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import docker
|
||||
from ._ahab import DockerContainer
|
||||
|
||||
from piker.log import (
|
||||
get_logger,
|
||||
get_console_log
|
||||
)
|
||||
|
||||
import asks
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
# container level config
|
||||
_config = {
|
||||
'port': 19200,
|
||||
'log_level': 'debug',
|
||||
|
||||
# hardcoded to our image version
|
||||
'version': '7.17.4',
|
||||
}
|
||||
|
||||
|
||||
def start_elasticsearch(
|
||||
client: docker.DockerClient,
|
||||
|
||||
**kwargs,
|
||||
|
||||
) -> tuple[DockerContainer, dict[str, Any]]:
|
||||
'''
|
||||
Start and supervise an elasticsearch instance with its config bind-mounted
|
||||
in from the piker config directory on the system.
|
||||
|
||||
The equivalent cli cmd to this code is:
|
||||
|
||||
sudo docker run \
|
||||
-itd \
|
||||
--rm \
|
||||
--network=host \
|
||||
--mount type=bind,source="$(pwd)"/elastic,\
|
||||
target=/usr/share/elasticsearch/data \
|
||||
--env "elastic_username=elastic" \
|
||||
--env "elastic_password=password" \
|
||||
--env "xpack.security.enabled=false" \
|
||||
elastic
|
||||
|
||||
'''
|
||||
get_console_log('info', name=__name__)
|
||||
|
||||
dcntr: DockerContainer = client.containers.run(
|
||||
'piker:elastic',
|
||||
name='piker-elastic',
|
||||
network='host',
|
||||
detach=True,
|
||||
remove=True
|
||||
)
|
||||
|
||||
async def health_query(msg: str | None = None):
|
||||
if (
|
||||
msg
|
||||
and _config['version'] in msg
|
||||
):
|
||||
return True
|
||||
|
||||
try:
|
||||
health = (await asks.get(
|
||||
'http://localhost:19200/_cat/health',
|
||||
params={'format': 'json'}
|
||||
)).json()
|
||||
kog.info(
|
||||
'ElasticSearch cntr health:\n'
|
||||
f'{health}'
|
||||
)
|
||||
|
||||
except OSError:
|
||||
log.exception('couldnt reach elastic container')
|
||||
return False
|
||||
|
||||
log.info(health)
|
||||
return health[0]['status'] == 'green'
|
||||
|
||||
async def chk_for_closed_msg(msg: str):
|
||||
return msg == 'closed'
|
||||
|
||||
return (
|
||||
dcntr,
|
||||
{
|
||||
# apparently we're REALLY tolerant of startup latency
|
||||
# for CI XD
|
||||
'startup_timeout': 240.0,
|
||||
|
||||
# XXX: decrease http poll period bc docker
|
||||
# is shite at handling fast poll rates..
|
||||
'startup_query_period': 0.1,
|
||||
|
||||
'log_msg_key': 'message',
|
||||
|
||||
# 'started_afunc': health_query,
|
||||
},
|
||||
# expected startup and stop msgs
|
||||
health_query,
|
||||
chk_for_closed_msg,
|
||||
)
|
|
@ -1,5 +1,5 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
|
@ -25,47 +25,49 @@
|
|||
'''
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
Any,
|
||||
Optional,
|
||||
Union,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
import time
|
||||
from math import isnan
|
||||
from pathlib import Path
|
||||
|
||||
from bidict import bidict
|
||||
import msgpack
|
||||
import pyqtgraph as pg
|
||||
from msgspec.msgpack import (
|
||||
encode,
|
||||
decode,
|
||||
)
|
||||
# import pyqtgraph as pg
|
||||
import numpy as np
|
||||
import tractor
|
||||
from trio_websocket import open_websocket_url
|
||||
from anyio_marketstore import (
|
||||
from anyio_marketstore import ( # noqa
|
||||
open_marketstore_client,
|
||||
MarketstoreClient,
|
||||
Params,
|
||||
)
|
||||
import pendulum
|
||||
import purerpc
|
||||
# TODO: import this for specific error set expected by mkts client
|
||||
# import purerpc
|
||||
|
||||
from ..data.feed import maybe_open_feed
|
||||
from ..log import get_logger, get_console_log
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import docker
|
||||
from ._ahab import DockerContainer
|
||||
|
||||
from .feed import maybe_open_feed
|
||||
from ..log import get_logger, get_console_log
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
# container level config
|
||||
# ahabd-supervisor and container level config
|
||||
_config = {
|
||||
'grpc_listen_port': 5995,
|
||||
'ws_listen_port': 5993,
|
||||
'log_level': 'debug',
|
||||
'startup_timeout': 2,
|
||||
}
|
||||
|
||||
_yaml_config = '''
|
||||
|
@ -131,7 +133,10 @@ def start_marketstore(
|
|||
|
||||
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
||||
|
||||
# create when dne
|
||||
# create dirs when dne
|
||||
if not os.path.isdir(config._config_dir):
|
||||
Path(config._config_dir).mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if not os.path.isdir(mktsdir):
|
||||
os.mkdir(mktsdir)
|
||||
|
||||
|
@ -180,18 +185,29 @@ def start_marketstore(
|
|||
config_dir_mnt,
|
||||
data_dir_mnt,
|
||||
],
|
||||
|
||||
# XXX: this must be set to allow backgrounding/non-blocking
|
||||
# usage interaction with the container's process.
|
||||
detach=True,
|
||||
|
||||
# stop_signal='SIGINT',
|
||||
init=True,
|
||||
# remove=True,
|
||||
)
|
||||
|
||||
async def start_matcher(msg: str):
|
||||
return "launching tcp listener for all services..." in msg
|
||||
|
||||
async def stop_matcher(msg: str):
|
||||
return "exiting..." in msg
|
||||
|
||||
return (
|
||||
dcntr,
|
||||
_config,
|
||||
|
||||
# expected startup and stop msgs
|
||||
"launching tcp listener for all services...",
|
||||
"exiting...",
|
||||
start_matcher,
|
||||
stop_matcher,
|
||||
)
|
||||
|
||||
|
||||
|
@ -312,7 +328,7 @@ def quote_to_marketstore_structarray(
|
|||
@acm
|
||||
async def get_client(
|
||||
host: str = 'localhost',
|
||||
port: int = 5995
|
||||
port: int = _config['grpc_listen_port'],
|
||||
|
||||
) -> MarketstoreClient:
|
||||
'''
|
||||
|
@ -354,340 +370,6 @@ tf_in_1s = bidict({
|
|||
})
|
||||
|
||||
|
||||
class Storage:
|
||||
'''
|
||||
High level storage api for both real-time and historical ingest.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
client: MarketstoreClient,
|
||||
|
||||
) -> None:
|
||||
# TODO: eventually this should be an api/interface type that
|
||||
# ensures we can support multiple tsdb backends.
|
||||
self.client = client
|
||||
|
||||
# series' cache from tsdb reads
|
||||
self._arrays: dict[str, np.ndarray] = {}
|
||||
|
||||
async def list_keys(self) -> list[str]:
|
||||
return await self.client.list_symbols()
|
||||
|
||||
async def search_keys(self, pattern: str) -> list[str]:
|
||||
'''
|
||||
Search for time series key in the storage backend.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
async def write_ticks(self, ticks: list) -> None:
|
||||
...
|
||||
|
||||
async def load(
|
||||
self,
|
||||
fqsn: str,
|
||||
|
||||
) -> tuple[
|
||||
dict[int, np.ndarray], # timeframe (in secs) to series
|
||||
Optional[datetime], # first dt
|
||||
Optional[datetime], # last dt
|
||||
]:
|
||||
|
||||
first_tsdb_dt, last_tsdb_dt = None, None
|
||||
tsdb_arrays = await self.read_ohlcv(
|
||||
fqsn,
|
||||
# on first load we don't need to pull the max
|
||||
# history per request size worth.
|
||||
limit=3000,
|
||||
)
|
||||
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
||||
|
||||
if tsdb_arrays:
|
||||
fastest = list(tsdb_arrays.values())[0]
|
||||
times = fastest['Epoch']
|
||||
first, last = times[0], times[-1]
|
||||
first_tsdb_dt, last_tsdb_dt = map(
|
||||
pendulum.from_timestamp, [first, last]
|
||||
)
|
||||
|
||||
return tsdb_arrays, first_tsdb_dt, last_tsdb_dt
|
||||
|
||||
async def read_ohlcv(
|
||||
self,
|
||||
fqsn: str,
|
||||
timeframe: Optional[Union[int, str]] = None,
|
||||
end: Optional[int] = None,
|
||||
limit: int = int(800e3),
|
||||
|
||||
) -> tuple[
|
||||
MarketstoreClient,
|
||||
Union[dict, np.ndarray]
|
||||
]:
|
||||
client = self.client
|
||||
syms = await client.list_symbols()
|
||||
|
||||
if fqsn not in syms:
|
||||
return {}
|
||||
|
||||
tfstr = tf_in_1s[1]
|
||||
|
||||
params = Params(
|
||||
symbols=fqsn,
|
||||
timeframe=tfstr,
|
||||
attrgroup='OHLCV',
|
||||
end=end,
|
||||
# limit_from_start=True,
|
||||
|
||||
# TODO: figure the max limit here given the
|
||||
# ``purepc`` msg size limit of purerpc: 33554432
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
if timeframe is None:
|
||||
log.info(f'starting {fqsn} tsdb granularity scan..')
|
||||
# loop through and try to find highest granularity
|
||||
for tfstr in tf_in_1s.values():
|
||||
try:
|
||||
log.info(f'querying for {tfstr}@{fqsn}')
|
||||
params.set('timeframe', tfstr)
|
||||
result = await client.query(params)
|
||||
break
|
||||
|
||||
except purerpc.grpclib.exceptions.UnknownError:
|
||||
# XXX: this is already logged by the container and
|
||||
# thus shows up through `marketstored` logs relay.
|
||||
# log.warning(f'{tfstr}@{fqsn} not found')
|
||||
continue
|
||||
else:
|
||||
return {}
|
||||
|
||||
else:
|
||||
result = await client.query(params)
|
||||
|
||||
# TODO: it turns out column access on recarrays is actually slower:
|
||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||
# it might make sense to make these structured arrays?
|
||||
# Fill out a `numpy` array-results map
|
||||
arrays = {}
|
||||
for fqsn, data_set in result.by_symbols().items():
|
||||
arrays.setdefault(fqsn, {})[
|
||||
tf_in_1s.inverse[data_set.timeframe]
|
||||
] = data_set.array
|
||||
|
||||
return arrays[fqsn][timeframe] if timeframe else arrays[fqsn]
|
||||
|
||||
async def delete_ts(
|
||||
self,
|
||||
key: str,
|
||||
timeframe: Optional[Union[int, str]] = None,
|
||||
|
||||
) -> bool:
|
||||
|
||||
client = self.client
|
||||
syms = await client.list_symbols()
|
||||
print(syms)
|
||||
# if key not in syms:
|
||||
# raise KeyError(f'`{fqsn}` table key not found?')
|
||||
|
||||
return await client.destroy(tbk=key)
|
||||
|
||||
async def write_ohlcv(
|
||||
self,
|
||||
fqsn: str,
|
||||
ohlcv: np.ndarray,
|
||||
append_and_duplicate: bool = True,
|
||||
limit: int = int(800e3),
|
||||
|
||||
) -> None:
|
||||
# build mkts schema compat array for writing
|
||||
mkts_dt = np.dtype(_ohlcv_dt)
|
||||
mkts_array = np.zeros(
|
||||
len(ohlcv),
|
||||
dtype=mkts_dt,
|
||||
)
|
||||
# copy from shm array (yes it's this easy):
|
||||
# https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
|
||||
mkts_array[:] = ohlcv[[
|
||||
'time',
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
'volume',
|
||||
]]
|
||||
|
||||
m, r = divmod(len(mkts_array), limit)
|
||||
|
||||
for i in range(m, 1):
|
||||
to_push = mkts_array[i-1:i*limit]
|
||||
|
||||
# write to db
|
||||
resp = await self.client.write(
|
||||
to_push,
|
||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||
|
||||
# NOTE: will will append duplicates
|
||||
# for the same timestamp-index.
|
||||
# TODO: pre deduplicate?
|
||||
isvariablelength=append_and_duplicate,
|
||||
)
|
||||
|
||||
log.info(
|
||||
f'Wrote {mkts_array.size} datums to tsdb\n'
|
||||
)
|
||||
|
||||
for resp in resp.responses:
|
||||
err = resp.error
|
||||
if err:
|
||||
raise MarketStoreError(err)
|
||||
|
||||
if r:
|
||||
to_push = mkts_array[m*limit:]
|
||||
|
||||
# write to db
|
||||
resp = await self.client.write(
|
||||
to_push,
|
||||
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||
|
||||
# NOTE: will will append duplicates
|
||||
# for the same timestamp-index.
|
||||
# TODO: pre deduplicate?
|
||||
isvariablelength=append_and_duplicate,
|
||||
)
|
||||
|
||||
log.info(
|
||||
f'Wrote {mkts_array.size} datums to tsdb\n'
|
||||
)
|
||||
|
||||
for resp in resp.responses:
|
||||
err = resp.error
|
||||
if err:
|
||||
raise MarketStoreError(err)
|
||||
|
||||
# XXX: currently the only way to do this is through the CLI:
|
||||
|
||||
# sudo ./marketstore connect --dir ~/.config/piker/data
|
||||
# >> \show mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||
# and this seems to block and use up mem..
|
||||
# >> \trim mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||
|
||||
# relevant source code for this is here:
|
||||
# https://github.com/alpacahq/marketstore/blob/master/cmd/connect/session/trim.go#L14
|
||||
# def delete_range(self, start_dt, end_dt) -> None:
|
||||
# ...
|
||||
|
||||
@acm
|
||||
async def open_storage_client(
|
||||
fqsn: str,
|
||||
period: Optional[Union[int, str]] = None, # in seconds
|
||||
|
||||
) -> tuple[Storage, dict[str, np.ndarray]]:
|
||||
'''
|
||||
Load a series by key and deliver in ``numpy`` struct array format.
|
||||
|
||||
'''
|
||||
async with (
|
||||
# eventually a storage backend endpoint
|
||||
get_client() as client,
|
||||
):
|
||||
# slap on our wrapper api
|
||||
yield Storage(client)
|
||||
|
||||
|
||||
async def tsdb_history_update(
|
||||
fqsn: Optional[str] = None,
|
||||
|
||||
) -> list[str]:
|
||||
|
||||
# TODO: real-time dedicated task for ensuring
|
||||
# history consistency between the tsdb, shm and real-time feed..
|
||||
|
||||
# update sequence design notes:
|
||||
|
||||
# - load existing highest frequency data from mkts
|
||||
# * how do we want to offer this to the UI?
|
||||
# - lazy loading?
|
||||
# - try to load it all and expect graphics caching/diffing
|
||||
# to hide extra bits that aren't in view?
|
||||
|
||||
# - compute the diff between latest data from broker and shm
|
||||
# * use sql api in mkts to determine where the backend should
|
||||
# start querying for data?
|
||||
# * append any diff with new shm length
|
||||
# * determine missing (gapped) history by scanning
|
||||
# * how far back do we look?
|
||||
|
||||
# - begin rt update ingest and aggregation
|
||||
# * could start by always writing ticks to mkts instead of
|
||||
# worrying about a shm queue for now.
|
||||
# * we have a short list of shm queues worth groking:
|
||||
# - https://github.com/pikers/piker/issues/107
|
||||
# * the original data feed arch blurb:
|
||||
# - https://github.com/pikers/piker/issues/98
|
||||
#
|
||||
profiler = pg.debug.Profiler(
|
||||
disabled=False, # not pg_profile_enabled(),
|
||||
delayed=False,
|
||||
)
|
||||
|
||||
async with (
|
||||
open_storage_client(fqsn) as storage,
|
||||
|
||||
maybe_open_feed(
|
||||
[fqsn],
|
||||
start_stream=False,
|
||||
|
||||
) as (feed, stream),
|
||||
):
|
||||
profiler(f'opened feed for {fqsn}')
|
||||
|
||||
to_append = feed.shm.array
|
||||
to_prepend = None
|
||||
|
||||
if fqsn:
|
||||
symbol = feed.symbols.get(fqsn)
|
||||
if symbol:
|
||||
fqsn = symbol.front_fqsn()
|
||||
|
||||
# diff db history with shm and only write the missing portions
|
||||
ohlcv = feed.shm.array
|
||||
|
||||
# TODO: use pg profiler
|
||||
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||
# hist diffing
|
||||
if tsdb_arrays:
|
||||
for secs in (1, 60):
|
||||
ts = tsdb_arrays.get(secs)
|
||||
if ts is not None and len(ts):
|
||||
# these aren't currently used but can be referenced from
|
||||
# within the embedded ipython shell below.
|
||||
to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
||||
to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
||||
|
||||
profiler('Finished db arrays diffs')
|
||||
|
||||
syms = await storage.client.list_symbols()
|
||||
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||
profiler(f'listed symbols {syms}')
|
||||
|
||||
# TODO: ask if user wants to write history for detected
|
||||
# available shm buffers?
|
||||
from tractor.trionics import ipython_embed
|
||||
await ipython_embed()
|
||||
|
||||
# for array in [to_append, to_prepend]:
|
||||
# if array is None:
|
||||
# continue
|
||||
|
||||
# log.info(
|
||||
# f'Writing datums {array.size} -> to tsdb from shm\n'
|
||||
# )
|
||||
# await storage.write_ohlcv(fqsn, array)
|
||||
|
||||
# profiler('Finished db writes')
|
||||
|
||||
|
||||
async def ingest_quote_stream(
|
||||
symbols: list[str],
|
||||
brokername: str,
|
||||
|
@ -774,12 +456,13 @@ async def stream_quotes(
|
|||
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
||||
# send subs topics to server
|
||||
resp = await ws.send_message(
|
||||
msgpack.dumps({'streams': list(tbks.values())})
|
||||
|
||||
encode({'streams': list(tbks.values())})
|
||||
)
|
||||
log.info(resp)
|
||||
|
||||
async def recv() -> dict[str, Any]:
|
||||
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
||||
return decode((await ws.get_message()), encoding='utf-8')
|
||||
|
||||
streams = (await recv())['streams']
|
||||
log.info(f"Subscribed to {streams}")
|
|
@ -0,0 +1,414 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
'''
|
||||
(time-series) database middle ware layer.
|
||||
|
||||
- APIs for read, write, delete, replicate over multiple
|
||||
db systems.
|
||||
- backend agnostic tick msg ingest machinery.
|
||||
- broadcast systems for fan out of real-time ingested
|
||||
data to live consumers.
|
||||
- test harness utilities for data-processing verification.
|
||||
|
||||
'''
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from datetime import datetime
|
||||
from pprint import pformat
|
||||
from typing import (
|
||||
Optional,
|
||||
Union,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from anyio_marketstore import (
|
||||
Params,
|
||||
)
|
||||
import pendulum
|
||||
import purerpc
|
||||
|
||||
from ..service.marketstore import (
|
||||
MarketstoreClient,
|
||||
tf_in_1s,
|
||||
mk_tbk,
|
||||
_ohlcv_dt,
|
||||
MarketStoreError,
|
||||
)
|
||||
from ..data.feed import maybe_open_feed
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
class Storage:
|
||||
'''
|
||||
High level storage api for both real-time and historical ingest.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
client: MarketstoreClient,
|
||||
|
||||
) -> None:
|
||||
# TODO: eventually this should be an api/interface type that
|
||||
# ensures we can support multiple tsdb backends.
|
||||
self.client = client
|
||||
|
||||
# series' cache from tsdb reads
|
||||
self._arrays: dict[str, np.ndarray] = {}
|
||||
|
||||
async def list_keys(self) -> list[str]:
|
||||
return await self.client.list_symbols()
|
||||
|
||||
async def search_keys(self, pattern: str) -> list[str]:
|
||||
'''
|
||||
Search for time series key in the storage backend.
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
async def write_ticks(self, ticks: list) -> None:
|
||||
...
|
||||
|
||||
async def load(
|
||||
self,
|
||||
fqsn: str,
|
||||
timeframe: int,
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray, # timeframe sampled array-series
|
||||
Optional[datetime], # first dt
|
||||
Optional[datetime], # last dt
|
||||
]:
|
||||
|
||||
first_tsdb_dt, last_tsdb_dt = None, None
|
||||
hist = await self.read_ohlcv(
|
||||
fqsn,
|
||||
# on first load we don't need to pull the max
|
||||
# history per request size worth.
|
||||
limit=3000,
|
||||
timeframe=timeframe,
|
||||
)
|
||||
log.info(f'Loaded tsdb history {hist}')
|
||||
|
||||
if len(hist):
|
||||
times = hist['Epoch']
|
||||
first, last = times[0], times[-1]
|
||||
first_tsdb_dt, last_tsdb_dt = map(
|
||||
pendulum.from_timestamp, [first, last]
|
||||
)
|
||||
|
||||
return (
|
||||
hist, # array-data
|
||||
first_tsdb_dt, # start of query-frame
|
||||
last_tsdb_dt, # most recent
|
||||
)
|
||||
|
||||
async def read_ohlcv(
|
||||
self,
|
||||
fqsn: str,
|
||||
timeframe: int | str,
|
||||
end: Optional[int] = None,
|
||||
limit: int = int(800e3),
|
||||
|
||||
) -> np.ndarray:
|
||||
|
||||
client = self.client
|
||||
syms = await client.list_symbols()
|
||||
|
||||
if fqsn not in syms:
|
||||
return {}
|
||||
|
||||
# use the provided timeframe or 1s by default
|
||||
tfstr = tf_in_1s.get(timeframe, tf_in_1s[1])
|
||||
|
||||
params = Params(
|
||||
symbols=fqsn,
|
||||
timeframe=tfstr,
|
||||
attrgroup='OHLCV',
|
||||
end=end,
|
||||
# limit_from_start=True,
|
||||
|
||||
# TODO: figure the max limit here given the
|
||||
# ``purepc`` msg size limit of purerpc: 33554432
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
try:
|
||||
result = await client.query(params)
|
||||
except purerpc.grpclib.exceptions.UnknownError as err:
|
||||
# indicate there is no history for this timeframe
|
||||
log.exception(
|
||||
f'Unknown mkts QUERY error: {params}\n'
|
||||
f'{err.args}'
|
||||
)
|
||||
return {}
|
||||
|
||||
# TODO: it turns out column access on recarrays is actually slower:
|
||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||
# it might make sense to make these structured arrays?
|
||||
data_set = result.by_symbols()[fqsn]
|
||||
array = data_set.array
|
||||
|
||||
# XXX: ensure sample rate is as expected
|
||||
time = data_set.array['Epoch']
|
||||
if len(time) > 1:
|
||||
time_step = time[-1] - time[-2]
|
||||
ts = tf_in_1s.inverse[data_set.timeframe]
|
||||
|
||||
if time_step != ts:
|
||||
log.warning(
|
||||
f'MKTS BUG: wrong timeframe loaded: {time_step}'
|
||||
'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG'
|
||||
f'WIPING HISTORY FOR {ts}s'
|
||||
)
|
||||
await self.delete_ts(fqsn, timeframe)
|
||||
|
||||
# try reading again..
|
||||
return await self.read_ohlcv(
|
||||
fqsn,
|
||||
timeframe,
|
||||
end,
|
||||
limit,
|
||||
)
|
||||
|
||||
return array
|
||||
|
||||
async def delete_ts(
|
||||
self,
|
||||
key: str,
|
||||
timeframe: Optional[Union[int, str]] = None,
|
||||
fmt: str = 'OHLCV',
|
||||
|
||||
) -> bool:
|
||||
|
||||
client = self.client
|
||||
syms = await client.list_symbols()
|
||||
if key not in syms:
|
||||
raise KeyError(f'`{key}` table key not found in\n{syms}?')
|
||||
|
||||
tbk = mk_tbk((
|
||||
key,
|
||||
tf_in_1s.get(timeframe, tf_in_1s[60]),
|
||||
fmt,
|
||||
))
|
||||
return await client.destroy(tbk=tbk)
|
||||
|
||||
async def write_ohlcv(
|
||||
self,
|
||||
fqsn: str,
|
||||
ohlcv: np.ndarray,
|
||||
timeframe: int,
|
||||
append_and_duplicate: bool = True,
|
||||
limit: int = int(800e3),
|
||||
|
||||
) -> None:
|
||||
# build mkts schema compat array for writing
|
||||
mkts_dt = np.dtype(_ohlcv_dt)
|
||||
mkts_array = np.zeros(
|
||||
len(ohlcv),
|
||||
dtype=mkts_dt,
|
||||
)
|
||||
# copy from shm array (yes it's this easy):
|
||||
# https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
|
||||
mkts_array[:] = ohlcv[[
|
||||
'time',
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
'volume',
|
||||
]]
|
||||
|
||||
m, r = divmod(len(mkts_array), limit)
|
||||
|
||||
tfkey = tf_in_1s[timeframe]
|
||||
for i in range(m, 1):
|
||||
to_push = mkts_array[i-1:i*limit]
|
||||
|
||||
# write to db
|
||||
resp = await self.client.write(
|
||||
to_push,
|
||||
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
||||
|
||||
# NOTE: will will append duplicates
|
||||
# for the same timestamp-index.
|
||||
# TODO: pre-deduplicate?
|
||||
isvariablelength=append_and_duplicate,
|
||||
)
|
||||
|
||||
log.info(
|
||||
f'Wrote {mkts_array.size} datums to tsdb\n'
|
||||
)
|
||||
|
||||
for resp in resp.responses:
|
||||
err = resp.error
|
||||
if err:
|
||||
raise MarketStoreError(err)
|
||||
|
||||
if r:
|
||||
to_push = mkts_array[m*limit:]
|
||||
|
||||
# write to db
|
||||
resp = await self.client.write(
|
||||
to_push,
|
||||
tbk=f'{fqsn}/{tfkey}/OHLCV',
|
||||
|
||||
# NOTE: will will append duplicates
|
||||
# for the same timestamp-index.
|
||||
# TODO: pre deduplicate?
|
||||
isvariablelength=append_and_duplicate,
|
||||
)
|
||||
|
||||
log.info(
|
||||
f'Wrote {mkts_array.size} datums to tsdb\n'
|
||||
)
|
||||
|
||||
for resp in resp.responses:
|
||||
err = resp.error
|
||||
if err:
|
||||
raise MarketStoreError(err)
|
||||
|
||||
# XXX: currently the only way to do this is through the CLI:
|
||||
|
||||
# sudo ./marketstore connect --dir ~/.config/piker/data
|
||||
# >> \show mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||
# and this seems to block and use up mem..
|
||||
# >> \trim mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||
|
||||
# relevant source code for this is here:
|
||||
# https://github.com/alpacahq/marketstore/blob/master/cmd/connect/session/trim.go#L14
|
||||
# def delete_range(self, start_dt, end_dt) -> None:
|
||||
# ...
|
||||
|
||||
|
||||
@acm
|
||||
async def open_storage_client(
|
||||
fqsn: str,
|
||||
period: Optional[Union[int, str]] = None, # in seconds
|
||||
|
||||
) -> tuple[Storage, dict[str, np.ndarray]]:
|
||||
'''
|
||||
Load a series by key and deliver in ``numpy`` struct array format.
|
||||
|
||||
'''
|
||||
# TODO: generic import-by-name system for each backend much like
|
||||
# we have in ``piker.brokers`` module loading for `brokerd` B)
|
||||
from ..service import marketstore
|
||||
mod = marketstore
|
||||
|
||||
async with (
|
||||
# eventually a storage backend endpoint
|
||||
mod.get_client() as client,
|
||||
):
|
||||
# slap on our wrapper api
|
||||
yield Storage(client)
|
||||
|
||||
|
||||
# NOTE: pretty sure right now this is only being
|
||||
# called by a CLI entrypoint?
|
||||
@acm
|
||||
async def open_tsdb_client(
|
||||
fqsn: str,
|
||||
|
||||
) -> Storage:
|
||||
|
||||
# TODO: real-time dedicated task for ensuring
|
||||
# history consistency between the tsdb, shm and real-time feed..
|
||||
|
||||
# update sequence design notes:
|
||||
|
||||
# - load existing highest frequency data from mkts
|
||||
# * how do we want to offer this to the UI?
|
||||
# - lazy loading?
|
||||
# - try to load it all and expect graphics caching/diffing
|
||||
# to hide extra bits that aren't in view?
|
||||
|
||||
# - compute the diff between latest data from broker and shm
|
||||
# * use sql api in mkts to determine where the backend should
|
||||
# start querying for data?
|
||||
# * append any diff with new shm length
|
||||
# * determine missing (gapped) history by scanning
|
||||
# * how far back do we look?
|
||||
|
||||
# - begin rt update ingest and aggregation
|
||||
# * could start by always writing ticks to mkts instead of
|
||||
# worrying about a shm queue for now.
|
||||
# * we have a short list of shm queues worth groking:
|
||||
# - https://github.com/pikers/piker/issues/107
|
||||
# * the original data feed arch blurb:
|
||||
# - https://github.com/pikers/piker/issues/98
|
||||
#
|
||||
profiler = Profiler(
|
||||
disabled=True, # not pg_profile_enabled(),
|
||||
delayed=False,
|
||||
)
|
||||
|
||||
async with (
|
||||
open_storage_client(fqsn) as storage,
|
||||
|
||||
maybe_open_feed(
|
||||
[fqsn],
|
||||
start_stream=False,
|
||||
|
||||
) as feed,
|
||||
):
|
||||
profiler(f'opened feed for {fqsn}')
|
||||
|
||||
# to_append = feed.hist_shm.array
|
||||
# to_prepend = None
|
||||
|
||||
if fqsn:
|
||||
flume = feed.flumes[fqsn]
|
||||
symbol = flume.symbol
|
||||
if symbol:
|
||||
fqsn = symbol.fqsn
|
||||
|
||||
# diff db history with shm and only write the missing portions
|
||||
# ohlcv = flume.hist_shm.array
|
||||
|
||||
# TODO: use pg profiler
|
||||
# for secs in (1, 60):
|
||||
# tsdb_array = await storage.read_ohlcv(
|
||||
# fqsn,
|
||||
# timeframe=timeframe,
|
||||
# )
|
||||
# # hist diffing:
|
||||
# # these aren't currently used but can be referenced from
|
||||
# # within the embedded ipython shell below.
|
||||
# to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
||||
# to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
||||
|
||||
# profiler('Finished db arrays diffs')
|
||||
|
||||
syms = await storage.client.list_symbols()
|
||||
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||
# profiler(f'listed symbols {syms}')
|
||||
yield storage
|
||||
|
||||
# for array in [to_append, to_prepend]:
|
||||
# if array is None:
|
||||
# continue
|
||||
|
||||
# log.info(
|
||||
# f'Writing datums {array.size} -> to tsdb from shm\n'
|
||||
# )
|
||||
# await storage.write_ohlcv(fqsn, array)
|
||||
|
||||
# profiler('Finished db writes')
|
|
@ -32,16 +32,22 @@ def mk_marker_path(
|
|||
style: str,
|
||||
|
||||
) -> QGraphicsPathItem:
|
||||
"""Add a marker to be displayed on the line wrapped in a ``QGraphicsPathItem``
|
||||
ready to be placed using scene coordinates (not view).
|
||||
'''
|
||||
Add a marker to be displayed on the line wrapped in
|
||||
a ``QGraphicsPathItem`` ready to be placed using scene coordinates
|
||||
(not view).
|
||||
|
||||
**Arguments**
|
||||
style String indicating the style of marker to add:
|
||||
``'<|'``, ``'|>'``, ``'>|'``, ``'|<'``, ``'<|>'``,
|
||||
``'>|<'``, ``'^'``, ``'v'``, ``'o'``
|
||||
size Size of the marker in pixels.
|
||||
|
||||
"""
|
||||
This code is taken nearly verbatim from the
|
||||
`InfiniteLine.addMarker()` method but does not attempt do be aware
|
||||
of low(er) level graphics controls and expects for the output
|
||||
polygon to be applied to a ``QGraphicsPathItem``.
|
||||
|
||||
'''
|
||||
path = QtGui.QPainterPath()
|
||||
|
||||
if style == 'o':
|
||||
|
@ -87,7 +93,8 @@ def mk_marker_path(
|
|||
|
||||
|
||||
class LevelMarker(QGraphicsPathItem):
|
||||
'''An arrow marker path graphich which redraws itself
|
||||
'''
|
||||
An arrow marker path graphich which redraws itself
|
||||
to the specified view coordinate level on each paint cycle.
|
||||
|
||||
'''
|
||||
|
@ -104,7 +111,8 @@ class LevelMarker(QGraphicsPathItem):
|
|||
|
||||
# get polygon and scale
|
||||
super().__init__()
|
||||
self.scale(size, size)
|
||||
# self.setScale(size, size)
|
||||
self.setScale(size)
|
||||
|
||||
# interally generates path
|
||||
self._style = None
|
||||
|
@ -114,6 +122,7 @@ class LevelMarker(QGraphicsPathItem):
|
|||
|
||||
self.get_level = get_level
|
||||
self._on_paint = on_paint
|
||||
|
||||
self.scene_x = lambda: chart.marker_right_points()[1]
|
||||
self.level: float = 0
|
||||
self.keep_in_view = keep_in_view
|
||||
|
@ -149,12 +158,9 @@ class LevelMarker(QGraphicsPathItem):
|
|||
def w(self) -> float:
|
||||
return self.path_br().width()
|
||||
|
||||
def position_in_view(
|
||||
self,
|
||||
# level: float,
|
||||
|
||||
) -> None:
|
||||
'''Show a pp off-screen indicator for a level label.
|
||||
def position_in_view(self) -> None:
|
||||
'''
|
||||
Show a pp off-screen indicator for a level label.
|
||||
|
||||
This is like in fps games where you have a gps "nav" indicator
|
||||
but your teammate is outside the range of view, except in 2D, on
|
||||
|
@ -162,7 +168,6 @@ class LevelMarker(QGraphicsPathItem):
|
|||
|
||||
'''
|
||||
level = self.get_level()
|
||||
|
||||
view = self.chart.getViewBox()
|
||||
vr = view.state['viewRange']
|
||||
ymn, ymx = vr[1]
|
||||
|
@ -186,7 +191,6 @@ class LevelMarker(QGraphicsPathItem):
|
|||
)
|
||||
|
||||
elif level < ymn: # pin to bottom of view
|
||||
|
||||
self.setPos(
|
||||
QPointF(
|
||||
x,
|
||||
|
@ -211,7 +215,8 @@ class LevelMarker(QGraphicsPathItem):
|
|||
w: QtWidgets.QWidget
|
||||
|
||||
) -> None:
|
||||
'''Core paint which we override to always update
|
||||
'''
|
||||
Core paint which we override to always update
|
||||
our marker position in scene coordinates from a
|
||||
view cooridnate "level".
|
||||
|
||||
|
@ -235,11 +240,12 @@ def qgo_draw_markers(
|
|||
right_offset: float,
|
||||
|
||||
) -> float:
|
||||
"""Paint markers in ``pg.GraphicsItem`` style by first
|
||||
'''
|
||||
Paint markers in ``pg.GraphicsItem`` style by first
|
||||
removing the view transform for the painter, drawing the markers
|
||||
in scene coords, then restoring the view coords.
|
||||
|
||||
"""
|
||||
'''
|
||||
# paint markers in native coordinate system
|
||||
orig_tr = p.transform()
|
||||
|
||||
|
|
|
@ -19,15 +19,16 @@ Main app startup and run.
|
|||
|
||||
'''
|
||||
from functools import partial
|
||||
from types import ModuleType
|
||||
|
||||
from PyQt5.QtCore import QEvent
|
||||
import trio
|
||||
|
||||
from .._daemon import maybe_spawn_brokerd
|
||||
from ..brokers import get_brokermod
|
||||
from ..service import maybe_spawn_brokerd
|
||||
from . import _event
|
||||
from ._exec import run_qtractor
|
||||
from ..data.feed import install_brokerd_search
|
||||
from ..data._source import unpack_fqsn
|
||||
from . import _search
|
||||
from ._chart import GodWidget
|
||||
from ..log import get_logger
|
||||
|
@ -36,27 +37,26 @@ log = get_logger(__name__)
|
|||
|
||||
|
||||
async def load_provider_search(
|
||||
|
||||
broker: str,
|
||||
brokermod: str,
|
||||
loglevel: str,
|
||||
|
||||
) -> None:
|
||||
|
||||
log.info(f'loading brokerd for {broker}..')
|
||||
name = brokermod.name
|
||||
log.info(f'loading brokerd for {name}..')
|
||||
|
||||
async with (
|
||||
|
||||
maybe_spawn_brokerd(
|
||||
broker,
|
||||
name,
|
||||
loglevel=loglevel
|
||||
) as portal,
|
||||
|
||||
install_brokerd_search(
|
||||
portal,
|
||||
get_brokermod(broker),
|
||||
brokermod,
|
||||
),
|
||||
):
|
||||
|
||||
# keep search engine stream up until cancelled
|
||||
await trio.sleep_forever()
|
||||
|
||||
|
@ -66,8 +66,8 @@ async def _async_main(
|
|||
# implicit required argument provided by ``qtractor_run()``
|
||||
main_widget: GodWidget,
|
||||
|
||||
sym: str,
|
||||
brokernames: str,
|
||||
syms: list[str],
|
||||
brokers: dict[str, ModuleType],
|
||||
loglevel: str,
|
||||
|
||||
) -> None:
|
||||
|
@ -78,6 +78,8 @@ async def _async_main(
|
|||
|
||||
"""
|
||||
from . import _display
|
||||
from ._pg_overrides import _do_overrides
|
||||
_do_overrides()
|
||||
|
||||
godwidget = main_widget
|
||||
|
||||
|
@ -97,6 +99,11 @@ async def _async_main(
|
|||
sbar = godwidget.window.status_bar
|
||||
starting_done = sbar.open_status('starting ze sexy chartz')
|
||||
|
||||
needed_brokermods: dict[str, ModuleType] = {}
|
||||
for fqsn in syms:
|
||||
brokername, *_ = unpack_fqsn(fqsn)
|
||||
needed_brokermods[brokername] = brokers[brokername]
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as root_n,
|
||||
):
|
||||
|
@ -107,18 +114,14 @@ async def _async_main(
|
|||
# setup search widget and focus main chart view at startup
|
||||
# search widget is a singleton alongside the godwidget
|
||||
search = _search.SearchWidget(godwidget=godwidget)
|
||||
search.bar.unfocus()
|
||||
|
||||
godwidget.hbox.addWidget(search)
|
||||
# search.bar.unfocus()
|
||||
# godwidget.hbox.addWidget(search)
|
||||
godwidget.search = search
|
||||
|
||||
symbol, _, provider = sym.rpartition('.')
|
||||
|
||||
# this internally starts a ``display_symbol_data()`` task above
|
||||
order_mode_ready = await godwidget.load_symbol(
|
||||
provider,
|
||||
symbol,
|
||||
loglevel
|
||||
order_mode_ready = await godwidget.load_symbols(
|
||||
fqsns=syms,
|
||||
loglevel=loglevel,
|
||||
)
|
||||
|
||||
# spin up a search engine for the local cached symbol set
|
||||
|
@ -135,8 +138,12 @@ async def _async_main(
|
|||
):
|
||||
# load other providers into search **after**
|
||||
# the chart's select cache
|
||||
for broker in brokernames:
|
||||
root_n.start_soon(load_provider_search, broker, loglevel)
|
||||
for brokername, mod in needed_brokermods.items():
|
||||
root_n.start_soon(
|
||||
load_provider_search,
|
||||
mod,
|
||||
loglevel,
|
||||
)
|
||||
|
||||
await order_mode_ready.wait()
|
||||
|
||||
|
@ -165,19 +172,22 @@ async def _async_main(
|
|||
|
||||
|
||||
def _main(
|
||||
sym: str,
|
||||
brokernames: [str],
|
||||
syms: list[str],
|
||||
brokermods: list[ModuleType],
|
||||
piker_loglevel: str,
|
||||
tractor_kwargs,
|
||||
) -> None:
|
||||
'''
|
||||
Sync entry point to start a chart: a ``tractor`` + Qt runtime
|
||||
entry point
|
||||
Sync entry point to start a chart: a ``tractor`` + Qt runtime.
|
||||
|
||||
'''
|
||||
run_qtractor(
|
||||
func=_async_main,
|
||||
args=(sym, brokernames, piker_loglevel),
|
||||
main_widget=GodWidget,
|
||||
args=(
|
||||
syms,
|
||||
{mod.name: mod for mod in brokermods},
|
||||
piker_loglevel,
|
||||
),
|
||||
main_widget_type=GodWidget,
|
||||
tractor_kwargs=tractor_kwargs,
|
||||
)
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
Chart axes graphics and behavior.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from functools import lru_cache
|
||||
from typing import Optional, Callable
|
||||
from math import floor
|
||||
|
@ -27,6 +28,7 @@ import pyqtgraph as pg
|
|||
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||
from PyQt5.QtCore import QPointF
|
||||
|
||||
from . import _pg_overrides as pgo
|
||||
from ..data._source import float_digits
|
||||
from ._label import Label
|
||||
from ._style import DpiAwareFont, hcolor, _font
|
||||
|
@ -39,12 +41,17 @@ class Axis(pg.AxisItem):
|
|||
'''
|
||||
A better axis that sizes tick contents considering font size.
|
||||
|
||||
Also includes tick values lru caching originally proposed in but never
|
||||
accepted upstream:
|
||||
https://github.com/pyqtgraph/pyqtgraph/pull/2160
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
linkedsplits,
|
||||
typical_max_str: str = '100 000.000',
|
||||
plotitem: pgo.PlotItem,
|
||||
typical_max_str: str = '100 000.000 ',
|
||||
text_color: str = 'bracket',
|
||||
lru_cache_tick_strings: bool = True,
|
||||
**kwargs
|
||||
|
||||
) -> None:
|
||||
|
@ -56,41 +63,78 @@ class Axis(pg.AxisItem):
|
|||
# XXX: pretty sure this makes things slower
|
||||
# self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
self.linkedsplits = linkedsplits
|
||||
self.pi = plotitem
|
||||
self._dpi_font = _font
|
||||
|
||||
self.setTickFont(_font.font)
|
||||
font_size = self._dpi_font.font.pixelSize()
|
||||
|
||||
style_conf = {
|
||||
'textFillLimits': [(0, 0.5)],
|
||||
'tickFont': self._dpi_font.font,
|
||||
|
||||
}
|
||||
text_offset = None
|
||||
if self.orientation in ('bottom',):
|
||||
text_offset = floor(0.25 * font_size)
|
||||
|
||||
elif self.orientation in ('left', 'right'):
|
||||
text_offset = floor(font_size / 2)
|
||||
|
||||
self.setStyle(**{
|
||||
'textFillLimits': [(0, 0.5)],
|
||||
'tickFont': self._dpi_font.font,
|
||||
|
||||
# offset of text *away from* axis line in px
|
||||
# use approx. half the font pixel size (height)
|
||||
'tickTextOffset': text_offset,
|
||||
})
|
||||
if text_offset:
|
||||
style_conf.update({
|
||||
# offset of text *away from* axis line in px
|
||||
# use approx. half the font pixel size (height)
|
||||
'tickTextOffset': text_offset,
|
||||
})
|
||||
|
||||
self.setStyle(**style_conf)
|
||||
self.setTickFont(_font.font)
|
||||
|
||||
# NOTE: this is for surrounding "border"
|
||||
self.setPen(_axis_pen)
|
||||
|
||||
# this is the text color
|
||||
# self.setTextPen(pg.mkPen(hcolor(text_color)))
|
||||
self.text_color = text_color
|
||||
|
||||
# generate a bounding rect based on sizing to a "typical"
|
||||
# maximum length-ed string defined as init default.
|
||||
self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||
|
||||
# size the pertinent axis dimension to a "typical value"
|
||||
self.size_to_values()
|
||||
|
||||
# NOTE: requires override ``.tickValues()`` method seen below.
|
||||
if lru_cache_tick_strings:
|
||||
self.tickStrings = lru_cache(
|
||||
maxsize=2**20
|
||||
)(self.tickStrings)
|
||||
|
||||
# axis "sticky" labels
|
||||
self._stickies: dict[str, YAxisLabel] = {}
|
||||
|
||||
# NOTE: only overriden to cast tick values entries into tuples
|
||||
# for use with the lru caching.
|
||||
def tickValues(
|
||||
self,
|
||||
minVal: float,
|
||||
maxVal: float,
|
||||
size: int,
|
||||
|
||||
) -> list[tuple[float, tuple[str]]]:
|
||||
'''
|
||||
Repack tick values into tuples for lru caching.
|
||||
|
||||
'''
|
||||
ticks = []
|
||||
for scalar, values in super().tickValues(minVal, maxVal, size):
|
||||
ticks.append((
|
||||
scalar,
|
||||
tuple(values), # this
|
||||
))
|
||||
|
||||
return ticks
|
||||
|
||||
@property
|
||||
def text_color(self) -> str:
|
||||
return self._text_color
|
||||
|
@ -106,6 +150,38 @@ class Axis(pg.AxisItem):
|
|||
def txt_offsets(self) -> tuple[int, int]:
|
||||
return tuple(self.style['tickTextOffset'])
|
||||
|
||||
def add_sticky(
|
||||
self,
|
||||
pi: pgo.PlotItem,
|
||||
name: None | str = None,
|
||||
digits: None | int = 2,
|
||||
bg_color='default',
|
||||
fg_color='black',
|
||||
|
||||
) -> YAxisLabel:
|
||||
|
||||
# if the sticky is for our symbol
|
||||
# use the tick size precision for display
|
||||
name = name or pi.name
|
||||
digits = digits or 2
|
||||
|
||||
# TODO: ``._ysticks`` should really be an attr on each
|
||||
# ``PlotItem`` now instead of the containing widget (because of
|
||||
# overlays) ?
|
||||
|
||||
# add y-axis "last" value label
|
||||
sticky = self._stickies[name] = YAxisLabel(
|
||||
pi=pi,
|
||||
parent=self,
|
||||
digits=digits, # TODO: pass this from symbol data
|
||||
opacity=0.9, # slight see-through
|
||||
bg_color=bg_color,
|
||||
fg_color=fg_color,
|
||||
)
|
||||
|
||||
pi.sigRangeChanged.connect(sticky.update_on_resize)
|
||||
return sticky
|
||||
|
||||
|
||||
class PriceAxis(Axis):
|
||||
|
||||
|
@ -167,7 +243,6 @@ class PriceAxis(Axis):
|
|||
self._min_tick = size
|
||||
|
||||
def size_to_values(self) -> None:
|
||||
# self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||
self.setWidth(self.typical_br.width())
|
||||
|
||||
# XXX: drop for now since it just eats up h space
|
||||
|
@ -222,28 +297,50 @@ class DynamicDateAxis(Axis):
|
|||
|
||||
) -> list[str]:
|
||||
|
||||
chart = self.linkedsplits.chart
|
||||
flow = chart._flows[chart.name]
|
||||
shm = flow.shm
|
||||
bars = shm.array
|
||||
first = shm._first.value
|
||||
# XX: ARGGGGG AG:LKSKDJF:LKJSDFD
|
||||
chart = self.pi.chart_widget
|
||||
|
||||
bars_len = len(bars)
|
||||
times = bars['time']
|
||||
viz = chart._vizs[chart.name]
|
||||
shm = viz.shm
|
||||
array = shm.array
|
||||
times = array['time']
|
||||
i_0, i_l = times[0], times[-1]
|
||||
|
||||
epochs = times[list(
|
||||
map(
|
||||
int,
|
||||
filter(
|
||||
lambda i: i > 0 and i < bars_len,
|
||||
(i-first for i in indexes)
|
||||
# edge cases
|
||||
if (
|
||||
not indexes
|
||||
or
|
||||
(indexes[0] < i_0
|
||||
and indexes[-1] < i_l)
|
||||
or
|
||||
(indexes[0] > i_0
|
||||
and indexes[-1] > i_l)
|
||||
):
|
||||
return []
|
||||
|
||||
if viz.index_field == 'index':
|
||||
arr_len = times.shape[0]
|
||||
first = shm._first.value
|
||||
epochs = times[
|
||||
list(
|
||||
map(
|
||||
int,
|
||||
filter(
|
||||
lambda i: i > 0 and i < arr_len,
|
||||
(i - first for i in indexes)
|
||||
)
|
||||
)
|
||||
)
|
||||
)
|
||||
)]
|
||||
]
|
||||
else:
|
||||
epochs = list(map(int, indexes))
|
||||
|
||||
# TODO: **don't** have this hard coded shift to EST
|
||||
# delay = times[-1] - times[-2]
|
||||
dts = np.array(epochs, dtype='datetime64[s]')
|
||||
dts = np.array(
|
||||
epochs,
|
||||
dtype='datetime64[s]',
|
||||
)
|
||||
|
||||
# see units listing:
|
||||
# https://numpy.org/devdocs/reference/arrays.datetime.html#datetime-units
|
||||
|
@ -261,24 +358,39 @@ class DynamicDateAxis(Axis):
|
|||
spacing: float,
|
||||
|
||||
) -> list[str]:
|
||||
|
||||
return self._indexes_to_timestrs(values)
|
||||
|
||||
# NOTE: handy for debugging the lru cache
|
||||
# info = self.tickStrings.cache_info()
|
||||
# print(info)
|
||||
return self._indexes_to_timestrs(values)
|
||||
|
||||
|
||||
class AxisLabel(pg.GraphicsObject):
|
||||
|
||||
_x_margin = 0
|
||||
_y_margin = 0
|
||||
# relative offsets *OF* the bounding rect relative
|
||||
# to parent graphics object.
|
||||
# eg. <parent>| => <_x_br_offset> => | <text> |
|
||||
_x_br_offset: float = 0
|
||||
_y_br_offset: float = 0
|
||||
|
||||
# relative offsets of text *within* bounding rect
|
||||
# eg. | <_x_margin> => <text> |
|
||||
_x_margin: float = 0
|
||||
_y_margin: float = 0
|
||||
|
||||
# multiplier of the text content's height in order
|
||||
# to force a larger (y-dimension) bounding rect.
|
||||
_y_txt_h_scaling: float = 1
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
parent: pg.GraphicsItem,
|
||||
digits: int = 2,
|
||||
|
||||
bg_color: str = 'bracket',
|
||||
bg_color: str = 'default',
|
||||
fg_color: str = 'black',
|
||||
opacity: int = 1, # XXX: seriously don't set this to 0
|
||||
opacity: int = .8, # XXX: seriously don't set this to 0
|
||||
font_size: str = 'default',
|
||||
|
||||
use_arrow: bool = True,
|
||||
|
@ -289,6 +401,7 @@ class AxisLabel(pg.GraphicsObject):
|
|||
self.setParentItem(parent)
|
||||
|
||||
self.setFlag(self.ItemIgnoresTransformations)
|
||||
self.setZValue(100)
|
||||
|
||||
# XXX: pretty sure this is faster
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
@ -320,14 +433,14 @@ class AxisLabel(pg.GraphicsObject):
|
|||
p: QtGui.QPainter,
|
||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||
w: QtWidgets.QWidget
|
||||
|
||||
) -> None:
|
||||
"""Draw a filled rectangle based on the size of ``.label_str`` text.
|
||||
'''
|
||||
Draw a filled rectangle based on the size of ``.label_str`` text.
|
||||
|
||||
Subtypes can customize further by overloading ``.draw()``.
|
||||
|
||||
"""
|
||||
# p.setCompositionMode(QtWidgets.QPainter.CompositionMode_SourceOver)
|
||||
|
||||
'''
|
||||
if self.label_str:
|
||||
|
||||
# if not self.rect:
|
||||
|
@ -338,7 +451,11 @@ class AxisLabel(pg.GraphicsObject):
|
|||
|
||||
p.setFont(self._dpifont.font)
|
||||
p.setPen(self.fg_color)
|
||||
p.drawText(self.rect, self.text_flags, self.label_str)
|
||||
p.drawText(
|
||||
self.rect,
|
||||
self.text_flags,
|
||||
self.label_str,
|
||||
)
|
||||
|
||||
def draw(
|
||||
self,
|
||||
|
@ -346,6 +463,8 @@ class AxisLabel(pg.GraphicsObject):
|
|||
rect: QtCore.QRectF
|
||||
) -> None:
|
||||
|
||||
p.setOpacity(self.opacity)
|
||||
|
||||
if self._use_arrow:
|
||||
if not self.path:
|
||||
self._draw_arrow_path()
|
||||
|
@ -353,15 +472,13 @@ class AxisLabel(pg.GraphicsObject):
|
|||
p.drawPath(self.path)
|
||||
p.fillPath(self.path, pg.mkBrush(self.bg_color))
|
||||
|
||||
# this adds a nice black outline around the label for some odd
|
||||
# reason; ok by us
|
||||
p.setOpacity(self.opacity)
|
||||
|
||||
# this cause the L1 labels to glitch out if used in the subtype
|
||||
# and it will leave a small black strip with the arrow path if
|
||||
# done before the above
|
||||
p.fillRect(self.rect, self.bg_color)
|
||||
|
||||
p.fillRect(
|
||||
self.rect,
|
||||
self.bg_color,
|
||||
)
|
||||
|
||||
def boundingRect(self): # noqa
|
||||
'''
|
||||
|
@ -405,15 +522,18 @@ class AxisLabel(pg.GraphicsObject):
|
|||
txt_h, txt_w = txt_br.height(), txt_br.width()
|
||||
# print(f'wsw: {self._dpifont.boundingRect(" ")}')
|
||||
|
||||
# allow subtypes to specify a static width and height
|
||||
# allow subtypes to override width and height
|
||||
h, w = self.size_hint()
|
||||
# print(f'axis size: {self._parent.size()}')
|
||||
# print(f'axis geo: {self._parent.geometry()}')
|
||||
|
||||
self.rect = QtCore.QRectF(
|
||||
0, 0,
|
||||
|
||||
# relative bounds offsets
|
||||
self._x_br_offset,
|
||||
self._y_br_offset,
|
||||
|
||||
(w or txt_w) + self._x_margin / 2,
|
||||
(h or txt_h) + self._y_margin / 2,
|
||||
|
||||
(h or txt_h) * self._y_txt_h_scaling + (self._y_margin / 2),
|
||||
)
|
||||
# print(self.rect)
|
||||
# hb = self.path.controlPointRect()
|
||||
|
@ -489,7 +609,7 @@ class XAxisLabel(AxisLabel):
|
|||
|
||||
|
||||
class YAxisLabel(AxisLabel):
|
||||
_y_margin = 4
|
||||
_y_margin: int = 4
|
||||
|
||||
text_flags = (
|
||||
QtCore.Qt.AlignLeft
|
||||
|
@ -500,19 +620,19 @@ class YAxisLabel(AxisLabel):
|
|||
|
||||
def __init__(
|
||||
self,
|
||||
chart,
|
||||
pi: pgo.PlotItem,
|
||||
*args,
|
||||
**kwargs
|
||||
) -> None:
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
self._chart = chart
|
||||
|
||||
chart.sigRangeChanged.connect(self.update_on_resize)
|
||||
self._pi = pi
|
||||
pi.sigRangeChanged.connect(self.update_on_resize)
|
||||
|
||||
self._last_datum = (None, None)
|
||||
|
||||
self.x_offset = 0
|
||||
# pull text offset from axis from parent axis
|
||||
if getattr(self._parent, 'txt_offsets', False):
|
||||
self.x_offset, y_offset = self._parent.txt_offsets()
|
||||
|
@ -531,7 +651,8 @@ class YAxisLabel(AxisLabel):
|
|||
value: float, # data for text
|
||||
|
||||
# on odd dimension and/or adds nice black line
|
||||
x_offset: Optional[int] = None
|
||||
x_offset: int = 0,
|
||||
|
||||
) -> None:
|
||||
|
||||
# this is read inside ``.paint()``
|
||||
|
@ -577,7 +698,7 @@ class YAxisLabel(AxisLabel):
|
|||
self._last_datum = (index, value)
|
||||
|
||||
self.update_label(
|
||||
self._chart.mapFromView(QPointF(index, value)),
|
||||
self._pi.mapFromView(QPointF(index, value)),
|
||||
value
|
||||
)
|
||||
|
||||
|
|
1015
piker/ui/_chart.py
1015
piker/ui/_chart.py
File diff suppressed because it is too large
Load Diff
|
@ -18,8 +18,13 @@
|
|||
Mouse interaction graphics
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from functools import partial
|
||||
from typing import Optional, Callable
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import inspect
|
||||
import numpy as np
|
||||
|
@ -36,6 +41,12 @@ from ._style import (
|
|||
from ._axes import YAxisLabel, XAxisLabel
|
||||
from ..log import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import (
|
||||
ChartPlotWidget,
|
||||
LinkedSplits,
|
||||
)
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -58,9 +69,9 @@ class LineDot(pg.CurvePoint):
|
|||
curve: pg.PlotCurveItem,
|
||||
index: int,
|
||||
|
||||
plot: 'ChartPlotWidget', # type: ingore # noqa
|
||||
plot: ChartPlotWidget, # type: ingore # noqa
|
||||
pos=None,
|
||||
color: str = 'default_light',
|
||||
color: str = 'bracket',
|
||||
|
||||
) -> None:
|
||||
# scale from dpi aware font size
|
||||
|
@ -151,7 +162,7 @@ class ContentsLabel(pg.LabelItem):
|
|||
def __init__(
|
||||
self,
|
||||
|
||||
# chart: 'ChartPlotWidget', # noqa
|
||||
# chart: ChartPlotWidget, # noqa
|
||||
view: pg.ViewBox,
|
||||
|
||||
anchor_at: str = ('top', 'right'),
|
||||
|
@ -187,12 +198,11 @@ class ContentsLabel(pg.LabelItem):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
index: int,
|
||||
ix: int,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> None:
|
||||
# this being "html" is the dumbest shit :eyeroll:
|
||||
first = array[0]['index']
|
||||
|
||||
self.setText(
|
||||
"<b>i</b>:{index}<br/>"
|
||||
|
@ -205,7 +215,7 @@ class ContentsLabel(pg.LabelItem):
|
|||
"<b>C</b>:{}<br/>"
|
||||
"<b>V</b>:{}<br/>"
|
||||
"<b>wap</b>:{}".format(
|
||||
*array[index - first][
|
||||
*array[ix][
|
||||
[
|
||||
'time',
|
||||
'open',
|
||||
|
@ -217,7 +227,7 @@ class ContentsLabel(pg.LabelItem):
|
|||
]
|
||||
],
|
||||
name=name,
|
||||
index=index,
|
||||
index=ix,
|
||||
)
|
||||
)
|
||||
|
||||
|
@ -225,15 +235,12 @@ class ContentsLabel(pg.LabelItem):
|
|||
self,
|
||||
|
||||
name: str,
|
||||
index: int,
|
||||
ix: int,
|
||||
array: np.ndarray,
|
||||
|
||||
) -> None:
|
||||
|
||||
first = array[0]['index']
|
||||
if index < array[-1]['index'] and index > first:
|
||||
data = array[index - first][name]
|
||||
self.setText(f"{name}: {data:.2f}")
|
||||
data = array[ix][name]
|
||||
self.setText(f"{name}: {data:.2f}")
|
||||
|
||||
|
||||
class ContentsLabels:
|
||||
|
@ -244,7 +251,7 @@ class ContentsLabels:
|
|||
'''
|
||||
def __init__(
|
||||
self,
|
||||
linkedsplits: 'LinkedSplits', # type: ignore # noqa
|
||||
linkedsplits: LinkedSplits, # type: ignore # noqa
|
||||
|
||||
) -> None:
|
||||
|
||||
|
@ -258,17 +265,20 @@ class ContentsLabels:
|
|||
|
||||
def update_labels(
|
||||
self,
|
||||
index: int,
|
||||
x_in: int,
|
||||
|
||||
) -> None:
|
||||
for chart, name, label, update in self._labels:
|
||||
|
||||
flow = chart._flows[name]
|
||||
array = flow.shm.array
|
||||
viz = chart.get_viz(name)
|
||||
array = viz.shm.array
|
||||
index = array[viz.index_field]
|
||||
start = index[0]
|
||||
stop = index[-1]
|
||||
|
||||
if not (
|
||||
index >= 0
|
||||
and index < array[-1]['index']
|
||||
x_in >= start
|
||||
and x_in <= stop
|
||||
):
|
||||
# out of range
|
||||
print('WTF out of range?')
|
||||
|
@ -277,7 +287,10 @@ class ContentsLabels:
|
|||
# call provided update func with data point
|
||||
try:
|
||||
label.show()
|
||||
update(index, array)
|
||||
ix = np.searchsorted(index, x_in)
|
||||
if ix > len(array):
|
||||
breakpoint()
|
||||
update(ix, array)
|
||||
|
||||
except IndexError:
|
||||
log.exception(f"Failed to update label: {name}")
|
||||
|
@ -289,7 +302,7 @@ class ContentsLabels:
|
|||
def add_label(
|
||||
|
||||
self,
|
||||
chart: 'ChartPlotWidget', # type: ignore # noqa
|
||||
chart: ChartPlotWidget, # type: ignore # noqa
|
||||
name: str,
|
||||
anchor_at: tuple[str, str] = ('top', 'left'),
|
||||
update_func: Callable = ContentsLabel.update_from_value,
|
||||
|
@ -316,7 +329,7 @@ class Cursor(pg.GraphicsObject):
|
|||
def __init__(
|
||||
|
||||
self,
|
||||
linkedsplits: 'LinkedSplits', # noqa
|
||||
linkedsplits: LinkedSplits, # noqa
|
||||
digits: int = 0
|
||||
|
||||
) -> None:
|
||||
|
@ -325,6 +338,8 @@ class Cursor(pg.GraphicsObject):
|
|||
|
||||
self.linked = linkedsplits
|
||||
self.graphics: dict[str, pg.GraphicsObject] = {}
|
||||
self.xaxis_label: Optional[XAxisLabel] = None
|
||||
self.always_show_xlabel: bool = True
|
||||
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
||||
self.active_plot = None
|
||||
self.digits: int = digits
|
||||
|
@ -336,7 +351,7 @@ class Cursor(pg.GraphicsObject):
|
|||
# XXX: not sure why these are instance variables?
|
||||
# It's not like we can change them on the fly..?
|
||||
self.pen = pg.mkPen(
|
||||
color=hcolor('default'),
|
||||
color=hcolor('bracket'),
|
||||
style=QtCore.Qt.DashLine,
|
||||
)
|
||||
self.lines_pen = pg.mkPen(
|
||||
|
@ -352,7 +367,7 @@ class Cursor(pg.GraphicsObject):
|
|||
self._lw = self.pixelWidth() * self.lines_pen.width()
|
||||
|
||||
# xhair label's color name
|
||||
self.label_color: str = 'default'
|
||||
self.label_color: str = 'bracket'
|
||||
|
||||
self._y_label_update: bool = True
|
||||
|
||||
|
@ -385,7 +400,7 @@ class Cursor(pg.GraphicsObject):
|
|||
|
||||
def add_plot(
|
||||
self,
|
||||
plot: 'ChartPlotWidget', # noqa
|
||||
plot: ChartPlotWidget, # noqa
|
||||
digits: int = 0,
|
||||
|
||||
) -> None:
|
||||
|
@ -405,7 +420,7 @@ class Cursor(pg.GraphicsObject):
|
|||
hl.hide()
|
||||
|
||||
yl = YAxisLabel(
|
||||
chart=plot,
|
||||
pi=plot.plotItem,
|
||||
# parent=plot.getAxis('right'),
|
||||
parent=plot.pi_overlay.get_axis(plot.plotItem, 'right'),
|
||||
digits=digits or self.digits,
|
||||
|
@ -469,39 +484,58 @@ class Cursor(pg.GraphicsObject):
|
|||
|
||||
def add_curve_cursor(
|
||||
self,
|
||||
plot: 'ChartPlotWidget', # noqa
|
||||
chart: ChartPlotWidget, # noqa
|
||||
curve: 'PlotCurveItem', # noqa
|
||||
|
||||
) -> LineDot:
|
||||
# if this plot contains curves add line dot "cursors" to denote
|
||||
# if this chart contains curves add line dot "cursors" to denote
|
||||
# the current sample under the mouse
|
||||
main_flow = plot._flows[plot.name]
|
||||
main_viz = chart.get_viz(chart.name)
|
||||
|
||||
# read out last index
|
||||
i = main_flow.shm.array[-1]['index']
|
||||
i = main_viz.shm.array[-1]['index']
|
||||
cursor = LineDot(
|
||||
curve,
|
||||
index=i,
|
||||
plot=plot
|
||||
plot=chart
|
||||
)
|
||||
plot.addItem(cursor)
|
||||
self.graphics[plot].setdefault('cursors', []).append(cursor)
|
||||
chart.addItem(cursor)
|
||||
self.graphics[chart].setdefault('cursors', []).append(cursor)
|
||||
return cursor
|
||||
|
||||
def mouseAction(self, action, plot): # noqa
|
||||
def mouseAction(
|
||||
self,
|
||||
action: str,
|
||||
plot: ChartPlotWidget,
|
||||
|
||||
) -> None: # noqa
|
||||
|
||||
log.debug(f"{(action, plot.name)}")
|
||||
if action == 'Enter':
|
||||
self.active_plot = plot
|
||||
plot.linked.godwidget._active_cursor = self
|
||||
|
||||
# show horiz line and y-label
|
||||
self.graphics[plot]['hl'].show()
|
||||
self.graphics[plot]['yl'].show()
|
||||
|
||||
else: # Leave
|
||||
if (
|
||||
not self.always_show_xlabel
|
||||
and not self.xaxis_label.isVisible()
|
||||
):
|
||||
self.xaxis_label.show()
|
||||
|
||||
# hide horiz line and y-label
|
||||
# Leave: hide horiz line and y-label
|
||||
else:
|
||||
self.graphics[plot]['hl'].hide()
|
||||
self.graphics[plot]['yl'].hide()
|
||||
|
||||
if (
|
||||
not self.always_show_xlabel
|
||||
and self.xaxis_label.isVisible()
|
||||
):
|
||||
self.xaxis_label.hide()
|
||||
|
||||
def mouseMoved(
|
||||
self,
|
||||
coords: tuple[QPointF], # noqa
|
||||
|
@ -590,13 +624,17 @@ class Cursor(pg.GraphicsObject):
|
|||
left_axis_width += left.width()
|
||||
|
||||
# map back to abs (label-local) coordinates
|
||||
self.xaxis_label.update_label(
|
||||
abs_pos=(
|
||||
plot.mapFromView(QPointF(vl_x, iy)) -
|
||||
QPointF(left_axis_width, 0)
|
||||
),
|
||||
value=ix,
|
||||
)
|
||||
if (
|
||||
self.always_show_xlabel
|
||||
or self.xaxis_label.isVisible()
|
||||
):
|
||||
self.xaxis_label.update_label(
|
||||
abs_pos=(
|
||||
plot.mapFromView(QPointF(vl_x, iy)) -
|
||||
QPointF(left_axis_width, 0)
|
||||
),
|
||||
value=ix,
|
||||
)
|
||||
|
||||
self._datum_xy = ix, iy
|
||||
|
||||
|
|
|
@ -28,10 +28,7 @@ from PyQt5.QtWidgets import QGraphicsItem
|
|||
from PyQt5.QtCore import (
|
||||
Qt,
|
||||
QLineF,
|
||||
QSizeF,
|
||||
QRectF,
|
||||
# QRect,
|
||||
QPointF,
|
||||
)
|
||||
from PyQt5.QtGui import (
|
||||
QPainter,
|
||||
|
@ -39,11 +36,8 @@ from PyQt5.QtGui import (
|
|||
)
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
# from ._compression import (
|
||||
# # ohlc_to_m4_line,
|
||||
# ds_m4,
|
||||
# )
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -57,7 +51,117 @@ _line_styles: dict[str, int] = {
|
|||
}
|
||||
|
||||
|
||||
class Curve(pg.GraphicsObject):
|
||||
class FlowGraphic(pg.GraphicsObject):
|
||||
'''
|
||||
Base class with minimal interface for `QPainterPath` implemented,
|
||||
real-time updated "data flow" graphics.
|
||||
|
||||
See subtypes below.
|
||||
|
||||
'''
|
||||
# sub-type customization methods
|
||||
declare_paintables: Callable | None = None
|
||||
sub_paint: Callable | None = None
|
||||
|
||||
# XXX-NOTE-XXX: graphics caching B)
|
||||
# see explanation for different caching modes:
|
||||
# https://stackoverflow.com/a/39410081
|
||||
cache_mode: int = QGraphicsItem.DeviceCoordinateCache
|
||||
# XXX: WARNING item caching seems to only be useful
|
||||
# if we don't re-generate the entire QPainterPath every time
|
||||
# don't ever use this - it's a colossal nightmare of artefacts
|
||||
# and is disastrous for performance.
|
||||
# QGraphicsItem.ItemCoordinateCache
|
||||
# TODO: still questions todo with coord-cacheing that we should
|
||||
# probably talk to a core dev about:
|
||||
# - if this makes trasform interactions slower (such as zooming)
|
||||
# and if so maybe if/when we implement a "history" mode for the
|
||||
# view we disable this in that mode?
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
name: str | None = None,
|
||||
|
||||
# line styling
|
||||
color: str = 'bracket',
|
||||
last_step_color: str | None = None,
|
||||
fill_color: Optional[str] = None,
|
||||
style: str = 'solid',
|
||||
|
||||
**kwargs
|
||||
|
||||
) -> None:
|
||||
|
||||
self._name = name
|
||||
|
||||
# primary graphics item used for history
|
||||
self.path: QPainterPath = QPainterPath()
|
||||
|
||||
# additional path that can be optionally used for appends which
|
||||
# tries to avoid triggering an update/redraw of the presumably
|
||||
# larger historical ``.path`` above. the flag to enable
|
||||
# this behaviour is found in `Renderer.render()`.
|
||||
self.fast_path: QPainterPath | None = None
|
||||
|
||||
# TODO: evaluating the path capacity stuff and see
|
||||
# if it really makes much diff pre-allocating it.
|
||||
# self._last_cap: int = 0
|
||||
# cap = path.capacity()
|
||||
# if cap != self._last_cap:
|
||||
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
||||
# self._last_cap = cap
|
||||
|
||||
# all history of curve is drawn in single px thickness
|
||||
self._color: str = color
|
||||
pen = pg.mkPen(hcolor(color), width=1)
|
||||
pen.setStyle(_line_styles[style])
|
||||
|
||||
if 'dash' in style:
|
||||
pen.setDashPattern([8, 3])
|
||||
|
||||
self._pen = pen
|
||||
self._brush = pg.functions.mkBrush(
|
||||
hcolor(fill_color or color)
|
||||
)
|
||||
|
||||
# last segment is drawn in 2px thickness for emphasis
|
||||
if last_step_color:
|
||||
self.last_step_pen = pg.mkPen(
|
||||
hcolor(last_step_color),
|
||||
width=2,
|
||||
)
|
||||
else:
|
||||
self.last_step_pen = pg.mkPen(
|
||||
self._pen,
|
||||
width=2,
|
||||
)
|
||||
|
||||
self._last_line: QLineF = QLineF()
|
||||
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# apply cache mode
|
||||
self.setCacheMode(self.cache_mode)
|
||||
|
||||
def x_uppx(self) -> int:
|
||||
|
||||
px_vecs = self.pixelVectors()[0]
|
||||
if px_vecs:
|
||||
return px_vecs.x()
|
||||
else:
|
||||
return 0
|
||||
|
||||
def x_last(self) -> float | None:
|
||||
'''
|
||||
Return the last most x value of the last line segment or if not
|
||||
drawn yet, ``None``.
|
||||
|
||||
'''
|
||||
return self._last_line.x1() if self._last_line else None
|
||||
|
||||
|
||||
class Curve(FlowGraphic):
|
||||
'''
|
||||
A faster, simpler, append friendly version of
|
||||
``pyqtgraph.PlotCurveItem`` built for highly customizable real-time
|
||||
|
@ -74,7 +178,7 @@ class Curve(pg.GraphicsObject):
|
|||
lower level graphics data can be rendered in different threads and
|
||||
then read and drawn in this main thread without having to worry
|
||||
about dealing with Qt's concurrency primitives. See
|
||||
``piker.ui._flows.Renderer`` for details and logic related to lower
|
||||
``piker.ui._render.Renderer`` for details and logic related to lower
|
||||
level path generation and incremental update. The main differences in
|
||||
the path generation code include:
|
||||
|
||||
|
@ -86,127 +190,38 @@ class Curve(pg.GraphicsObject):
|
|||
updates don't trigger a full path redraw.
|
||||
|
||||
'''
|
||||
|
||||
# sub-type customization methods
|
||||
sub_br: Optional[Callable] = None
|
||||
sub_paint: Optional[Callable] = None
|
||||
declare_paintables: Optional[Callable] = None
|
||||
# TODO: can we remove this?
|
||||
# sub_br: Optional[Callable] = None
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
*args,
|
||||
|
||||
step_mode: bool = False,
|
||||
color: str = 'default_lightest',
|
||||
fill_color: Optional[str] = None,
|
||||
style: str = 'solid',
|
||||
name: Optional[str] = None,
|
||||
use_fpath: bool = True,
|
||||
# color: str = 'default_lightest',
|
||||
# fill_color: Optional[str] = None,
|
||||
# style: str = 'solid',
|
||||
|
||||
**kwargs
|
||||
|
||||
) -> None:
|
||||
|
||||
self._name = name
|
||||
|
||||
# brutaaalll, see comments within..
|
||||
self.yData = None
|
||||
self.xData = None
|
||||
|
||||
# self._last_cap: int = 0
|
||||
self.path: Optional[QPainterPath] = None
|
||||
|
||||
# additional path used for appends which tries to avoid
|
||||
# triggering an update/redraw of the presumably larger
|
||||
# historical ``.path`` above.
|
||||
self.use_fpath = use_fpath
|
||||
self.fast_path: Optional[QPainterPath] = None
|
||||
|
||||
# TODO: we can probably just dispense with the parent since
|
||||
# we're basically only using the pen setting now...
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
# all history of curve is drawn in single px thickness
|
||||
pen = pg.mkPen(hcolor(color))
|
||||
pen.setStyle(_line_styles[style])
|
||||
|
||||
if 'dash' in style:
|
||||
pen.setDashPattern([8, 3])
|
||||
|
||||
self._pen = pen
|
||||
|
||||
# last segment is drawn in 2px thickness for emphasis
|
||||
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||
|
||||
# self._last_line: Optional[QLineF] = None
|
||||
self._last_line = QLineF()
|
||||
self._last_w: float = 1
|
||||
|
||||
# flat-top style histogram-like discrete curve
|
||||
# self._step_mode: bool = step_mode
|
||||
self._last_line: QLineF = QLineF()
|
||||
|
||||
# self._fill = True
|
||||
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
||||
|
||||
# NOTE: this setting seems to mostly prevent redraws on mouse
|
||||
# interaction which is a huge boon for avg interaction latency.
|
||||
|
||||
# TODO: one question still remaining is if this makes trasform
|
||||
# interactions slower (such as zooming) and if so maybe if/when
|
||||
# we implement a "history" mode for the view we disable this in
|
||||
# that mode?
|
||||
# don't enable caching by default for the case where the
|
||||
# only thing drawn is the "last" line segment which can
|
||||
# have a weird artifact where it won't be fully drawn to its
|
||||
# endpoint (something we saw on trade rate curves)
|
||||
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# XXX: see explanation for different caching modes:
|
||||
# https://stackoverflow.com/a/39410081
|
||||
# seems to only be useful if we don't re-generate the entire
|
||||
# QPainterPath every time
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
# don't ever use this - it's a colossal nightmare of artefacts
|
||||
# and is disastrous for performance.
|
||||
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
||||
|
||||
# allow sub-type customization
|
||||
declare = self.declare_paintables
|
||||
if declare:
|
||||
declare()
|
||||
|
||||
# TODO: probably stick this in a new parent
|
||||
# type which will contain our own version of
|
||||
# what ``PlotCurveItem`` had in terms of base
|
||||
# functionality? A `FlowGraphic` maybe?
|
||||
def x_uppx(self) -> int:
|
||||
|
||||
px_vecs = self.pixelVectors()[0]
|
||||
if px_vecs:
|
||||
xs_in_px = px_vecs.x()
|
||||
return round(xs_in_px)
|
||||
else:
|
||||
return 0
|
||||
|
||||
def px_width(self) -> float:
|
||||
|
||||
vb = self.getViewBox()
|
||||
if not vb:
|
||||
return 0
|
||||
|
||||
vr = self.viewRect()
|
||||
l, r = int(vr.left()), int(vr.right())
|
||||
|
||||
start, stop = self._xrange
|
||||
lbar = max(l, start)
|
||||
rbar = min(r, stop)
|
||||
|
||||
return vb.mapViewToDevice(
|
||||
QLineF(lbar, 0, rbar, 0)
|
||||
).length()
|
||||
|
||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||
# our `LineDot`) required ``.getData()`` to work..
|
||||
def getData(self):
|
||||
|
@ -230,8 +245,8 @@ class Curve(pg.GraphicsObject):
|
|||
self.path.clear()
|
||||
|
||||
if self.fast_path:
|
||||
# self.fast_path.clear()
|
||||
self.fast_path = None
|
||||
self.fast_path.clear()
|
||||
# self.fast_path = None
|
||||
|
||||
@cm
|
||||
def reset_cache(self) -> None:
|
||||
|
@ -251,77 +266,65 @@ class Curve(pg.GraphicsObject):
|
|||
self.boundingRect = self._path_br
|
||||
return self._path_br()
|
||||
|
||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||
def _path_br(self):
|
||||
'''
|
||||
Post init ``.boundingRect()```.
|
||||
|
||||
'''
|
||||
# hb = self.path.boundingRect()
|
||||
hb = self.path.controlPointRect()
|
||||
hb_size = hb.size()
|
||||
|
||||
fp = self.fast_path
|
||||
if fp:
|
||||
fhb = fp.controlPointRect()
|
||||
hb_size = fhb.size() + hb_size
|
||||
|
||||
# print(f'hb_size: {hb_size}')
|
||||
|
||||
# if self._last_step_rect:
|
||||
# hb_size += self._last_step_rect.size()
|
||||
|
||||
# if self._line:
|
||||
# br = self._last_step_rect.bottomRight()
|
||||
|
||||
# tl = QPointF(
|
||||
# # self._vr[0],
|
||||
# # hb.topLeft().y(),
|
||||
# # 0,
|
||||
# # hb_size.height() + 1
|
||||
# profiler = Profiler(
|
||||
# msg=f'Curve.boundingRect(): `{self._name}`',
|
||||
# disabled=not pg_profile_enabled(),
|
||||
# ms_threshold=ms_slower_then,
|
||||
# )
|
||||
|
||||
# br = self._last_step_rect.bottomRight()
|
||||
|
||||
w = hb_size.width()
|
||||
h = hb_size.height()
|
||||
|
||||
sbr = self.sub_br
|
||||
if sbr:
|
||||
w, h = self.sub_br(w, h)
|
||||
else:
|
||||
# assume plain line graphic and use
|
||||
# default unit step in each direction.
|
||||
|
||||
# only on a plane line do we include
|
||||
# and extra index step's worth of width
|
||||
# since in the step case the end of the curve
|
||||
# actually terminates earlier so we don't need
|
||||
# this for the last step.
|
||||
w += self._last_w
|
||||
# ll = self._last_line
|
||||
h += 1 # ll.y2() - ll.y1()
|
||||
|
||||
# br = QPointF(
|
||||
# self._vr[-1],
|
||||
# # tl.x() + w,
|
||||
# tl.y() + h,
|
||||
# )
|
||||
|
||||
br = QRectF(
|
||||
|
||||
# top left
|
||||
# hb.topLeft()
|
||||
# tl,
|
||||
QPointF(hb.topLeft()),
|
||||
|
||||
# br,
|
||||
# total size
|
||||
# QSizeF(hb_size)
|
||||
# hb_size,
|
||||
QSizeF(w, h)
|
||||
pr = self.path.controlPointRect()
|
||||
hb_tl, hb_br = (
|
||||
pr.topLeft(),
|
||||
pr.bottomRight(),
|
||||
)
|
||||
mn_y = hb_tl.y()
|
||||
mx_y = hb_br.y()
|
||||
most_left = hb_tl.x()
|
||||
most_right = hb_br.x()
|
||||
# profiler('calc path vertices')
|
||||
|
||||
# TODO: if/when we get fast path appends working in the
|
||||
# `Renderer`, then we might need to actually use this..
|
||||
# fp = self.fast_path
|
||||
# if fp:
|
||||
# fhb = fp.controlPointRect()
|
||||
# # hb_size = fhb.size() + hb_size
|
||||
# br = pr.united(fhb)
|
||||
|
||||
# XXX: *was* a way to allow sub-types to extend the
|
||||
# boundingrect calc, but in the one use case for a step curve
|
||||
# doesn't seem like we need it as long as the last line segment
|
||||
# is drawn as it is?
|
||||
|
||||
# sbr = self.sub_br
|
||||
# if sbr:
|
||||
# # w, h = self.sub_br(w, h)
|
||||
# sub_br = sbr()
|
||||
# br = br.united(sub_br)
|
||||
|
||||
# assume plain line graphic and use
|
||||
# default unit step in each direction.
|
||||
ll = self._last_line
|
||||
y1, y2 = ll.y1(), ll.y2()
|
||||
x1, x2 = ll.x1(), ll.x2()
|
||||
|
||||
ymn = min(y1, y2, mn_y)
|
||||
ymx = max(y1, y2, mx_y)
|
||||
most_left = min(x1, x2, most_left)
|
||||
most_right = max(x1, x2, most_right)
|
||||
# profiler('calc last line vertices')
|
||||
|
||||
return QRectF(
|
||||
most_left,
|
||||
ymn,
|
||||
most_right - most_left + 1,
|
||||
ymx,
|
||||
)
|
||||
# print(f'bounding rect: {br}')
|
||||
return br
|
||||
|
||||
def paint(
|
||||
self,
|
||||
|
@ -331,7 +334,7 @@ class Curve(pg.GraphicsObject):
|
|||
|
||||
) -> None:
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
profiler = Profiler(
|
||||
msg=f'Curve.paint(): `{self._name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
|
@ -339,18 +342,14 @@ class Curve(pg.GraphicsObject):
|
|||
|
||||
sub_paint = self.sub_paint
|
||||
if sub_paint:
|
||||
sub_paint(p, profiler)
|
||||
sub_paint(p)
|
||||
|
||||
p.setPen(self.last_step_pen)
|
||||
p.drawLine(self._last_line)
|
||||
profiler('.drawLine()')
|
||||
p.setPen(self._pen)
|
||||
profiler('last datum `.drawLine()`')
|
||||
|
||||
p.setPen(self._pen)
|
||||
path = self.path
|
||||
# cap = path.capacity()
|
||||
# if cap != self._last_cap:
|
||||
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
||||
# self._last_cap = cap
|
||||
|
||||
if path:
|
||||
p.drawPath(path)
|
||||
|
@ -373,22 +372,30 @@ class Curve(pg.GraphicsObject):
|
|||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
) -> None:
|
||||
# default line draw last call
|
||||
# with self.reset_cache():
|
||||
x = render_data['index']
|
||||
y = render_data[array_key]
|
||||
x = src_data[index_field]
|
||||
y = src_data[array_key]
|
||||
|
||||
x_last = x[-1]
|
||||
x_2last = x[-2]
|
||||
|
||||
# draw the "current" step graphic segment so it
|
||||
# lines up with the "middle" of the current
|
||||
# (OHLC) sample.
|
||||
self._last_line = QLineF(
|
||||
x[-2], y[-2],
|
||||
x[-1], y[-1],
|
||||
|
||||
# NOTE: currently we draw in x-domain
|
||||
# from last datum to current such that
|
||||
# the end of line touches the "beginning"
|
||||
# of the current datum step span.
|
||||
x_2last, y[-2],
|
||||
x_last, y[-1],
|
||||
)
|
||||
|
||||
return x, y
|
||||
|
@ -400,17 +407,20 @@ class Curve(pg.GraphicsObject):
|
|||
# (via it's max / min) even when highly zoomed out.
|
||||
class FlattenedOHLC(Curve):
|
||||
|
||||
# avoids strange dragging/smearing artifacts when panning..
|
||||
cache_mode: int = QGraphicsItem.NoCache
|
||||
|
||||
def draw_last_datum(
|
||||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
) -> None:
|
||||
lasts = src_data[-2:]
|
||||
x = lasts['index']
|
||||
x = lasts[index_field]
|
||||
y = lasts['close']
|
||||
|
||||
# draw the "current" step graphic segment so it
|
||||
|
@ -434,9 +444,9 @@ class StepCurve(Curve):
|
|||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
index_field: str,
|
||||
|
||||
w: float = 0.5,
|
||||
|
||||
|
@ -445,40 +455,31 @@ class StepCurve(Curve):
|
|||
# TODO: remove this and instead place all step curve
|
||||
# updating into pre-path data render callbacks.
|
||||
# full input data
|
||||
x = src_data['index']
|
||||
x = src_data[index_field]
|
||||
y = src_data[array_key]
|
||||
|
||||
x_last = x[-1]
|
||||
x_2last = x[-2]
|
||||
y_last = y[-1]
|
||||
step_size = x_last - x_2last
|
||||
|
||||
# lol, commenting this makes step curves
|
||||
# all "black" for me :eyeroll:..
|
||||
self._last_line = QLineF(
|
||||
x_last - w, 0,
|
||||
x_last + w, 0,
|
||||
x_2last, 0,
|
||||
x_last, 0,
|
||||
)
|
||||
self._last_step_rect = QRectF(
|
||||
x_last - w, 0,
|
||||
x_last + w, y_last,
|
||||
x_last, 0,
|
||||
step_size, y_last,
|
||||
)
|
||||
return x, y
|
||||
|
||||
def sub_paint(
|
||||
self,
|
||||
p: QPainter,
|
||||
profiler: pg.debug.Profiler,
|
||||
|
||||
) -> None:
|
||||
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||
# p.drawRect(self._last_step_rect)
|
||||
p.fillRect(self._last_step_rect, self._brush)
|
||||
profiler('.fillRect()')
|
||||
|
||||
def sub_br(
|
||||
self,
|
||||
path_w: float,
|
||||
path_h: float,
|
||||
|
||||
) -> (float, float):
|
||||
# passthrough
|
||||
return path_w, path_h
|
||||
|
|
File diff suppressed because it is too large
Load Diff
1708
piker/ui/_display.py
1708
piker/ui/_display.py
File diff suppressed because it is too large
Load Diff
|
@ -18,11 +18,27 @@
|
|||
Higher level annotation editors.
|
||||
|
||||
"""
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Optional
|
||||
from __future__ import annotations
|
||||
from collections import defaultdict
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING
|
||||
)
|
||||
|
||||
import pyqtgraph as pg
|
||||
from pyqtgraph import ViewBox, Point, QtCore, QtGui
|
||||
from pyqtgraph import (
|
||||
ViewBox,
|
||||
Point,
|
||||
QtCore,
|
||||
QtWidgets,
|
||||
)
|
||||
from PyQt5.QtGui import (
|
||||
QColor,
|
||||
)
|
||||
from PyQt5.QtWidgets import (
|
||||
QLabel,
|
||||
)
|
||||
|
||||
from pyqtgraph import functions as fn
|
||||
from PyQt5.QtCore import QPointF
|
||||
import numpy as np
|
||||
|
@ -30,28 +46,34 @@ import numpy as np
|
|||
from ._style import hcolor, _font
|
||||
from ._lines import LevelLine
|
||||
from ..log import get_logger
|
||||
from ..data.types import Struct
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import GodWidget
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ArrowEditor:
|
||||
class ArrowEditor(Struct):
|
||||
|
||||
chart: 'ChartPlotWidget' # noqa
|
||||
_arrows: field(default_factory=dict)
|
||||
godw: GodWidget = None # type: ignore # noqa
|
||||
_arrows: dict[str, list[pg.ArrowItem]] = {}
|
||||
|
||||
def add(
|
||||
self,
|
||||
plot: pg.PlotItem,
|
||||
uid: str,
|
||||
x: float,
|
||||
y: float,
|
||||
color='default',
|
||||
pointing: Optional[str] = None,
|
||||
) -> pg.ArrowItem:
|
||||
"""Add an arrow graphic to view at given (x, y).
|
||||
|
||||
"""
|
||||
) -> pg.ArrowItem:
|
||||
'''
|
||||
Add an arrow graphic to view at given (x, y).
|
||||
|
||||
'''
|
||||
angle = {
|
||||
'up': 90,
|
||||
'down': -90,
|
||||
|
@ -74,25 +96,25 @@ class ArrowEditor:
|
|||
brush=pg.mkBrush(hcolor(color)),
|
||||
)
|
||||
arrow.setPos(x, y)
|
||||
|
||||
self._arrows[uid] = arrow
|
||||
self._arrows.setdefault(uid, []).append(arrow)
|
||||
|
||||
# render to view
|
||||
self.chart.plotItem.addItem(arrow)
|
||||
plot.addItem(arrow)
|
||||
|
||||
return arrow
|
||||
|
||||
def remove(self, arrow) -> bool:
|
||||
self.chart.plotItem.removeItem(arrow)
|
||||
for linked in self.godw.iter_linked():
|
||||
linked.chart.plotItem.removeItem(arrow)
|
||||
|
||||
|
||||
@dataclass
|
||||
class LineEditor:
|
||||
'''The great editor of linez.
|
||||
class LineEditor(Struct):
|
||||
'''
|
||||
The great editor of linez.
|
||||
|
||||
'''
|
||||
chart: 'ChartPlotWidget' = None # type: ignore # noqa
|
||||
_order_lines: dict[str, LevelLine] = field(default_factory=dict)
|
||||
godw: GodWidget = None # type: ignore # noqa
|
||||
_order_lines: defaultdict[str, LevelLine] = defaultdict(list)
|
||||
_active_staged_line: LevelLine = None
|
||||
|
||||
def stage_line(
|
||||
|
@ -100,11 +122,11 @@ class LineEditor:
|
|||
line: LevelLine,
|
||||
|
||||
) -> LevelLine:
|
||||
"""Stage a line at the current chart's cursor position
|
||||
'''
|
||||
Stage a line at the current chart's cursor position
|
||||
and return it.
|
||||
|
||||
"""
|
||||
|
||||
'''
|
||||
# add a "staged" cursor-tracking line to view
|
||||
# and cash it in a a var
|
||||
if self._active_staged_line:
|
||||
|
@ -115,17 +137,25 @@ class LineEditor:
|
|||
return line
|
||||
|
||||
def unstage_line(self) -> LevelLine:
|
||||
"""Inverse of ``.stage_line()``.
|
||||
'''
|
||||
Inverse of ``.stage_line()``.
|
||||
|
||||
"""
|
||||
# chart = self.chart._cursor.active_plot
|
||||
# # chart.setCursor(QtCore.Qt.ArrowCursor)
|
||||
cursor = self.chart.linked.cursor
|
||||
'''
|
||||
cursor = self.godw.get_cursor()
|
||||
if not cursor:
|
||||
return None
|
||||
|
||||
# delete "staged" cursor tracking line from view
|
||||
line = self._active_staged_line
|
||||
if line:
|
||||
cursor._trackers.remove(line)
|
||||
try:
|
||||
cursor._trackers.remove(line)
|
||||
except KeyError:
|
||||
# when the current cursor doesn't have said line
|
||||
# registered (probably means that user held order mode
|
||||
# key while panning to another view) then we just
|
||||
# ignore the remove error.
|
||||
pass
|
||||
line.delete()
|
||||
|
||||
self._active_staged_line = None
|
||||
|
@ -133,55 +163,58 @@ class LineEditor:
|
|||
# show the crosshair y line and label
|
||||
cursor.show_xhair()
|
||||
|
||||
def submit_line(
|
||||
def submit_lines(
|
||||
self,
|
||||
line: LevelLine,
|
||||
lines: list[LevelLine],
|
||||
uuid: str,
|
||||
|
||||
) -> LevelLine:
|
||||
|
||||
staged_line = self._active_staged_line
|
||||
if not staged_line:
|
||||
raise RuntimeError("No line is currently staged!?")
|
||||
# staged_line = self._active_staged_line
|
||||
# if not staged_line:
|
||||
# raise RuntimeError("No line is currently staged!?")
|
||||
|
||||
# for now, until submission reponse arrives
|
||||
line.hide_labels()
|
||||
for line in lines:
|
||||
line.hide_labels()
|
||||
|
||||
# register for later lookup/deletion
|
||||
self._order_lines[uuid] = line
|
||||
self._order_lines[uuid] += lines
|
||||
|
||||
return line
|
||||
return lines
|
||||
|
||||
def commit_line(self, uuid: str) -> LevelLine:
|
||||
"""Commit a "staged line" to view.
|
||||
def commit_line(self, uuid: str) -> list[LevelLine]:
|
||||
'''
|
||||
Commit a "staged line" to view.
|
||||
|
||||
Submits the line graphic under the cursor as a (new) permanent
|
||||
graphic in view.
|
||||
|
||||
"""
|
||||
try:
|
||||
line = self._order_lines[uuid]
|
||||
except KeyError:
|
||||
log.warning(f'No line for {uuid} could be found?')
|
||||
return
|
||||
else:
|
||||
line.show_labels()
|
||||
'''
|
||||
lines = self._order_lines[uuid]
|
||||
if lines:
|
||||
for line in lines:
|
||||
line.show_labels()
|
||||
line.hide_markers()
|
||||
log.debug(f'Level active for level: {line.value()}')
|
||||
# TODO: other flashy things to indicate the order is active
|
||||
|
||||
# TODO: other flashy things to indicate the order is active
|
||||
|
||||
log.debug(f'Level active for level: {line.value()}')
|
||||
|
||||
return line
|
||||
return lines
|
||||
|
||||
def lines_under_cursor(self) -> list[LevelLine]:
|
||||
"""Get the line(s) under the cursor position.
|
||||
'''
|
||||
Get the line(s) under the cursor position.
|
||||
|
||||
"""
|
||||
'''
|
||||
# Delete any hoverable under the cursor
|
||||
return self.chart.linked.cursor._hovered
|
||||
return self.godw.get_cursor()._hovered
|
||||
|
||||
def all_lines(self) -> tuple[LevelLine]:
|
||||
return tuple(self._order_lines.values())
|
||||
def all_lines(self) -> list[LevelLine]:
|
||||
all_lines = []
|
||||
for lines in list(self._order_lines.values()):
|
||||
all_lines.extend(lines)
|
||||
|
||||
return all_lines
|
||||
|
||||
def remove_line(
|
||||
self,
|
||||
|
@ -196,29 +229,30 @@ class LineEditor:
|
|||
|
||||
'''
|
||||
# try to look up line from our registry
|
||||
line = self._order_lines.pop(uuid, line)
|
||||
if line:
|
||||
lines = self._order_lines.pop(uuid, None)
|
||||
if lines:
|
||||
cursor = self.godw.get_cursor()
|
||||
if cursor:
|
||||
for line in lines:
|
||||
# if hovered remove from cursor set
|
||||
hovered = cursor._hovered
|
||||
if line in hovered:
|
||||
hovered.remove(line)
|
||||
|
||||
# if hovered remove from cursor set
|
||||
cursor = self.chart.linked.cursor
|
||||
hovered = cursor._hovered
|
||||
if line in hovered:
|
||||
hovered.remove(line)
|
||||
log.debug(f'deleting {line} with oid: {uuid}')
|
||||
line.delete()
|
||||
|
||||
# make sure the xhair doesn't get left off
|
||||
# just because we never got a un-hover event
|
||||
cursor.show_xhair()
|
||||
|
||||
log.debug(f'deleting {line} with oid: {uuid}')
|
||||
line.delete()
|
||||
# make sure the xhair doesn't get left off
|
||||
# just because we never got a un-hover event
|
||||
cursor.show_xhair()
|
||||
|
||||
else:
|
||||
log.warning(f'Could not find line for {line}')
|
||||
|
||||
return line
|
||||
return lines
|
||||
|
||||
|
||||
class SelectRect(QtGui.QGraphicsRectItem):
|
||||
class SelectRect(QtWidgets.QGraphicsRectItem):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -227,12 +261,12 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
|||
) -> None:
|
||||
super().__init__(0, 0, 1, 1)
|
||||
|
||||
# self.rbScaleBox = QtGui.QGraphicsRectItem(0, 0, 1, 1)
|
||||
# self.rbScaleBox = QGraphicsRectItem(0, 0, 1, 1)
|
||||
self.vb = viewbox
|
||||
self._chart: 'ChartPlotWidget' = None # noqa
|
||||
|
||||
# override selection box color
|
||||
color = QtGui.QColor(hcolor(color))
|
||||
color = QColor(hcolor(color))
|
||||
self.setPen(fn.mkPen(color, width=1))
|
||||
color.setAlpha(66)
|
||||
self.setBrush(fn.mkBrush(color))
|
||||
|
@ -240,7 +274,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
|||
self.hide()
|
||||
self._label = None
|
||||
|
||||
label = self._label = QtGui.QLabel()
|
||||
label = self._label = QLabel()
|
||||
label.setTextFormat(0) # markdown
|
||||
label.setFont(_font.font)
|
||||
label.setMargin(0)
|
||||
|
@ -277,8 +311,8 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
|||
# TODO: get bg color working
|
||||
palette.setColor(
|
||||
self._label.backgroundRole(),
|
||||
# QtGui.QColor(chart.backgroundBrush()),
|
||||
QtGui.QColor(hcolor('papas_special')),
|
||||
# QColor(chart.backgroundBrush()),
|
||||
QColor(hcolor('papas_special')),
|
||||
)
|
||||
|
||||
def update_on_resize(self, vr, r):
|
||||
|
@ -326,7 +360,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
|||
|
||||
self.setPos(r.topLeft())
|
||||
self.resetTransform()
|
||||
self.scale(r.width(), r.height())
|
||||
self.setRect(r)
|
||||
self.show()
|
||||
|
||||
y1, y2 = start_pos.y(), end_pos.y()
|
||||
|
@ -343,7 +377,7 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
|||
nbars = ixmx - ixmn + 1
|
||||
|
||||
chart = self._chart
|
||||
data = chart._flows[chart.name].shm.array[ixmn:ixmx]
|
||||
data = chart.get_viz(chart.name).shm.array[ixmn:ixmx]
|
||||
|
||||
if len(data):
|
||||
std = data['close'].std()
|
||||
|
|
|
@ -18,11 +18,11 @@
|
|||
Qt event proxying and processing using ``trio`` mem chans.
|
||||
|
||||
"""
|
||||
from contextlib import asynccontextmanager, AsyncExitStack
|
||||
from contextlib import asynccontextmanager as acm
|
||||
from typing import Callable
|
||||
|
||||
from pydantic import BaseModel
|
||||
import trio
|
||||
from tractor.trionics import gather_contexts
|
||||
from PyQt5 import QtCore
|
||||
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
||||
from PyQt5.QtWidgets import QWidget
|
||||
|
@ -30,6 +30,8 @@ from PyQt5.QtWidgets import (
|
|||
QGraphicsSceneMouseEvent as gs_mouse,
|
||||
)
|
||||
|
||||
from ..data.types import Struct
|
||||
|
||||
|
||||
MOUSE_EVENTS = {
|
||||
gs_mouse.GraphicsSceneMousePress,
|
||||
|
@ -43,13 +45,10 @@ MOUSE_EVENTS = {
|
|||
# TODO: maybe consider some constrained ints down the road?
|
||||
# https://pydantic-docs.helpmanual.io/usage/types/#constrained-types
|
||||
|
||||
class KeyboardMsg(BaseModel):
|
||||
class KeyboardMsg(Struct):
|
||||
'''Unpacked Qt keyboard event data.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
event: QEvent
|
||||
etype: int
|
||||
key: int
|
||||
|
@ -57,16 +56,13 @@ class KeyboardMsg(BaseModel):
|
|||
txt: str
|
||||
|
||||
def to_tuple(self) -> tuple:
|
||||
return tuple(self.dict().values())
|
||||
return tuple(self.to_dict().values())
|
||||
|
||||
|
||||
class MouseMsg(BaseModel):
|
||||
class MouseMsg(Struct):
|
||||
'''Unpacked Qt keyboard event data.
|
||||
|
||||
'''
|
||||
class Config:
|
||||
arbitrary_types_allowed = True
|
||||
|
||||
event: QEvent
|
||||
etype: int
|
||||
button: int
|
||||
|
@ -160,7 +156,7 @@ class EventRelay(QtCore.QObject):
|
|||
return False
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def open_event_stream(
|
||||
|
||||
source_widget: QWidget,
|
||||
|
@ -186,7 +182,7 @@ async def open_event_stream(
|
|||
source_widget.removeEventFilter(kc)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def open_signal_handler(
|
||||
|
||||
signal: pyqtBoundSignal,
|
||||
|
@ -211,7 +207,7 @@ async def open_signal_handler(
|
|||
yield
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
@acm
|
||||
async def open_handlers(
|
||||
|
||||
source_widgets: list[QWidget],
|
||||
|
@ -220,16 +216,14 @@ async def open_handlers(
|
|||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
|
||||
async with (
|
||||
trio.open_nursery() as n,
|
||||
AsyncExitStack() as stack,
|
||||
gather_contexts([
|
||||
open_event_stream(widget, event_types, **kwargs)
|
||||
for widget in source_widgets
|
||||
]) as streams,
|
||||
):
|
||||
for widget in source_widgets:
|
||||
|
||||
event_recv_stream = await stack.enter_async_context(
|
||||
open_event_stream(widget, event_types, **kwargs)
|
||||
)
|
||||
for widget, event_recv_stream in zip(source_widgets, streams):
|
||||
n.start_soon(async_handler, widget, event_recv_stream)
|
||||
|
||||
yield
|
||||
|
|
|
@ -20,16 +20,24 @@ Trio - Qt integration
|
|||
Run ``trio`` in guest mode on top of the Qt event loop.
|
||||
All global Qt runtime settings are mostly defined here.
|
||||
"""
|
||||
from typing import Tuple, Callable, Dict, Any
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Callable,
|
||||
Any,
|
||||
Type,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
import platform
|
||||
import traceback
|
||||
|
||||
# Qt specific
|
||||
import PyQt5 # noqa
|
||||
import pyqtgraph as pg
|
||||
from pyqtgraph import QtGui
|
||||
from PyQt5.QtWidgets import (
|
||||
QWidget,
|
||||
QMainWindow,
|
||||
QApplication,
|
||||
)
|
||||
from PyQt5 import QtCore
|
||||
# from PyQt5.QtGui import QLabel, QStatusBar
|
||||
from PyQt5.QtCore import (
|
||||
pyqtRemoveInputHook,
|
||||
Qt,
|
||||
|
@ -37,15 +45,19 @@ from PyQt5.QtCore import (
|
|||
)
|
||||
import qdarkstyle
|
||||
from qdarkstyle import DarkPalette
|
||||
# import qdarkgraystyle
|
||||
# import qdarkgraystyle # TODO: play with it
|
||||
import trio
|
||||
from outcome import Error
|
||||
|
||||
from .._daemon import maybe_open_pikerd, _tractor_kwargs
|
||||
from ..service import (
|
||||
maybe_open_pikerd,
|
||||
get_tractor_runtime_kwargs,
|
||||
)
|
||||
from ..log import get_logger
|
||||
from ._pg_overrides import _do_overrides
|
||||
from . import _style
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
# pyqtgraph global config
|
||||
|
@ -72,17 +84,18 @@ if platform.system() == "Windows":
|
|||
|
||||
def run_qtractor(
|
||||
func: Callable,
|
||||
args: Tuple,
|
||||
main_widget: QtGui.QWidget,
|
||||
tractor_kwargs: Dict[str, Any] = {},
|
||||
window_type: QtGui.QMainWindow = None,
|
||||
args: tuple,
|
||||
main_widget_type: Type[QWidget],
|
||||
tractor_kwargs: dict[str, Any] = {},
|
||||
window_type: QMainWindow = None,
|
||||
|
||||
) -> None:
|
||||
# avoids annoying message when entering debugger from qt loop
|
||||
pyqtRemoveInputHook()
|
||||
|
||||
app = QtGui.QApplication.instance()
|
||||
app = QApplication.instance()
|
||||
if app is None:
|
||||
app = PyQt5.QtWidgets.QApplication([])
|
||||
app = QApplication([])
|
||||
|
||||
# TODO: we might not need this if it's desired
|
||||
# to cancel the tractor machinery on Qt loop
|
||||
|
@ -156,11 +169,11 @@ def run_qtractor(
|
|||
# hook into app focus change events
|
||||
app.focusChanged.connect(window.on_focus_change)
|
||||
|
||||
instance = main_widget()
|
||||
instance = main_widget_type()
|
||||
instance.window = window
|
||||
|
||||
# override tractor's defaults
|
||||
tractor_kwargs.update(_tractor_kwargs)
|
||||
tractor_kwargs.update(get_tractor_runtime_kwargs())
|
||||
|
||||
# define tractor entrypoint
|
||||
async def main():
|
||||
|
@ -178,7 +191,7 @@ def run_qtractor(
|
|||
# restrict_keyboard_interrupt_to_checkpoints=True,
|
||||
)
|
||||
|
||||
window.main_widget = main_widget
|
||||
window.godwidget: GodWidget = instance
|
||||
window.setCentralWidget(instance)
|
||||
if is_windows:
|
||||
window.configure_to_desktop()
|
||||
|
|
1247
piker/ui/_flows.py
1247
piker/ui/_flows.py
File diff suppressed because it is too large
Load Diff
|
@ -619,7 +619,7 @@ class FillStatusBar(QProgressBar):
|
|||
# color: #19232D;
|
||||
# width: 10px;
|
||||
|
||||
self.setRange(0, slots)
|
||||
self.setRange(0, int(slots))
|
||||
self.setValue(value)
|
||||
|
||||
|
||||
|
@ -644,7 +644,7 @@ def mk_fill_status_bar(
|
|||
|
||||
# TODO: calc this height from the ``ChartnPane``
|
||||
chart_h = round(parent_pane.height() * 5/8)
|
||||
bar_h = chart_h * 0.375
|
||||
bar_h = chart_h * 0.375*0.9
|
||||
|
||||
# TODO: once things are sized to screen
|
||||
bar_label_font_size = label_font_size or _font.px_size - 2
|
||||
|
|
282
piker/ui/_fsp.py
282
piker/ui/_fsp.py
|
@ -27,12 +27,13 @@ from itertools import cycle
|
|||
from typing import Optional, AsyncGenerator, Any
|
||||
|
||||
import numpy as np
|
||||
from pydantic import create_model
|
||||
import msgspec
|
||||
import tractor
|
||||
import pyqtgraph as pg
|
||||
import trio
|
||||
from trio_typing import TaskStatus
|
||||
|
||||
from piker.data.types import Struct
|
||||
from ._axes import PriceAxis
|
||||
from .._cacheables import maybe_open_context
|
||||
from ..calc import humanize
|
||||
|
@ -41,6 +42,8 @@ from ..data._sharedmem import (
|
|||
_Token,
|
||||
try_read,
|
||||
)
|
||||
from ..data.feed import Flume
|
||||
from ..data._source import Symbol
|
||||
from ._chart import (
|
||||
ChartPlotWidget,
|
||||
LinkedSplits,
|
||||
|
@ -50,14 +53,18 @@ from ._forms import (
|
|||
mk_form,
|
||||
open_form_input_handling,
|
||||
)
|
||||
from ..fsp._api import maybe_mk_fsp_shm, Fsp
|
||||
from ..fsp._api import (
|
||||
maybe_mk_fsp_shm,
|
||||
Fsp,
|
||||
)
|
||||
from ..fsp import cascade
|
||||
from ..fsp._volume import (
|
||||
tina_vwap,
|
||||
# tina_vwap,
|
||||
dolla_vlm,
|
||||
flow_rates,
|
||||
)
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -71,15 +78,14 @@ def has_vlm(ohlcv: ShmArray) -> bool:
|
|||
|
||||
|
||||
def update_fsp_chart(
|
||||
chart: ChartPlotWidget,
|
||||
flow,
|
||||
viz,
|
||||
graphics_name: str,
|
||||
array_key: Optional[str],
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
|
||||
shm = flow.shm
|
||||
shm = viz.shm
|
||||
if not shm:
|
||||
return
|
||||
|
||||
|
@ -94,18 +100,15 @@ def update_fsp_chart(
|
|||
# update graphics
|
||||
# NOTE: this does a length check internally which allows it
|
||||
# staying above the last row check below..
|
||||
chart.update_graphics_from_flow(
|
||||
graphics_name,
|
||||
array_key=array_key or graphics_name,
|
||||
**kwargs,
|
||||
)
|
||||
viz.update_graphics()
|
||||
|
||||
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
||||
# can't have duplicates of the underlying data even if multiple
|
||||
# sub-charts reference it under different 'named charts'.
|
||||
|
||||
# read from last calculated value and update any label
|
||||
last_val_sticky = chart._ysticks.get(graphics_name)
|
||||
last_val_sticky = viz.plot.getAxis(
|
||||
'right')._stickies.get(graphics_name)
|
||||
if last_val_sticky:
|
||||
last = last_row[array_key]
|
||||
last_val_sticky.update_from_data(-1, last)
|
||||
|
@ -153,12 +156,13 @@ async def open_fsp_sidepane(
|
|||
)
|
||||
|
||||
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
||||
FspConfig = create_model(
|
||||
'FspConfig',
|
||||
name=name,
|
||||
**params,
|
||||
FspConfig = msgspec.defstruct(
|
||||
"Point",
|
||||
[('name', name)] + list(params.items()),
|
||||
bases=(Struct,),
|
||||
)
|
||||
sidepane.model = FspConfig()
|
||||
model = FspConfig(name=name, **params)
|
||||
sidepane.model = model
|
||||
|
||||
# just a logger for now until we get fsp configs up and running.
|
||||
async def settings_change(
|
||||
|
@ -188,7 +192,7 @@ async def open_fsp_actor_cluster(
|
|||
|
||||
from tractor._clustering import open_actor_cluster
|
||||
|
||||
# profiler = pg.debug.Profiler(
|
||||
# profiler = Profiler(
|
||||
# delayed=False,
|
||||
# disabled=False
|
||||
# )
|
||||
|
@ -205,12 +209,12 @@ async def open_fsp_actor_cluster(
|
|||
async def run_fsp_ui(
|
||||
|
||||
linkedsplits: LinkedSplits,
|
||||
shm: ShmArray,
|
||||
flume: Flume,
|
||||
started: trio.Event,
|
||||
target: Fsp,
|
||||
conf: dict[str, dict],
|
||||
loglevel: str,
|
||||
# profiler: pg.debug.Profiler,
|
||||
# profiler: Profiler,
|
||||
# _quote_throttle_rate: int = 58,
|
||||
|
||||
) -> None:
|
||||
|
@ -242,9 +246,11 @@ async def run_fsp_ui(
|
|||
else:
|
||||
chart = linkedsplits.subplots[overlay_with]
|
||||
|
||||
shm = flume.rt_shm
|
||||
chart.draw_curve(
|
||||
name=name,
|
||||
shm=shm,
|
||||
name,
|
||||
shm,
|
||||
flume,
|
||||
overlay=True,
|
||||
color='default_light',
|
||||
array_key=name,
|
||||
|
@ -254,8 +260,9 @@ async def run_fsp_ui(
|
|||
else:
|
||||
# create a new sub-chart widget for this fsp
|
||||
chart = linkedsplits.add_plot(
|
||||
name=name,
|
||||
shm=shm,
|
||||
name,
|
||||
shm,
|
||||
flume,
|
||||
|
||||
array_key=name,
|
||||
sidepane=sidepane,
|
||||
|
@ -275,9 +282,10 @@ async def run_fsp_ui(
|
|||
# profiler(f'fsp:{name} chart created')
|
||||
|
||||
# first UI update, usually from shm pushed history
|
||||
viz = chart.get_viz(array_key)
|
||||
update_fsp_chart(
|
||||
chart,
|
||||
chart._flows[array_key],
|
||||
viz,
|
||||
name,
|
||||
array_key=array_key,
|
||||
)
|
||||
|
@ -304,7 +312,7 @@ async def run_fsp_ui(
|
|||
# level_line(chart, 70, orient_v='bottom')
|
||||
# level_line(chart, 80, orient_v='top')
|
||||
|
||||
chart.view._set_yrange()
|
||||
chart.view._set_yrange(viz=viz)
|
||||
# done() # status updates
|
||||
|
||||
# profiler(f'fsp:{func_name} starting update loop')
|
||||
|
@ -345,6 +353,9 @@ async def run_fsp_ui(
|
|||
# last = time.time()
|
||||
|
||||
|
||||
# TODO: maybe this should be our ``Viz`` type since it maps
|
||||
# one flume to the next? The machinery for task/actor mgmt should
|
||||
# be part of the instantiation API?
|
||||
class FspAdmin:
|
||||
'''
|
||||
Client API for orchestrating FSP actors and displaying
|
||||
|
@ -356,7 +367,7 @@ class FspAdmin:
|
|||
tn: trio.Nursery,
|
||||
cluster: dict[str, tractor.Portal],
|
||||
linked: LinkedSplits,
|
||||
src_shm: ShmArray,
|
||||
flume: Flume,
|
||||
|
||||
) -> None:
|
||||
self.tn = tn
|
||||
|
@ -368,7 +379,11 @@ class FspAdmin:
|
|||
tuple[tractor.MsgStream, ShmArray]
|
||||
] = {}
|
||||
self._flow_registry: dict[_Token, str] = {}
|
||||
self.src_shm = src_shm
|
||||
|
||||
# TODO: make this a `.src_flume` and add
|
||||
# a `dst_flume`?
|
||||
# (=> but then wouldn't this be the most basic `Viz`?)
|
||||
self.flume = flume
|
||||
|
||||
def rr_next_portal(self) -> tractor.Portal:
|
||||
name, portal = next(self._rr_next_actor)
|
||||
|
@ -381,7 +396,7 @@ class FspAdmin:
|
|||
complete: trio.Event,
|
||||
started: trio.Event,
|
||||
fqsn: str,
|
||||
dst_shm: ShmArray,
|
||||
dst_fsp_flume: Flume,
|
||||
conf: dict,
|
||||
target: Fsp,
|
||||
loglevel: str,
|
||||
|
@ -402,9 +417,10 @@ class FspAdmin:
|
|||
# data feed key
|
||||
fqsn=fqsn,
|
||||
|
||||
# TODO: pass `Flume.to_msg()`s here?
|
||||
# mems
|
||||
src_shm_token=self.src_shm.token,
|
||||
dst_shm_token=dst_shm.token,
|
||||
src_shm_token=self.flume.rt_shm.token,
|
||||
dst_shm_token=dst_fsp_flume.rt_shm.token,
|
||||
|
||||
# target
|
||||
ns_path=ns_path,
|
||||
|
@ -421,12 +437,14 @@ class FspAdmin:
|
|||
ctx.open_stream() as stream,
|
||||
):
|
||||
|
||||
dst_fsp_flume.stream: tractor.MsgStream = stream
|
||||
|
||||
# register output data
|
||||
self._registry[
|
||||
(fqsn, ns_path)
|
||||
] = (
|
||||
stream,
|
||||
dst_shm,
|
||||
dst_fsp_flume.rt_shm,
|
||||
complete
|
||||
)
|
||||
|
||||
|
@ -440,7 +458,9 @@ class FspAdmin:
|
|||
# if the chart isn't hidden try to update
|
||||
# the data on screen.
|
||||
if not self.linked.isHidden():
|
||||
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
|
||||
log.debug(
|
||||
f'Re-syncing graphics for fsp: {ns_path}'
|
||||
)
|
||||
self.linked.graphics_cycle(
|
||||
trigger_all=True,
|
||||
prepend_update_index=info['first'],
|
||||
|
@ -459,9 +479,9 @@ class FspAdmin:
|
|||
worker_name: Optional[str] = None,
|
||||
loglevel: str = 'info',
|
||||
|
||||
) -> (ShmArray, trio.Event):
|
||||
) -> (Flume, trio.Event):
|
||||
|
||||
fqsn = self.linked.symbol.front_fqsn()
|
||||
fqsn = self.flume.symbol.fqsn
|
||||
|
||||
# allocate an output shm array
|
||||
key, dst_shm, opened = maybe_mk_fsp_shm(
|
||||
|
@ -469,16 +489,36 @@ class FspAdmin:
|
|||
target=target,
|
||||
readonly=True,
|
||||
)
|
||||
self._flow_registry[
|
||||
(self.src_shm._token, target.name)
|
||||
] = dst_shm._token
|
||||
|
||||
portal = self.cluster.get(worker_name) or self.rr_next_portal()
|
||||
provider_tag = portal.channel.uid
|
||||
|
||||
symbol = Symbol(
|
||||
key=key,
|
||||
broker_info={
|
||||
provider_tag: {'asset_type': 'fsp'},
|
||||
},
|
||||
)
|
||||
dst_fsp_flume = Flume(
|
||||
symbol=symbol,
|
||||
_rt_shm_token=dst_shm.token,
|
||||
first_quote={},
|
||||
|
||||
# set to 0 presuming for now that we can't load
|
||||
# FSP history (though we should eventually).
|
||||
izero_hist=0,
|
||||
izero_rt=0,
|
||||
)
|
||||
self._flow_registry[(
|
||||
self.flume.rt_shm._token,
|
||||
target.name
|
||||
)] = dst_shm._token
|
||||
|
||||
# if not opened:
|
||||
# raise RuntimeError(
|
||||
# f'Already started FSP `{fqsn}:{func_name}`'
|
||||
# )
|
||||
|
||||
portal = self.cluster.get(worker_name) or self.rr_next_portal()
|
||||
complete = trio.Event()
|
||||
started = trio.Event()
|
||||
self.tn.start_soon(
|
||||
|
@ -487,13 +527,13 @@ class FspAdmin:
|
|||
complete,
|
||||
started,
|
||||
fqsn,
|
||||
dst_shm,
|
||||
dst_fsp_flume,
|
||||
conf,
|
||||
target,
|
||||
loglevel,
|
||||
)
|
||||
|
||||
return dst_shm, started
|
||||
return dst_fsp_flume, started
|
||||
|
||||
async def open_fsp_chart(
|
||||
self,
|
||||
|
@ -505,7 +545,7 @@ class FspAdmin:
|
|||
|
||||
) -> (trio.Event, ChartPlotWidget):
|
||||
|
||||
shm, started = await self.start_engine_task(
|
||||
flume, started = await self.start_engine_task(
|
||||
target,
|
||||
conf,
|
||||
loglevel,
|
||||
|
@ -517,7 +557,7 @@ class FspAdmin:
|
|||
run_fsp_ui,
|
||||
|
||||
self.linked,
|
||||
shm,
|
||||
flume,
|
||||
started,
|
||||
target,
|
||||
|
||||
|
@ -531,7 +571,7 @@ class FspAdmin:
|
|||
@acm
|
||||
async def open_fsp_admin(
|
||||
linked: LinkedSplits,
|
||||
src_shm: ShmArray,
|
||||
flume: Flume,
|
||||
**kwargs,
|
||||
|
||||
) -> AsyncGenerator[dict, dict[str, tractor.Portal]]:
|
||||
|
@ -552,7 +592,7 @@ async def open_fsp_admin(
|
|||
tn,
|
||||
cluster_map,
|
||||
linked,
|
||||
src_shm,
|
||||
flume,
|
||||
)
|
||||
try:
|
||||
yield admin
|
||||
|
@ -566,7 +606,7 @@ async def open_fsp_admin(
|
|||
async def open_vlm_displays(
|
||||
|
||||
linked: LinkedSplits,
|
||||
ohlcv: ShmArray,
|
||||
flume: Flume,
|
||||
dvlm: bool = True,
|
||||
|
||||
task_status: TaskStatus[ChartPlotWidget] = trio.TASK_STATUS_IGNORED,
|
||||
|
@ -588,6 +628,8 @@ async def open_vlm_displays(
|
|||
sig = inspect.signature(flow_rates.func)
|
||||
params = sig.parameters
|
||||
|
||||
ohlcv: ShmArray = flume.rt_shm
|
||||
|
||||
async with (
|
||||
open_fsp_sidepane(
|
||||
linked, {
|
||||
|
@ -607,7 +649,7 @@ async def open_vlm_displays(
|
|||
}
|
||||
},
|
||||
) as sidepane,
|
||||
open_fsp_admin(linked, ohlcv) as admin,
|
||||
open_fsp_admin(linked, flume) as admin,
|
||||
):
|
||||
# TODO: support updates
|
||||
# period_field = sidepane.fields['period']
|
||||
|
@ -615,12 +657,21 @@ async def open_vlm_displays(
|
|||
# str(period_param.default)
|
||||
# )
|
||||
|
||||
# use slightly less light (then bracket) gray
|
||||
# for volume from "main exchange" and a more "bluey"
|
||||
# gray for "dark" vlm.
|
||||
vlm_color = 'i3'
|
||||
dark_vlm_color = 'charcoal'
|
||||
|
||||
# built-in vlm which we plot ASAP since it's
|
||||
# usually data provided directly with OHLC history.
|
||||
shm = ohlcv
|
||||
chart = linked.add_plot(
|
||||
# ohlc_chart = linked.chart
|
||||
|
||||
vlm_chart = linked.add_plot(
|
||||
name='volume',
|
||||
shm=shm,
|
||||
flume=flume,
|
||||
|
||||
array_key='volume',
|
||||
sidepane=sidepane,
|
||||
|
@ -633,63 +684,47 @@ async def open_vlm_displays(
|
|||
# the curve item internals are pretty convoluted.
|
||||
style='step',
|
||||
)
|
||||
|
||||
# force 0 to always be in view
|
||||
def multi_maxmin(
|
||||
names: list[str],
|
||||
|
||||
) -> tuple[float, float]:
|
||||
|
||||
mx = 0
|
||||
for name in names:
|
||||
|
||||
mxmn = chart.maxmin(name=name)
|
||||
if mxmn:
|
||||
ymax = mxmn[1]
|
||||
if ymax > mx:
|
||||
mx = ymax
|
||||
|
||||
return 0, mx
|
||||
|
||||
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
||||
vlm_viz = vlm_chart._vizs['volume']
|
||||
|
||||
# TODO: fix the x-axis label issue where if you put
|
||||
# the axis on the left it's totally not lined up...
|
||||
# show volume units value on LHS (for dinkus)
|
||||
# chart.hideAxis('right')
|
||||
# chart.showAxis('left')
|
||||
# vlm_chart.hideAxis('right')
|
||||
# vlm_chart.showAxis('left')
|
||||
|
||||
# send back new chart to caller
|
||||
task_status.started(chart)
|
||||
task_status.started(vlm_chart)
|
||||
|
||||
# should **not** be the same sub-chart widget
|
||||
assert chart.name != linked.chart.name
|
||||
assert vlm_chart.name != linked.chart.name
|
||||
|
||||
# sticky only on sub-charts atm
|
||||
last_val_sticky = chart._ysticks[chart.name]
|
||||
last_val_sticky = vlm_chart.plotItem.getAxis(
|
||||
'right')._stickies.get(vlm_chart.name)
|
||||
|
||||
# read from last calculated value
|
||||
value = shm.array['volume'][-1]
|
||||
|
||||
last_val_sticky.update_from_data(-1, value)
|
||||
|
||||
vlm_curve = chart.update_graphics_from_flow(
|
||||
_, _, vlm_curve = vlm_chart.update_graphics_from_flow(
|
||||
'volume',
|
||||
# shm.array,
|
||||
)
|
||||
|
||||
# size view to data once at outset
|
||||
chart.view._set_yrange()
|
||||
vlm_chart.view._set_yrange(
|
||||
viz=vlm_viz
|
||||
)
|
||||
|
||||
# add axis title
|
||||
axis = chart.getAxis('right')
|
||||
axis = vlm_chart.getAxis('right')
|
||||
axis.set_title(' vlm')
|
||||
|
||||
if dvlm:
|
||||
|
||||
tasks_ready = []
|
||||
# spawn and overlay $ vlm on the same subchart
|
||||
dvlm_shm, started = await admin.start_engine_task(
|
||||
dvlm_flume, started = await admin.start_engine_task(
|
||||
dolla_vlm,
|
||||
|
||||
{ # fsp engine conf
|
||||
|
@ -708,7 +743,7 @@ async def open_vlm_displays(
|
|||
# FIXME: we should error on starting the same fsp right
|
||||
# since it might collide with existing shm.. or wait we
|
||||
# had this before??
|
||||
# dolla_vlm,
|
||||
# dolla_vlm
|
||||
|
||||
tasks_ready.append(started)
|
||||
# profiler(f'created shm for fsp actor: {display_name}')
|
||||
|
@ -722,22 +757,29 @@ async def open_vlm_displays(
|
|||
# XXX: the main chart already contains a vlm "units" axis
|
||||
# so here we add an overlay wth a y-range in
|
||||
# $ liquidity-value units (normally a fiat like USD).
|
||||
dvlm_pi = chart.overlay_plotitem(
|
||||
dvlm_pi = vlm_chart.overlay_plotitem(
|
||||
'dolla_vlm',
|
||||
index=0, # place axis on inside (nearest to chart)
|
||||
|
||||
axis_title=' $vlm',
|
||||
axis_side='right',
|
||||
axis_side='left',
|
||||
|
||||
axis_kwargs={
|
||||
'typical_max_str': ' 100.0 M ',
|
||||
'formatter': partial(
|
||||
humanize,
|
||||
digits=2,
|
||||
),
|
||||
'text_color': vlm_color,
|
||||
},
|
||||
)
|
||||
|
||||
# TODO: should this maybe be implicit based on input args to
|
||||
# `.overlay_plotitem()` above?
|
||||
dvlm_pi.hideAxis('bottom')
|
||||
|
||||
# all to be overlayed curve names
|
||||
fields = [
|
||||
dvlm_fields = [
|
||||
'dolla_vlm',
|
||||
'dark_vlm',
|
||||
]
|
||||
|
@ -750,32 +792,18 @@ async def open_vlm_displays(
|
|||
'dark_trade_rate',
|
||||
]
|
||||
|
||||
group_mxmn = partial(
|
||||
multi_maxmin,
|
||||
# keep both regular and dark vlm in view
|
||||
names=fields,
|
||||
# names=fields + dvlm_rate_fields,
|
||||
)
|
||||
|
||||
# add custom auto range handler
|
||||
dvlm_pi.vb._maxmin = group_mxmn
|
||||
|
||||
# use slightly less light (then bracket) gray
|
||||
# for volume from "main exchange" and a more "bluey"
|
||||
# gray for "dark" vlm.
|
||||
vlm_color = 'i3'
|
||||
dark_vlm_color = 'charcoal'
|
||||
|
||||
# add dvlm (step) curves to common view
|
||||
def chart_curves(
|
||||
names: list[str],
|
||||
pi: pg.PlotItem,
|
||||
shm: ShmArray,
|
||||
flume: Flume,
|
||||
step_mode: bool = False,
|
||||
style: str = 'solid',
|
||||
|
||||
) -> None:
|
||||
for name in names:
|
||||
|
||||
if 'dark' in name:
|
||||
color = dark_vlm_color
|
||||
elif 'rate' in name:
|
||||
|
@ -783,9 +811,13 @@ async def open_vlm_displays(
|
|||
else:
|
||||
color = 'bracket'
|
||||
|
||||
curve, _ = chart.draw_curve(
|
||||
name=name,
|
||||
shm=shm,
|
||||
assert isinstance(shm, ShmArray)
|
||||
assert isinstance(flume, Flume)
|
||||
|
||||
viz = vlm_chart.draw_curve(
|
||||
name,
|
||||
shm,
|
||||
flume,
|
||||
array_key=name,
|
||||
overlay=pi,
|
||||
color=color,
|
||||
|
@ -793,29 +825,24 @@ async def open_vlm_displays(
|
|||
style=style,
|
||||
pi=pi,
|
||||
)
|
||||
|
||||
# TODO: we need a better API to do this..
|
||||
# specially store ref to shm for lookup in display loop
|
||||
# since only a placeholder of `None` is entered in
|
||||
# ``.draw_curve()``.
|
||||
flow = chart._flows[name]
|
||||
assert flow.plot is pi
|
||||
assert viz.plot is pi
|
||||
|
||||
chart_curves(
|
||||
fields,
|
||||
dvlm_fields,
|
||||
dvlm_pi,
|
||||
dvlm_shm,
|
||||
dvlm_flume.rt_shm,
|
||||
dvlm_flume,
|
||||
step_mode=True,
|
||||
)
|
||||
|
||||
# spawn flow rates fsp **ONLY AFTER** the 'dolla_vlm' fsp is
|
||||
# up since this one depends on it.
|
||||
|
||||
fr_shm, started = await admin.start_engine_task(
|
||||
fr_flume, started = await admin.start_engine_task(
|
||||
flow_rates,
|
||||
{ # fsp engine conf
|
||||
'func_name': 'flow_rates',
|
||||
'zero_on_step': False,
|
||||
'zero_on_step': True,
|
||||
},
|
||||
# loglevel,
|
||||
)
|
||||
|
@ -824,7 +851,7 @@ async def open_vlm_displays(
|
|||
# chart_curves(
|
||||
# dvlm_rate_fields,
|
||||
# dvlm_pi,
|
||||
# fr_shm,
|
||||
# fr_flume.rt_shm,
|
||||
# )
|
||||
|
||||
# TODO: is there a way to "sync" the dual axes such that only
|
||||
|
@ -833,24 +860,24 @@ async def open_vlm_displays(
|
|||
# displayed and the curves are effectively the same minus
|
||||
# liquidity events (well at least on low OHLC periods - 1s).
|
||||
vlm_curve.hide()
|
||||
chart.removeItem(vlm_curve)
|
||||
vflow = chart._flows['volume']
|
||||
vflow.render = False
|
||||
vlm_chart.removeItem(vlm_curve)
|
||||
vlm_viz = vlm_chart._vizs['volume']
|
||||
vlm_viz.render = False
|
||||
|
||||
# avoid range sorting on volume once disabled
|
||||
chart.view.disable_auto_yrange()
|
||||
vlm_chart.view.disable_auto_yrange()
|
||||
|
||||
# Trade rate overlay
|
||||
# XXX: requires an additional overlay for
|
||||
# a trades-per-period (time) y-range.
|
||||
tr_pi = chart.overlay_plotitem(
|
||||
tr_pi = vlm_chart.overlay_plotitem(
|
||||
'trade_rates',
|
||||
|
||||
# TODO: dynamically update period (and thus this axis?)
|
||||
# title from user input.
|
||||
axis_title='clears',
|
||||
|
||||
axis_side='left',
|
||||
|
||||
axis_kwargs={
|
||||
'typical_max_str': ' 10.0 M ',
|
||||
'formatter': partial(
|
||||
|
@ -861,17 +888,13 @@ async def open_vlm_displays(
|
|||
},
|
||||
|
||||
)
|
||||
# add custom auto range handler
|
||||
tr_pi.vb.maxmin = partial(
|
||||
multi_maxmin,
|
||||
# keep both regular and dark vlm in view
|
||||
names=trade_rate_fields,
|
||||
)
|
||||
tr_pi.hideAxis('bottom')
|
||||
|
||||
chart_curves(
|
||||
trade_rate_fields,
|
||||
tr_pi,
|
||||
fr_shm,
|
||||
fr_flume.rt_shm,
|
||||
fr_flume,
|
||||
# step_mode=True,
|
||||
|
||||
# dashed line to represent "individual trades" being
|
||||
|
@ -905,7 +928,7 @@ async def open_vlm_displays(
|
|||
async def start_fsp_displays(
|
||||
|
||||
linked: LinkedSplits,
|
||||
ohlcv: ShmArray,
|
||||
flume: Flume,
|
||||
group_status_key: str,
|
||||
loglevel: str,
|
||||
|
||||
|
@ -940,7 +963,7 @@ async def start_fsp_displays(
|
|||
# },
|
||||
# },
|
||||
}
|
||||
profiler = pg.debug.Profiler(
|
||||
profiler = Profiler(
|
||||
delayed=False,
|
||||
disabled=False
|
||||
)
|
||||
|
@ -948,7 +971,10 @@ async def start_fsp_displays(
|
|||
async with (
|
||||
|
||||
# NOTE: this admin internally opens an actor cluster
|
||||
open_fsp_admin(linked, ohlcv) as admin,
|
||||
open_fsp_admin(
|
||||
linked,
|
||||
flume,
|
||||
) as admin,
|
||||
):
|
||||
statuses = []
|
||||
for target, conf in fsp_conf.items():
|
||||
|
|
|
@ -20,8 +20,13 @@ Chart view box primitives
|
|||
"""
|
||||
from __future__ import annotations
|
||||
from contextlib import asynccontextmanager
|
||||
from functools import partial
|
||||
import time
|
||||
from typing import Optional, Callable
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import pyqtgraph as pg
|
||||
# from pyqtgraph.GraphicsScene import mouseEvents
|
||||
|
@ -33,11 +38,16 @@ import numpy as np
|
|||
import trio
|
||||
|
||||
from ..log import get_logger
|
||||
from .._profile import Profiler
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
# from ._style import _min_points_to_show
|
||||
from ._editors import SelectRect
|
||||
from . import _event
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import ChartPlotWidget
|
||||
from ._dataviz import Viz
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
@ -75,7 +85,6 @@ async def handle_viewmode_kb_inputs(
|
|||
pressed: set[str] = set()
|
||||
|
||||
last = time.time()
|
||||
trigger_mode: str
|
||||
action: str
|
||||
|
||||
on_next_release: Optional[Callable] = None
|
||||
|
@ -141,13 +150,16 @@ async def handle_viewmode_kb_inputs(
|
|||
Qt.Key_Space,
|
||||
}
|
||||
):
|
||||
view._chart.linked.godwidget.search.focus()
|
||||
godw = view._chart.linked.godwidget
|
||||
godw.hist_linked.resize_sidepanes(from_linked=godw.rt_linked)
|
||||
godw.search.focus()
|
||||
|
||||
# esc and ctrl-c
|
||||
if key == Qt.Key_Escape or (ctrl and key == Qt.Key_C):
|
||||
# ctrl-c as cancel
|
||||
# https://forum.qt.io/topic/532/how-to-catch-ctrl-c-on-a-widget/9
|
||||
view.select_box.clear()
|
||||
view.linked.focus()
|
||||
|
||||
# cancel order or clear graphics
|
||||
if key == Qt.Key_C or key == Qt.Key_Delete:
|
||||
|
@ -178,17 +190,17 @@ async def handle_viewmode_kb_inputs(
|
|||
if key in pressed:
|
||||
pressed.remove(key)
|
||||
|
||||
# QUERY/QUOTE MODE #
|
||||
# QUERY/QUOTE MODE
|
||||
# ----------------
|
||||
if {Qt.Key_Q}.intersection(pressed):
|
||||
|
||||
view.linkedsplits.cursor.in_query_mode = True
|
||||
view.linked.cursor.in_query_mode = True
|
||||
|
||||
else:
|
||||
view.linkedsplits.cursor.in_query_mode = False
|
||||
view.linked.cursor.in_query_mode = False
|
||||
|
||||
# SELECTION MODE
|
||||
# --------------
|
||||
|
||||
if shift:
|
||||
if view.state['mouseMode'] == ViewBox.PanMode:
|
||||
view.setMouseMode(ViewBox.RectMode)
|
||||
|
@ -209,18 +221,27 @@ async def handle_viewmode_kb_inputs(
|
|||
|
||||
# ORDER MODE
|
||||
# ----------
|
||||
|
||||
# live vs. dark trigger + an action {buy, sell, alert}
|
||||
order_keys_pressed = ORDER_MODE.intersection(pressed)
|
||||
|
||||
if order_keys_pressed:
|
||||
|
||||
# show the pp size label
|
||||
order_mode.current_pp.show()
|
||||
# TODO: it seems like maybe the composition should be
|
||||
# reversed here? Like, maybe we should have the nav have
|
||||
# access to the pos state and then make encapsulated logic
|
||||
# that shows the right stuff on screen instead or order mode
|
||||
# and position-related abstractions doing this?
|
||||
|
||||
# show the pp size label only if there is
|
||||
# a non-zero pos existing
|
||||
tracker = order_mode.current_pp
|
||||
if tracker.live_pp.size:
|
||||
tracker.nav.show()
|
||||
|
||||
# TODO: show pp config mini-params in status bar widget
|
||||
# mode.pp_config.show()
|
||||
|
||||
trigger_type: str = 'dark'
|
||||
if (
|
||||
# 's' for "submit" to activate "live" order
|
||||
Qt.Key_S in pressed or
|
||||
|
@ -228,9 +249,6 @@ async def handle_viewmode_kb_inputs(
|
|||
):
|
||||
trigger_type: str = 'live'
|
||||
|
||||
else:
|
||||
trigger_type: str = 'dark'
|
||||
|
||||
# order mode trigger "actions"
|
||||
if Qt.Key_D in pressed: # for "damp eet"
|
||||
action = 'sell'
|
||||
|
@ -259,8 +277,8 @@ async def handle_viewmode_kb_inputs(
|
|||
Qt.Key_S in pressed or
|
||||
order_keys_pressed or
|
||||
Qt.Key_O in pressed
|
||||
) and
|
||||
key in NUMBER_LINE
|
||||
)
|
||||
and key in NUMBER_LINE
|
||||
):
|
||||
# hot key to set order slots size.
|
||||
# change edit field to current number line value,
|
||||
|
@ -278,7 +296,7 @@ async def handle_viewmode_kb_inputs(
|
|||
else: # none active
|
||||
|
||||
# hide pp label
|
||||
order_mode.current_pp.hide_info()
|
||||
order_mode.current_pp.nav.hide_info()
|
||||
|
||||
# if none are pressed, remove "staged" level
|
||||
# line under cursor position
|
||||
|
@ -319,7 +337,6 @@ async def handle_viewmode_mouse(
|
|||
):
|
||||
# when in order mode, submit execution
|
||||
# msg.event.accept()
|
||||
# breakpoint()
|
||||
view.order_mode.submit_order()
|
||||
|
||||
|
||||
|
@ -336,16 +353,6 @@ class ChartView(ViewBox):
|
|||
'''
|
||||
mode_name: str = 'view'
|
||||
|
||||
# "relay events" for making overlaid views work.
|
||||
# NOTE: these MUST be defined here (and can't be monkey patched
|
||||
# on later) due to signal construction requiring refs to be
|
||||
# in place during the run of meta-class machinery.
|
||||
mouseDragEventRelay = QtCore.Signal(object, object, object)
|
||||
wheelEventRelay = QtCore.Signal(object, object, object)
|
||||
|
||||
event_relay_source: 'Optional[ViewBox]' = None
|
||||
relays: dict[str, QtCore.Signal] = {}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
|
||||
|
@ -367,7 +374,6 @@ class ChartView(ViewBox):
|
|||
)
|
||||
# for "known y-range style"
|
||||
self._static_yrange = static_yrange
|
||||
self._maxmin = None
|
||||
|
||||
# disable vertical scrolling
|
||||
self.setMouseEnabled(
|
||||
|
@ -375,8 +381,8 @@ class ChartView(ViewBox):
|
|||
y=True,
|
||||
)
|
||||
|
||||
self.linkedsplits = None
|
||||
self._chart: 'ChartPlotWidget' = None # noqa
|
||||
self.linked = None
|
||||
self._chart: ChartPlotWidget | None = None # noqa
|
||||
|
||||
# add our selection box annotator
|
||||
self.select_box = SelectRect(self)
|
||||
|
@ -387,6 +393,7 @@ class ChartView(ViewBox):
|
|||
|
||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||
self._ic = None
|
||||
self._yranger: Callable | None = None
|
||||
|
||||
def start_ic(
|
||||
self,
|
||||
|
@ -397,8 +404,11 @@ class ChartView(ViewBox):
|
|||
|
||||
'''
|
||||
if self._ic is None:
|
||||
self.chart.pause_all_feeds()
|
||||
self._ic = trio.Event()
|
||||
try:
|
||||
self.chart.pause_all_feeds()
|
||||
self._ic = trio.Event()
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
def signal_ic(
|
||||
self,
|
||||
|
@ -411,9 +421,12 @@ class ChartView(ViewBox):
|
|||
|
||||
'''
|
||||
if self._ic:
|
||||
self._ic.set()
|
||||
self._ic = None
|
||||
self.chart.resume_all_feeds()
|
||||
try:
|
||||
self._ic.set()
|
||||
self._ic = None
|
||||
self.chart.resume_all_feeds()
|
||||
except RuntimeError:
|
||||
pass
|
||||
|
||||
@asynccontextmanager
|
||||
async def open_async_input_handler(
|
||||
|
@ -441,29 +454,18 @@ class ChartView(ViewBox):
|
|||
yield self
|
||||
|
||||
@property
|
||||
def chart(self) -> 'ChartPlotWidget': # type: ignore # noqa
|
||||
def chart(self) -> ChartPlotWidget: # type: ignore # noqa
|
||||
return self._chart
|
||||
|
||||
@chart.setter
|
||||
def chart(self, chart: 'ChartPlotWidget') -> None: # type: ignore # noqa
|
||||
def chart(self, chart: ChartPlotWidget) -> None: # type: ignore # noqa
|
||||
self._chart = chart
|
||||
self.select_box.chart = chart
|
||||
if self._maxmin is None:
|
||||
self._maxmin = chart.maxmin
|
||||
|
||||
@property
|
||||
def maxmin(self) -> Callable:
|
||||
return self._maxmin
|
||||
|
||||
@maxmin.setter
|
||||
def maxmin(self, callback: Callable) -> None:
|
||||
self._maxmin = callback
|
||||
|
||||
def wheelEvent(
|
||||
self,
|
||||
ev,
|
||||
axis=None,
|
||||
relayed_from: ChartView = None,
|
||||
):
|
||||
'''
|
||||
Override "center-point" location for scrolling.
|
||||
|
@ -474,27 +476,34 @@ class ChartView(ViewBox):
|
|||
TODO: PR a method into ``pyqtgraph`` to make this configurable
|
||||
|
||||
'''
|
||||
linked = self.linked
|
||||
if (
|
||||
not linked
|
||||
):
|
||||
return
|
||||
|
||||
if axis in (0, 1):
|
||||
mask = [False, False]
|
||||
mask[axis] = self.state['mouseEnabled'][axis]
|
||||
else:
|
||||
mask = self.state['mouseEnabled'][:]
|
||||
|
||||
chart = self.linkedsplits.chart
|
||||
chart = self.linked.chart
|
||||
|
||||
# don't zoom more then the min points setting
|
||||
l, lbar, rbar, r = chart.bars_range()
|
||||
# vl = r - l
|
||||
viz = chart.get_viz(chart.name)
|
||||
vl, lbar, rbar, vr = viz.bars_range()
|
||||
|
||||
# if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||
# log.debug("Max zoom bruh...")
|
||||
# TODO: max/min zoom limits incorporating time step size.
|
||||
# rl = vr - vl
|
||||
# if ev.delta() > 0 and rl <= _min_points_to_show:
|
||||
# log.warning("Max zoom bruh...")
|
||||
# return
|
||||
|
||||
# if (
|
||||
# ev.delta() < 0
|
||||
# and vl >= len(chart._flows[chart.name].shm.array) + 666
|
||||
# and rl >= len(chart._vizs[chart.name].shm.array) + 666
|
||||
# ):
|
||||
# log.debug("Min zoom bruh...")
|
||||
# log.warning("Min zoom bruh...")
|
||||
# return
|
||||
|
||||
# actual scaling factor
|
||||
|
@ -525,49 +534,17 @@ class ChartView(ViewBox):
|
|||
self.scaleBy(s, center)
|
||||
|
||||
else:
|
||||
|
||||
# center = pg.Point(
|
||||
# fn.invertQTransform(self.childGroup.transform()).map(ev.pos())
|
||||
# )
|
||||
|
||||
# XXX: scroll "around" the right most element in the view
|
||||
# which stays "pinned" in place.
|
||||
|
||||
# furthest_right_coord = self.boundingRect().topRight()
|
||||
|
||||
# yaxis = pg.Point(
|
||||
# fn.invertQTransform(
|
||||
# self.childGroup.transform()
|
||||
# ).map(furthest_right_coord)
|
||||
# )
|
||||
|
||||
# This seems like the most "intuitive option, a hybrid of
|
||||
# tws and tv styles
|
||||
last_bar = pg.Point(int(rbar)) + 1
|
||||
|
||||
ryaxis = chart.getAxis('right')
|
||||
r_axis_x = ryaxis.pos().x()
|
||||
|
||||
end_of_l1 = pg.Point(
|
||||
round(
|
||||
chart.cv.mapToView(
|
||||
pg.Point(r_axis_x - chart._max_l1_line_len)
|
||||
# QPointF(chart._max_l1_line_len, 0)
|
||||
).x()
|
||||
)
|
||||
) # .x()
|
||||
|
||||
# self.state['viewRange'][0][1] = end_of_l1
|
||||
# focal = pg.Point((last_bar.x() + end_of_l1)/2)
|
||||
|
||||
# use right-most point of current curve graphic
|
||||
xl = viz.graphics.x_last()
|
||||
focal = min(
|
||||
last_bar,
|
||||
end_of_l1,
|
||||
key=lambda p: p.x()
|
||||
xl,
|
||||
vr,
|
||||
)
|
||||
# focal = pg.Point(last_bar.x() + end_of_l1)
|
||||
|
||||
self._resetTarget()
|
||||
|
||||
# NOTE: scroll "around" the right most datum-element in view
|
||||
# gives the feeling of staying "pinned" in place.
|
||||
self.scaleBy(s, focal)
|
||||
|
||||
# XXX: the order of the next 2 lines i'm pretty sure
|
||||
|
@ -593,10 +570,8 @@ class ChartView(ViewBox):
|
|||
self,
|
||||
ev,
|
||||
axis: Optional[int] = None,
|
||||
relayed_from: ChartView = None,
|
||||
|
||||
) -> None:
|
||||
|
||||
pos = ev.pos()
|
||||
lastPos = ev.lastPos()
|
||||
dif = pos - lastPos
|
||||
|
@ -666,10 +641,10 @@ class ChartView(ViewBox):
|
|||
|
||||
# PANNING MODE
|
||||
else:
|
||||
# XXX: WHY
|
||||
ev.accept()
|
||||
|
||||
self.start_ic()
|
||||
try:
|
||||
self.start_ic()
|
||||
except RuntimeError:
|
||||
pass
|
||||
# if self._ic is None:
|
||||
# self.chart.pause_all_feeds()
|
||||
# self._ic = trio.Event()
|
||||
|
@ -697,6 +672,9 @@ class ChartView(ViewBox):
|
|||
# self._ic = None
|
||||
# self.chart.resume_all_feeds()
|
||||
|
||||
# XXX: WHY
|
||||
ev.accept()
|
||||
|
||||
# WEIRD "RIGHT-CLICK CENTER ZOOM" MODE
|
||||
elif button & QtCore.Qt.RightButton:
|
||||
|
||||
|
@ -742,7 +720,12 @@ class ChartView(ViewBox):
|
|||
*,
|
||||
|
||||
yrange: Optional[tuple[float, float]] = None,
|
||||
range_margin: float = 0.06,
|
||||
viz: Viz | None = None,
|
||||
|
||||
# NOTE: this value pairs (more or less) with L1 label text
|
||||
# height offset from from the bid/ask lines.
|
||||
range_margin: float = 0.09,
|
||||
|
||||
bars_range: Optional[tuple[int, int, int, int]] = None,
|
||||
|
||||
# flag to prevent triggering sibling charts from the same linked
|
||||
|
@ -761,7 +744,7 @@ class ChartView(ViewBox):
|
|||
'''
|
||||
name = self.name
|
||||
# print(f'YRANGE ON {name}')
|
||||
profiler = pg.debug.Profiler(
|
||||
profiler = Profiler(
|
||||
msg=f'`ChartView._set_yrange()`: `{name}`',
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
|
@ -795,18 +778,28 @@ class ChartView(ViewBox):
|
|||
# XXX: only compute the mxmn range
|
||||
# if none is provided as input!
|
||||
if not yrange:
|
||||
# flow = chart._flows[name]
|
||||
yrange = self._maxmin()
|
||||
|
||||
if not viz:
|
||||
breakpoint()
|
||||
|
||||
out = viz.maxmin()
|
||||
if out is None:
|
||||
log.warning(f'No yrange provided for {name}!?')
|
||||
return
|
||||
(
|
||||
ixrng,
|
||||
_,
|
||||
yrange
|
||||
) = out
|
||||
|
||||
profiler(f'`{self.name}:Viz.maxmin()` -> {ixrng}=>{yrange}')
|
||||
|
||||
if yrange is None:
|
||||
log.warning(f'No yrange provided for {name}!?')
|
||||
print(f"WTF NO YRANGE {name}")
|
||||
return
|
||||
|
||||
ylow, yhigh = yrange
|
||||
|
||||
profiler(f'callback ._maxmin(): {yrange}')
|
||||
|
||||
# view margins: stay within a % of the "true range"
|
||||
diff = yhigh - ylow
|
||||
ylow = ylow - (diff * range_margin)
|
||||
|
@ -826,54 +819,55 @@ class ChartView(ViewBox):
|
|||
|
||||
def enable_auto_yrange(
|
||||
self,
|
||||
viz: Viz,
|
||||
src_vb: Optional[ChartView] = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Assign callback for rescaling y-axis automatically
|
||||
based on data contents and ``ViewBox`` state.
|
||||
Assign callbacks for rescaling and resampling y-axis data
|
||||
automatically based on data contents and ``ViewBox`` state.
|
||||
|
||||
'''
|
||||
if src_vb is None:
|
||||
src_vb = self
|
||||
|
||||
# splitter(s) resizing
|
||||
src_vb.sigResized.connect(self._set_yrange)
|
||||
if self._yranger is None:
|
||||
self._yranger = partial(
|
||||
self._set_yrange,
|
||||
viz=viz,
|
||||
)
|
||||
|
||||
# widget-UIs/splitter(s) resizing
|
||||
src_vb.sigResized.connect(self._yranger)
|
||||
|
||||
# mouse wheel doesn't emit XRangeChanged
|
||||
src_vb.sigRangeChangedManually.connect(self._yranger)
|
||||
|
||||
# re-sampling trigger:
|
||||
# TODO: a smarter way to avoid calling this needlessly?
|
||||
# 2 things i can think of:
|
||||
# - register downsample-able graphics specially and only
|
||||
# iterate those.
|
||||
# - only register this when certain downsampleable graphics are
|
||||
# - only register this when certain downsample-able graphics are
|
||||
# "added to scene".
|
||||
src_vb.sigRangeChangedManually.connect(
|
||||
self.maybe_downsample_graphics
|
||||
)
|
||||
|
||||
# mouse wheel doesn't emit XRangeChanged
|
||||
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||
|
||||
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||
# src_vb.sigXRangeChanged.connect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
|
||||
def disable_auto_yrange(self) -> None:
|
||||
|
||||
# XXX: not entirely sure why we can't de-reg this..
|
||||
self.sigResized.disconnect(
|
||||
self._set_yrange,
|
||||
self._yranger,
|
||||
)
|
||||
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self._yranger,
|
||||
)
|
||||
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self.maybe_downsample_graphics
|
||||
)
|
||||
self.sigRangeChangedManually.disconnect(
|
||||
self._set_yrange,
|
||||
)
|
||||
|
||||
# self.sigXRangeChanged.disconnect(self._set_yrange)
|
||||
# self.sigXRangeChanged.disconnect(
|
||||
# self.maybe_downsample_graphics
|
||||
# )
|
||||
|
||||
def x_uppx(self) -> float:
|
||||
'''
|
||||
|
@ -882,7 +876,7 @@ class ChartView(ViewBox):
|
|||
graphics items which are our children.
|
||||
|
||||
'''
|
||||
graphics = [f.graphics for f in self._chart._flows.values()]
|
||||
graphics = [f.graphics for f in self._chart._vizs.values()]
|
||||
if not graphics:
|
||||
return 0
|
||||
|
||||
|
@ -895,10 +889,9 @@ class ChartView(ViewBox):
|
|||
|
||||
def maybe_downsample_graphics(
|
||||
self,
|
||||
autoscale_overlays: bool = True,
|
||||
autoscale_overlays: bool = False,
|
||||
):
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
profiler = Profiler(
|
||||
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
||||
disabled=not pg_profile_enabled(),
|
||||
|
||||
|
@ -912,10 +905,14 @@ class ChartView(ViewBox):
|
|||
|
||||
# TODO: a faster single-loop-iterator way of doing this XD
|
||||
chart = self._chart
|
||||
linked = self.linkedsplits
|
||||
plots = linked.subplots | {chart.name: chart}
|
||||
plots = {chart.name: chart}
|
||||
|
||||
linked = self.linked
|
||||
if linked:
|
||||
plots |= linked.subplots
|
||||
|
||||
for chart_name, chart in plots.items():
|
||||
for name, flow in chart._flows.items():
|
||||
for name, flow in chart._vizs.items():
|
||||
|
||||
if (
|
||||
not flow.render
|
||||
|
@ -923,25 +920,24 @@ class ChartView(ViewBox):
|
|||
# XXX: super important to be aware of this.
|
||||
# or not flow.graphics.isVisible()
|
||||
):
|
||||
# print(f'skipping {flow.name}')
|
||||
continue
|
||||
|
||||
# pass in no array which will read and render from the last
|
||||
# passed array (normally provided by the display loop.)
|
||||
chart.update_graphics_from_flow(
|
||||
name,
|
||||
use_vr=True,
|
||||
)
|
||||
chart.update_graphics_from_flow(name)
|
||||
|
||||
# for each overlay on this chart auto-scale the
|
||||
# y-range to max-min values.
|
||||
if autoscale_overlays:
|
||||
overlay = chart.pi_overlay
|
||||
if overlay:
|
||||
for pi in overlay.overlays:
|
||||
pi.vb._set_yrange(
|
||||
# TODO: get the range once up front...
|
||||
# bars_range=br,
|
||||
)
|
||||
profiler('autoscaled linked plots')
|
||||
# if autoscale_overlays:
|
||||
# overlay = chart.pi_overlay
|
||||
# if overlay:
|
||||
# for pi in overlay.overlays:
|
||||
# pi.vb._set_yrange(
|
||||
# # TODO: get the range once up front...
|
||||
# # bars_range=br,
|
||||
# viz=pi.viz,
|
||||
# )
|
||||
# profiler('autoscaled linked plots')
|
||||
|
||||
profiler(f'<{chart_name}>.update_graphics_from_flow({name})')
|
||||
profiler(f'<{chart_name}>.update_graphics_from_flow({name})')
|
||||
|
|
|
@ -26,22 +26,24 @@ from PyQt5.QtCore import QPointF
|
|||
|
||||
from ._axes import YAxisLabel
|
||||
from ._style import hcolor
|
||||
from ._pg_overrides import PlotItem
|
||||
|
||||
|
||||
class LevelLabel(YAxisLabel):
|
||||
"""Y-axis (vertically) oriented, horizontal label that sticks to
|
||||
'''
|
||||
Y-axis (vertically) oriented, horizontal label that sticks to
|
||||
where it's placed despite chart resizing and supports displaying
|
||||
multiple fields.
|
||||
|
||||
|
||||
TODO: replace the rectangle-text part with our new ``Label`` type.
|
||||
|
||||
"""
|
||||
_x_margin = 0
|
||||
_y_margin = 0
|
||||
'''
|
||||
_x_br_offset: float = -16
|
||||
_y_txt_h_scaling: float = 2
|
||||
|
||||
# adjustment "further away from" anchor point
|
||||
_x_offset = 9
|
||||
_x_offset = 0
|
||||
_y_offset = 0
|
||||
|
||||
# fields to be displayed in the label string
|
||||
|
@ -57,12 +59,12 @@ class LevelLabel(YAxisLabel):
|
|||
chart,
|
||||
parent,
|
||||
|
||||
color: str = 'bracket',
|
||||
color: str = 'default_light',
|
||||
|
||||
orient_v: str = 'bottom',
|
||||
orient_h: str = 'left',
|
||||
orient_h: str = 'right',
|
||||
|
||||
opacity: float = 0,
|
||||
opacity: float = 1,
|
||||
|
||||
# makes order line labels offset from their parent axis
|
||||
# such that they don't collide with the L1/L2 lines/prices
|
||||
|
@ -98,13 +100,15 @@ class LevelLabel(YAxisLabel):
|
|||
|
||||
self._h_shift = {
|
||||
'left': -1.,
|
||||
'right': 0.
|
||||
'right': 0.,
|
||||
}[orient_h]
|
||||
|
||||
self.fields = self._fields.copy()
|
||||
# ensure default format fields are in correct
|
||||
self.set_fmt_str(self._fmt_str, self.fields)
|
||||
|
||||
self.setZValue(10)
|
||||
|
||||
@property
|
||||
def color(self):
|
||||
return self._hcolor
|
||||
|
@ -112,7 +116,10 @@ class LevelLabel(YAxisLabel):
|
|||
@color.setter
|
||||
def color(self, color: str) -> None:
|
||||
self._hcolor = color
|
||||
self._pen = self.pen = pg.mkPen(hcolor(color))
|
||||
self._pen = self.pen = pg.mkPen(
|
||||
hcolor(color),
|
||||
width=3,
|
||||
)
|
||||
|
||||
def update_on_resize(self, vr, r):
|
||||
"""Tiis is a ``.sigRangeChanged()`` handler.
|
||||
|
@ -124,15 +131,16 @@ class LevelLabel(YAxisLabel):
|
|||
self,
|
||||
fields: dict = None,
|
||||
) -> None:
|
||||
"""Update the label's text contents **and** position from
|
||||
'''
|
||||
Update the label's text contents **and** position from
|
||||
a view box coordinate datum.
|
||||
|
||||
"""
|
||||
'''
|
||||
self.fields.update(fields)
|
||||
level = self.fields['level']
|
||||
|
||||
# map "level" to local coords
|
||||
abs_xy = self._chart.mapFromView(QPointF(0, level))
|
||||
abs_xy = self._pi.mapFromView(QPointF(0, level))
|
||||
|
||||
self.update_label(
|
||||
abs_xy,
|
||||
|
@ -149,7 +157,7 @@ class LevelLabel(YAxisLabel):
|
|||
h, w = self.set_label_str(fields)
|
||||
|
||||
if self._adjust_to_l1:
|
||||
self._x_offset = self._chart._max_l1_line_len
|
||||
self._x_offset = self._pi.chart_widget._max_l1_line_len
|
||||
|
||||
self.setPos(QPointF(
|
||||
self._h_shift * (w + self._x_offset),
|
||||
|
@ -174,7 +182,8 @@ class LevelLabel(YAxisLabel):
|
|||
fields: dict,
|
||||
):
|
||||
# use space as e3 delim
|
||||
self.label_str = self._fmt_str.format(**fields).replace(',', ' ')
|
||||
self.label_str = self._fmt_str.format(
|
||||
**fields).replace(',', ' ')
|
||||
|
||||
br = self.boundingRect()
|
||||
h, w = br.height(), br.width()
|
||||
|
@ -187,14 +196,14 @@ class LevelLabel(YAxisLabel):
|
|||
self,
|
||||
p: QtGui.QPainter,
|
||||
rect: QtCore.QRectF
|
||||
) -> None:
|
||||
p.setPen(self._pen)
|
||||
|
||||
) -> None:
|
||||
|
||||
p.setPen(self._pen)
|
||||
rect = self.rect
|
||||
|
||||
if self._orient_v == 'bottom':
|
||||
lp, rp = rect.topLeft(), rect.topRight()
|
||||
# p.drawLine(rect.topLeft(), rect.topRight())
|
||||
|
||||
elif self._orient_v == 'top':
|
||||
lp, rp = rect.bottomLeft(), rect.bottomRight()
|
||||
|
@ -208,6 +217,11 @@ class LevelLabel(YAxisLabel):
|
|||
])
|
||||
)
|
||||
|
||||
p.fillRect(
|
||||
self.rect,
|
||||
self.bg_color,
|
||||
)
|
||||
|
||||
def highlight(self, pen) -> None:
|
||||
self._pen = pen
|
||||
self.update()
|
||||
|
@ -236,43 +250,46 @@ class L1Label(LevelLabel):
|
|||
# Set a global "max L1 label length" so we can
|
||||
# look it up on order lines and adjust their
|
||||
# labels not to overlap with it.
|
||||
chart = self._chart
|
||||
chart = self._pi.chart_widget
|
||||
chart._max_l1_line_len: float = max(
|
||||
chart._max_l1_line_len,
|
||||
w
|
||||
w,
|
||||
)
|
||||
|
||||
return h, w
|
||||
|
||||
|
||||
class L1Labels:
|
||||
"""Level 1 bid ask labels for dynamic update on price-axis.
|
||||
'''
|
||||
Level 1 bid ask labels for dynamic update on price-axis.
|
||||
|
||||
"""
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
chart: 'ChartPlotWidget', # noqa
|
||||
plotitem: PlotItem,
|
||||
digits: int = 2,
|
||||
size_digits: int = 3,
|
||||
font_size: str = 'small',
|
||||
) -> None:
|
||||
|
||||
self.chart = chart
|
||||
chart = self.chart = plotitem.chart_widget
|
||||
|
||||
raxis = chart.getAxis('right')
|
||||
raxis = plotitem.getAxis('right')
|
||||
kwargs = {
|
||||
'chart': chart,
|
||||
'chart': plotitem,
|
||||
'parent': raxis,
|
||||
|
||||
'opacity': 1,
|
||||
'opacity': .9,
|
||||
'font_size': font_size,
|
||||
'fg_color': chart.pen_color,
|
||||
'bg_color': chart.view_color,
|
||||
'fg_color': 'default_light',
|
||||
'bg_color': chart.view_color, # normally 'papas_special'
|
||||
}
|
||||
|
||||
# TODO: add humanized source-asset
|
||||
# info format.
|
||||
fmt_str = (
|
||||
' {size:.{size_digits}f} x '
|
||||
'{level:,.{level_digits}f} '
|
||||
' {size:.{size_digits}f} u'
|
||||
# '{level:,.{level_digits}f} '
|
||||
)
|
||||
fields = {
|
||||
'level': 0,
|
||||
|
@ -285,12 +302,17 @@ class L1Labels:
|
|||
orient_v='bottom',
|
||||
**kwargs,
|
||||
)
|
||||
bid.set_fmt_str(fmt_str=fmt_str, fields=fields)
|
||||
bid.set_fmt_str(
|
||||
fmt_str='\n' + fmt_str,
|
||||
fields=fields,
|
||||
)
|
||||
bid.show()
|
||||
|
||||
ask = self.ask_label = L1Label(
|
||||
orient_v='top',
|
||||
**kwargs,
|
||||
)
|
||||
ask.set_fmt_str(fmt_str=fmt_str, fields=fields)
|
||||
ask.set_fmt_str(
|
||||
fmt_str=fmt_str,
|
||||
fields=fields)
|
||||
ask.show()
|
||||
|
|
|
@ -233,6 +233,36 @@ class Label:
|
|||
def delete(self) -> None:
|
||||
self.vb.scene().removeItem(self.txt)
|
||||
|
||||
# NOTE: pulled out from ``ChartPlotWidget`` from way way old code.
|
||||
# def _label_h(self, yhigh: float, ylow: float) -> float:
|
||||
# # compute contents label "height" in view terms
|
||||
# # to avoid having data "contents" overlap with them
|
||||
# if self._labels:
|
||||
# label = self._labels[self.name][0]
|
||||
|
||||
# rect = label.itemRect()
|
||||
# tl, br = rect.topLeft(), rect.bottomRight()
|
||||
# vb = self.plotItem.vb
|
||||
|
||||
# try:
|
||||
# # on startup labels might not yet be rendered
|
||||
# top, bottom = (vb.mapToView(tl).y(), vb.mapToView(br).y())
|
||||
|
||||
# # XXX: magic hack, how do we compute exactly?
|
||||
# label_h = (top - bottom) * 0.42
|
||||
|
||||
# except np.linalg.LinAlgError:
|
||||
# label_h = 0
|
||||
# else:
|
||||
# label_h = 0
|
||||
|
||||
# # print(f'label height {self.name}: {label_h}')
|
||||
|
||||
# if label_h > yhigh - ylow:
|
||||
# label_h = 0
|
||||
|
||||
# print(f"bounds (ylow, yhigh): {(ylow, yhigh)}")
|
||||
|
||||
|
||||
class FormatLabel(QLabel):
|
||||
'''
|
||||
|
|
|
@ -18,9 +18,14 @@
|
|||
Lines for orders, alerts, L2.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from functools import partial
|
||||
from math import floor
|
||||
from typing import Optional, Callable
|
||||
from typing import (
|
||||
Optional,
|
||||
Callable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import pyqtgraph as pg
|
||||
from pyqtgraph import Point, functions as fn
|
||||
|
@ -37,6 +42,9 @@ from ..calc import humanize
|
|||
from ._label import Label
|
||||
from ._style import hcolor, _font
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._cursor import Cursor
|
||||
|
||||
|
||||
# TODO: probably worth investigating if we can
|
||||
# make .boundingRect() faster:
|
||||
|
@ -84,7 +92,7 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
self._marker = None
|
||||
self.only_show_markers_on_hover = only_show_markers_on_hover
|
||||
self.show_markers: bool = True # presuming the line is hovered at init
|
||||
self.track_marker_pos: bool = False
|
||||
|
||||
# should line go all the way to far end or leave a "margin"
|
||||
# space for other graphics (eg. L1 book)
|
||||
|
@ -122,6 +130,9 @@ class LevelLine(pg.InfiniteLine):
|
|||
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
||||
self._right_end_sc: float = 0
|
||||
|
||||
# use px caching
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
|
||||
def txt_offsets(self) -> tuple[int, int]:
|
||||
return 0, 0
|
||||
|
||||
|
@ -216,20 +227,23 @@ class LevelLine(pg.InfiniteLine):
|
|||
y: float
|
||||
|
||||
) -> None:
|
||||
'''Chart coordinates cursor tracking callback.
|
||||
'''
|
||||
Chart coordinates cursor tracking callback.
|
||||
|
||||
this is called by our ``Cursor`` type once this line is set to
|
||||
track the cursor: for every movement this callback is invoked to
|
||||
reposition the line with the current view coordinates.
|
||||
|
||||
'''
|
||||
self.movable = True
|
||||
self.set_level(y) # implictly calls reposition handler
|
||||
|
||||
def mouseDragEvent(self, ev):
|
||||
"""Override the ``InfiniteLine`` handler since we need more
|
||||
'''
|
||||
Override the ``InfiniteLine`` handler since we need more
|
||||
detailed control and start end signalling.
|
||||
|
||||
"""
|
||||
'''
|
||||
cursor = self._chart.linked.cursor
|
||||
|
||||
# hide y-crosshair
|
||||
|
@ -281,10 +295,20 @@ class LevelLine(pg.InfiniteLine):
|
|||
# show y-crosshair again
|
||||
cursor.show_xhair()
|
||||
|
||||
def delete(self) -> None:
|
||||
"""Remove this line from containing chart/view/scene.
|
||||
def get_cursor(self) -> Optional[Cursor]:
|
||||
|
||||
"""
|
||||
chart = self._chart
|
||||
cur = chart.linked.cursor
|
||||
if self in cur._hovered:
|
||||
return cur
|
||||
|
||||
return None
|
||||
|
||||
def delete(self) -> None:
|
||||
'''
|
||||
Remove this line from containing chart/view/scene.
|
||||
|
||||
'''
|
||||
scene = self.scene()
|
||||
if scene:
|
||||
for label in self._labels:
|
||||
|
@ -298,9 +322,8 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
# remove from chart/cursor states
|
||||
chart = self._chart
|
||||
cur = chart.linked.cursor
|
||||
|
||||
if self in cur._hovered:
|
||||
cur = self.get_cursor()
|
||||
if cur:
|
||||
cur._hovered.remove(self)
|
||||
|
||||
chart.plotItem.removeItem(self)
|
||||
|
@ -308,8 +331,8 @@ class LevelLine(pg.InfiniteLine):
|
|||
def mouseDoubleClickEvent(
|
||||
self,
|
||||
ev: QtGui.QMouseEvent,
|
||||
) -> None:
|
||||
|
||||
) -> None:
|
||||
# TODO: enter labels edit mode
|
||||
print(f'double click {ev}')
|
||||
|
||||
|
@ -334,30 +357,22 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
||||
|
||||
if self.show_markers and self.markers:
|
||||
|
||||
p.setPen(self.pen)
|
||||
qgo_draw_markers(
|
||||
self.markers,
|
||||
self.pen.color(),
|
||||
p,
|
||||
vb_left,
|
||||
vb_right,
|
||||
marker_right,
|
||||
)
|
||||
# marker_size = self.markers[0][2]
|
||||
self._maxMarkerSize = max([m[2] / 2. for m in self.markers])
|
||||
|
||||
# this seems slower when moving around
|
||||
# order lines.. not sure wtf is up with that.
|
||||
# for now we're just using it on the position line.
|
||||
elif self._marker:
|
||||
# (legacy) NOTE: at one point this seemed slower when moving around
|
||||
# order lines.. not sure if that's still true or why but we've
|
||||
# dropped the original hacky `.pain()` transform stuff for inf
|
||||
# line markers now - check the git history if it needs to be
|
||||
# reverted.
|
||||
if self._marker:
|
||||
if self.track_marker_pos:
|
||||
# make the line end at the marker's x pos
|
||||
line_end = marker_right = self._marker.pos().x()
|
||||
|
||||
# TODO: make this label update part of a scene-aware-marker
|
||||
# composed annotation
|
||||
self._marker.setPos(
|
||||
QPointF(marker_right, self.scene_y())
|
||||
)
|
||||
|
||||
if hasattr(self._marker, 'label'):
|
||||
self._marker.label.update()
|
||||
|
||||
|
@ -379,16 +394,14 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
def hide(self) -> None:
|
||||
super().hide()
|
||||
if self._marker:
|
||||
self._marker.hide()
|
||||
# needed for ``order_line()`` lines currently
|
||||
self._marker.label.hide()
|
||||
mkr = self._marker
|
||||
if mkr:
|
||||
mkr.hide()
|
||||
|
||||
def show(self) -> None:
|
||||
super().show()
|
||||
if self._marker:
|
||||
self._marker.show()
|
||||
# self._marker.label.show()
|
||||
|
||||
def scene_y(self) -> float:
|
||||
return self.getViewBox().mapFromView(
|
||||
|
@ -421,6 +434,10 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
return path
|
||||
|
||||
@property
|
||||
def marker(self) -> LevelMarker:
|
||||
return self._marker
|
||||
|
||||
def hoverEvent(self, ev):
|
||||
'''
|
||||
Mouse hover callback.
|
||||
|
@ -429,17 +446,16 @@ class LevelLine(pg.InfiniteLine):
|
|||
cur = self._chart.linked.cursor
|
||||
|
||||
# hovered
|
||||
if (not ev.isExit()) and ev.acceptDrags(QtCore.Qt.LeftButton):
|
||||
|
||||
if (
|
||||
not ev.isExit()
|
||||
and ev.acceptDrags(QtCore.Qt.LeftButton)
|
||||
):
|
||||
# if already hovered we don't need to run again
|
||||
if self.mouseHovering is True:
|
||||
return
|
||||
|
||||
if self.only_show_markers_on_hover:
|
||||
self.show_markers = True
|
||||
|
||||
if self._marker:
|
||||
self._marker.show()
|
||||
self.show_markers()
|
||||
|
||||
# highlight if so configured
|
||||
if self.highlight_on_hover:
|
||||
|
@ -482,11 +498,7 @@ class LevelLine(pg.InfiniteLine):
|
|||
cur._hovered.remove(self)
|
||||
|
||||
if self.only_show_markers_on_hover:
|
||||
self.show_markers = False
|
||||
|
||||
if self._marker:
|
||||
self._marker.hide()
|
||||
self._marker.label.hide()
|
||||
self.hide_markers()
|
||||
|
||||
if self not in cur._trackers:
|
||||
cur.show_xhair(y_label_level=self.value())
|
||||
|
@ -498,6 +510,15 @@ class LevelLine(pg.InfiniteLine):
|
|||
|
||||
self.update()
|
||||
|
||||
def hide_markers(self) -> None:
|
||||
if self._marker:
|
||||
self._marker.hide()
|
||||
self._marker.label.hide()
|
||||
|
||||
def show_markers(self) -> None:
|
||||
if self._marker:
|
||||
self._marker.show()
|
||||
|
||||
|
||||
def level_line(
|
||||
|
||||
|
@ -518,9 +539,10 @@ def level_line(
|
|||
**kwargs,
|
||||
|
||||
) -> LevelLine:
|
||||
"""Convenience routine to add a styled horizontal line to a plot.
|
||||
'''
|
||||
Convenience routine to add a styled horizontal line to a plot.
|
||||
|
||||
"""
|
||||
'''
|
||||
hl_color = color + '_light' if highlight_on_hover else color
|
||||
|
||||
line = LevelLine(
|
||||
|
@ -702,7 +724,7 @@ def order_line(
|
|||
marker = LevelMarker(
|
||||
chart=chart,
|
||||
style=marker_style,
|
||||
get_level=line.value,
|
||||
get_level=line.value, # callback
|
||||
size=marker_size,
|
||||
keep_in_view=False,
|
||||
)
|
||||
|
@ -711,7 +733,8 @@ def order_line(
|
|||
marker = line.add_marker(marker)
|
||||
|
||||
# XXX: DON'T COMMENT THIS!
|
||||
# this fixes it the artifact issue! .. of course, bounding rect stuff
|
||||
# this fixes it the artifact issue!
|
||||
# .. of course, bounding rect stuff
|
||||
line._maxMarkerSize = marker_size
|
||||
|
||||
assert line._marker is marker
|
||||
|
@ -732,7 +755,8 @@ def order_line(
|
|||
|
||||
if action != 'alert':
|
||||
|
||||
# add a partial position label if we also added a level marker
|
||||
# add a partial position label if we also added a level
|
||||
# marker
|
||||
pp_size_label = Label(
|
||||
view=view,
|
||||
color=line.color,
|
||||
|
@ -766,9 +790,9 @@ def order_line(
|
|||
# XXX: without this the pp proportion label next the marker
|
||||
# seems to lag? this is the same issue we had with position
|
||||
# lines which we handle with ``.update_graphcis()``.
|
||||
# marker._on_paint=lambda marker: pp_size_label.update()
|
||||
marker._on_paint = lambda marker: pp_size_label.update()
|
||||
|
||||
# XXX: THIS IS AN UNTYPED MONKEY PATCH!?!?!
|
||||
marker.label = label
|
||||
|
||||
# sanity check
|
||||
|
|
|
@ -0,0 +1,108 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Notifications utils.
|
||||
|
||||
"""
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
from typing import Optional
|
||||
|
||||
import trio
|
||||
|
||||
from ..log import get_logger
|
||||
from ..clearing._messages import (
|
||||
Status,
|
||||
)
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
||||
|
||||
_dbus_uid: Optional[str] = ''
|
||||
|
||||
|
||||
async def notify_from_ems_status_msg(
|
||||
msg: Status,
|
||||
duration: int = 3000,
|
||||
is_subproc: bool = False,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Send a linux desktop notification.
|
||||
|
||||
Handle subprocesses by discovering the dbus user id
|
||||
on first call.
|
||||
|
||||
'''
|
||||
if platform.system() != "Linux":
|
||||
return
|
||||
|
||||
# TODO: this in another task?
|
||||
# not sure if this will ever be a bottleneck,
|
||||
# we probably could do graphics stuff first tho?
|
||||
|
||||
if is_subproc:
|
||||
global _dbus_uid
|
||||
su = os.environ.get('SUDO_USER')
|
||||
if (
|
||||
not _dbus_uid
|
||||
and su
|
||||
):
|
||||
|
||||
# TODO: use `trio` but we need to use nursery.start()
|
||||
# to use pipes?
|
||||
# result = await trio.run_process(
|
||||
result = subprocess.run(
|
||||
[
|
||||
'id',
|
||||
'-u',
|
||||
su,
|
||||
],
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.PIPE,
|
||||
# check=True
|
||||
)
|
||||
_dbus_uid = result.stdout.decode("utf-8").replace('\n', '')
|
||||
|
||||
os.environ['DBUS_SESSION_BUS_ADDRESS'] = (
|
||||
f'unix:path=/run/user/{_dbus_uid}/bus'
|
||||
)
|
||||
|
||||
try:
|
||||
result = await trio.run_process(
|
||||
[
|
||||
'notify-send',
|
||||
'-u', 'normal',
|
||||
'-t', f'{duration}',
|
||||
'piker',
|
||||
|
||||
# TODO: add in standard fill/exec info that maybe we
|
||||
# pack in a broker independent way?
|
||||
f"'{msg.pformat()}'",
|
||||
],
|
||||
capture_stdout=True,
|
||||
capture_stderr=True,
|
||||
check=False,
|
||||
)
|
||||
if result.returncode != 0:
|
||||
log.warn(f'Notification daemon crashed stderr: {result.stderr}')
|
||||
|
||||
log.runtime(result)
|
||||
|
||||
except FileNotFoundError:
|
||||
log.warn('Tried to send a notification but \'notify-send\' not present')
|
|
@ -18,23 +18,23 @@ Super fast OHLC sampling graphics types.
|
|||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
import pyqtgraph as pg
|
||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||
from PyQt5.QtCore import QLineF, QPointF
|
||||
from PyQt5 import (
|
||||
QtGui,
|
||||
QtWidgets,
|
||||
)
|
||||
from PyQt5.QtCore import (
|
||||
QLineF,
|
||||
QRectF,
|
||||
)
|
||||
from PyQt5.QtWidgets import QGraphicsItem
|
||||
from PyQt5.QtGui import QPainterPath
|
||||
|
||||
from ._curve import FlowGraphic
|
||||
from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._style import hcolor
|
||||
from ..log import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._chart import LinkedSplits
|
||||
from .._profile import Profiler
|
||||
|
||||
|
||||
log = get_logger(__name__)
|
||||
|
@ -43,7 +43,8 @@ log = get_logger(__name__)
|
|||
def bar_from_ohlc_row(
|
||||
row: np.ndarray,
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.43
|
||||
bar_w: float,
|
||||
bar_gap: float = 0.16
|
||||
|
||||
) -> tuple[QLineF]:
|
||||
'''
|
||||
|
@ -51,8 +52,7 @@ def bar_from_ohlc_row(
|
|||
OHLC "bar" for use in the "last datum" of a series.
|
||||
|
||||
'''
|
||||
open, high, low, close, index = row[
|
||||
['open', 'high', 'low', 'close', 'index']]
|
||||
open, high, low, close, index = row
|
||||
|
||||
# TODO: maybe consider using `QGraphicsLineItem` ??
|
||||
# gives us a ``.boundingRect()`` on the objects which may make
|
||||
|
@ -60,9 +60,11 @@ def bar_from_ohlc_row(
|
|||
# history path faster since it's done in C++:
|
||||
# https://doc.qt.io/qt-5/qgraphicslineitem.html
|
||||
|
||||
mid: float = (bar_w / 2) + index
|
||||
|
||||
# high -> low vertical (body) line
|
||||
if low != high:
|
||||
hl = QLineF(index, low, index, high)
|
||||
hl = QLineF(mid, low, mid, high)
|
||||
else:
|
||||
# XXX: if we don't do it renders a weird rectangle?
|
||||
# see below for filtering this later...
|
||||
|
@ -73,48 +75,55 @@ def bar_from_ohlc_row(
|
|||
# the index's range according to the view mapping coordinates.
|
||||
|
||||
# open line
|
||||
o = QLineF(index - w, open, index, open)
|
||||
o = QLineF(index + bar_gap, open, mid, open)
|
||||
|
||||
# close line
|
||||
c = QLineF(index, close, index + w, close)
|
||||
c = QLineF(
|
||||
mid, close,
|
||||
index + bar_w - bar_gap, close,
|
||||
)
|
||||
|
||||
return [hl, o, c]
|
||||
|
||||
|
||||
class BarItems(pg.GraphicsObject):
|
||||
class BarItems(FlowGraphic):
|
||||
'''
|
||||
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
||||
|
||||
'''
|
||||
# XXX: causes this weird jitter bug when click-drag panning
|
||||
# where the path curve will awkwardly flicker back and forth?
|
||||
cache_mode: int = QGraphicsItem.NoCache
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
linked: LinkedSplits,
|
||||
plotitem: 'pg.PlotItem', # noqa
|
||||
pen_color: str = 'bracket',
|
||||
last_bar_color: str = 'bracket',
|
||||
|
||||
name: Optional[str] = None,
|
||||
*args,
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
super().__init__()
|
||||
self.linked = linked
|
||||
# XXX: for the mega-lulz increasing width here increases draw
|
||||
# latency... so probably don't do it until we figure that out.
|
||||
self._color = pen_color
|
||||
self.bars_pen = pg.mkPen(hcolor(pen_color), width=1)
|
||||
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
||||
self._name = name
|
||||
|
||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||
self.path = QPainterPath()
|
||||
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
||||
super().__init__(*args, **kwargs)
|
||||
self._last_bar_lines: tuple[QLineF, ...] | None = None
|
||||
|
||||
def x_uppx(self) -> int:
|
||||
# we expect the downsample curve report this.
|
||||
return 0
|
||||
def x_last(self) -> None | float:
|
||||
'''
|
||||
Return the last most x value of the close line segment
|
||||
or if not drawn yet, ``None``.
|
||||
|
||||
'''
|
||||
if self._last_bar_lines:
|
||||
close_arm_line = self._last_bar_lines[-1]
|
||||
return close_arm_line.x2() if close_arm_line else None
|
||||
else:
|
||||
return None
|
||||
|
||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||
def boundingRect(self):
|
||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||
# profiler = Profiler(
|
||||
# msg=f'BarItems.boundingRect(): `{self._name}`',
|
||||
# disabled=not pg_profile_enabled(),
|
||||
# ms_threshold=ms_slower_then,
|
||||
# )
|
||||
|
||||
# TODO: Can we do rect caching to make this faster
|
||||
# like `pg.PlotCurveItem` does? In theory it's just
|
||||
|
@ -134,32 +143,37 @@ class BarItems(pg.GraphicsObject):
|
|||
hb.topLeft(),
|
||||
hb.bottomRight(),
|
||||
)
|
||||
mn_y = hb_tl.y()
|
||||
mx_y = hb_br.y()
|
||||
most_left = hb_tl.x()
|
||||
most_right = hb_br.x()
|
||||
# profiler('calc path vertices')
|
||||
|
||||
# need to include last bar height or BR will be off
|
||||
mx_y = hb_br.y()
|
||||
mn_y = hb_tl.y()
|
||||
|
||||
last_lines = self._last_bar_lines
|
||||
# OHLC line segments: [hl, o, c]
|
||||
last_lines: tuple[QLineF] | None = self._last_bar_lines
|
||||
if last_lines:
|
||||
body_line = self._last_bar_lines[0]
|
||||
if body_line:
|
||||
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
||||
mn_y = min(mn_y, min(body_line.y1(), body_line.y2()))
|
||||
(
|
||||
hl,
|
||||
o,
|
||||
c,
|
||||
) = last_lines
|
||||
most_right = c.x2() + 1
|
||||
ymx = ymn = c.y2()
|
||||
|
||||
return QtCore.QRectF(
|
||||
|
||||
# top left
|
||||
QPointF(
|
||||
hb_tl.x(),
|
||||
mn_y,
|
||||
),
|
||||
|
||||
# bottom right
|
||||
QPointF(
|
||||
hb_br.x() + 1,
|
||||
mx_y,
|
||||
)
|
||||
if hl:
|
||||
y1, y2 = hl.y1(), hl.y2()
|
||||
ymn = min(y1, y2)
|
||||
ymx = max(y1, y2)
|
||||
mx_y = max(ymx, mx_y)
|
||||
mn_y = min(ymn, mn_y)
|
||||
# profiler('calc last bar vertices')
|
||||
|
||||
return QRectF(
|
||||
most_left,
|
||||
mn_y,
|
||||
most_right - most_left + 1,
|
||||
mx_y - mn_y,
|
||||
)
|
||||
|
||||
def paint(
|
||||
|
@ -170,7 +184,7 @@ class BarItems(pg.GraphicsObject):
|
|||
|
||||
) -> None:
|
||||
|
||||
profiler = pg.debug.Profiler(
|
||||
profiler = Profiler(
|
||||
disabled=not pg_profile_enabled(),
|
||||
ms_threshold=ms_slower_then,
|
||||
)
|
||||
|
@ -183,12 +197,12 @@ class BarItems(pg.GraphicsObject):
|
|||
# as is necesarry for what's in "view". Not sure if this will
|
||||
# lead to any perf gains other then when zoomed in to less bars
|
||||
# in view.
|
||||
p.setPen(self.last_bar_pen)
|
||||
p.setPen(self.last_step_pen)
|
||||
if self._last_bar_lines:
|
||||
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
||||
profiler('draw last bar')
|
||||
|
||||
p.setPen(self.bars_pen)
|
||||
p.setPen(self._pen)
|
||||
p.drawPath(self.path)
|
||||
profiler(f'draw history path: {self.path.capacity()}')
|
||||
|
||||
|
@ -196,29 +210,40 @@ class BarItems(pg.GraphicsObject):
|
|||
self,
|
||||
path: QPainterPath,
|
||||
src_data: np.ndarray,
|
||||
render_data: np.ndarray,
|
||||
reset: bool,
|
||||
array_key: str,
|
||||
|
||||
fields: list[str] = [
|
||||
'index',
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
],
|
||||
index_field: str,
|
||||
|
||||
) -> None:
|
||||
|
||||
# relevant fields
|
||||
fields: list[str] = [
|
||||
'open',
|
||||
'high',
|
||||
'low',
|
||||
'close',
|
||||
index_field,
|
||||
]
|
||||
ohlc = src_data[fields]
|
||||
last_row = ohlc[-1:]
|
||||
# last_row = ohlc[-1:]
|
||||
|
||||
# individual values
|
||||
last_row = i, o, h, l, last = ohlc[-1]
|
||||
last_row = o, h, l, last, i = ohlc[-1]
|
||||
|
||||
# times = src_data['time']
|
||||
# if times[-1] - times[-2]:
|
||||
# breakpoint()
|
||||
|
||||
index = src_data[index_field]
|
||||
step_size = index[-1] - index[-2]
|
||||
|
||||
# generate new lines objects for updatable "current bar"
|
||||
self._last_bar_lines = bar_from_ohlc_row(last_row)
|
||||
bg: float = 0.16 * step_size
|
||||
self._last_bar_lines = bar_from_ohlc_row(
|
||||
last_row,
|
||||
bar_w=step_size,
|
||||
bar_gap=bg,
|
||||
)
|
||||
|
||||
# assert i == graphics.start_index - 1
|
||||
# assert i == last_index
|
||||
|
@ -233,10 +258,16 @@ class BarItems(pg.GraphicsObject):
|
|||
if l != h: # noqa
|
||||
|
||||
if body is None:
|
||||
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
||||
body = self._last_bar_lines[0] = QLineF(
|
||||
i + bg, l,
|
||||
i + step_size - bg, h,
|
||||
)
|
||||
else:
|
||||
# update body
|
||||
body.setLine(i, l, i, h)
|
||||
body.setLine(
|
||||
body.x1(), l,
|
||||
body.x2(), h,
|
||||
)
|
||||
|
||||
# XXX: pretty sure this is causing an issue where the
|
||||
# bar has a large upward move right before the next
|
||||
|
@ -247,4 +278,4 @@ class BarItems(pg.GraphicsObject):
|
|||
# date / from some previous sample. It's weird though
|
||||
# because i've seen it do this to bars i - 3 back?
|
||||
|
||||
return ohlc['index'], ohlc['close']
|
||||
return ohlc[index_field], ohlc['close']
|
||||
|
|
|
@ -22,12 +22,9 @@ from __future__ import annotations
|
|||
from typing import (
|
||||
Optional, Generic,
|
||||
TypeVar, Callable,
|
||||
Literal,
|
||||
)
|
||||
import enum
|
||||
import sys
|
||||
|
||||
from pydantic import BaseModel, validator
|
||||
# from pydantic import BaseModel, validator
|
||||
from pydantic.generics import GenericModel
|
||||
from PyQt5.QtWidgets import (
|
||||
QWidget,
|
||||
|
@ -38,6 +35,7 @@ from ._forms import (
|
|||
# FontScaledDelegate,
|
||||
Edit,
|
||||
)
|
||||
from ..data.types import Struct
|
||||
|
||||
|
||||
DataType = TypeVar('DataType')
|
||||
|
@ -62,7 +60,7 @@ class Selection(Field[DataType], Generic[DataType]):
|
|||
options: dict[str, DataType]
|
||||
# value: DataType = None
|
||||
|
||||
@validator('value') # , always=True)
|
||||
# @validator('value') # , always=True)
|
||||
def set_value_first(
|
||||
cls,
|
||||
|
||||
|
@ -100,7 +98,7 @@ class Edit(Field[DataType], Generic[DataType]):
|
|||
widget_factory = Edit
|
||||
|
||||
|
||||
class AllocatorPane(BaseModel):
|
||||
class AllocatorPane(Struct):
|
||||
|
||||
account = Selection[str](
|
||||
options=dict.fromkeys(
|
||||
|
|
|
@ -18,23 +18,27 @@
|
|||
Charting overlay helpers.
|
||||
|
||||
'''
|
||||
from typing import Callable, Optional
|
||||
|
||||
from pyqtgraph.Qt.QtCore import (
|
||||
# QObject,
|
||||
# Signal,
|
||||
Qt,
|
||||
# QEvent,
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
from typing import (
|
||||
Callable,
|
||||
Optional,
|
||||
)
|
||||
|
||||
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
||||
from pyqtgraph.graphicsItems.ViewBox import ViewBox
|
||||
from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
||||
# from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
||||
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
|
||||
from pyqtgraph.Qt.QtCore import QObject, Signal, QEvent
|
||||
from pyqtgraph.Qt.QtWidgets import QGraphicsGridLayout, QGraphicsLinearLayout
|
||||
|
||||
from ._interaction import ChartView
|
||||
from pyqtgraph.Qt.QtCore import (
|
||||
QObject,
|
||||
Signal,
|
||||
QEvent,
|
||||
Qt,
|
||||
)
|
||||
from pyqtgraph.Qt.QtWidgets import (
|
||||
# QGraphicsGridLayout,
|
||||
QGraphicsLinearLayout,
|
||||
)
|
||||
|
||||
__all__ = ["PlotItemOverlay"]
|
||||
|
||||
|
@ -80,25 +84,20 @@ class ComposedGridLayout:
|
|||
``<axis_name>i`` in the layout.
|
||||
|
||||
The ``item: PlotItem`` passed to the constructor's grid layout is
|
||||
used verbatim as the "main plot" who's view box is give precedence
|
||||
for input handling. The main plot's axes are removed from it's
|
||||
used verbatim as the "main plot" who's view box is given precedence
|
||||
for input handling. The main plot's axes are removed from its
|
||||
layout and placed in the surrounding exterior layouts to allow for
|
||||
re-ordering if desired.
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
item: PlotItem,
|
||||
grid: QGraphicsGridLayout,
|
||||
reverse: bool = False, # insert items to the "center"
|
||||
pi: PlotItem,
|
||||
|
||||
) -> None:
|
||||
self.items: list[PlotItem] = []
|
||||
# self.grid = grid
|
||||
self.reverse = reverse
|
||||
|
||||
# TODO: use a ``bidict`` here?
|
||||
self._pi2axes: dict[
|
||||
self.pitems: list[PlotItem] = []
|
||||
self._pi2axes: dict[ # TODO: use a ``bidict`` here?
|
||||
int,
|
||||
dict[str, AxisItem],
|
||||
] = {}
|
||||
|
@ -120,12 +119,13 @@ class ComposedGridLayout:
|
|||
|
||||
if name in ('top', 'bottom'):
|
||||
orient = Qt.Vertical
|
||||
|
||||
elif name in ('left', 'right'):
|
||||
orient = Qt.Horizontal
|
||||
|
||||
layout.setOrientation(orient)
|
||||
|
||||
self.insert(0, item)
|
||||
self.insert_plotitem(0, pi)
|
||||
|
||||
# insert surrounding linear layouts into the parent pi's layout
|
||||
# such that additional axes can be appended arbitrarily without
|
||||
|
@ -135,13 +135,14 @@ class ComposedGridLayout:
|
|||
# TODO: do we need this?
|
||||
# axis should have been removed during insert above
|
||||
index = _axes_layout_indices[name]
|
||||
axis = item.layout.itemAt(*index)
|
||||
axis = pi.layout.itemAt(*index)
|
||||
if axis and axis.isVisible():
|
||||
assert linlayout.itemAt(0) is axis
|
||||
|
||||
# item.layout.removeItem(axis)
|
||||
item.layout.addItem(linlayout, *index)
|
||||
layout = item.layout.itemAt(*index)
|
||||
# XXX: see comment in ``.insert_plotitem()``...
|
||||
# pi.layout.removeItem(axis)
|
||||
pi.layout.addItem(linlayout, *index)
|
||||
layout = pi.layout.itemAt(*index)
|
||||
assert layout is linlayout
|
||||
|
||||
def _register_item(
|
||||
|
@ -157,27 +158,32 @@ class ComposedGridLayout:
|
|||
self._pi2axes.setdefault(name, {})[index] = axis
|
||||
|
||||
# enter plot into list for index tracking
|
||||
self.items.insert(index, plotitem)
|
||||
self.pitems.insert(index, plotitem)
|
||||
|
||||
def insert(
|
||||
def insert_plotitem(
|
||||
self,
|
||||
index: int,
|
||||
plotitem: PlotItem,
|
||||
|
||||
) -> (int, int):
|
||||
) -> tuple[int, list[AxisItem]]:
|
||||
'''
|
||||
Place item at index by inserting all axes into the grid
|
||||
at list-order appropriate position.
|
||||
|
||||
'''
|
||||
if index < 0:
|
||||
raise ValueError('`insert()` only supports an index >= 0')
|
||||
raise ValueError(
|
||||
'`.insert_plotitem()` only supports an index >= 0'
|
||||
)
|
||||
|
||||
inserted_axes: list[AxisItem] = []
|
||||
|
||||
# add plot's axes in sequence to the embedded linear layouts
|
||||
# for each "side" thus avoiding graphics collisions.
|
||||
for name, axis_info in plotitem.axes.copy().items():
|
||||
linlayout, axes = self.sides[name]
|
||||
axis = axis_info['item']
|
||||
inserted_axes.append(axis)
|
||||
|
||||
if axis in axes:
|
||||
# TODO: re-order using ``.pop()`` ?
|
||||
|
@ -190,19 +196,20 @@ class ComposedGridLayout:
|
|||
if (
|
||||
not axis.isVisible()
|
||||
|
||||
# XXX: we never skip moving the axes for the *first*
|
||||
# XXX: we never skip moving the axes for the *root*
|
||||
# plotitem inserted (even if not shown) since we need to
|
||||
# move all the hidden axes into linear sub-layouts for
|
||||
# that "central" plot in the overlay. Also if we don't
|
||||
# do it there's weird geomoetry calc offsets that make
|
||||
# view coords slightly off somehow .. smh
|
||||
and not len(self.items) == 0
|
||||
and not len(self.pitems) == 0
|
||||
):
|
||||
continue
|
||||
|
||||
# XXX: Remove old axis? No, turns out we don't need this?
|
||||
# DON'T unlink it since we the original ``ViewBox``
|
||||
# to still drive it B)
|
||||
# XXX: Remove old axis?
|
||||
# No, turns out we don't need this?
|
||||
# DON'T UNLINK IT since we need the original ``ViewBox`` to
|
||||
# still drive it with events/handlers B)
|
||||
# popped = plotitem.removeAxis(name, unlink=False)
|
||||
# assert axis is popped
|
||||
|
||||
|
@ -218,9 +225,9 @@ class ComposedGridLayout:
|
|||
|
||||
self._register_item(index, plotitem)
|
||||
|
||||
return index
|
||||
return (index, inserted_axes)
|
||||
|
||||
def append(
|
||||
def append_plotitem(
|
||||
self,
|
||||
item: PlotItem,
|
||||
|
||||
|
@ -232,7 +239,7 @@ class ComposedGridLayout:
|
|||
'''
|
||||
# for left and bottom axes we have to first remove
|
||||
# items and re-insert to maintain a list-order.
|
||||
return self.insert(len(self.items), item)
|
||||
return self.insert_plotitem(len(self.pitems), item)
|
||||
|
||||
def get_axis(
|
||||
self,
|
||||
|
@ -245,20 +252,20 @@ class ComposedGridLayout:
|
|||
if axis for that name is not shown.
|
||||
|
||||
'''
|
||||
index = self.items.index(plot)
|
||||
index = self.pitems.index(plot)
|
||||
named = self._pi2axes[name]
|
||||
return named.get(index)
|
||||
|
||||
def pop(
|
||||
self,
|
||||
item: PlotItem,
|
||||
# def pop(
|
||||
# self,
|
||||
# item: PlotItem,
|
||||
|
||||
) -> PlotItem:
|
||||
'''
|
||||
Remove item and restack all axes in list-order.
|
||||
# ) -> PlotItem:
|
||||
# '''
|
||||
# Remove item and restack all axes in list-order.
|
||||
|
||||
'''
|
||||
raise NotImplementedError
|
||||
# '''
|
||||
# raise NotImplementedError
|
||||
|
||||
|
||||
# Unimplemented features TODO:
|
||||
|
@ -279,194 +286,6 @@ class ComposedGridLayout:
|
|||
# axis?
|
||||
|
||||
|
||||
# TODO: we might want to enabled some kind of manual flag to disable
|
||||
# this method wrapping during type creation? As example a user could
|
||||
# definitively decide **not** to enable broadcasting support by
|
||||
# setting something like ``ViewBox.disable_relays = True``?
|
||||
def mk_relay_method(
|
||||
|
||||
signame: str,
|
||||
slot: Callable[
|
||||
[ViewBox,
|
||||
'QEvent',
|
||||
Optional[AxisItem]],
|
||||
None,
|
||||
],
|
||||
|
||||
) -> Callable[
|
||||
[
|
||||
ViewBox,
|
||||
# lol, there isn't really a generic type thanks
|
||||
# to the rewrite of Qt's event system XD
|
||||
'QEvent',
|
||||
|
||||
'Optional[AxisItem]',
|
||||
'Optional[ViewBox]', # the ``relayed_from`` arg we provide
|
||||
],
|
||||
None,
|
||||
]:
|
||||
|
||||
def maybe_broadcast(
|
||||
vb: 'ViewBox',
|
||||
ev: 'QEvent',
|
||||
axis: 'Optional[int]' = None,
|
||||
relayed_from: 'ViewBox' = None,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
(soon to be) Decorator which makes an event handler
|
||||
"broadcastable" to overlayed ``GraphicsWidget``s.
|
||||
|
||||
Adds relay signals based on the decorated handler's name
|
||||
and conducts a signal broadcast of the relay signal if there
|
||||
are consumers registered.
|
||||
|
||||
'''
|
||||
# When no relay source has been set just bypass all
|
||||
# the broadcast machinery.
|
||||
if vb.event_relay_source is None:
|
||||
ev.accept()
|
||||
return slot(
|
||||
vb,
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
if relayed_from:
|
||||
assert axis is None
|
||||
|
||||
# this is a relayed event and should be ignored (so it does not
|
||||
# halt/short circuit the graphicscene loop). Further the
|
||||
# surrounding handler for this signal must be allowed to execute
|
||||
# and get processed by **this consumer**.
|
||||
# print(f'{vb.name} rx relayed from {relayed_from.name}')
|
||||
ev.ignore()
|
||||
|
||||
return slot(
|
||||
vb,
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
if axis is not None:
|
||||
# print(f'{vb.name} handling axis event:\n{str(ev)}')
|
||||
ev.accept()
|
||||
return slot(
|
||||
vb,
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
elif (
|
||||
relayed_from is None
|
||||
and vb.event_relay_source is vb # we are the broadcaster
|
||||
and axis is None
|
||||
):
|
||||
# Broadcast case: this is a source event which will be
|
||||
# relayed to attached consumers and accepted after all
|
||||
# consumers complete their own handling followed by this
|
||||
# routine's processing. Sequence is,
|
||||
# - pre-relay to all consumers *first* - ``.emit()`` blocks
|
||||
# until all downstream relay handlers have run.
|
||||
# - run the source handler for **this** event and accept
|
||||
# the event
|
||||
|
||||
# Access the "bound signal" that is created
|
||||
# on the widget type as part of instantiation.
|
||||
signal = getattr(vb, signame)
|
||||
# print(f'{vb.name} emitting {signame}')
|
||||
|
||||
# TODO/NOTE: we could also just bypass a "relay" signal
|
||||
# entirely and instead call the handlers manually in
|
||||
# a loop? This probably is a lot simpler and also doesn't
|
||||
# have any downside, and allows not touching target widget
|
||||
# internals.
|
||||
signal.emit(
|
||||
ev,
|
||||
axis,
|
||||
# passing this demarks a broadcasted/relayed event
|
||||
vb,
|
||||
)
|
||||
# accept event so no more relays are fired.
|
||||
ev.accept()
|
||||
|
||||
# call underlying wrapped method with an extra
|
||||
# ``relayed_from`` value to denote that this is a relayed
|
||||
# event handling case.
|
||||
return slot(
|
||||
vb,
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
return maybe_broadcast
|
||||
|
||||
|
||||
# XXX: :( can't define signals **after** class compile time
|
||||
# so this is not really useful.
|
||||
# def mk_relay_signal(
|
||||
# func,
|
||||
# name: str = None,
|
||||
|
||||
# ) -> Signal:
|
||||
# (
|
||||
# args,
|
||||
# varargs,
|
||||
# varkw,
|
||||
# defaults,
|
||||
# kwonlyargs,
|
||||
# kwonlydefaults,
|
||||
# annotations
|
||||
# ) = inspect.getfullargspec(func)
|
||||
|
||||
# # XXX: generate a relay signal with 1 extra
|
||||
# # argument for a ``relayed_from`` kwarg. Since
|
||||
# # ``'self'`` is already ignored by signals we just need
|
||||
# # to count the arguments since we're adding only 1 (and
|
||||
# # ``args`` will capture that).
|
||||
# numargs = len(args + list(defaults))
|
||||
# signal = Signal(*tuple(numargs * [object]))
|
||||
# signame = name or func.__name__ + 'Relay'
|
||||
# return signame, signal
|
||||
|
||||
|
||||
def enable_relays(
|
||||
widget: GraphicsWidget,
|
||||
handler_names: list[str],
|
||||
|
||||
) -> list[Signal]:
|
||||
'''
|
||||
Method override helper which enables relay of a particular
|
||||
``Signal`` from some chosen broadcaster widget to a set of
|
||||
consumer widgets which should operate their event handlers normally
|
||||
but instead of signals "relayed" from the broadcaster.
|
||||
|
||||
Mostly useful for overlaying widgets that handle user input
|
||||
that you want to overlay graphically. The target ``widget`` type must
|
||||
define ``QtCore.Signal``s each with a `'Relay'` suffix for each
|
||||
name provided in ``handler_names: list[str]``.
|
||||
|
||||
'''
|
||||
signals = []
|
||||
for name in handler_names:
|
||||
handler = getattr(widget, name)
|
||||
signame = name + 'Relay'
|
||||
# ensure the target widget defines a relay signal
|
||||
relay = getattr(widget, signame)
|
||||
widget.relays[signame] = name
|
||||
signals.append(relay)
|
||||
method = mk_relay_method(signame, handler)
|
||||
setattr(widget, name, method)
|
||||
|
||||
return signals
|
||||
|
||||
|
||||
enable_relays(
|
||||
ChartView,
|
||||
['wheelEvent', 'mouseDragEvent']
|
||||
)
|
||||
|
||||
|
||||
class PlotItemOverlay:
|
||||
'''
|
||||
A composite for managing overlaid ``PlotItem`` instances such that
|
||||
|
@ -482,86 +301,191 @@ class PlotItemOverlay:
|
|||
) -> None:
|
||||
|
||||
self.root_plotitem: PlotItem = root_plotitem
|
||||
self.relay_handlers: defaultdict[
|
||||
str,
|
||||
list[Callable],
|
||||
] = defaultdict(list)
|
||||
|
||||
vb = root_plotitem.vb
|
||||
vb.event_relay_source = vb # TODO: maybe change name?
|
||||
vb.setZValue(1000) # XXX: critical for scene layering/relaying
|
||||
# NOTE: required for scene layering/relaying; this guarantees
|
||||
# the "root" plot receives priority for interaction
|
||||
# events/signals.
|
||||
root_plotitem.vb.setZValue(10)
|
||||
|
||||
self.overlays: list[PlotItem] = []
|
||||
self.layout = ComposedGridLayout(
|
||||
root_plotitem,
|
||||
root_plotitem.layout,
|
||||
)
|
||||
self.layout = ComposedGridLayout(root_plotitem)
|
||||
self._relays: dict[str, Signal] = {}
|
||||
|
||||
@property
|
||||
def overlays(self) -> list[PlotItem]:
|
||||
return self.layout.pitems
|
||||
|
||||
def add_plotitem(
|
||||
self,
|
||||
plotitem: PlotItem,
|
||||
index: Optional[int] = None,
|
||||
|
||||
# TODO: we could also put the ``ViewBox.XAxis``
|
||||
# style enum here?
|
||||
# event/signal names which will be broadcasted to all added
|
||||
# (relayee) ``PlotItem``s (eg. ``ViewBox.mouseDragEvent``).
|
||||
relay_events: list[str] = [],
|
||||
|
||||
# (0,), # link x
|
||||
# (1,), # link y
|
||||
# (0, 1), # link both
|
||||
link_axes: tuple[int] = (),
|
||||
|
||||
) -> None:
|
||||
) -> tuple[int, list[AxisItem]]:
|
||||
|
||||
index = index or len(self.overlays)
|
||||
root = self.root_plotitem
|
||||
# layout: QGraphicsGridLayout = root.layout
|
||||
self.overlays.insert(index, plotitem)
|
||||
vb: ViewBox = plotitem.vb
|
||||
|
||||
# mark this consumer overlay as ready to expect relayed events
|
||||
# from the root plotitem.
|
||||
vb.event_relay_source = root.vb
|
||||
|
||||
# TODO: some sane way to allow menu event broadcast XD
|
||||
# vb.setMenuEnabled(False)
|
||||
|
||||
# TODO: inside the `maybe_broadcast()` (soon to be) decorator
|
||||
# we need have checks that consumers have been attached to
|
||||
# these relay signals.
|
||||
if link_axes != (0, 1):
|
||||
# wire up any relay signal(s) from the source plot to added
|
||||
# "overlays". We use a plain loop instead of mucking with
|
||||
# re-connecting signal/slots which tends to be more invasive and
|
||||
# harder to implement and provides no measurable performance
|
||||
# gain.
|
||||
if relay_events:
|
||||
for ev_name in relay_events:
|
||||
relayee_handler: Callable[
|
||||
[
|
||||
ViewBox,
|
||||
# lol, there isn't really a generic type thanks
|
||||
# to the rewrite of Qt's event system XD
|
||||
QEvent,
|
||||
|
||||
# wire up relay signals
|
||||
for relay_signal_name, handler_name in vb.relays.items():
|
||||
# print(handler_name)
|
||||
# XXX: Signal class attrs are bound after instantiation
|
||||
# of the defining type, so we need to access that bound
|
||||
# version here.
|
||||
signal = getattr(root.vb, relay_signal_name)
|
||||
handler = getattr(vb, handler_name)
|
||||
signal.connect(handler)
|
||||
AxisItem | None,
|
||||
],
|
||||
None,
|
||||
] = getattr(vb, ev_name)
|
||||
|
||||
sub_handlers: list[Callable] = self.relay_handlers[ev_name]
|
||||
|
||||
# on the first registry of a relayed event we pop the
|
||||
# root's handler and override it to a custom broadcaster
|
||||
# routine.
|
||||
if not sub_handlers:
|
||||
|
||||
src_handler = getattr(
|
||||
root.vb,
|
||||
ev_name,
|
||||
)
|
||||
|
||||
def broadcast(
|
||||
ev: 'QEvent',
|
||||
|
||||
# TODO: drop this viewbox specific input and
|
||||
# allow a predicate to be passed in by user.
|
||||
axis: 'Optional[int]' = None,
|
||||
|
||||
*,
|
||||
|
||||
# these are bound in by the ``partial`` below
|
||||
# and ensure a unique broadcaster per event.
|
||||
ev_name: str = None,
|
||||
src_handler: Callable = None,
|
||||
relayed_from: 'ViewBox' = None,
|
||||
|
||||
# remaining inputs the source handler expects
|
||||
**kwargs,
|
||||
|
||||
) -> None:
|
||||
'''
|
||||
Broadcast signal or event: this is a source
|
||||
event which will be relayed to attached
|
||||
"relayee" plot item consumers.
|
||||
|
||||
The event is accepted halting any further
|
||||
handlers from being triggered.
|
||||
|
||||
Sequence is,
|
||||
- pre-relay to all consumers *first* - exactly
|
||||
like how a ``Signal.emit()`` blocks until all
|
||||
downstream relay handlers have run.
|
||||
- run the event's source handler event
|
||||
|
||||
'''
|
||||
ev.accept()
|
||||
|
||||
# broadcast first to relayees *first*. trigger
|
||||
# relay of event to all consumers **before**
|
||||
# processing/consumption in the source handler.
|
||||
relayed_handlers = self.relay_handlers[ev_name]
|
||||
|
||||
assert getattr(vb, ev_name).__name__ == ev_name
|
||||
|
||||
# TODO: generalize as an input predicate
|
||||
if axis is None:
|
||||
for handler in relayed_handlers:
|
||||
handler(
|
||||
ev,
|
||||
axis=axis,
|
||||
**kwargs,
|
||||
)
|
||||
|
||||
# run "source" widget's handler last
|
||||
src_handler(
|
||||
ev,
|
||||
axis=axis,
|
||||
)
|
||||
|
||||
# dynamic handler override on the publisher plot
|
||||
setattr(
|
||||
root.vb,
|
||||
ev_name,
|
||||
partial(
|
||||
broadcast,
|
||||
ev_name=ev_name,
|
||||
src_handler=src_handler
|
||||
),
|
||||
)
|
||||
|
||||
else:
|
||||
assert getattr(root.vb, ev_name)
|
||||
assert relayee_handler not in sub_handlers
|
||||
|
||||
# append relayed-to widget's handler to relay table
|
||||
sub_handlers.append(relayee_handler)
|
||||
|
||||
# link dim-axes to root if requested by user.
|
||||
# TODO: solve more-then-wanted scaled panning on click drag
|
||||
# which seems to be due to broadcast. So we probably need to
|
||||
# disable broadcast when axes are linked in a particular
|
||||
# dimension?
|
||||
for dim in link_axes:
|
||||
# link x and y axes to new view box such that the top level
|
||||
# viewbox propagates to the root (and whatever other
|
||||
# plotitem overlays that have been added).
|
||||
vb.linkView(dim, root.vb)
|
||||
|
||||
# make overlaid viewbox impossible to focus since the top
|
||||
# level should handle all input and relay to overlays.
|
||||
# NOTE: this was solved with the `setZValue()` above!
|
||||
# => NOTE: in order to prevent "more-then-linear" scaled
|
||||
# panning moves on (for eg. click-drag) certain range change
|
||||
# signals (i.e. ``.sigXRangeChanged``), the user needs to be
|
||||
# careful that any broadcasted ``relay_events`` are are short
|
||||
# circuited in sub-handlers (aka relayee's) implementations. As
|
||||
# an example if a ``ViewBox.mouseDragEvent`` is broadcasted, the
|
||||
# overlayed implementations need to be sure they either don't
|
||||
# also link the x-axes (by not providing ``link_axes=(0,)``
|
||||
# above) or that the relayee ``.mouseDragEvent()`` handlers are
|
||||
# ready to "``return`` early" in the case that
|
||||
# ``.sigXRangeChanged`` is emitted as part of linked axes.
|
||||
# For more details on such signalling mechanics peek in
|
||||
# ``ViewBox.linkView()``.
|
||||
|
||||
# TODO: we will probably want to add a "focus" api such that
|
||||
# a new "top level" ``PlotItem`` can be selected dynamically
|
||||
# (and presumably the axes dynamically sorted to match).
|
||||
# make overlaid viewbox impossible to focus since the top level
|
||||
# should handle all input and relay to overlays. Note that the
|
||||
# "root" plot item gettingn interaction priority is configured
|
||||
# with the ``.setZValue()`` during init.
|
||||
vb.setFlag(
|
||||
vb.GraphicsItemFlag.ItemIsFocusable,
|
||||
False
|
||||
)
|
||||
vb.setFocusPolicy(Qt.NoFocus)
|
||||
|
||||
# => TODO: add a "focus" api for switching the "top level"
|
||||
# ``PlotItem`` dynamically.
|
||||
|
||||
# append-compose into the layout all axes from this plot
|
||||
self.layout.insert(index, plotitem)
|
||||
if index is None:
|
||||
insert_index, axes = self.layout.append_plotitem(plotitem)
|
||||
else:
|
||||
insert_index, axes = self.layout.insert_plotitem(index, plotitem)
|
||||
|
||||
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
||||
|
||||
|
@ -579,24 +503,12 @@ class PlotItemOverlay:
|
|||
root.vb.setFocus()
|
||||
assert root.vb.focusWidget()
|
||||
|
||||
# XXX: do we need this? Why would you build then destroy?
|
||||
def remove_plotitem(self, plotItem: PlotItem) -> None:
|
||||
'''
|
||||
Remove this ``PlotItem`` from the overlayed set making not shown
|
||||
and unable to accept input.
|
||||
vb.setZValue(100)
|
||||
|
||||
'''
|
||||
...
|
||||
|
||||
# TODO: i think this would be super hot B)
|
||||
def focus_item(self, plotitem: PlotItem) -> PlotItem:
|
||||
'''
|
||||
Apply focus to a contained PlotItem thus making it the "top level"
|
||||
item in the overlay able to accept peripheral's input from the user
|
||||
and responsible for zoom and panning control via its ``ViewBox``.
|
||||
|
||||
'''
|
||||
...
|
||||
return (
|
||||
index,
|
||||
axes,
|
||||
)
|
||||
|
||||
def get_axis(
|
||||
self,
|
||||
|
@ -630,8 +542,9 @@ class PlotItemOverlay:
|
|||
|
||||
return axes
|
||||
|
||||
# TODO: i guess we need this if you want to detach existing plots
|
||||
# dynamically? XXX: untested as of now.
|
||||
# XXX: untested as of now.
|
||||
# TODO: need this as part of selecting a different root/source
|
||||
# plot to rewire interaction event broadcast dynamically.
|
||||
def _disconnect_all(
|
||||
self,
|
||||
plotitem: PlotItem,
|
||||
|
@ -646,3 +559,22 @@ class PlotItemOverlay:
|
|||
disconnected.append(sig)
|
||||
|
||||
return disconnected
|
||||
|
||||
# XXX: do we need this? Why would you build then destroy?
|
||||
# def remove_plotitem(self, plotItem: PlotItem) -> None:
|
||||
# '''
|
||||
# Remove this ``PlotItem`` from the overlayed set making not shown
|
||||
# and unable to accept input.
|
||||
|
||||
# '''
|
||||
# ...
|
||||
|
||||
# TODO: i think this would be super hot B)
|
||||
# def focus_plotitem(self, plotitem: PlotItem) -> PlotItem:
|
||||
# '''
|
||||
# Apply focus to a contained PlotItem thus making it the "top level"
|
||||
# item in the overlay able to accept peripheral's input from the user
|
||||
# and responsible for zoom and panning control via its ``ViewBox``.
|
||||
|
||||
# '''
|
||||
# ...
|
||||
|
|
|
@ -1,236 +0,0 @@
|
|||
# piker: trading gear for hackers
|
||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU Affero General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU Affero General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU Affero General Public License
|
||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
"""
|
||||
Super fast ``QPainterPath`` generation related operator routines.
|
||||
|
||||
"""
|
||||
from __future__ import annotations
|
||||
from typing import (
|
||||
# Optional,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
|
||||
import numpy as np
|
||||
from numpy.lib import recfunctions as rfn
|
||||
from numba import njit, float64, int64 # , optional
|
||||
# import pyqtgraph as pg
|
||||
from PyQt5 import QtGui
|
||||
# from PyQt5.QtCore import QLineF, QPointF
|
||||
|
||||
from ..data._sharedmem import (
|
||||
ShmArray,
|
||||
)
|
||||
# from .._profile import pg_profile_enabled, ms_slower_then
|
||||
from ._compression import (
|
||||
ds_m4,
|
||||
)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from ._flows import Renderer
|
||||
|
||||
|
||||
def xy_downsample(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
|
||||
x_spacer: float = 0.5,
|
||||
|
||||
) -> tuple[np.ndarray, np.ndarray]:
|
||||
|
||||
# downsample whenever more then 1 pixels per datum can be shown.
|
||||
# always refresh data bounds until we get diffing
|
||||
# working properly, see above..
|
||||
bins, x, y = ds_m4(
|
||||
x,
|
||||
y,
|
||||
uppx,
|
||||
)
|
||||
|
||||
# flatten output to 1d arrays suitable for path-graphics generation.
|
||||
x = np.broadcast_to(x[:, None], y.shape)
|
||||
x = (x + np.array(
|
||||
[-x_spacer, 0, 0, x_spacer]
|
||||
)).flatten()
|
||||
y = y.flatten()
|
||||
|
||||
return x, y
|
||||
|
||||
|
||||
@njit(
|
||||
# TODO: for now need to construct this manually for readonly arrays, see
|
||||
# https://github.com/numba/numba/issues/4511
|
||||
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||
# numba_ohlc_dtype[::1], # contiguous
|
||||
# int64,
|
||||
# optional(float64),
|
||||
# ),
|
||||
nogil=True
|
||||
)
|
||||
def path_arrays_from_ohlc(
|
||||
data: np.ndarray,
|
||||
start: int64,
|
||||
bar_gap: float64 = 0.43,
|
||||
|
||||
) -> np.ndarray:
|
||||
'''
|
||||
Generate an array of lines objects from input ohlc data.
|
||||
|
||||
'''
|
||||
size = int(data.shape[0] * 6)
|
||||
|
||||
x = np.zeros(
|
||||
# data,
|
||||
shape=size,
|
||||
dtype=float64,
|
||||
)
|
||||
y, c = x.copy(), x.copy()
|
||||
|
||||
# TODO: report bug for assert @
|
||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||
for i, q in enumerate(data[start:], start):
|
||||
|
||||
# TODO: ask numba why this doesn't work..
|
||||
# open, high, low, close, index = q[
|
||||
# ['open', 'high', 'low', 'close', 'index']]
|
||||
|
||||
open = q['open']
|
||||
high = q['high']
|
||||
low = q['low']
|
||||
close = q['close']
|
||||
index = float64(q['index'])
|
||||
|
||||
istart = i * 6
|
||||
istop = istart + 6
|
||||
|
||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||
x[istart:istop] = (
|
||||
index - bar_gap,
|
||||
index,
|
||||
index,
|
||||
index,
|
||||
index,
|
||||
index + bar_gap,
|
||||
)
|
||||
y[istart:istop] = (
|
||||
open,
|
||||
open,
|
||||
low,
|
||||
high,
|
||||
close,
|
||||
close,
|
||||
)
|
||||
|
||||
# specifies that the first edge is never connected to the
|
||||
# prior bars last edge thus providing a small "gap"/"space"
|
||||
# between bars determined by ``bar_gap``.
|
||||
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||
|
||||
return x, y, c
|
||||
|
||||
|
||||
def gen_ohlc_qpath(
|
||||
r: Renderer,
|
||||
data: np.ndarray,
|
||||
array_key: str, # we ignore this
|
||||
vr: tuple[int, int],
|
||||
|
||||
start: int = 0, # XXX: do we need this?
|
||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||
w: float = 0.43,
|
||||
|
||||
) -> QtGui.QPainterPath:
|
||||
'''
|
||||
More or less direct proxy to ``path_arrays_from_ohlc()``
|
||||
but with closed in kwargs for line spacing.
|
||||
|
||||
'''
|
||||
x, y, c = path_arrays_from_ohlc(
|
||||
data,
|
||||
start,
|
||||
bar_gap=w,
|
||||
)
|
||||
return x, y, c
|
||||
|
||||
|
||||
def ohlc_to_line(
|
||||
ohlc_shm: ShmArray,
|
||||
data_field: str,
|
||||
fields: list[str] = ['open', 'high', 'low', 'close']
|
||||
|
||||
) -> tuple[
|
||||
np.ndarray,
|
||||
np.ndarray,
|
||||
]:
|
||||
'''
|
||||
Convert an input struct-array holding OHLC samples into a pair of
|
||||
flattened x, y arrays with the same size (datums wise) as the source
|
||||
data.
|
||||
|
||||
'''
|
||||
y_out = ohlc_shm.ustruct(fields)
|
||||
first = ohlc_shm._first.value
|
||||
last = ohlc_shm._last.value
|
||||
|
||||
# write pushed data to flattened copy
|
||||
y_out[first:last] = rfn.structured_to_unstructured(
|
||||
ohlc_shm.array[fields]
|
||||
)
|
||||
|
||||
# generate an flat-interpolated x-domain
|
||||
x_out = (
|
||||
np.broadcast_to(
|
||||
ohlc_shm._array['index'][:, None],
|
||||
(
|
||||
ohlc_shm._array.size,
|
||||
# 4, # only ohlc
|
||||
y_out.shape[1],
|
||||
),
|
||||
) + np.array([-0.5, 0, 0, 0.5])
|
||||
)
|
||||
assert y_out.any()
|
||||
|
||||
return (
|
||||
x_out,
|
||||
y_out,
|
||||
)
|
||||
|
||||
|
||||
def to_step_format(
|
||||
shm: ShmArray,
|
||||
data_field: str,
|
||||
index_field: str = 'index',
|
||||
|
||||
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||
'''
|
||||
Convert an input 1d shm array to a "step array" format
|
||||
for use by path graphics generation.
|
||||
|
||||
'''
|
||||
i = shm._array['index'].copy()
|
||||
out = shm._array[data_field].copy()
|
||||
|
||||
x_out = np.broadcast_to(
|
||||
i[:, None],
|
||||
(i.size, 2),
|
||||
) + np.array([-0.5, 0.5])
|
||||
|
||||
y_out = np.empty((len(out), 2), dtype=out.dtype)
|
||||
y_out[:] = out[:, np.newaxis]
|
||||
|
||||
# start y at origin level
|
||||
y_out[0, 0] = 0
|
||||
return x_out, y_out
|
|
@ -15,13 +15,19 @@
|
|||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||
|
||||
"""
|
||||
Customization of ``pyqtgraph`` core routines to speed up our use mostly
|
||||
based on not requiring "scentific precision" for pixel perfect view
|
||||
transforms.
|
||||
Customization of ``pyqtgraph`` core routines and various types normally
|
||||
for speedups.
|
||||
|
||||
Generally, our does not require "scentific precision" for pixel perfect
|
||||
view transforms.
|
||||
|
||||
"""
|
||||
from typing import Optional
|
||||
|
||||
import pyqtgraph as pg
|
||||
|
||||
from ._axes import Axis
|
||||
|
||||
|
||||
def invertQTransform(tr):
|
||||
"""Return a QTransform that is the inverse of *tr*.
|
||||
|
@ -46,3 +52,236 @@ def _do_overrides() -> None:
|
|||
"""
|
||||
# we don't care about potential fp issues inside Qt
|
||||
pg.functions.invertQTransform = invertQTransform
|
||||
pg.PlotItem = PlotItem
|
||||
|
||||
# enable "QPainterPathPrivate for faster arrayToQPath" from
|
||||
# https://github.com/pyqtgraph/pyqtgraph/pull/2324
|
||||
pg.setConfigOption('enableExperimental', True)
|
||||
|
||||
|
||||
# NOTE: the below customized type contains all our changes on a method
|
||||
# by method basis as per the diff:
|
||||
# https://github.com/pyqtgraph/pyqtgraph/commit/8e60bc14234b6bec1369ff4192dbfb82f8682920#diff-a2b5865955d2ba703dbc4c35ff01aa761aa28d2aeaac5e68d24e338bc82fb5b1R500
|
||||
|
||||
class PlotItem(pg.PlotItem):
|
||||
'''
|
||||
Overrides for the core plot object mostly pertaining to overlayed
|
||||
multi-view management as it relates to multi-axis managment.
|
||||
|
||||
This object is the combination of a ``ViewBox`` and multiple
|
||||
``AxisItem``s and so far we've added additional functionality and
|
||||
APIs for:
|
||||
- removal of axes
|
||||
|
||||
---
|
||||
|
||||
From ``pyqtgraph`` super type docs:
|
||||
- Manage placement of ViewBox, AxisItems, and LabelItems
|
||||
- Create and manage a list of PlotDataItems displayed inside the
|
||||
ViewBox
|
||||
- Implement a context menu with commonly used display and analysis
|
||||
options
|
||||
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
parent=None,
|
||||
name=None,
|
||||
labels=None,
|
||||
title=None,
|
||||
viewBox=None,
|
||||
axisItems=None,
|
||||
default_axes=['left', 'bottom'],
|
||||
enableMenu=True,
|
||||
**kargs
|
||||
):
|
||||
super().__init__(
|
||||
parent=parent,
|
||||
name=name,
|
||||
labels=labels,
|
||||
title=title,
|
||||
viewBox=viewBox,
|
||||
axisItems=axisItems,
|
||||
# default_axes=default_axes,
|
||||
enableMenu=enableMenu,
|
||||
kargs=kargs,
|
||||
)
|
||||
self.name = name
|
||||
self.chart_widget = None
|
||||
# self.setAxisItems(
|
||||
# axisItems,
|
||||
# default_axes=default_axes,
|
||||
# )
|
||||
|
||||
# NOTE: this is an entirely new method not in upstream.
|
||||
def removeAxis(
|
||||
self,
|
||||
name: str,
|
||||
unlink: bool = True,
|
||||
|
||||
) -> Optional[pg.AxisItem]:
|
||||
"""
|
||||
Remove an axis from the contained axis items
|
||||
by ```name: str```.
|
||||
|
||||
This means the axis graphics object will be removed
|
||||
from the ``.layout: QGraphicsGridLayout`` as well as unlinked
|
||||
from the underlying associated ``ViewBox``.
|
||||
|
||||
If the ``unlink: bool`` is set to ``False`` then the axis will
|
||||
stay linked to its view and will only be removed from the
|
||||
layoutonly be removed from the layout.
|
||||
|
||||
If no axis with ``name: str`` is found then this is a noop.
|
||||
|
||||
Return the axis instance that was removed.
|
||||
|
||||
"""
|
||||
entry = self.axes.pop(name, None)
|
||||
|
||||
if not entry:
|
||||
return
|
||||
|
||||
axis = entry['item']
|
||||
self.layout.removeItem(axis)
|
||||
axis.scene().removeItem(axis)
|
||||
if unlink:
|
||||
axis.unlinkFromView()
|
||||
|
||||
self.update()
|
||||
|
||||
return axis
|
||||
|
||||
# Why do we need to always have all axes created?
|
||||
#
|
||||
# I don't understand this at all.
|
||||
#
|
||||
# Everything seems to work if you just always apply the
|
||||
# set passed to this method **EXCEPT** for some super weird reason
|
||||
# the view box geometry still computes as though the space for the
|
||||
# `'bottom'` axis is always there **UNLESS** you always add that
|
||||
# axis but hide it?
|
||||
#
|
||||
# Why in tf would this be the case!?!?
|
||||
def setAxisItems(
|
||||
self,
|
||||
# XXX: yeah yeah, i know we can't use type annots like this yet.
|
||||
axisItems: Optional[dict[str, pg.AxisItem]] = None,
|
||||
add_to_layout: bool = True,
|
||||
default_axes: list[str] = ['left', 'bottom'],
|
||||
):
|
||||
"""
|
||||
Override axis item setting to only
|
||||
|
||||
"""
|
||||
axisItems = axisItems or {}
|
||||
|
||||
# XXX: wth is is this even saying?!?
|
||||
# Array containing visible axis items
|
||||
# Also containing potentially hidden axes, but they are not
|
||||
# touched so it does not matter
|
||||
# visibleAxes = ['left', 'bottom']
|
||||
# Note that it does not matter that this adds
|
||||
# some values to visibleAxes a second time
|
||||
|
||||
# XXX: uhhh wat^ ..?
|
||||
|
||||
visibleAxes = list(default_axes) + list(axisItems.keys())
|
||||
|
||||
# TODO: we should probably invert the loop here to not loop the
|
||||
# predefined "axis name set" and instead loop the `axisItems`
|
||||
# input and lookup indices from a predefined map.
|
||||
for name, pos in (
|
||||
('top', (1, 1)),
|
||||
('bottom', (3, 1)),
|
||||
('left', (2, 0)),
|
||||
('right', (2, 2))
|
||||
):
|
||||
if (
|
||||
name in self.axes and
|
||||
name in axisItems
|
||||
):
|
||||
# we already have an axis entry for this name
|
||||
# so remove the existing entry.
|
||||
self.removeAxis(name)
|
||||
|
||||
# elif name not in axisItems:
|
||||
# # this axis entry is not provided in this call
|
||||
# # so remove any old/existing entry.
|
||||
# self.removeAxis(name)
|
||||
|
||||
# Create new axis
|
||||
if name in axisItems:
|
||||
axis = axisItems[name]
|
||||
if axis.scene() is not None:
|
||||
if (
|
||||
name not in self.axes
|
||||
or axis != self.axes[name]["item"]
|
||||
):
|
||||
raise RuntimeError(
|
||||
"Can't add an axis to multiple plots. Shared axes"
|
||||
" can be achieved with multiple AxisItem instances"
|
||||
" and set[X/Y]Link.")
|
||||
|
||||
else:
|
||||
# Set up new axis
|
||||
|
||||
# XXX: ok but why do we want to add axes for all entries
|
||||
# if not desired by the user? The only reason I can see
|
||||
# adding this is without it there's some weird
|
||||
# ``ViewBox`` geometry bug.. where a gap for the
|
||||
# 'bottom' axis is somehow left in?
|
||||
# axis = pg.AxisItem(orientation=name, parent=self)
|
||||
axis = Axis(
|
||||
self,
|
||||
orientation=name,
|
||||
parent=self,
|
||||
)
|
||||
|
||||
axis.linkToView(self.vb)
|
||||
|
||||
# XXX: shouldn't you already know the ``pos`` from the name?
|
||||
# Oh right instead of a global map that would let you
|
||||
# reasily look that up it's redefined over and over and over
|
||||
# again in methods..
|
||||
self.axes[name] = {'item': axis, 'pos': pos}
|
||||
|
||||
# NOTE: in the overlay case the axis may be added to some
|
||||
# other layout and should not be added here.
|
||||
if add_to_layout:
|
||||
self.layout.addItem(axis, *pos)
|
||||
|
||||
# place axis above images at z=0, items that want to draw
|
||||
# over the axes should be placed at z>=1:
|
||||
axis.setZValue(0.5)
|
||||
axis.setFlag(
|
||||
axis.GraphicsItemFlag.ItemNegativeZStacksBehindParent
|
||||
)
|
||||
if name in visibleAxes:
|
||||
self.showAxis(name, True)
|
||||
else:
|
||||
# why do we need to insert all axes to ``.axes`` and
|
||||
# only hide the ones the user doesn't specify? It all
|
||||
# seems to work fine without doing this except for this
|
||||
# weird gap for the 'bottom' axis that always shows up
|
||||
# in the view box geometry??
|
||||
self.hideAxis(name)
|
||||
|
||||
def updateGrid(
|
||||
self,
|
||||
*args,
|
||||
):
|
||||
alpha = self.ctrl.gridAlphaSlider.value()
|
||||
x = alpha if self.ctrl.xGridCheck.isChecked() else False
|
||||
y = alpha if self.ctrl.yGridCheck.isChecked() else False
|
||||
for name, dim in (
|
||||
('top', x),
|
||||
('bottom', x),
|
||||
('left', y),
|
||||
('right', y)
|
||||
):
|
||||
if name in self.axes:
|
||||
self.getAxis(name).setGrid(dim)
|
||||
# self.getAxis('bottom').setGrid(x)
|
||||
# self.getAxis('left').setGrid(y)
|
||||
# self.getAxis('right').setGrid(y)
|
||||
|
|
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue