Compare commits
775 Commits
fsp_hotfix
...
310_plus
Author | SHA1 | Date |
---|---|---|
Tyler Goodlet | ac0f43dc98 | |
goodboy | 3977f1cc7e | |
Tyler Goodlet | e45cb9d08a | |
Tyler Goodlet | 27c523ca74 | |
Tyler Goodlet | b8b76a32a6 | |
Tyler Goodlet | dcee0ddd55 | |
goodboy | 67eab85f06 | |
Tyler Goodlet | afc95b8592 | |
Tyler Goodlet | 14c98d82ee | |
goodboy | b87aa30031 | |
Tyler Goodlet | 958f53d8e9 | |
Tyler Goodlet | ba43b54175 | |
Tyler Goodlet | de970755d7 | |
goodboy | 7ddebf6773 | |
Tyler Goodlet | 8eb4a427da | |
Tyler Goodlet | da5dea9f99 | |
Tyler Goodlet | 3074773662 | |
Tyler Goodlet | 4099b53ea2 | |
goodboy | 633fa7cc3a | |
Tyler Goodlet | 1345b250bc | |
goodboy | e9f0ea3daa | |
Tyler Goodlet | 569674517f | |
Tyler Goodlet | bf7397f031 | |
Tyler Goodlet | 85c2f6e79f | |
Tyler Goodlet | 1c1661b783 | |
Tyler Goodlet | 99eabe34c9 | |
Tyler Goodlet | 827b5f9c45 | |
Tyler Goodlet | 41f24f3de6 | |
Tyler Goodlet | 34975dfbd5 | |
goodboy | f6b54f02c0 | |
Tyler Goodlet | 44c242a794 | |
Tyler Goodlet | 99965e7601 | |
Tyler Goodlet | e5f96391e3 | |
Tyler Goodlet | a66934a49d | |
Tyler Goodlet | 55772efb34 | |
Tyler Goodlet | 736178adfd | |
Tyler Goodlet | d770867163 | |
Tyler Goodlet | c518553aa9 | |
Tyler Goodlet | 4138cef512 | |
Tyler Goodlet | 0f4bfcdf22 | |
Tyler Goodlet | 80835d4e04 | |
Tyler Goodlet | e6d03ba97f | |
Tyler Goodlet | b71e8c5e6d | |
Tyler Goodlet | 064d185395 | |
Tyler Goodlet | 363ba8f9ae | |
Tyler Goodlet | fc24f5efd1 | |
Tyler Goodlet | a7ff47158b | |
Tyler Goodlet | 57acc3bd29 | |
Tyler Goodlet | 8f1faf97ee | |
Tyler Goodlet | 3ab91deaec | |
Tyler Goodlet | 6f00617bd3 | |
Tyler Goodlet | 2c2c453932 | |
Tyler Goodlet | 360643b32f | |
Tyler Goodlet | ab0def22c1 | |
Tyler Goodlet | a9ec1a97dd | |
Tyler Goodlet | d61b636487 | |
Tyler Goodlet | 88ac2fda52 | |
Tyler Goodlet | 08c83afa90 | |
Tyler Goodlet | 066b8df619 | |
Tyler Goodlet | d4f31f2b3c | |
Tyler Goodlet | 04897fd402 | |
Tyler Goodlet | 42572d3808 | |
Tyler Goodlet | 8ce7e99210 | |
Tyler Goodlet | 1b38628b09 | |
Tyler Goodlet | bbe1ff19ef | |
Tyler Goodlet | eca2401ab5 | |
Tyler Goodlet | 5d91516b41 | |
Tyler Goodlet | b985b48eb3 | |
Tyler Goodlet | c256d3bdc0 | |
Tyler Goodlet | f5de361f49 | |
Tyler Goodlet | 432d4545c2 | |
Tyler Goodlet | fa30df36ba | |
Tyler Goodlet | 17456d96e0 | |
Tyler Goodlet | 167ae96566 | |
Tyler Goodlet | aa0efe1523 | |
Tyler Goodlet | 664a208ae5 | |
Tyler Goodlet | 876add4fc2 | |
Tyler Goodlet | 72e849c651 | |
Tyler Goodlet | b3ae562e4f | |
Tyler Goodlet | b5b9ecf4b1 | |
Tyler Goodlet | 1dab77ca0b | |
Tyler Goodlet | 4c7661fc23 | |
Tyler Goodlet | e258654c86 | |
Tyler Goodlet | 81be0b4bd0 | |
Tyler Goodlet | df1c89e811 | |
Tyler Goodlet | f67fd11a29 | |
Tyler Goodlet | 1f95ba4fd8 | |
Tyler Goodlet | 1dca7766d2 | |
Tyler Goodlet | b236dc72e4 | |
Tyler Goodlet | 27ee9fdc81 | |
Tyler Goodlet | 5d294031f2 | |
Tyler Goodlet | 537b725bf3 | |
Tyler Goodlet | ca5a25f921 | |
Tyler Goodlet | 037300ced0 | |
Tyler Goodlet | 9c5bc6deda | |
Tyler Goodlet | bc50db5925 | |
Tyler Goodlet | e8e26232ea | |
Tyler Goodlet | f6909ae395 | |
Tyler Goodlet | b609f46d26 | |
Tyler Goodlet | 5d26609693 | |
Tyler Goodlet | 09e988ec3e | |
Tyler Goodlet | 5e602214be | |
Tyler Goodlet | cfc4198837 | |
Tyler Goodlet | c455df7fa8 | |
Tyler Goodlet | 47cf4aa4f7 | |
Tyler Goodlet | 4f36743f64 | |
Tyler Goodlet | 1fcb9233b4 | |
Tyler Goodlet | fb38265199 | |
Tyler Goodlet | e163a7e336 | |
Tyler Goodlet | 36a10155bc | |
Tyler Goodlet | 7a3437348d | |
Tyler Goodlet | 0744dd0415 | |
Tyler Goodlet | 0770a39125 | |
Tyler Goodlet | 2b6041465c | |
Tyler Goodlet | 859eaffa29 | |
Tyler Goodlet | b12921678b | |
Tyler Goodlet | 186658ab09 | |
Tyler Goodlet | 12d60e6d9c | |
Tyler Goodlet | c5beecf8a1 | |
Tyler Goodlet | 629ea8ba9d | |
Tyler Goodlet | ba0ba346ec | |
Tyler Goodlet | 82b2d2ee3a | |
Tyler Goodlet | b2b31b8f84 | |
Tyler Goodlet | b97ec38baf | |
Tyler Goodlet | 64c6287cd1 | |
Tyler Goodlet | 69282a9924 | |
Tyler Goodlet | aee44fed46 | |
Tyler Goodlet | db727910be | |
Tyler Goodlet | 64206543cd | |
Tyler Goodlet | af6aad4e9c | |
Tyler Goodlet | c94c53286b | |
Tyler Goodlet | 2af4050e5e | |
Tyler Goodlet | df78e9ba96 | |
Tyler Goodlet | 7e1ec7b5a7 | |
Tyler Goodlet | 3dbce6f891 | |
Tyler Goodlet | 239c9d701a | |
Tyler Goodlet | 427a33654b | |
Tyler Goodlet | f4dc0fbab8 | |
Tyler Goodlet | e0a72a2174 | |
Tyler Goodlet | 5a9bab0b69 | |
Tyler Goodlet | d0af280a59 | |
Tyler Goodlet | 599c77ff84 | |
Tyler Goodlet | c097016fd2 | |
goodboy | be7c4e70f0 | |
Tyler Goodlet | 051680e259 | |
Tyler Goodlet | 55a453a710 | |
Tyler Goodlet | 88eccc1e15 | |
Tyler Goodlet | 488506d8b8 | |
Tyler Goodlet | 78b9333bcd | |
Tyler Goodlet | 7229a39f47 | |
Tyler Goodlet | d870a09a4b | |
Tyler Goodlet | 5d53ecb433 | |
Tyler Goodlet | 06832b94d4 | |
Tyler Goodlet | 8d6c5b214e | |
Tyler Goodlet | a5389beccd | |
Tyler Goodlet | 26f47227d2 | |
Tyler Goodlet | b357a120b9 | |
Tyler Goodlet | aba8b05a33 | |
Tyler Goodlet | c3142aec81 | |
Tyler Goodlet | bff625725e | |
Tyler Goodlet | 6f172479eb | |
Tyler Goodlet | a96f1dec3a | |
goodboy | 86caf5f6a3 | |
Tyler Goodlet | 72b4273ddc | |
Tyler Goodlet | 4281936ff4 | |
Guillermo Rodriguez | 4ddf04f68b | |
dinkus | 339fcda727 | |
Tyler Goodlet | 4b7d7d688e | |
Tyler Goodlet | 7ae7b2f864 | |
Tyler Goodlet | fa9f8c78c3 | |
Tyler Goodlet | 3bbbc21d2b | |
Tyler Goodlet | b03603a6b4 | |
Tyler Goodlet | 81b77df544 | |
Tyler Goodlet | a79a99fc71 | |
goodboy | 9f47515f59 | |
Tyler Goodlet | 09f2f32d5b | |
Tyler Goodlet | e718120cc7 | |
Tyler Goodlet | fb5df5ab5e | |
Tyler Goodlet | 6e2e2fc03f | |
Tyler Goodlet | a3b2ba9ae9 | |
goodboy | 7083c5a0bd | |
Tyler Goodlet | de55565f60 | |
Tyler Goodlet | d0530c4e26 | |
goodboy | 21b16b4a9e | |
Tyler Goodlet | ed85079d0f | |
goodboy | 5b540a53e9 | |
Tyler Goodlet | fb91e27651 | |
goodboy | 482fc1da10 | |
Tyler Goodlet | b3f9c4f93d | |
Tyler Goodlet | 09431aad85 | |
Tyler Goodlet | 8219307bf5 | |
Tyler Goodlet | b910eceb3b | |
Tyler Goodlet | 1657f51edc | |
Tyler Goodlet | b1246446c2 | |
Tyler Goodlet | 083a3296e7 | |
Tyler Goodlet | 769e803695 | |
Tyler Goodlet | e196e9d1a0 | |
Tyler Goodlet | 9ddfae44d2 | |
Tyler Goodlet | 277ca29018 | |
Tyler Goodlet | 26fddae3c0 | |
Tyler Goodlet | 4b6ecbfc79 | |
Tyler Goodlet | 30ddf63ec0 | |
Tyler Goodlet | 8e08fb7b23 | |
Tyler Goodlet | fb9b6990ae | |
Tyler Goodlet | 1676bceee1 | |
Tyler Goodlet | c9a621fc2a | |
Tyler Goodlet | 0324404b03 | |
Tyler Goodlet | 61e9db3229 | |
Tyler Goodlet | 4a6f01747c | |
Tyler Goodlet | e4a900168d | |
Tyler Goodlet | 40753ae93c | |
Tyler Goodlet | 969530ba19 | |
Tyler Goodlet | 9b5f052597 | |
Tyler Goodlet | b44786e5b7 | |
Tyler Goodlet | 7e951f17ca | |
Tyler Goodlet | fcb85873de | |
Tyler Goodlet | 7b1c0939bd | |
Tyler Goodlet | d77cfa3587 | |
Tyler Goodlet | 49509d55d2 | |
Tyler Goodlet | 6ba3c15c4e | |
Tyler Goodlet | a3db5d1bdc | |
Tyler Goodlet | c672493998 | |
Tyler Goodlet | 423af37389 | |
Tyler Goodlet | 0061fabb56 | |
Tyler Goodlet | 2f04a8c939 | |
Tyler Goodlet | 8bf40ae299 | |
Tyler Goodlet | 0f683205f4 | |
Tyler Goodlet | d244af69c9 | |
Tyler Goodlet | b8b95f1081 | |
Tyler Goodlet | 3056bc3143 | |
Tyler Goodlet | d3824c8c0b | |
Tyler Goodlet | 727d3cc027 | |
Tyler Goodlet | 46c23e90db | |
Tyler Goodlet | bcf3be1fe4 | |
Tyler Goodlet | 7d8cf3eaf8 | |
Tyler Goodlet | d4e0d4463f | |
Tyler Goodlet | ab8629aa11 | |
Tyler Goodlet | 2a07005c97 | |
Tyler Goodlet | 79160619bc | |
Tyler Goodlet | e1a88cb93c | |
Tyler Goodlet | a6c5902437 | |
Tyler Goodlet | a10dc4fe77 | |
Tyler Goodlet | 71416f5752 | |
Tyler Goodlet | 9fe5cd647a | |
Tyler Goodlet | 15630f465d | |
Tyler Goodlet | ce3229df7d | |
Tyler Goodlet | 53ad5e6f65 | |
Tyler Goodlet | 41325ad418 | |
Tyler Goodlet | a971de2b67 | |
Tyler Goodlet | ca48577c60 | |
Tyler Goodlet | 950cb03e07 | |
Tyler Goodlet | 907b7dd5c6 | |
Tyler Goodlet | 6cdd017cd6 | |
Tyler Goodlet | 6dc6d00a9b | |
Tyler Goodlet | ba250c7197 | |
Tyler Goodlet | 565573b609 | |
Tyler Goodlet | 39b4d2684a | |
Tyler Goodlet | 25dfe4115d | |
Tyler Goodlet | 6c6f2abd06 | |
Tyler Goodlet | 9138f376f7 | |
Tyler Goodlet | f582af4c9f | |
Tyler Goodlet | dd2edaeb3c | |
Tyler Goodlet | 3d6d77364b | |
Tyler Goodlet | 8003878248 | |
Tyler Goodlet | 706c8085f2 | |
Tyler Goodlet | cbe74d126e | |
Tyler Goodlet | 3dba456cf8 | |
Tyler Goodlet | 4555a1f279 | |
Tyler Goodlet | a2fe814857 | |
Tyler Goodlet | 8c558d05d6 | |
Tyler Goodlet | e1bbcff8e0 | |
Tyler Goodlet | ba82a18890 | |
Tyler Goodlet | d9773217e9 | |
Tyler Goodlet | 2c51ad2a0d | |
Tyler Goodlet | 56fa759452 | |
Tyler Goodlet | 4bcc301c01 | |
Tyler Goodlet | 445b82283d | |
Tyler Goodlet | 8047714101 | |
Tyler Goodlet | 970393bb85 | |
Tyler Goodlet | ed5bae0e11 | |
Tyler Goodlet | facc86f76e | |
Tyler Goodlet | 7395b56321 | |
Tyler Goodlet | aecc5973fa | |
Tyler Goodlet | faa5a785cb | |
Tyler Goodlet | 7d2e9bff46 | |
Tyler Goodlet | ec413541d3 | |
Tyler Goodlet | 9203ebe044 | |
Tyler Goodlet | fbd3d1e308 | |
Tyler Goodlet | 1cdb94374c | |
Tyler Goodlet | aca3ca8aa6 | |
Guillermo Rodriguez | 943b02573d | |
Guillermo Rodriguez | 897a5cf2f6 | |
Guillermo Rodriguez | 3c09bfba57 | |
goodboy | c849bb9c4c | |
Tyler Goodlet | 34e6db6d98 | |
goodboy | 84399e8131 | |
Tyler Goodlet | 5921d18d66 | |
Tyler Goodlet | cdc882657a | |
Tyler Goodlet | 62d08eaf85 | |
Tyler Goodlet | f2f00dcc52 | |
Tyler Goodlet | ee831baeb3 | |
Tyler Goodlet | 7c615a403b | |
Tyler Goodlet | b8374dbe9a | |
Tyler Goodlet | 454cd7920d | |
Tyler Goodlet | ca283660de | |
Tyler Goodlet | d4eddbdb25 | |
Tyler Goodlet | eec329a221 | |
Tyler Goodlet | a1de097a43 | |
Tyler Goodlet | b5f2558cec | |
Tyler Goodlet | 1a95712680 | |
Tyler Goodlet | b20e9e58ee | |
Tyler Goodlet | 4bc2bbda69 | |
Tyler Goodlet | b524929cb6 | |
Tyler Goodlet | f95d22bfd3 | |
Tyler Goodlet | 91de281b7e | |
Tyler Goodlet | 2284e61eda | |
Tyler Goodlet | 082b02776c | |
Tyler Goodlet | 27e3d0ef80 | |
Tyler Goodlet | eeca9eb4c7 | |
Tyler Goodlet | 9bbfa4be02 | |
Tyler Goodlet | ce85031ef2 | |
Tyler Goodlet | b6f852e0ad | |
Tyler Goodlet | fdd5aa33d2 | |
Tyler Goodlet | 82732e3f17 | |
Tyler Goodlet | 2c1daab990 | |
Tyler Goodlet | a9e1c6c50e | |
Tyler Goodlet | ef03b8e987 | |
Tyler Goodlet | 3b90b1f960 | |
Tyler Goodlet | 1cf6ba789c | |
Tyler Goodlet | 49c25eeef4 | |
Tyler Goodlet | 5bcd6ac494 | |
Tyler Goodlet | 5da9f7fdb4 | |
Tyler Goodlet | 5128e4c304 | |
Tyler Goodlet | 6410c68e2e | |
Tyler Goodlet | 947a514153 | |
Tyler Goodlet | 8627f6f6c5 | |
Tyler Goodlet | 5800c10901 | |
Tyler Goodlet | 28bf8853aa | |
Tyler Goodlet | 86da64c2c2 | |
Tyler Goodlet | d59442e3b1 | |
Tyler Goodlet | 5e161aa251 | |
Tyler Goodlet | 9b2ec871a0 | |
Tyler Goodlet | 2b12742992 | |
Tyler Goodlet | b262532fd4 | |
Tyler Goodlet | 561d7e0349 | |
Tyler Goodlet | 3a6c5a2fbd | |
Tyler Goodlet | 88a7314bd0 | |
Tyler Goodlet | 1abe513ecb | |
Tyler Goodlet | 44f3a08ef1 | |
Tyler Goodlet | 03e0e3e76b | |
Tyler Goodlet | 08f90c275c | |
Tyler Goodlet | 7edfe68d4d | |
Tyler Goodlet | ff00993412 | |
Tyler Goodlet | ed03d77e6e | |
Tyler Goodlet | 1a0e89d07e | |
Tyler Goodlet | 56c163cdd7 | |
Tyler Goodlet | c4242acc21 | |
Tyler Goodlet | 772f871272 | |
Tyler Goodlet | 1ad83e4556 | |
Tyler Goodlet | bedb55b79d | |
Tyler Goodlet | 03a08b5f63 | |
Tyler Goodlet | 8f26335aea | |
Tyler Goodlet | f1f257d4a2 | |
Tyler Goodlet | d02b1a17ad | |
Tyler Goodlet | 4d4f745918 | |
Tyler Goodlet | 39b7c9340d | |
Tyler Goodlet | e7481b1634 | |
Tyler Goodlet | 09d95157dc | |
Tyler Goodlet | ea5b8f1dd0 | |
Tyler Goodlet | 7e49b7c033 | |
Tyler Goodlet | e7dc1a036b | |
Tyler Goodlet | ab8ea41b93 | |
Tyler Goodlet | dbe55ad4d2 | |
Tyler Goodlet | d7a9928293 | |
Tyler Goodlet | 02300efb59 | |
Tyler Goodlet | 7c4e55ed2c | |
Tyler Goodlet | 7811508307 | |
Tyler Goodlet | 7e853fe345 | |
Tyler Goodlet | 11f8c4f350 | |
Tyler Goodlet | 7577443f95 | |
goodboy | 01cc8f347e | |
wattygetlood | 9eefc3a521 | |
goodboy | 67cec4bc54 | |
Zoltan | 34df818ed9 | |
Konstantine Tsafatinos | 773ed5e7ad | |
Konstantine Tsafatinos | 59434b9a8a | |
Konstantine Tsafatinos | 250d9cbc03 | |
goodboy | 3ac9c55535 | |
Tyler Goodlet | bcb4fe8c50 | |
Tyler Goodlet | d8db9233c9 | |
Tyler Goodlet | 82f2fa2d37 | |
Tyler Goodlet | 8195fae289 | |
Tyler Goodlet | 30656eda39 | |
Tyler Goodlet | 2564acea1b | |
Tyler Goodlet | b3efa2874b | |
Tyler Goodlet | ad1bbe74ad | |
Tyler Goodlet | 761b823939 | |
Tyler Goodlet | b75a3310fe | |
Tyler Goodlet | ed8cfcf66d | |
Tyler Goodlet | 72ec34ffd2 | |
Tyler Goodlet | d334e61b1f | |
goodboy | fbabfb78e0 | |
Tyler Goodlet | 4d23f6e4d7 | |
Tyler Goodlet | 8b1c521ae9 | |
Tyler Goodlet | 7586e20ab4 | |
Tyler Goodlet | 80d70216f7 | |
Tyler Goodlet | d1f45b0883 | |
Tyler Goodlet | 00a7f20292 | |
Tyler Goodlet | 0178fcd26f | |
Tyler Goodlet | 24fa1b8ff7 | |
Tyler Goodlet | 66ea74c6d5 | |
Tyler Goodlet | b579d4b1f5 | |
Tyler Goodlet | 874374af06 | |
Tyler Goodlet | 62d073dc18 | |
Tyler Goodlet | 3e125625b1 | |
Tyler Goodlet | 8395a1fcfe | |
Tyler Goodlet | 957686a9fe | |
Tyler Goodlet | 1e433ca4f4 | |
Tyler Goodlet | 937406534c | |
Tyler Goodlet | b26b66cc66 | |
Tyler Goodlet | 7936dcafbf | |
Tyler Goodlet | d32c26c5d7 | |
Tyler Goodlet | d2d3286fb8 | |
goodboy | 310a17e93b | |
Tyler Goodlet | a45156cbb7 | |
Tyler Goodlet | 6324624811 | |
Tyler Goodlet | 3762466a58 | |
Tyler Goodlet | 289a69bf41 | |
goodboy | 253cbf901c | |
Tyler Goodlet | 4b0ca40b17 | |
Tyler Goodlet | ebe2680355 | |
Tyler Goodlet | e92632bd34 | |
Tyler Goodlet | 32e316ebff | |
Tyler Goodlet | f604437897 | |
Tyler Goodlet | c9e6c81459 | |
Tyler Goodlet | ce7d630676 | |
Tyler Goodlet | 6ac60fbe22 | |
Tyler Goodlet | 998a5acd92 | |
Tyler Goodlet | 493e45e70a | |
Tyler Goodlet | c7f3e59105 | |
Tyler Goodlet | d62a636bcc | |
Tyler Goodlet | d0205e726b | |
Tyler Goodlet | 8df614465c | |
Tyler Goodlet | 81cd696ec8 | |
Tyler Goodlet | a6e32e7530 | |
Tyler Goodlet | 7bd5b42f9e | |
Tyler Goodlet | 76f398bd9f | |
Tyler Goodlet | 7f36e85815 | |
Tyler Goodlet | 8462ea8a28 | |
Tyler Goodlet | e9d64ffee8 | |
Tyler Goodlet | b16167b8f3 | |
Tyler Goodlet | 434c340cb8 | |
Tyler Goodlet | 94e2103bf5 | |
Tyler Goodlet | cc026dfb1d | |
Tyler Goodlet | 97c2a2da3e | |
goodboy | 039d06cc48 | |
Tyler Goodlet | 58517295d2 | |
Tyler Goodlet | c39fa825d0 | |
Tyler Goodlet | 88306a6c1e | |
Tyler Goodlet | c034ea742f | |
goodboy | d26fea70c7 | |
Konstantine Tsafatinos | cb970cef46 | |
Konstantine Tsafatinos | c2e654aae2 | |
Konstantine Tsafatinos | 2baa1b4605 | |
Konstantine Tsafatinos | cb8e97a142 | |
Konstantine Tsafatinos | 1525c645ce | |
Konstantine Tsafatinos | fd0acd21fb | |
Konstantine Tsafatinos | 617bf3e0da | |
Konstantine Tsafatinos | a3345dbba2 | |
Konstantine Tsafatinos | ee0be13af1 | |
Konstantine Tsafatinos | b1bff1be85 | |
Konstantine Tsafatinos | 46948e0a8b | |
Konstantine Tsafatinos | d826a66c8c | |
Konstantine Tsafatinos | 6c54c81f01 | |
Tyler Goodlet | 0122669dd4 | |
Konstantine Tsafatinos | 0c905920e2 | |
Konstantine Tsafatinos | 03d2eddce3 | |
Konstantine Tsafatinos | 96dd5c632f | |
Konstantine Tsafatinos | b21bbf5031 | |
Konstantine Tsafatinos | 66da58525d | |
Konstantine Tsafatinos | b55debbe95 | |
Konstantine Tsafatinos | 1fe1f88806 | |
Konstantine Tsafatinos | 3d2be3674e | |
Konstantine Tsafatinos | 48c7b5262c | |
Konstantine Tsafatinos | ef598444c4 | |
Konstantine Tsafatinos | 0285a847d8 | |
Konstantine Tsafatinos | 88061d8799 | |
Konstantine Tsafatinos | e12af8aa4c | |
Konstantine Tsafatinos | 184edb2a90 | |
Konstantine Tsafatinos | b88dd380a3 | |
goodboy | bc59d476b1 | |
Tyler Goodlet | 01f5f2d015 | |
Tyler Goodlet | af3d624281 | |
Tyler Goodlet | 2c9612ebd8 | |
Tyler Goodlet | 16b9e39e11 | |
Tyler Goodlet | 6889a25926 | |
goodboy | 5fb85d9ea0 | |
wattygetlood | e04a7dceb2 | |
wattygetlood | cb69e89218 | |
Tyler Goodlet | f7d03489d8 | |
Tyler Goodlet | 09079b61fc | |
Tyler Goodlet | 9d4e1c885f | |
Tyler Goodlet | adccb687fe | |
Tyler Goodlet | c239faf4e5 | |
Tyler Goodlet | 6f3d78b729 | |
Tyler Goodlet | 3e7d4f8717 | |
Tyler Goodlet | b1cce8f9cf | |
Tyler Goodlet | 89a98c4aa2 | |
Tyler Goodlet | 7a943f0e1e | |
Tyler Goodlet | 786ffde4e6 | |
Tyler Goodlet | 11d4ebd0b5 | |
Tyler Goodlet | 81f8b4e145 | |
Tyler Goodlet | cc55e1f4bb | |
Tyler Goodlet | 412c9ee6cf | |
Tyler Goodlet | bf3b58e861 | |
Tyler Goodlet | 1d3ed6c333 | |
Tyler Goodlet | 832e4c97d2 | |
Tyler Goodlet | 23aa7eb31c | |
Tyler Goodlet | c2a13c474c | |
Tyler Goodlet | 7252094f90 | |
Tyler Goodlet | b1dd24d1f7 | |
Tyler Goodlet | a073039b30 | |
Tyler Goodlet | 5c343aa748 | |
wattygetlood | 9ddaf0f4e7 | |
wattygetlood | 7037d04df4 | |
wattygetlood | 45b5902d77 | |
goodboy | 1440e0b58f | |
Tyler Goodlet | 7b13124dd4 | |
Tyler Goodlet | ca1c1cf415 | |
goodboy | cde090bf24 | |
Tyler Goodlet | 92c63988bc | |
Tyler Goodlet | 9ed153bcb6 | |
Tyler Goodlet | 412c34eba0 | |
Tyler Goodlet | 68e1db27f8 | |
Tyler Goodlet | 86b1316691 | |
Tyler Goodlet | 890ffc76cf | |
Tyler Goodlet | 51d94a301a | |
Tyler Goodlet | c54c9ae3d3 | |
Tyler Goodlet | 5a4c155798 | |
goodboy | 14faf2d245 | |
wattygetlood | a5ad24f770 | |
Tyler Goodlet | a0034e2948 | |
wattygetlood | fc3c0741b8 | |
wattygetlood | cc87508fd9 | |
wattygetlood | d069481f1d | |
wattygetlood | c411a244f6 | |
wattygetlood | 15556e40f0 | |
wattygetlood | c0082e15bc | |
wattygetlood | 2ebdf008da | |
Tyler Goodlet | 71f9b5c000 | |
Tyler Goodlet | 228f21d7b0 | |
Tyler Goodlet | 45464a5465 | |
Tyler Goodlet | 723eef3fd6 | |
Tyler Goodlet | e0462f0a8c | |
Tyler Goodlet | 1c49f7f47f | |
Tyler Goodlet | ef04781a2b | |
Tyler Goodlet | e3a3fd2d39 | |
Tyler Goodlet | 860ed99757 | |
Tyler Goodlet | 326b2c089a | |
Tyler Goodlet | 8f467bf4f0 | |
Tyler Goodlet | 4a7b2d835b | |
Tyler Goodlet | 30cf54480d | |
Tyler Goodlet | ee4ad32d3b | |
Tyler Goodlet | e7516447df | |
Tyler Goodlet | a006b87546 | |
Tyler Goodlet | 9490129a74 | |
Tyler Goodlet | 2f2aef28dd | |
Tyler Goodlet | 0271841412 | |
Tyler Goodlet | e8d7709358 | |
Tyler Goodlet | e3c46a5d4d | |
Tyler Goodlet | 8d432e1988 | |
Tyler Goodlet | 87653ddca2 | |
Tyler Goodlet | 73faafcfc1 | |
Tyler Goodlet | e4244e96a9 | |
Tyler Goodlet | 5274eb538c | |
Tyler Goodlet | b358b8e874 | |
Tyler Goodlet | 2d3c685e19 | |
Tyler Goodlet | 4570b06c26 | |
Tyler Goodlet | 26b0071471 | |
Tyler Goodlet | 1fc6429f75 | |
Tyler Goodlet | ebf3e00438 | |
Tyler Goodlet | df6afe24a4 | |
Tyler Goodlet | d130f0449f | |
Tyler Goodlet | efb743fd85 | |
Tyler Goodlet | 615bf3a55a | |
Tyler Goodlet | d4f79a6245 | |
Tyler Goodlet | 4b7d1fb35b | |
Tyler Goodlet | 0b5250d5e3 | |
Tyler Goodlet | 97c2f86092 | |
Tyler Goodlet | f3289c1977 | |
Tyler Goodlet | 4e96dd09e3 | |
Tyler Goodlet | b81209e78e | |
Tyler Goodlet | dfe4473c9a | |
Tyler Goodlet | 1aae40cdeb | |
Tyler Goodlet | 8118a57b9a | |
Tyler Goodlet | 5952e7f538 | |
goodboy | cef2cdd6b6 | |
Tyler Goodlet | 16c04e11e4 | |
Tyler Goodlet | 9bfad86c29 | |
Tyler Goodlet | a9d42b374f | |
goodboy | 43b39d3b6b | |
Tyler Goodlet | 174590ee88 | |
Tyler Goodlet | 00a90e7390 | |
Tyler Goodlet | 1aaa382036 | |
Tyler Goodlet | 999d3efdd7 | |
Tyler Goodlet | f63a7c497d | |
goodboy | 022f90df09 | |
Guillermo Rodriguez | 82d1b85b09 | |
goodboy | a2698c73b5 | |
Tyler Goodlet | 20a24283a1 | |
Tyler Goodlet | bb8fade16f | |
Tyler Goodlet | 296863348d | |
Tyler Goodlet | 95b31cbc0f | |
Tyler Goodlet | 6a0fba1eb3 | |
Tyler Goodlet | 06934be047 | |
Tyler Goodlet | 28b5be0719 | |
Tyler Goodlet | 67de8f24b9 | |
Tyler Goodlet | b112f24e7e | |
Tyler Goodlet | bd2460846e | |
Tyler Goodlet | be93ded0e5 | |
Tyler Goodlet | 9d9929fb89 | |
Tyler Goodlet | cc5390376c | |
Tyler Goodlet | 72f4474273 | |
Tyler Goodlet | c6a3c66e7e | |
Tyler Goodlet | 13b8807f1f | |
Tyler Goodlet | 55cfe6082b | |
goodboy | 8fe2bd6614 | |
Tyler Goodlet | d351fe14a8 | |
Tyler Goodlet | 4e884aec6c | |
Tyler Goodlet | 7b21ddd27f | |
Tyler Goodlet | fd31b843b9 | |
Tyler Goodlet | 7f4546b71f | |
Tyler Goodlet | f5eb34c4d7 | |
Tyler Goodlet | c7a588cf25 | |
Tyler Goodlet | 5c2d3125b4 | |
Tyler Goodlet | f011234285 | |
Tyler Goodlet | ce7c174059 | |
Tyler Goodlet | 94b6f370a9 | |
Tyler Goodlet | 349040dbf0 | |
Tyler Goodlet | 80079105fc | |
Tyler Goodlet | 8911c3c8ed | |
goodboy | a40e949940 | |
Tyler Goodlet | 9813cf4169 | |
Tyler Goodlet | c3c1e14cf4 | |
Tyler Goodlet | 404f5d6d23 | |
Tyler Goodlet | e22a652852 | |
Tyler Goodlet | 09fc901b0d | |
Tyler Goodlet | e69af9e291 | |
Tyler Goodlet | 06fe2bd1be | |
Tyler Goodlet | 7a41c83f84 | |
Tyler Goodlet | b7f27f201f | |
goodboy | 05b8e3a199 | |
Tyler Goodlet | 0ae79d6418 | |
Tyler Goodlet | d170132eb5 | |
Tyler Goodlet | ced310c194 | |
Tyler Goodlet | fbb765e1d8 | |
Tyler Goodlet | 80d16886cb | |
Tyler Goodlet | e66b3792bb | |
Tyler Goodlet | 4b89f7197a | |
Tyler Goodlet | 637c9c65e9 | |
Tyler Goodlet | 12e04d57f8 | |
Tyler Goodlet | 6f07c5e255 | |
Tyler Goodlet | 65c3cc5f5f | |
Tyler Goodlet | 3225f254f4 | |
Tyler Goodlet | 1ccff37677 | |
Tyler Goodlet | 9e18afe0d7 | |
goodboy | 0131160896 | |
Tyler Goodlet | 8e390278f5 | |
Tyler Goodlet | 47d0c81a2d | |
Tyler Goodlet | 0744eb78a6 | |
Tyler Goodlet | 16dfc75ad0 | |
Tyler Goodlet | 50713030f8 | |
Tyler Goodlet | 54712827ee | |
Tyler Goodlet | 7e9cbd7d9e | |
Tyler Goodlet | 2877d7e4ce | |
Tyler Goodlet | 78e52566c6 | |
Tyler Goodlet | b63ce088f2 | |
Tyler Goodlet | 74fe27eb2c | |
Tyler Goodlet | c52d63c762 | |
Tyler Goodlet | 49bdbf29be | |
Tyler Goodlet | d27214621d | |
goodboy | ff8c33cf7e | |
Tyler Goodlet | 9c57f10e77 | |
Tyler Goodlet | 8722cf4c49 | |
Tyler Goodlet | 644ac6661c | |
Tyler Goodlet | 56b65a1cde | |
Tyler Goodlet | f21c68a672 | |
Tyler Goodlet | c1cf4c7876 | |
Tyler Goodlet | 61331fee67 | |
Tyler Goodlet | e178c18745 | |
Tyler Goodlet | 8b12329479 | |
wattygetlood | cf2d258a27 | |
Tyler Goodlet | 9951e1d4c9 | |
Tyler Goodlet | 51373789fe | |
Tyler Goodlet | 51def5484e | |
Tyler Goodlet | 824c81da41 | |
goodboy | fa2468b175 | |
goodboy | 11544dc64f | |
Tyler Goodlet | ba7ed8b877 | |
Tyler Goodlet | 8e81f8bd81 | |
Tyler Goodlet | 0f200d9596 | |
Tyler Goodlet | 1f64f47ee9 | |
Tyler Goodlet | 4c9e5feace | |
Tyler Goodlet | 147207a0ad | |
Tyler Goodlet | 119ff0ec20 | |
goodboy | 1c85efc63d | |
Tyler Goodlet | 422977d27a | |
Tyler Goodlet | 5b368992f6 | |
Tyler Goodlet | 835ad7794c | |
Tyler Goodlet | 590db2c51b | |
Tyler Goodlet | 94572716e6 | |
Tyler Goodlet | 00d6258a24 | |
Tyler Goodlet | ca467f45b6 | |
Tyler Goodlet | 8f023cd66f | |
Tyler Goodlet | 162c58a8d8 | |
Tyler Goodlet | 224c01e43e | |
Tyler Goodlet | 2eef6c76d0 | |
Tyler Goodlet | 746db60e5b | |
Tyler Goodlet | fc3baf4bd1 | |
Tyler Goodlet | f1d61ac01b | |
Tyler Goodlet | d69d3b319e | |
Tyler Goodlet | 0e4a7e3846 | |
Tyler Goodlet | 0bcaeda784 | |
Tyler Goodlet | c828261740 | |
goodboy | 4680d68824 | |
goodboy | 242d02b1cd | |
Tyler Goodlet | 139eca47f7 | |
Tyler Goodlet | 21386f6c1f | |
Tyler Goodlet | 853e8d4466 | |
Tyler Goodlet | e8cd1a0e83 | |
Tyler Goodlet | 96937829eb | |
Tyler Goodlet | 4ea42a0a7e | |
Tyler Goodlet | 30a5f32ef8 | |
Tyler Goodlet | 37eeb0d74b | |
Tyler Goodlet | dd752927a2 | |
Tyler Goodlet | dbdd7b6497 | |
Tyler Goodlet | 39fb2ee85d | |
Tyler Goodlet | 40c874ce92 | |
Tyler Goodlet | ea5b55945f | |
Tyler Goodlet | 216afec19c | |
Tyler Goodlet | 4f9aa0d965 | |
Tyler Goodlet | 04373fd62a | |
Tyler Goodlet | 2a59ccf1bb | |
Tyler Goodlet | 6ec0fdcabf | |
goodboy | b2ee78b71f | |
Guillermo Rodriguez | 9fc95deab7 | |
Tyler Goodlet | 50e8b3464f | |
Tyler Goodlet | 1ad2cd36c5 | |
goodboy | b8ed7da63c | |
Tyler Goodlet | bcc8d8a0d5 | |
goodboy | c808965a6f | |
Tyler Goodlet | 18859e1b8c | |
Tyler Goodlet | 4f899edcef | |
Tyler Goodlet | a2659d1fde | |
Tyler Goodlet | 739399d5a9 | |
Tyler Goodlet | 95bf522b48 | |
Tyler Goodlet | 43666a1a8e | |
Tyler Goodlet | 5bf8e6a90e | |
Tyler Goodlet | 0876d2f4fe | |
Tyler Goodlet | c378a56b29 | |
Tyler Goodlet | e4e1b4d64a | |
Tyler Goodlet | 4cf51ffb1e | |
Tyler Goodlet | 61f3ce43b3 | |
goodboy | 3e302f8445 | |
goodboy | 837c34e24b | |
Tyler Goodlet | 6825ad4804 | |
Tyler Goodlet | de0cc6d81a | |
Tyler Goodlet | 46e85e2e4b | |
Tyler Goodlet | 75fddb249c | |
Tyler Goodlet | c737de7c74 | |
Tyler Goodlet | 8f70398d88 | |
Tyler Goodlet | d706f35668 | |
Tyler Goodlet | 1f1f0d3909 | |
Tyler Goodlet | d7cfe4dcb3 | |
Tyler Goodlet | 7cbcfc5525 |
|
@ -1,5 +1,6 @@
|
||||||
name: CI
|
name: CI
|
||||||
|
|
||||||
|
|
||||||
on:
|
on:
|
||||||
# Triggers the workflow on push or pull request events but only for the master branch
|
# Triggers the workflow on push or pull request events but only for the master branch
|
||||||
push:
|
push:
|
||||||
|
@ -10,41 +11,21 @@ on:
|
||||||
# Allows you to run this workflow manually from the Actions tab
|
# Allows you to run this workflow manually from the Actions tab
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
basic_install:
|
|
||||||
name: 'pip install'
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
|
|
||||||
- name: Checkout
|
|
||||||
uses: actions/checkout@v2
|
|
||||||
with:
|
|
||||||
ref: master
|
|
||||||
|
|
||||||
- name: Setup python
|
|
||||||
uses: actions/setup-python@v2
|
|
||||||
with:
|
|
||||||
python-version: '3.9'
|
|
||||||
|
|
||||||
- name: Install dependencies
|
|
||||||
run: pip install -e . --upgrade-strategy eager -r requirements.txt
|
|
||||||
|
|
||||||
- name: Run piker cli
|
|
||||||
run: piker
|
|
||||||
|
|
||||||
testing:
|
testing:
|
||||||
name: 'test suite'
|
name: 'install + test-suite'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Checkout
|
- name: Checkout
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Setup python
|
- name: Setup python
|
||||||
uses: actions/setup-python@v2
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: '3.9'
|
python-version: '3.10'
|
||||||
|
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
|
||||||
|
|
|
@ -97,6 +97,9 @@ ENV/
|
||||||
# mkdocs documentation
|
# mkdocs documentation
|
||||||
/site
|
/site
|
||||||
|
|
||||||
|
# extra scripts dir
|
||||||
|
/snippets
|
||||||
|
|
||||||
# mypy
|
# mypy
|
||||||
.mypy_cache/
|
.mypy_cache/
|
||||||
.vscode/settings.json
|
.vscode/settings.json
|
||||||
|
|
65
README.rst
65
README.rst
|
@ -72,11 +72,73 @@ for a development install::
|
||||||
pip install -r requirements.txt -e .
|
pip install -r requirements.txt -e .
|
||||||
|
|
||||||
|
|
||||||
|
install for tinas
|
||||||
|
*****************
|
||||||
|
for windows peeps you can start by installing all the prerequisite software:
|
||||||
|
|
||||||
|
- install git with all default settings - https://git-scm.com/download/win
|
||||||
|
- install anaconda all default settings - https://www.anaconda.com/products/individual
|
||||||
|
- install microsoft build tools (check the box for Desktop development for C++, you might be able to uncheck some optional downloads) - https://visualstudio.microsoft.com/visual-cpp-build-tools/
|
||||||
|
- install visual studio code default settings - https://code.visualstudio.com/download
|
||||||
|
|
||||||
|
|
||||||
|
then, `crack a conda shell`_ and run the following commands::
|
||||||
|
|
||||||
|
mkdir code # create code directory
|
||||||
|
cd code # change directory to code
|
||||||
|
git clone https://github.com/pikers/piker.git # downloads piker installation package from github
|
||||||
|
cd piker # change directory to piker
|
||||||
|
|
||||||
|
conda create -n pikonda # creates conda environment named pikonda
|
||||||
|
conda activate pikonda # activates pikonda
|
||||||
|
|
||||||
|
conda install -c conda-forge python-levenshtein # in case it is not already installed
|
||||||
|
conda install pip # may already be installed
|
||||||
|
pip # will show if pip is installed
|
||||||
|
|
||||||
|
pip install -e . -r requirements.txt # install piker in editable mode
|
||||||
|
|
||||||
|
test Piker to see if it is working::
|
||||||
|
|
||||||
|
piker -b binance chart btcusdt.binance # formatting for loading a chart
|
||||||
|
piker -b kraken -b binance chart xbtusdt.kraken
|
||||||
|
piker -b kraken -b binance -b ib chart qqq.nasdaq.ib
|
||||||
|
piker -b ib chart tsla.nasdaq.ib
|
||||||
|
|
||||||
|
potential error::
|
||||||
|
|
||||||
|
FileNotFoundError: [Errno 2] No such file or directory: 'C:\\Users\\user\\AppData\\Roaming\\piker\\brokers.toml'
|
||||||
|
|
||||||
|
solution:
|
||||||
|
|
||||||
|
- navigate to file directory above (may be different on your machine, location should be listed in the error code)
|
||||||
|
- copy and paste file from 'C:\\Users\\user\\code\\data/brokers.toml' or create a blank file using notepad at the location above
|
||||||
|
|
||||||
|
Visual Studio Code setup:
|
||||||
|
|
||||||
|
- now that piker is installed we can set up vscode as the default terminal for running piker and editing the code
|
||||||
|
- open Visual Studio Code
|
||||||
|
- file --> Add Folder to Workspace --> C:\Users\user\code\piker (adds piker directory where all piker files are located)
|
||||||
|
- file --> Save Workspace As --> save it wherever you want and call it whatever you want, this is going to be your default workspace for running and editing piker code
|
||||||
|
- ctrl + shift + p --> start typing Python: Select Interpetter --> when the option comes up select it --> Select at the workspace level --> select the one that shows ('pikonda')
|
||||||
|
- change the default terminal to cmd.exe instead of powershell (default)
|
||||||
|
- now when you create a new terminal VScode should automatically activate you conda env so that piker can be run as the first command after a new terminal is created
|
||||||
|
|
||||||
|
also, try out fancyzones as part of powertoyz for a decent tiling windows manager to manage all the cool new software you are going to be running.
|
||||||
|
|
||||||
|
.. _conda installed: https://
|
||||||
|
.. _C++ build toolz: https://
|
||||||
|
.. _crack a conda shell: https://
|
||||||
|
.. _vscode: https://
|
||||||
|
|
||||||
|
.. link to the tina guide
|
||||||
|
.. _setup a coolio tiled wm console: https://
|
||||||
|
|
||||||
provider support
|
provider support
|
||||||
****************
|
****************
|
||||||
for live data feeds the in-progress set of supported brokers is:
|
for live data feeds the in-progress set of supported brokers is:
|
||||||
|
|
||||||
- IB_ via ``ib_insync``
|
- IB_ via ``ib_insync``, also see our `container docs`_
|
||||||
- binance_ and kraken_ for crypto over their public websocket API
|
- binance_ and kraken_ for crypto over their public websocket API
|
||||||
- questrade_ (ish) which comes with effectively free L1
|
- questrade_ (ish) which comes with effectively free L1
|
||||||
|
|
||||||
|
@ -88,6 +150,7 @@ coming soon...
|
||||||
if you want your broker supported and they have an API let us know.
|
if you want your broker supported and they have an API let us know.
|
||||||
|
|
||||||
.. _IB: https://interactivebrokers.github.io/tws-api/index.html
|
.. _IB: https://interactivebrokers.github.io/tws-api/index.html
|
||||||
|
.. _container docs: https://github.com/pikers/piker/tree/master/dockering/ib
|
||||||
.. _questrade: https://www.questrade.com/api/documentation
|
.. _questrade: https://www.questrade.com/api/documentation
|
||||||
.. _kraken: https://www.kraken.com/features/api#public-market-data
|
.. _kraken: https://www.kraken.com/features/api#public-market-data
|
||||||
.. _binance: https://github.com/pikers/piker/pull/182
|
.. _binance: https://github.com/pikers/piker/pull/182
|
||||||
|
|
|
@ -8,20 +8,45 @@ expires_at = 1616095326.355846
|
||||||
|
|
||||||
[kraken]
|
[kraken]
|
||||||
key_descr = "api_0"
|
key_descr = "api_0"
|
||||||
public_key = ""
|
api_key = ""
|
||||||
private_key = ""
|
secret = ""
|
||||||
|
|
||||||
[ib]
|
[ib]
|
||||||
host = "127.0.0.1"
|
hosts = [
|
||||||
|
"127.0.0.1",
|
||||||
|
]
|
||||||
|
# XXX: the order in which ports will be scanned
|
||||||
|
# (by the `brokerd` daemon-actor)
|
||||||
|
# is determined # by the line order here.
|
||||||
|
# TODO: when we eventually spawn gateways in our
|
||||||
|
# container, we can just dynamically allocate these
|
||||||
|
# using IBC.
|
||||||
|
ports = [
|
||||||
|
4002, # gw
|
||||||
|
7497, # tws
|
||||||
|
]
|
||||||
|
|
||||||
ports.gw = 4002
|
# XXX: for a paper account the flex web query service
|
||||||
ports.tws = 7497
|
# is not supported so you have to manually download
|
||||||
ports.order = ["gw", "tws",]
|
# and XML report and put it in a location that can be
|
||||||
|
# accessed by the ``brokerd.ib`` backend code for parsing.
|
||||||
|
flex_token = '666666666666666666666666'
|
||||||
|
flex_trades_query_id = '666666' # live account
|
||||||
|
|
||||||
accounts.margin = "X0000000"
|
# when clients are being scanned this determines
|
||||||
accounts.ira = "X0000000"
|
# which clients are preferred to be used for data
|
||||||
accounts.paper = "XX0000000"
|
# feeds based on the order of account names, if
|
||||||
|
# detected as active on an API client.
|
||||||
|
prefer_data_account = [
|
||||||
|
'paper',
|
||||||
|
'margin',
|
||||||
|
'ira',
|
||||||
|
]
|
||||||
|
|
||||||
# the order in which accounts will be selected (if found through
|
[ib.accounts]
|
||||||
# `brokerd`) when a new symbol is loaded
|
# the order in which accounts will be selectable
|
||||||
accounts_order = ['paper', 'margin', 'ira']
|
# in the order mode UI (if found via clients during
|
||||||
|
# API-app scanning)when a new symbol is loaded.
|
||||||
|
paper = "XX0000000"
|
||||||
|
margin = "X0000000"
|
||||||
|
ira = "X0000000"
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
running ``ib`` gateway in ``docker``
|
||||||
|
------------------------------------
|
||||||
|
We have a config based on the (now defunct)
|
||||||
|
image from "waytrade":
|
||||||
|
|
||||||
|
https://github.com/waytrade/ib-gateway-docker
|
||||||
|
|
||||||
|
To startup this image with our custom settings
|
||||||
|
simply run the command::
|
||||||
|
|
||||||
|
docker compose up
|
||||||
|
|
||||||
|
And you should have the following socket-available services:
|
||||||
|
|
||||||
|
- ``x11vnc1@127.0.0.1:3003``
|
||||||
|
- ``ib-gw@127.0.0.1:4002``
|
||||||
|
|
||||||
|
You can attach to the container via a VNC client
|
||||||
|
without password auth.
|
||||||
|
|
||||||
|
SECURITY STUFF!?!?!
|
||||||
|
-------------------
|
||||||
|
Though "``ib``" claims they host filter connections outside
|
||||||
|
localhost (aka ``127.0.0.1``) it's probably better if you filter
|
||||||
|
the socket at the OS level using a stateless firewall rule::
|
||||||
|
|
||||||
|
ip rule add not unicast iif lo to 0.0.0.0/0 dport 4002
|
||||||
|
|
||||||
|
We will soon have this baked into our own custom image but for
|
||||||
|
now you'll have to do it urself dawgy.
|
|
@ -0,0 +1,64 @@
|
||||||
|
# rework from the original @
|
||||||
|
# https://github.com/waytrade/ib-gateway-docker/blob/master/docker-compose.yml
|
||||||
|
version: "3.5"
|
||||||
|
|
||||||
|
services:
|
||||||
|
ib-gateway:
|
||||||
|
# other image tags available:
|
||||||
|
# https://github.com/waytrade/ib-gateway-docker#supported-tags
|
||||||
|
image: waytrade/ib-gateway:981.3j
|
||||||
|
restart: always
|
||||||
|
network_mode: 'host'
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
- type: bind
|
||||||
|
source: ./jts.ini
|
||||||
|
target: /root/Jts/jts.ini
|
||||||
|
# don't let IBC clobber this file for
|
||||||
|
# the main reason of not having a stupid
|
||||||
|
# timezone set..
|
||||||
|
read_only: true
|
||||||
|
|
||||||
|
# force our own IBC config
|
||||||
|
- type: bind
|
||||||
|
source: ./ibc.ini
|
||||||
|
target: /root/ibc/config.ini
|
||||||
|
|
||||||
|
# force our noop script - socat isn't needed in host mode.
|
||||||
|
- type: bind
|
||||||
|
source: ./fork_ports_delayed.sh
|
||||||
|
target: /root/scripts/fork_ports_delayed.sh
|
||||||
|
|
||||||
|
# force our noop script - socat isn't needed in host mode.
|
||||||
|
- type: bind
|
||||||
|
source: ./run_x11_vnc.sh
|
||||||
|
target: /root/scripts/run_x11_vnc.sh
|
||||||
|
read_only: true
|
||||||
|
|
||||||
|
# NOTE:to fill these out, define an `.env` file in the same dir as
|
||||||
|
# this compose file which looks something like:
|
||||||
|
# TWS_USERID='myuser'
|
||||||
|
# TWS_PASSWORD='guest'
|
||||||
|
# TRADING_MODE=paper (or live)
|
||||||
|
# VNC_SERVER_PASSWORD='diggity'
|
||||||
|
|
||||||
|
environment:
|
||||||
|
TWS_USERID: ${TWS_USERID}
|
||||||
|
TWS_PASSWORD: ${TWS_PASSWORD}
|
||||||
|
TRADING_MODE: ${TRADING_MODE:-paper}
|
||||||
|
VNC_SERVER_PASSWORD: ${VNC_SERVER_PASSWORD:-}
|
||||||
|
|
||||||
|
# ports:
|
||||||
|
# - target: 4002
|
||||||
|
# host_ip: 127.0.0.1
|
||||||
|
# published: 4002
|
||||||
|
# protocol: tcp
|
||||||
|
|
||||||
|
# original mappings for use in non-host-mode
|
||||||
|
# which we won't really need going forward since
|
||||||
|
# ideally we just pick the port to have ib-gw listen
|
||||||
|
# on **when** we spawn the container - i.e. everything
|
||||||
|
# will be driven by a ``brokers.toml`` def.
|
||||||
|
# - "127.0.0.1:4001:4001"
|
||||||
|
# - "127.0.0.1:4002:4002"
|
||||||
|
# - "127.0.0.1:5900:5900"
|
|
@ -0,0 +1,6 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# we now just set this is to a noop script
|
||||||
|
# since we can just run the container in
|
||||||
|
# `network_mode: 'host'` and get literally
|
||||||
|
# the exact same behaviour XD
|
|
@ -0,0 +1,711 @@
|
||||||
|
# Note that in the comments in this file, TWS refers to both the Trader
|
||||||
|
# Workstation and the IB Gateway, unless explicitly stated otherwise.
|
||||||
|
#
|
||||||
|
# When referred to below, the default value for a setting is the value
|
||||||
|
# assumed if either the setting is included but no value is specified, or
|
||||||
|
# the setting is not included at all.
|
||||||
|
#
|
||||||
|
# IBC may also be used to start the FIX CTCI Gateway. All settings
|
||||||
|
# relating to this have names prefixed with FIX.
|
||||||
|
#
|
||||||
|
# The IB API Gateway and the FIX CTCI Gateway share the same code. Which
|
||||||
|
# gateway actually runs is governed by an option on the initial gateway
|
||||||
|
# login screen. The FIX setting described under IBC Startup
|
||||||
|
# Settings below controls this.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 1. IBC Startup Settings
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
|
||||||
|
# IBC may be used to start the IB Gateway for the FIX CTCI. This
|
||||||
|
# setting must be set to 'yes' if you want to run the FIX CTCI gateway. The
|
||||||
|
# default is 'no'.
|
||||||
|
|
||||||
|
FIX=no
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 2. Authentication Settings
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# TWS and the IB API gateway require a single username and password.
|
||||||
|
# You may specify the username and password using the following settings:
|
||||||
|
#
|
||||||
|
# IbLoginId
|
||||||
|
# IbPassword
|
||||||
|
#
|
||||||
|
# Alternatively, you can specify the username and password in the command
|
||||||
|
# files used to start TWS or the Gateway, but this is not recommended for
|
||||||
|
# security reasons.
|
||||||
|
#
|
||||||
|
# If you don't specify them, you will be prompted for them in the usual
|
||||||
|
# login dialog when TWS starts (but whatever you have specified will be
|
||||||
|
# included in the dialog automatically: for example you may specify the
|
||||||
|
# username but not the password, and then you will be prompted for the
|
||||||
|
# password via the login dialog). Note that if you specify either
|
||||||
|
# the username or the password (or both) in the command file, then
|
||||||
|
# IbLoginId and IbPassword settings defined in this file are ignored.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# The FIX CTCI gateway requires one username and password for FIX order
|
||||||
|
# routing, and optionally a separate username and password for market
|
||||||
|
# data connections. You may specify the usernames and passwords using
|
||||||
|
# the following settings:
|
||||||
|
#
|
||||||
|
# FIXLoginId
|
||||||
|
# FIXPassword
|
||||||
|
# IbLoginId (optional - for market data connections)
|
||||||
|
# IbPassword (optional - for market data connections)
|
||||||
|
#
|
||||||
|
# Alternatively you can specify the FIX username and password in the
|
||||||
|
# command file used to start the FIX CTCI Gateway, but this is not
|
||||||
|
# recommended for security reasons.
|
||||||
|
#
|
||||||
|
# If you don't specify them, you will be prompted for them in the usual
|
||||||
|
# login dialog when FIX CTCI gateway starts (but whatever you have
|
||||||
|
# specified will be included in the dialog automatically: for example
|
||||||
|
# you may specify the usernames but not the passwords, and then you will
|
||||||
|
# be prompted for the passwords via the login dialog). Note that if you
|
||||||
|
# specify either the FIX username or the FIX password (or both) on the
|
||||||
|
# command line, then FIXLoginId and FIXPassword settings defined in this
|
||||||
|
# file are ignored; he same applies to the market data username and
|
||||||
|
# password.
|
||||||
|
|
||||||
|
# IB API Authentication Settings
|
||||||
|
# ------------------------------
|
||||||
|
|
||||||
|
# Your TWS username:
|
||||||
|
|
||||||
|
IbLoginId=
|
||||||
|
|
||||||
|
|
||||||
|
# Your TWS password:
|
||||||
|
|
||||||
|
IbPassword=
|
||||||
|
|
||||||
|
|
||||||
|
# FIX CTCI Authentication Settings
|
||||||
|
# --------------------------------
|
||||||
|
|
||||||
|
# Your FIX CTCI username:
|
||||||
|
|
||||||
|
FIXLoginId=
|
||||||
|
|
||||||
|
|
||||||
|
# Your FIX CTCI password:
|
||||||
|
|
||||||
|
FIXPassword=
|
||||||
|
|
||||||
|
|
||||||
|
# Second Factor Authentication Settings
|
||||||
|
# -------------------------------------
|
||||||
|
|
||||||
|
# If you have enabled more than one second factor authentication
|
||||||
|
# device, TWS presents a list from which you must select the device
|
||||||
|
# you want to use for this login. You can use this setting to
|
||||||
|
# instruct IBC to select a particular item in the list on your
|
||||||
|
# behalf. Note that you must spell this value exactly as it appears
|
||||||
|
# in the list. If no value is set, you must manually select the
|
||||||
|
# relevant list entry.
|
||||||
|
|
||||||
|
SecondFactorDevice=
|
||||||
|
|
||||||
|
|
||||||
|
# If you use the IBKR Mobile app for second factor authentication,
|
||||||
|
# and you fail to complete the process before the time limit imposed
|
||||||
|
# by IBKR, you can use this setting to tell IBC to exit: arrangements
|
||||||
|
# can then be made to automatically restart IBC in order to initiate
|
||||||
|
# the login sequence afresh. Otherwise, manual intervention at TWS's
|
||||||
|
# Second Factor Authentication dialog is needed to complete the
|
||||||
|
# login.
|
||||||
|
#
|
||||||
|
# Permitted values are 'yes' and 'no'. The default is 'no'.
|
||||||
|
#
|
||||||
|
# Note that the scripts provided with the IBC zips for Windows and
|
||||||
|
# Linux provide options to automatically restart in these
|
||||||
|
# circumstances, but only if this setting is also set to 'yes'.
|
||||||
|
|
||||||
|
ExitAfterSecondFactorAuthenticationTimeout=no
|
||||||
|
|
||||||
|
|
||||||
|
# This setting is only relevant if
|
||||||
|
# ExitAfterSecondFactorAuthenticationTimeout is set to 'yes'.
|
||||||
|
#
|
||||||
|
# It controls how long (in seconds) IBC waits for login to complete
|
||||||
|
# after the user acknowledges the second factor authentication
|
||||||
|
# alert at the IBKR Mobile app. If login has not completed after
|
||||||
|
# this time, IBC terminates.
|
||||||
|
# The default value is 40.
|
||||||
|
|
||||||
|
SecondFactorAuthenticationExitInterval=
|
||||||
|
|
||||||
|
|
||||||
|
# Trading Mode
|
||||||
|
# ------------
|
||||||
|
#
|
||||||
|
# TWS 955 introduced a new Trading Mode combo box on its login
|
||||||
|
# dialog. This indicates whether the live account or the paper
|
||||||
|
# trading account corresponding to the supplied credentials is
|
||||||
|
# to be used. The allowed values are 'live' (the default) and
|
||||||
|
# 'paper'. For earlier versions of TWS this setting has no
|
||||||
|
# effect.
|
||||||
|
|
||||||
|
TradingMode=
|
||||||
|
|
||||||
|
|
||||||
|
# Paper-trading Account Warning
|
||||||
|
# -----------------------------
|
||||||
|
#
|
||||||
|
# Logging in to a paper-trading account results in TWS displaying
|
||||||
|
# a dialog asking the user to confirm that they are aware that this
|
||||||
|
# is not a brokerage account. Until this dialog has been accepted,
|
||||||
|
# TWS will not allow API connections to succeed. Setting this
|
||||||
|
# to 'yes' (the default) will cause IBC to automatically
|
||||||
|
# confirm acceptance. Setting it to 'no' will leave the dialog
|
||||||
|
# on display, and the user will have to deal with it manually.
|
||||||
|
|
||||||
|
AcceptNonBrokerageAccountWarning=yes
|
||||||
|
|
||||||
|
|
||||||
|
# Login Dialog Display Timeout
|
||||||
|
#-----------------------------
|
||||||
|
#
|
||||||
|
# In some circumstances, starting TWS may result in failure to display
|
||||||
|
# the login dialog. Restarting TWS may help to resolve this situation,
|
||||||
|
# and IBC does this automatically.
|
||||||
|
#
|
||||||
|
# This setting controls how long (in seconds) IBC waits for the login
|
||||||
|
# dialog to appear before restarting TWS.
|
||||||
|
#
|
||||||
|
# Note that in normal circumstances with a reasonably specified
|
||||||
|
# computer the time to displaying the login dialog is typically less
|
||||||
|
# than 20 seconds, and frequently much less. However many factors can
|
||||||
|
# influence this, and it is unwise to set this value too low.
|
||||||
|
#
|
||||||
|
# The default value is 60.
|
||||||
|
|
||||||
|
LoginDialogDisplayTimeout = 60
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 3. TWS Startup Settings
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Path to settings store
|
||||||
|
# ----------------------
|
||||||
|
#
|
||||||
|
# Path to the directory where TWS should store its settings. This is
|
||||||
|
# normally the folder in which TWS is installed. However you may set
|
||||||
|
# it to some other location if you wish (for example if you want to
|
||||||
|
# run multiple instances of TWS with different settings).
|
||||||
|
#
|
||||||
|
# It is recommended for clarity that you use an absolute path. The
|
||||||
|
# effect of using a relative path is undefined.
|
||||||
|
#
|
||||||
|
# Linux and macOS users should use the appropriate path syntax.
|
||||||
|
#
|
||||||
|
# Note that, for Windows users, you MUST use double separator
|
||||||
|
# characters to separate the elements of the folder path: for
|
||||||
|
# example, IbDir=C:\\IBLiveSettings is valid, but
|
||||||
|
# IbDir=C:\IBLiveSettings is NOT valid and will give unexpected
|
||||||
|
# results. Linux and macOS users need not use double separators,
|
||||||
|
# but they are acceptable.
|
||||||
|
#
|
||||||
|
# The default is the current working directory when IBC is
|
||||||
|
# started.
|
||||||
|
|
||||||
|
IbDir=/root/Jts
|
||||||
|
|
||||||
|
|
||||||
|
# Store settings on server
|
||||||
|
# ------------------------
|
||||||
|
#
|
||||||
|
# If you wish to store a copy of your TWS settings on IB's
|
||||||
|
# servers as well as locally on your computer, set this to
|
||||||
|
# 'yes': this enables you to run TWS on different computers
|
||||||
|
# with the same configuration, market data lines, etc. If set
|
||||||
|
# to 'no', running TWS on different computers will not share the
|
||||||
|
# same settings. If no value is specified, TWS will obtain its
|
||||||
|
# settings from the same place as the last time this user logged
|
||||||
|
# in (whether manually or using IBC).
|
||||||
|
|
||||||
|
StoreSettingsOnServer=
|
||||||
|
|
||||||
|
|
||||||
|
# Minimize TWS on startup
|
||||||
|
# -----------------------
|
||||||
|
#
|
||||||
|
# Set to 'yes' to minimize TWS when it starts:
|
||||||
|
|
||||||
|
MinimizeMainWindow=no
|
||||||
|
|
||||||
|
|
||||||
|
# Existing Session Detected Action
|
||||||
|
# --------------------------------
|
||||||
|
#
|
||||||
|
# When a user logs on to an IBKR account for trading purposes by any means, the
|
||||||
|
# IBKR account server checks to see whether the account is already logged in
|
||||||
|
# elsewhere. If so, a dialog is displayed to both the users that enables them
|
||||||
|
# to determine what happens next. The 'ExistingSessionDetectedAction' setting
|
||||||
|
# instructs TWS how to proceed when it displays this dialog:
|
||||||
|
#
|
||||||
|
# * If the new TWS session is set to 'secondary', the existing session continues
|
||||||
|
# and the new session terminates. Thus a secondary TWS session can never
|
||||||
|
# override any other session.
|
||||||
|
#
|
||||||
|
# * If the existing TWS session is set to 'primary', the existing session
|
||||||
|
# continues and the new session terminates (even if the new session is also
|
||||||
|
# set to primary). Thus a primary TWS session can never be overridden by
|
||||||
|
# any new session).
|
||||||
|
#
|
||||||
|
# * If both the existing and the new TWS sessions are set to 'primaryoverride',
|
||||||
|
# the existing session terminates and the new session proceeds.
|
||||||
|
#
|
||||||
|
# * If the existing TWS session is set to 'manual', the user must handle the
|
||||||
|
# dialog.
|
||||||
|
#
|
||||||
|
# The difference between 'primary' and 'primaryoverride' is that a
|
||||||
|
# 'primaryoverride' session can be overriden over by a new 'primary' session,
|
||||||
|
# but a 'primary' session cannot be overriden by any other session.
|
||||||
|
#
|
||||||
|
# When set to 'primary', if another TWS session is started and manually told to
|
||||||
|
# end the 'primary' session, the 'primary' session is automatically reconnected.
|
||||||
|
#
|
||||||
|
# The default is 'manual'.
|
||||||
|
|
||||||
|
ExistingSessionDetectedAction=primary
|
||||||
|
|
||||||
|
|
||||||
|
# Override TWS API Port Number
|
||||||
|
# ----------------------------
|
||||||
|
#
|
||||||
|
# If OverrideTwsApiPort is set to an integer, IBC changes the
|
||||||
|
# 'Socket port' in TWS's API configuration to that number shortly
|
||||||
|
# after startup. Leaving the setting blank will make no change to
|
||||||
|
# the current setting. This setting is only intended for use in
|
||||||
|
# certain specialized situations where the port number needs to
|
||||||
|
# be set dynamically at run-time: most users will never need it,
|
||||||
|
# so don't use it unless you know you need it.
|
||||||
|
|
||||||
|
OverrideTwsApiPort=4002
|
||||||
|
|
||||||
|
|
||||||
|
# Read-only Login
|
||||||
|
# ---------------
|
||||||
|
#
|
||||||
|
# If ReadOnlyLogin is set to 'yes', and the user is enrolled in IB's
|
||||||
|
# account security programme, the user will not be asked to perform
|
||||||
|
# the second factor authentication action, and login to TWS will
|
||||||
|
# occur automatically in read-only mode: in this mode, placing or
|
||||||
|
# managing orders is not allowed. If set to 'no', and the user is
|
||||||
|
# enrolled in IB's account security programme, the user must perform
|
||||||
|
# the relevant second factor authentication action to complete the
|
||||||
|
# login.
|
||||||
|
|
||||||
|
# If the user is not enrolled in IB's account security programme,
|
||||||
|
# this setting is ignored. The default is 'no'.
|
||||||
|
|
||||||
|
ReadOnlyLogin=no
|
||||||
|
|
||||||
|
|
||||||
|
# Read-only API
|
||||||
|
# -------------
|
||||||
|
#
|
||||||
|
# If ReadOnlyApi is set to 'yes', API programs cannot submit, modify
|
||||||
|
# or cancel orders. If set to 'no', API programs can do these things.
|
||||||
|
# If not set, the existing TWS/Gateway configuration is unchanged.
|
||||||
|
# NB: this setting is really only supplied for the benefit of new TWS
|
||||||
|
# or Gateway instances that are being automatically installed and
|
||||||
|
# started without user intervention (eg Docker containers). Where
|
||||||
|
# a user is involved, they should use the Global Configuration to
|
||||||
|
# set the relevant checkbox (this only needs to be done once) and
|
||||||
|
# not provide a value for this setting.
|
||||||
|
|
||||||
|
ReadOnlyApi=no
|
||||||
|
|
||||||
|
|
||||||
|
# Market data size for US stocks - lots or shares
|
||||||
|
# -----------------------------------------------
|
||||||
|
#
|
||||||
|
# Since IB introduced the option of market data for US stocks showing
|
||||||
|
# bid, ask and last sizes in shares rather than lots, TWS and Gateway
|
||||||
|
# display a dialog immediately after login notifying the user about
|
||||||
|
# this and requiring user input before allowing market data to be
|
||||||
|
# accessed. The user can request that the dialog not be shown again.
|
||||||
|
#
|
||||||
|
# It is recommended that the user should handle this dialog manually
|
||||||
|
# rather than using these settings, which are provided for situations
|
||||||
|
# where the user interface is not easily accessible, or where user
|
||||||
|
# settings are not preserved between sessions (eg some Docker images).
|
||||||
|
#
|
||||||
|
# - If this setting is set to 'accept', the dialog will be handled
|
||||||
|
# automatically and the option to not show it again will be
|
||||||
|
# selected.
|
||||||
|
#
|
||||||
|
# Note that in this case, the only way to allow the dialog to be
|
||||||
|
# displayed again is to manually enable the 'Bid, Ask and Last
|
||||||
|
# Size Display Update' message in the 'Messages' section of the TWS
|
||||||
|
# configuration dialog. So you should only use 'Accept' if you are
|
||||||
|
# sure you really don't want the dialog to be displayed again, or
|
||||||
|
# you have easy access to the user interface.
|
||||||
|
#
|
||||||
|
# - If set to 'defer', the dialog will be handled automatically (so
|
||||||
|
# that market data will start), but the option to not show it again
|
||||||
|
# will not be selected, and it will be shown again after the next
|
||||||
|
# login.
|
||||||
|
#
|
||||||
|
# - If set to 'ignore', the user has to deal with the dialog manually.
|
||||||
|
#
|
||||||
|
# The default value is 'ignore'.
|
||||||
|
#
|
||||||
|
# Note if set to 'accept' or 'defer', TWS also automatically sets
|
||||||
|
# the API settings checkbox labelled 'Send market data in lots for
|
||||||
|
# US stocks for dual-mode API clients'. IBC cannot prevent this.
|
||||||
|
# However you can change this immmediately by setting
|
||||||
|
# SendMarketDataInLotsForUSstocks (see below) to 'no' .
|
||||||
|
|
||||||
|
AcceptBidAskLastSizeDisplayUpdateNotification=accept
|
||||||
|
|
||||||
|
|
||||||
|
# This setting determines whether the API settings checkbox labelled
|
||||||
|
# 'Send market data in lots for US stocks for dual-mode API clients'
|
||||||
|
# is set or cleared. If set to 'yes', the checkbox is set. If set to
|
||||||
|
# 'no' the checkbox is cleared. If defaulted, the checkbox is
|
||||||
|
# unchanged.
|
||||||
|
|
||||||
|
SendMarketDataInLotsForUSstocks=
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 4. TWS Auto-Closedown
|
||||||
|
# =============================================================================
|
||||||
|
#
|
||||||
|
# IMPORTANT NOTE: Starting with TWS 974, this setting no longer
|
||||||
|
# works properly, because IB have changed the way TWS handles its
|
||||||
|
# autologoff mechanism.
|
||||||
|
#
|
||||||
|
# You should now configure the TWS autologoff time to something
|
||||||
|
# convenient for you, and restart IBC each day.
|
||||||
|
#
|
||||||
|
# Alternatively, discontinue use of IBC and use the auto-relogin
|
||||||
|
# mechanism within TWS 974 and later versions (note that the
|
||||||
|
# auto-relogin mechanism provided by IB is not available if you
|
||||||
|
# use IBC).
|
||||||
|
|
||||||
|
# Set to yes or no (lower case).
|
||||||
|
#
|
||||||
|
# yes means allow TWS to shut down automatically at its
|
||||||
|
# specified shutdown time, which is set via the TWS
|
||||||
|
# configuration menu.
|
||||||
|
#
|
||||||
|
# no means TWS never shuts down automatically.
|
||||||
|
#
|
||||||
|
# NB: IB recommends that you do not keep TWS running
|
||||||
|
# continuously. If you set this setting to 'no', you may
|
||||||
|
# experience incorrect TWS operation.
|
||||||
|
#
|
||||||
|
# NB: the default for this setting is 'no'. Since this will
|
||||||
|
# only work properly with TWS versions earlier than 974, you
|
||||||
|
# should explicitly set this to 'yes' for version 974 and later.
|
||||||
|
|
||||||
|
IbAutoClosedown=yes
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 5. TWS Tidy Closedown Time
|
||||||
|
# =============================================================================
|
||||||
|
#
|
||||||
|
# NB: starting with TWS 974 this is no longer a useful option
|
||||||
|
# because both TWS and Gateway now have the same auto-logoff
|
||||||
|
# mechanism, and IBC can no longer avoid this.
|
||||||
|
#
|
||||||
|
# Note that giving this setting a value does not change TWS's
|
||||||
|
# auto-logoff in any way: any setting will be additional to the
|
||||||
|
# TWS auto-logoff.
|
||||||
|
#
|
||||||
|
# To tell IBC to tidily close TWS at a specified time every
|
||||||
|
# day, set this value to <hh:mm>, for example:
|
||||||
|
# ClosedownAt=22:00
|
||||||
|
#
|
||||||
|
# To tell IBC to tidily close TWS at a specified day and time
|
||||||
|
# each week, set this value to <dayOfWeek hh:mm>, for example:
|
||||||
|
# ClosedownAt=Friday 22:00
|
||||||
|
#
|
||||||
|
# Note that the day of the week must be specified using your
|
||||||
|
# default locale. Also note that Java will only accept
|
||||||
|
# characters encoded to ISO 8859-1 (Latin-1). This means that
|
||||||
|
# if the day name in your default locale uses any non-Latin-1
|
||||||
|
# characters you need to encode them using Unicode escapes
|
||||||
|
# (see http://java.sun.com/docs/books/jls/third_edition/html/lexical.html#3.3
|
||||||
|
# for details). For example, to tidily close TWS at 12:00 on
|
||||||
|
# Saturday where the default locale is Simplified Chinese,
|
||||||
|
# use the following:
|
||||||
|
# #ClosedownAt=\u661F\u671F\u516D 12:00
|
||||||
|
|
||||||
|
ClosedownAt=
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 6. Other TWS Settings
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Accept Incoming Connection
|
||||||
|
# --------------------------
|
||||||
|
#
|
||||||
|
# If set to 'accept', IBC automatically accepts incoming
|
||||||
|
# API connection dialogs. If set to 'reject', IBC
|
||||||
|
# automatically rejects incoming API connection dialogs. If
|
||||||
|
# set to 'manual', the user must decide whether to accept or reject
|
||||||
|
# incoming API connection dialogs. The default is 'manual'.
|
||||||
|
# NB: it is recommended to set this to 'reject', and to explicitly
|
||||||
|
# configure which IP addresses can connect to the API in TWS's API
|
||||||
|
# configuration page, as this is much more secure (in this case, no
|
||||||
|
# incoming API connection dialogs will occur for those IP addresses).
|
||||||
|
|
||||||
|
AcceptIncomingConnectionAction=reject
|
||||||
|
|
||||||
|
|
||||||
|
# Allow Blind Trading
|
||||||
|
# -------------------
|
||||||
|
#
|
||||||
|
# If you attempt to place an order for a contract for which
|
||||||
|
# you have no market data subscription, TWS displays a dialog
|
||||||
|
# to warn you against such blind trading.
|
||||||
|
#
|
||||||
|
# yes means the dialog is dismissed as though the user had
|
||||||
|
# clicked the 'Ok' button: this means that you accept
|
||||||
|
# the risk and want the order to be submitted.
|
||||||
|
#
|
||||||
|
# no means the dialog remains on display and must be
|
||||||
|
# handled by the user.
|
||||||
|
|
||||||
|
AllowBlindTrading=yes
|
||||||
|
|
||||||
|
|
||||||
|
# Save Settings on a Schedule
|
||||||
|
# ---------------------------
|
||||||
|
#
|
||||||
|
# You can tell TWS to automatically save its settings on a schedule
|
||||||
|
# of your choosing. You can specify one or more specific times,
|
||||||
|
# like this:
|
||||||
|
#
|
||||||
|
# SaveTwsSettingsAt=HH:MM [ HH:MM]...
|
||||||
|
#
|
||||||
|
# for example:
|
||||||
|
# SaveTwsSettingsAt=08:00 12:30 17:30
|
||||||
|
#
|
||||||
|
# Or you can specify an interval at which settings are to be saved,
|
||||||
|
# optionally starting at a specific time and continuing until another
|
||||||
|
# time, like this:
|
||||||
|
#
|
||||||
|
#SaveTwsSettingsAt=Every n [{mins | hours}] [hh:mm] [hh:mm]
|
||||||
|
#
|
||||||
|
# where the first hh:mm is the start time and the second is the end
|
||||||
|
# time. If you don't specify the end time, settings are saved regularly
|
||||||
|
# from the start time till midnight. If you don't specify the start time.
|
||||||
|
# settings are saved regularly all day, beginning at 00:00. Note that
|
||||||
|
# settings will always be saved at the end time, even if that is not
|
||||||
|
# exactly one interval later than the previous time. If neither 'mins'
|
||||||
|
# nor 'hours' is specified, 'mins' is assumed. Examples:
|
||||||
|
#
|
||||||
|
# To save every 30 minutes all day starting at 00:00
|
||||||
|
#SaveTwsSettingsAt=Every 30
|
||||||
|
#SaveTwsSettingsAt=Every 30 mins
|
||||||
|
#
|
||||||
|
# To save every hour starting at 08:00 and ending at midnight
|
||||||
|
#SaveTwsSettingsAt=Every 1 hours 08:00
|
||||||
|
#SaveTwsSettingsAt=Every 1 hours 08:00 00:00
|
||||||
|
#
|
||||||
|
# To save every 90 minutes starting at 08:00 up to and including 17:43
|
||||||
|
#SaveTwsSettingsAt=Every 90 08:00 17:43
|
||||||
|
|
||||||
|
SaveTwsSettingsAt=
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 7. Settings Specific to Indian Versions of TWS
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Indian versions of TWS may display a password expiry
|
||||||
|
# notification dialog and a NSE Compliance dialog. These can be
|
||||||
|
# dismissed by setting the following to yes. By default the
|
||||||
|
# password expiry notice is not dismissed, but the NSE Compliance
|
||||||
|
# notice is dismissed.
|
||||||
|
|
||||||
|
# Warning: setting DismissPasswordExpiryWarning=yes will mean
|
||||||
|
# you will not be notified when your password is about to expire.
|
||||||
|
# You must then take other measures to ensure that your password
|
||||||
|
# is changed within the expiry period, otherwise IBC will
|
||||||
|
# not be able to login successfully.
|
||||||
|
|
||||||
|
DismissPasswordExpiryWarning=no
|
||||||
|
DismissNSEComplianceNotice=yes
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 8. IBC Command Server Settings
|
||||||
|
# =============================================================================
|
||||||
|
|
||||||
|
# Do NOT CHANGE THE FOLLOWING SETTINGS unless you
|
||||||
|
# intend to issue commands to IBC (for example
|
||||||
|
# using telnet). Note that these settings have nothing to
|
||||||
|
# do with running programs that use the TWS API.
|
||||||
|
|
||||||
|
# Command Server Port Number
|
||||||
|
# --------------------------
|
||||||
|
#
|
||||||
|
# The port number that IBC listens on for commands
|
||||||
|
# such as "STOP". DO NOT set this to the port number
|
||||||
|
# used for TWS API connections. There is no good reason
|
||||||
|
# to change this setting unless the port is used by
|
||||||
|
# some other application (typically another instance of
|
||||||
|
# IBC). The default value is 0, which tells IBC not to
|
||||||
|
# start the command server
|
||||||
|
|
||||||
|
#CommandServerPort=7462
|
||||||
|
|
||||||
|
|
||||||
|
# Permitted Command Sources
|
||||||
|
# -------------------------
|
||||||
|
#
|
||||||
|
# A comma separated list of IP addresses, or host names,
|
||||||
|
# which are allowed addresses for sending commands to
|
||||||
|
# IBC. Commands can always be sent from the
|
||||||
|
# same host as IBC is running on.
|
||||||
|
|
||||||
|
ControlFrom=127.0.0.1
|
||||||
|
|
||||||
|
|
||||||
|
# Address for Receiving Commands
|
||||||
|
# ------------------------------
|
||||||
|
#
|
||||||
|
# Specifies the IP address on which the Command Server
|
||||||
|
# is so listen. For a multi-homed host, this can be used
|
||||||
|
# to specify that connection requests are only to be
|
||||||
|
# accepted on the specified address. The default is to
|
||||||
|
# accept connection requests on all local addresses.
|
||||||
|
|
||||||
|
BindAddress=127.0.0.1
|
||||||
|
|
||||||
|
|
||||||
|
# Command Prompt
|
||||||
|
# --------------
|
||||||
|
#
|
||||||
|
# The specified string is output by the server when
|
||||||
|
# the connection is first opened and after the completion
|
||||||
|
# of each command. This can be useful if sending commands
|
||||||
|
# using an interactive program such as telnet. The default
|
||||||
|
# is that no prompt is output.
|
||||||
|
# For example:
|
||||||
|
#
|
||||||
|
# CommandPrompt=>
|
||||||
|
|
||||||
|
CommandPrompt=
|
||||||
|
|
||||||
|
|
||||||
|
# Suppress Command Server Info Messages
|
||||||
|
# -------------------------------------
|
||||||
|
#
|
||||||
|
# Some commands can return intermediate information about
|
||||||
|
# their progress. This setting controls whether such
|
||||||
|
# information is sent. The default is that such information
|
||||||
|
# is not sent.
|
||||||
|
|
||||||
|
SuppressInfoMessages=no
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
# =============================================================================
|
||||||
|
# 9. Diagnostic Settings
|
||||||
|
# =============================================================================
|
||||||
|
#
|
||||||
|
# IBC can log information about the structure of windows
|
||||||
|
# displayed by TWS. This information is useful when adding
|
||||||
|
# new features to IBC or when behaviour is not as expected.
|
||||||
|
#
|
||||||
|
# The logged information shows the hierarchical organisation
|
||||||
|
# of all the components of the window, and includes the
|
||||||
|
# current values of text boxes and labels.
|
||||||
|
#
|
||||||
|
# Note that this structure logging has a small performance
|
||||||
|
# impact, and depending on the settings can cause the logfile
|
||||||
|
# size to be significantly increased. It is therefore
|
||||||
|
# recommended that the LogStructureWhen setting be set to
|
||||||
|
# 'never' (the default) unless there is a specific reason
|
||||||
|
# that this information is needed.
|
||||||
|
|
||||||
|
|
||||||
|
# Scope of Structure Logging
|
||||||
|
# --------------------------
|
||||||
|
#
|
||||||
|
# The LogStructureScope setting indicates which windows are
|
||||||
|
# eligible for structure logging:
|
||||||
|
#
|
||||||
|
# - if set to 'known', only windows that IBC recognizes
|
||||||
|
# are eligible - these are windows that IBC has some
|
||||||
|
# interest in monitoring, usually to take some action
|
||||||
|
# on the user's behalf;
|
||||||
|
#
|
||||||
|
# - if set to 'unknown', only windows that IBC does not
|
||||||
|
# recognize are eligible. Most windows displayed by
|
||||||
|
# TWS fall into this category;
|
||||||
|
#
|
||||||
|
# - if set to 'untitled', only windows that IBC does not
|
||||||
|
# recognize and that have no title are eligible. These
|
||||||
|
# are usually message boxes or similar small windows,
|
||||||
|
#
|
||||||
|
# - if set to 'all', then every window displayed by TWS
|
||||||
|
# is eligible.
|
||||||
|
#
|
||||||
|
# The default value is 'known'.
|
||||||
|
|
||||||
|
LogStructureScope=all
|
||||||
|
|
||||||
|
|
||||||
|
# When to Log Window Structure
|
||||||
|
# ----------------------------
|
||||||
|
#
|
||||||
|
# The LogStructureWhen setting specifies the circumstances
|
||||||
|
# when eligible TWS windows have their structure logged:
|
||||||
|
#
|
||||||
|
# - if set to 'open' or 'yes' or 'true', IBC logs the
|
||||||
|
# structure of an eligible window the first time it
|
||||||
|
# is encountered;
|
||||||
|
#
|
||||||
|
# - if set to 'activate', the structure is logged every
|
||||||
|
# time an eligible window is made active;
|
||||||
|
#
|
||||||
|
# - if set to 'never' or 'no' or 'false', structure
|
||||||
|
# information is never logged.
|
||||||
|
#
|
||||||
|
# The default value is 'never'.
|
||||||
|
|
||||||
|
LogStructureWhen=never
|
||||||
|
|
||||||
|
|
||||||
|
# DEPRECATED SETTING
|
||||||
|
# ------------------
|
||||||
|
#
|
||||||
|
# LogComponents - THIS SETTING WILL BE REMOVED IN A FUTURE
|
||||||
|
# RELEASE
|
||||||
|
#
|
||||||
|
# If LogComponents is set to any value, this is equivalent
|
||||||
|
# to setting LogStructureWhen to that same value and
|
||||||
|
# LogStructureScope to 'all': the actual values of those
|
||||||
|
# settings are ignored. The default is that the values
|
||||||
|
# of LogStructureScope and LogStructureWhen are honoured.
|
||||||
|
|
||||||
|
#LogComponents=
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,33 @@
|
||||||
|
[IBGateway]
|
||||||
|
ApiOnly=true
|
||||||
|
LocalServerPort=4002
|
||||||
|
# NOTE: must be set if using IBC's "reject" mode
|
||||||
|
TrustedIPs=127.0.0.1
|
||||||
|
; RemoteHostOrderRouting=ndc1.ibllc.com
|
||||||
|
; WriteDebug=true
|
||||||
|
; RemotePortOrderRouting=4001
|
||||||
|
; useRemoteSettings=false
|
||||||
|
; tradingMode=p
|
||||||
|
; Steps=8
|
||||||
|
; colorPalletName=dark
|
||||||
|
|
||||||
|
# window geo, this may be useful for sending `xdotool` commands?
|
||||||
|
; MainWindow.Width=1986
|
||||||
|
; screenHeight=3960
|
||||||
|
|
||||||
|
|
||||||
|
[Logon]
|
||||||
|
Locale=en
|
||||||
|
# most markets are oriented around this zone
|
||||||
|
# so might as well hard code it.
|
||||||
|
TimeZone=America/New_York
|
||||||
|
UseSSL=true
|
||||||
|
displayedproxymsg=1
|
||||||
|
os_titlebar=true
|
||||||
|
s3store=true
|
||||||
|
useRemoteSettings=false
|
||||||
|
|
||||||
|
[Communication]
|
||||||
|
ctciAutoEncrypt=true
|
||||||
|
Region=usr
|
||||||
|
; Peer=cdc1.ibllc.com:4001
|
|
@ -0,0 +1,16 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
# start VNC server
|
||||||
|
x11vnc \
|
||||||
|
-ncache_cr \
|
||||||
|
-listen localhost \
|
||||||
|
-display :1 \
|
||||||
|
-forever \
|
||||||
|
-shared \
|
||||||
|
-logappend /var/log/x11vnc.log \
|
||||||
|
-bg \
|
||||||
|
-noipv6 \
|
||||||
|
-autoport 3003 \
|
||||||
|
# can't use this because of ``asyncvnc`` issue:
|
||||||
|
# https://github.com/barneygale/asyncvnc/issues/1
|
||||||
|
# -passwd 'ibcansmbz'
|
|
@ -0,0 +1,28 @@
|
||||||
|
Notes to self
|
||||||
|
=============
|
||||||
|
chicken scratch we shan't forget, consider this staging
|
||||||
|
for actual feature issues on wtv git wrapper-provider we're
|
||||||
|
using (no we shan't stick with GH long term likely).
|
||||||
|
|
||||||
|
|
||||||
|
cool chart features
|
||||||
|
-------------------
|
||||||
|
- allow right-click to spawn shell with current in view
|
||||||
|
data passed to the new process via ``msgpack-numpy``.
|
||||||
|
- expand OHLC datum to lower time frame.
|
||||||
|
- auto-highlight current time range on tick feed
|
||||||
|
|
||||||
|
|
||||||
|
features from IB charting
|
||||||
|
-------------------------
|
||||||
|
- vlm diffing from ticks and compare when bar arrives from historical
|
||||||
|
- should help isolate dark vlm / trades
|
||||||
|
|
||||||
|
|
||||||
|
chart ux ideas
|
||||||
|
--------------
|
||||||
|
- hotkey to zoom to order intersection (horizontal line) with previous
|
||||||
|
price levels (+ some margin obvs).
|
||||||
|
- L1 "lines" (queue size repr) should normalize to some fixed x width
|
||||||
|
such that when levels with more vlm appear other smaller levels are
|
||||||
|
scaled down giving an immediate indication of the liquidity diff.
|
|
@ -18,10 +18,3 @@
|
||||||
piker: trading gear for hackers.
|
piker: trading gear for hackers.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
import msgpack # noqa
|
|
||||||
|
|
||||||
# TODO: remove this now right?
|
|
||||||
import msgpack_numpy
|
|
||||||
|
|
||||||
# patch msgpack for numpy arrays
|
|
||||||
msgpack_numpy.patch()
|
|
||||||
|
|
|
@ -18,30 +18,18 @@
|
||||||
Cacheing apis and toolz.
|
Cacheing apis and toolz.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
# further examples of interest:
|
|
||||||
# https://gist.github.com/njsmith/cf6fc0a97f53865f2c671659c88c1798#file-cache-py-L8
|
|
||||||
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from typing import (
|
|
||||||
Any,
|
|
||||||
Hashable,
|
|
||||||
Optional,
|
|
||||||
TypeVar,
|
|
||||||
AsyncContextManager,
|
|
||||||
)
|
|
||||||
from contextlib import (
|
from contextlib import (
|
||||||
asynccontextmanager,
|
asynccontextmanager,
|
||||||
)
|
)
|
||||||
|
|
||||||
import trio
|
from tractor.trionics import maybe_open_context
|
||||||
from trio_typing import TaskStatus
|
|
||||||
import tractor
|
|
||||||
|
|
||||||
from .brokers import get_brokermod
|
from .brokers import get_brokermod
|
||||||
from .log import get_logger
|
from .log import get_logger
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar('T')
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@ -74,124 +62,18 @@ def async_lifo_cache(maxsize=128):
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
_cache: dict[str, 'Client'] = {} # noqa
|
|
||||||
|
|
||||||
|
|
||||||
class cache:
|
|
||||||
'''Globally (processs wide) cached, task access to a
|
|
||||||
kept-alive-while-in-use async resource.
|
|
||||||
|
|
||||||
'''
|
|
||||||
lock = trio.Lock()
|
|
||||||
users: int = 0
|
|
||||||
values: dict[Any, Any] = {}
|
|
||||||
resources: dict[
|
|
||||||
int,
|
|
||||||
Optional[tuple[trio.Nursery, trio.Event]]
|
|
||||||
] = {}
|
|
||||||
no_more_users: Optional[trio.Event] = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
async def run_ctx(
|
|
||||||
cls,
|
|
||||||
mng,
|
|
||||||
key,
|
|
||||||
task_status: TaskStatus[T] = trio.TASK_STATUS_IGNORED,
|
|
||||||
|
|
||||||
) -> None:
|
|
||||||
async with mng as value:
|
|
||||||
|
|
||||||
_, no_more_users = cls.resources[id(mng)]
|
|
||||||
cls.values[key] = value
|
|
||||||
task_status.started(value)
|
|
||||||
try:
|
|
||||||
await no_more_users.wait()
|
|
||||||
finally:
|
|
||||||
value = cls.values.pop(key)
|
|
||||||
# discard nursery ref so it won't be re-used (an error)
|
|
||||||
cls.resources.pop(id(mng))
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def maybe_open_ctx(
|
|
||||||
|
|
||||||
key: Hashable,
|
|
||||||
mngr: AsyncContextManager[T],
|
|
||||||
|
|
||||||
) -> (bool, T):
|
|
||||||
'''Maybe open a context manager if there is not already a cached
|
|
||||||
version for the provided ``key``. Return the cached instance on
|
|
||||||
a cache hit.
|
|
||||||
|
|
||||||
'''
|
|
||||||
|
|
||||||
await cache.lock.acquire()
|
|
||||||
|
|
||||||
ctx_key = id(mngr)
|
|
||||||
|
|
||||||
value = None
|
|
||||||
try:
|
|
||||||
# lock feed acquisition around task racing / ``trio``'s
|
|
||||||
# scheduler protocol
|
|
||||||
value = cache.values[key]
|
|
||||||
log.info(f'Reusing cached resource for {key}')
|
|
||||||
cache.users += 1
|
|
||||||
cache.lock.release()
|
|
||||||
yield True, value
|
|
||||||
|
|
||||||
except KeyError:
|
|
||||||
log.info(f'Allocating new resource for {key}')
|
|
||||||
|
|
||||||
# **critical section** that should prevent other tasks from
|
|
||||||
# checking the cache until complete otherwise the scheduler
|
|
||||||
# may switch and by accident we create more then one feed.
|
|
||||||
|
|
||||||
# TODO: avoid pulling from ``tractor`` internals and
|
|
||||||
# instead offer a "root nursery" in piker actors?
|
|
||||||
service_n = tractor.current_actor()._service_n
|
|
||||||
|
|
||||||
# TODO: does this need to be a tractor "root nursery"?
|
|
||||||
ln = cache.resources.get(ctx_key)
|
|
||||||
assert not ln
|
|
||||||
|
|
||||||
ln, _ = cache.resources[ctx_key] = (service_n, trio.Event())
|
|
||||||
|
|
||||||
value = await ln.start(cache.run_ctx, mngr, key)
|
|
||||||
cache.users += 1
|
|
||||||
cache.lock.release()
|
|
||||||
|
|
||||||
yield False, value
|
|
||||||
|
|
||||||
finally:
|
|
||||||
cache.users -= 1
|
|
||||||
|
|
||||||
if cache.lock.locked():
|
|
||||||
cache.lock.release()
|
|
||||||
|
|
||||||
if value is not None:
|
|
||||||
# if no more consumers, teardown the client
|
|
||||||
if cache.users <= 0:
|
|
||||||
log.warning(f'De-allocating resource for {key}')
|
|
||||||
|
|
||||||
# terminate mngr nursery
|
|
||||||
entry = cache.resources.get(ctx_key)
|
|
||||||
if entry:
|
|
||||||
_, no_more_users = entry
|
|
||||||
no_more_users.set()
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_cached_client(
|
async def open_cached_client(
|
||||||
brokername: str,
|
brokername: str,
|
||||||
) -> 'Client': # noqa
|
) -> 'Client': # noqa
|
||||||
'''Get a cached broker client from the current actor's local vars.
|
'''
|
||||||
|
Get a cached broker client from the current actor's local vars.
|
||||||
|
|
||||||
If one has not been setup do it and cache it.
|
If one has not been setup do it and cache it.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
brokermod = get_brokermod(brokername)
|
brokermod = get_brokermod(brokername)
|
||||||
async with maybe_open_ctx(
|
async with maybe_open_context(
|
||||||
key=brokername,
|
acm_func=brokermod.get_client,
|
||||||
mngr=brokermod.get_client(),
|
|
||||||
) as (cache_hit, client):
|
) as (cache_hit, client):
|
||||||
yield client
|
yield client
|
||||||
|
|
168
piker/_daemon.py
168
piker/_daemon.py
|
@ -19,7 +19,7 @@ Structured, daemon tree service management.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Optional, Union, Callable, Any
|
from typing import Optional, Union, Callable, Any
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
|
@ -34,9 +34,11 @@ from .brokers import get_brokermod
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
_root_dname = 'pikerd'
|
_root_dname = 'pikerd'
|
||||||
|
|
||||||
|
_registry_addr = ('127.0.0.1', 6116)
|
||||||
_tractor_kwargs: dict[str, Any] = {
|
_tractor_kwargs: dict[str, Any] = {
|
||||||
# use a different registry addr then tractor's default
|
# use a different registry addr then tractor's default
|
||||||
'arbiter_addr': ('127.0.0.1', 6116),
|
'arbiter_addr': _registry_addr
|
||||||
}
|
}
|
||||||
_root_modules = [
|
_root_modules = [
|
||||||
__name__,
|
__name__,
|
||||||
|
@ -78,7 +80,6 @@ class Services(BaseModel):
|
||||||
) -> Any:
|
) -> Any:
|
||||||
|
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
|
|
||||||
async with portal.open_context(
|
async with portal.open_context(
|
||||||
target,
|
target,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
@ -87,19 +88,21 @@ class Services(BaseModel):
|
||||||
|
|
||||||
# unblock once the remote context has started
|
# unblock once the remote context has started
|
||||||
task_status.started((cs, first))
|
task_status.started((cs, first))
|
||||||
|
log.info(
|
||||||
|
f'`pikerd` service {name} started with value {first}'
|
||||||
|
)
|
||||||
|
try:
|
||||||
# wait on any context's return value
|
# wait on any context's return value
|
||||||
ctx_res = await ctx.result()
|
ctx_res = await ctx.result()
|
||||||
log.info(
|
except tractor.ContextCancelled:
|
||||||
f'`pikerd` service {name} started with value {ctx_res}'
|
return await self.cancel_service(name)
|
||||||
)
|
else:
|
||||||
|
|
||||||
# wait on any error from the sub-actor
|
# wait on any error from the sub-actor
|
||||||
# NOTE: this will block indefinitely until cancelled
|
# NOTE: this will block indefinitely until
|
||||||
# either by error from the target context function or
|
# cancelled either by error from the target
|
||||||
# by being cancelled here by the surroundingn cancel
|
# context function or by being cancelled here by
|
||||||
# scope
|
# the surrounding cancel scope
|
||||||
return await (portal.result(), ctx_res)
|
return (await portal.result(), ctx_res)
|
||||||
|
|
||||||
cs, first = await self.service_n.start(open_context_in_task)
|
cs, first = await self.service_n.start(open_context_in_task)
|
||||||
|
|
||||||
|
@ -109,14 +112,17 @@ class Services(BaseModel):
|
||||||
|
|
||||||
return cs, first
|
return cs, first
|
||||||
|
|
||||||
|
# TODO: per service cancellation by scope, we aren't using this
|
||||||
|
# anywhere right?
|
||||||
async def cancel_service(
|
async def cancel_service(
|
||||||
self,
|
self,
|
||||||
name: str,
|
name: str,
|
||||||
|
|
||||||
) -> Any:
|
) -> Any:
|
||||||
|
|
||||||
log.info(f'Cancelling `pikerd` service {name}')
|
log.info(f'Cancelling `pikerd` service {name}')
|
||||||
cs, portal = self.service_tasks[name]
|
cs, portal = self.service_tasks[name]
|
||||||
|
# XXX: not entirely sure why this is required,
|
||||||
|
# and should probably be better fine tuned in
|
||||||
|
# ``tractor``?
|
||||||
cs.cancel()
|
cs.cancel()
|
||||||
return await portal.cancel_actor()
|
return await portal.cancel_actor()
|
||||||
|
|
||||||
|
@ -124,7 +130,7 @@ class Services(BaseModel):
|
||||||
_services: Optional[Services] = None
|
_services: Optional[Services] = None
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_pikerd(
|
async def open_pikerd(
|
||||||
start_method: str = 'trio',
|
start_method: str = 'trio',
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
|
@ -150,7 +156,7 @@ async def open_pikerd(
|
||||||
tractor.open_root_actor(
|
tractor.open_root_actor(
|
||||||
|
|
||||||
# passed through to ``open_root_actor``
|
# passed through to ``open_root_actor``
|
||||||
arbiter_addr=_tractor_kwargs['arbiter_addr'],
|
arbiter_addr=_registry_addr,
|
||||||
name=_root_dname,
|
name=_root_dname,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=debug_mode,
|
debug_mode=debug_mode,
|
||||||
|
@ -179,7 +185,48 @@ async def open_pikerd(
|
||||||
yield _services
|
yield _services
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
|
async def open_piker_runtime(
|
||||||
|
name: str,
|
||||||
|
enable_modules: list[str] = [],
|
||||||
|
start_method: str = 'trio',
|
||||||
|
loglevel: Optional[str] = None,
|
||||||
|
|
||||||
|
# XXX: you should pretty much never want debug mode
|
||||||
|
# for data daemons when running in production.
|
||||||
|
debug_mode: bool = False,
|
||||||
|
|
||||||
|
) -> Optional[tractor._portal.Portal]:
|
||||||
|
'''
|
||||||
|
Start a piker actor who's runtime will automatically
|
||||||
|
sync with existing piker actors in local network
|
||||||
|
based on configuration.
|
||||||
|
|
||||||
|
'''
|
||||||
|
global _services
|
||||||
|
assert _services is None
|
||||||
|
|
||||||
|
# XXX: this may open a root actor as well
|
||||||
|
async with (
|
||||||
|
tractor.open_root_actor(
|
||||||
|
|
||||||
|
# passed through to ``open_root_actor``
|
||||||
|
arbiter_addr=_registry_addr,
|
||||||
|
name=name,
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=debug_mode,
|
||||||
|
start_method=start_method,
|
||||||
|
|
||||||
|
# TODO: eventually we should be able to avoid
|
||||||
|
# having the root have more then permissions to
|
||||||
|
# spawn other specialized daemons I think?
|
||||||
|
enable_modules=_root_modules,
|
||||||
|
) as _,
|
||||||
|
):
|
||||||
|
yield tractor.current_actor()
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
async def maybe_open_runtime(
|
async def maybe_open_runtime(
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
@ -202,7 +249,7 @@ async def maybe_open_runtime(
|
||||||
yield
|
yield
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def maybe_open_pikerd(
|
async def maybe_open_pikerd(
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
@ -253,7 +300,36 @@ class Brokerd:
|
||||||
locks = defaultdict(trio.Lock)
|
locks = defaultdict(trio.Lock)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
|
async def find_service(
|
||||||
|
service_name: str,
|
||||||
|
) -> Optional[tractor.Portal]:
|
||||||
|
|
||||||
|
log.info(f'Scanning for service `{service_name}`')
|
||||||
|
# attach to existing daemon by name if possible
|
||||||
|
async with tractor.find_actor(
|
||||||
|
service_name,
|
||||||
|
arbiter_sockaddr=_registry_addr,
|
||||||
|
) as maybe_portal:
|
||||||
|
yield maybe_portal
|
||||||
|
|
||||||
|
|
||||||
|
async def check_for_service(
|
||||||
|
service_name: str,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Service daemon "liveness" predicate.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with tractor.query_actor(
|
||||||
|
service_name,
|
||||||
|
arbiter_sockaddr=_registry_addr,
|
||||||
|
) as sockaddr:
|
||||||
|
return sockaddr
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
async def maybe_spawn_daemon(
|
async def maybe_spawn_daemon(
|
||||||
|
|
||||||
service_name: str,
|
service_name: str,
|
||||||
|
@ -263,7 +339,7 @@ async def maybe_spawn_daemon(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> tractor.Portal:
|
) -> tractor.Portal:
|
||||||
"""
|
'''
|
||||||
If no ``service_name`` daemon-actor can be found,
|
If no ``service_name`` daemon-actor can be found,
|
||||||
spawn one in a local subactor and return a portal to it.
|
spawn one in a local subactor and return a portal to it.
|
||||||
|
|
||||||
|
@ -274,7 +350,7 @@ async def maybe_spawn_daemon(
|
||||||
This can be seen as a service starting api for remote-actor
|
This can be seen as a service starting api for remote-actor
|
||||||
clients.
|
clients.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
if loglevel:
|
if loglevel:
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
|
||||||
|
@ -283,13 +359,14 @@ async def maybe_spawn_daemon(
|
||||||
lock = Brokerd.locks[service_name]
|
lock = Brokerd.locks[service_name]
|
||||||
await lock.acquire()
|
await lock.acquire()
|
||||||
|
|
||||||
# attach to existing daemon by name if possible
|
async with find_service(service_name) as portal:
|
||||||
async with tractor.find_actor(service_name) as portal:
|
|
||||||
if portal is not None:
|
if portal is not None:
|
||||||
lock.release()
|
lock.release()
|
||||||
yield portal
|
yield portal
|
||||||
return
|
return
|
||||||
|
|
||||||
|
log.warning(f"Couldn't find any existing {service_name}")
|
||||||
|
|
||||||
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
# ask root ``pikerd`` daemon to spawn the daemon we need if
|
||||||
# pikerd is not live we now become the root of the
|
# pikerd is not live we now become the root of the
|
||||||
# process tree
|
# process tree
|
||||||
|
@ -325,6 +402,7 @@ async def maybe_spawn_daemon(
|
||||||
async with tractor.wait_for_actor(service_name) as portal:
|
async with tractor.wait_for_actor(service_name) as portal:
|
||||||
lock.release()
|
lock.release()
|
||||||
yield portal
|
yield portal
|
||||||
|
await portal.cancel_actor()
|
||||||
|
|
||||||
|
|
||||||
async def spawn_brokerd(
|
async def spawn_brokerd(
|
||||||
|
@ -348,9 +426,19 @@ async def spawn_brokerd(
|
||||||
|
|
||||||
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
# ask `pikerd` to spawn a new sub-actor and manage it under its
|
||||||
# actor nursery
|
# actor nursery
|
||||||
|
modpath = brokermod.__name__
|
||||||
|
broker_enable = [modpath]
|
||||||
|
for submodname in getattr(
|
||||||
|
brokermod,
|
||||||
|
'__enable_modules__',
|
||||||
|
[],
|
||||||
|
):
|
||||||
|
subpath = f'{modpath}.{submodname}'
|
||||||
|
broker_enable.append(subpath)
|
||||||
|
|
||||||
portal = await _services.actor_n.start_actor(
|
portal = await _services.actor_n.start_actor(
|
||||||
dname,
|
dname,
|
||||||
enable_modules=_data_mods + [brokermod.__name__],
|
enable_modules=_data_mods + broker_enable,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
debug_mode=_services.debug_mode,
|
debug_mode=_services.debug_mode,
|
||||||
**tractor_kwargs
|
**tractor_kwargs
|
||||||
|
@ -368,7 +456,7 @@ async def spawn_brokerd(
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def maybe_spawn_brokerd(
|
async def maybe_spawn_brokerd(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
|
@ -376,7 +464,9 @@ async def maybe_spawn_brokerd(
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> tractor.Portal:
|
) -> tractor.Portal:
|
||||||
'''Helper to spawn a brokerd service.
|
'''
|
||||||
|
Helper to spawn a brokerd service *from* a client
|
||||||
|
who wishes to use the sub-actor-daemon.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
async with maybe_spawn_daemon(
|
async with maybe_spawn_daemon(
|
||||||
|
@ -428,7 +518,7 @@ async def spawn_emsd(
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def maybe_open_emsd(
|
async def maybe_open_emsd(
|
||||||
|
|
||||||
brokername: str,
|
brokername: str,
|
||||||
|
@ -447,3 +537,25 @@ async def maybe_open_emsd(
|
||||||
|
|
||||||
) as portal:
|
) as portal:
|
||||||
yield portal
|
yield portal
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: ideally we can start the tsdb "on demand" but it's
|
||||||
|
# probably going to require "rootless" docker, at least if we don't
|
||||||
|
# want to expect the user to start ``pikerd`` with root perms all the
|
||||||
|
# time.
|
||||||
|
# async def maybe_open_marketstored(
|
||||||
|
# loglevel: Optional[str] = None,
|
||||||
|
# **kwargs,
|
||||||
|
|
||||||
|
# ) -> tractor._portal.Portal: # noqa
|
||||||
|
|
||||||
|
# async with maybe_spawn_daemon(
|
||||||
|
|
||||||
|
# 'marketstored',
|
||||||
|
# service_task_target=spawn_emsd,
|
||||||
|
# spawn_args={'loglevel': loglevel},
|
||||||
|
# loglevel=loglevel,
|
||||||
|
# **kwargs,
|
||||||
|
|
||||||
|
# ) as portal:
|
||||||
|
# yield portal
|
||||||
|
|
|
@ -21,7 +21,10 @@ Profiling wrappers for internal libs.
|
||||||
import time
|
import time
|
||||||
from functools import wraps
|
from functools import wraps
|
||||||
|
|
||||||
|
# NOTE: you can pass a flag to enable this:
|
||||||
|
# ``piker chart <args> --profile``.
|
||||||
_pg_profile: bool = False
|
_pg_profile: bool = False
|
||||||
|
ms_slower_then: float = 0
|
||||||
|
|
||||||
|
|
||||||
def pg_profile_enabled() -> bool:
|
def pg_profile_enabled() -> bool:
|
||||||
|
|
|
@ -33,13 +33,49 @@ class SymbolNotFound(BrokerError):
|
||||||
|
|
||||||
|
|
||||||
class NoData(BrokerError):
|
class NoData(BrokerError):
|
||||||
"Symbol data not permitted"
|
'''
|
||||||
|
Symbol data not permitted or no data
|
||||||
|
for time range found.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
frame_size: int = 1000,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
super().__init__(*args)
|
||||||
|
|
||||||
|
# when raised, machinery can check if the backend
|
||||||
|
# set a "frame size" for doing datetime calcs.
|
||||||
|
self.frame_size: int = 1000
|
||||||
|
|
||||||
|
|
||||||
|
class DataUnavailable(BrokerError):
|
||||||
|
'''
|
||||||
|
Signal storage requests to terminate.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# TODO: add in a reason that can be displayed in the
|
||||||
|
# UI (for eg. `kraken` is bs and you should complain
|
||||||
|
# to them that you can't pull more OHLC data..)
|
||||||
|
|
||||||
|
|
||||||
|
class DataThrottle(BrokerError):
|
||||||
|
'''
|
||||||
|
Broker throttled request rate for data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# TODO: add in throttle metrics/feedback
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def resproc(
|
def resproc(
|
||||||
resp: asks.response_objects.Response,
|
resp: asks.response_objects.Response,
|
||||||
log: logging.Logger,
|
log: logging.Logger,
|
||||||
return_json: bool = True
|
return_json: bool = True,
|
||||||
|
log_resp: bool = False,
|
||||||
|
|
||||||
) -> asks.response_objects.Response:
|
) -> asks.response_objects.Response:
|
||||||
"""Process response and return its json content.
|
"""Process response and return its json content.
|
||||||
|
|
||||||
|
@ -48,11 +84,12 @@ def resproc(
|
||||||
if not resp.status_code == 200:
|
if not resp.status_code == 200:
|
||||||
raise BrokerError(resp.body)
|
raise BrokerError(resp.body)
|
||||||
try:
|
try:
|
||||||
json = resp.json()
|
msg = resp.json()
|
||||||
except json.decoder.JSONDecodeError:
|
except json.decoder.JSONDecodeError:
|
||||||
log.exception(f"Failed to process {resp}:\n{resp.text}")
|
log.exception(f"Failed to process {resp}:\n{resp.text}")
|
||||||
raise BrokerError(resp.text)
|
raise BrokerError(resp.text)
|
||||||
else:
|
|
||||||
log.debug(f"Received json contents:\n{colorize_json(json)}")
|
|
||||||
|
|
||||||
return json if return_json else resp
|
if log_resp:
|
||||||
|
log.debug(f"Received json contents:\n{colorize_json(msg)}")
|
||||||
|
|
||||||
|
return msg if return_json else resp
|
||||||
|
|
|
@ -18,13 +18,17 @@
|
||||||
Binance backend
|
Binance backend
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import List, Dict, Any, Tuple, Union, Optional
|
from datetime import datetime
|
||||||
|
from typing import (
|
||||||
|
Any, Union, Optional,
|
||||||
|
AsyncGenerator, Callable,
|
||||||
|
)
|
||||||
import time
|
import time
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import arrow
|
import pendulum
|
||||||
import asks
|
import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -37,7 +41,7 @@ from .._cacheables import open_cached_client
|
||||||
from ._util import resproc, SymbolNotFound
|
from ._util import resproc, SymbolNotFound
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from ..data import ShmArray
|
from ..data import ShmArray
|
||||||
from ..data._web_bs import open_autorecon_ws
|
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -88,7 +92,7 @@ class Pair(BaseModel):
|
||||||
baseCommissionPrecision: int
|
baseCommissionPrecision: int
|
||||||
quoteCommissionPrecision: int
|
quoteCommissionPrecision: int
|
||||||
|
|
||||||
orderTypes: List[str]
|
orderTypes: list[str]
|
||||||
|
|
||||||
icebergAllowed: bool
|
icebergAllowed: bool
|
||||||
ocoAllowed: bool
|
ocoAllowed: bool
|
||||||
|
@ -96,8 +100,8 @@ class Pair(BaseModel):
|
||||||
isSpotTradingAllowed: bool
|
isSpotTradingAllowed: bool
|
||||||
isMarginTradingAllowed: bool
|
isMarginTradingAllowed: bool
|
||||||
|
|
||||||
filters: List[Dict[str, Union[str, int, float]]]
|
filters: list[dict[str, Union[str, int, float]]]
|
||||||
permissions: List[str]
|
permissions: list[str]
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -129,7 +133,7 @@ class OHLC:
|
||||||
bar_wap: float = 0.0
|
bar_wap: float = 0.0
|
||||||
|
|
||||||
|
|
||||||
# convert arrow timestamp to unixtime in miliseconds
|
# convert datetime obj timestamp to unixtime in milliseconds
|
||||||
def binance_timestamp(when):
|
def binance_timestamp(when):
|
||||||
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
return int((when.timestamp() * 1000) + (when.microsecond / 1000))
|
||||||
|
|
||||||
|
@ -145,7 +149,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
method: str,
|
method: str,
|
||||||
params: dict,
|
params: dict,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
resp = await self._sesh.get(
|
resp = await self._sesh.get(
|
||||||
path=f'/api/v3/{method}',
|
path=f'/api/v3/{method}',
|
||||||
params=params,
|
params=params,
|
||||||
|
@ -200,7 +204,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
limit: int = None,
|
limit: int = None,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
if self._pairs is not None:
|
if self._pairs is not None:
|
||||||
data = self._pairs
|
data = self._pairs
|
||||||
else:
|
else:
|
||||||
|
@ -218,20 +222,22 @@ class Client:
|
||||||
async def bars(
|
async def bars(
|
||||||
self,
|
self,
|
||||||
symbol: str,
|
symbol: str,
|
||||||
start_time: int = None,
|
start_dt: Optional[datetime] = None,
|
||||||
end_time: int = None,
|
end_dt: Optional[datetime] = None,
|
||||||
limit: int = 1000, # <- max allowed per query
|
limit: int = 1000, # <- max allowed per query
|
||||||
as_np: bool = True,
|
as_np: bool = True,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
|
||||||
if start_time is None:
|
if end_dt is None:
|
||||||
start_time = binance_timestamp(
|
end_dt = pendulum.now('UTC')
|
||||||
arrow.utcnow().floor('minute').shift(minutes=-limit)
|
|
||||||
)
|
|
||||||
|
|
||||||
if end_time is None:
|
if start_dt is None:
|
||||||
end_time = binance_timestamp(arrow.utcnow())
|
start_dt = end_dt.start_of(
|
||||||
|
'minute').subtract(minutes=limit)
|
||||||
|
|
||||||
|
start_time = binance_timestamp(start_dt)
|
||||||
|
end_time = binance_timestamp(end_dt)
|
||||||
|
|
||||||
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
# https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
|
||||||
bars = await self._api(
|
bars = await self._api(
|
||||||
|
@ -273,7 +279,7 @@ class Client:
|
||||||
return array
|
return array
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def get_client() -> Client:
|
async def get_client() -> Client:
|
||||||
client = Client()
|
client = Client()
|
||||||
await client.cache_symbols()
|
await client.cache_symbols()
|
||||||
|
@ -295,7 +301,7 @@ class AggTrade(BaseModel):
|
||||||
M: bool # Ignore
|
M: bool # Ignore
|
||||||
|
|
||||||
|
|
||||||
async def stream_messages(ws):
|
async def stream_messages(ws: NoBsWs) -> AsyncGenerator[NoBsWs, dict]:
|
||||||
|
|
||||||
timeouts = 0
|
timeouts = 0
|
||||||
while True:
|
while True:
|
||||||
|
@ -353,7 +359,7 @@ async def stream_messages(ws):
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def make_sub(pairs: List[str], sub_name: str, uid: int) -> Dict[str, str]:
|
def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]:
|
||||||
"""Create a request subscription packet dict.
|
"""Create a request subscription packet dict.
|
||||||
|
|
||||||
https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
|
https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
|
||||||
|
@ -368,6 +374,37 @@ def make_sub(pairs: List[str], sub_name: str, uid: int) -> Dict[str, str]:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
symbol: str,
|
||||||
|
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
# TODO implement history getter for the new storage layer.
|
||||||
|
async with open_cached_client('binance') as client:
|
||||||
|
|
||||||
|
async def get_ohlc(
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
datetime, # start
|
||||||
|
datetime, # end
|
||||||
|
]:
|
||||||
|
|
||||||
|
array = await client.bars(
|
||||||
|
symbol,
|
||||||
|
start_dt=start_dt,
|
||||||
|
end_dt=end_dt,
|
||||||
|
)
|
||||||
|
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||||
|
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||||
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
|
yield get_ohlc, {'erlangs': 3, 'rate': 3}
|
||||||
|
|
||||||
|
|
||||||
async def backfill_bars(
|
async def backfill_bars(
|
||||||
sym: str,
|
sym: str,
|
||||||
shm: ShmArray, # type: ignore # noqa
|
shm: ShmArray, # type: ignore # noqa
|
||||||
|
@ -385,13 +422,12 @@ async def backfill_bars(
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: List[str],
|
symbols: list[str],
|
||||||
shm: ShmArray,
|
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
# startup sync
|
# startup sync
|
||||||
task_status: TaskStatus[Tuple[Dict, Dict]] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
|
@ -416,8 +452,8 @@ async def stream_quotes(
|
||||||
|
|
||||||
# XXX: after manually inspecting the response format we
|
# XXX: after manually inspecting the response format we
|
||||||
# just directly pick out the info we need
|
# just directly pick out the info we need
|
||||||
si['price_tick_size'] = syminfo.filters[0]['tickSize']
|
si['price_tick_size'] = float(syminfo.filters[0]['tickSize'])
|
||||||
si['lot_tick_size'] = syminfo.filters[2]['stepSize']
|
si['lot_tick_size'] = float(syminfo.filters[2]['stepSize'])
|
||||||
si['asset_type'] = 'crypto'
|
si['asset_type'] = 'crypto'
|
||||||
|
|
||||||
symbol = symbols[0]
|
symbol = symbols[0]
|
||||||
|
@ -428,10 +464,11 @@ async def stream_quotes(
|
||||||
symbol: {
|
symbol: {
|
||||||
'symbol_info': sym_infos[sym],
|
'symbol_info': sym_infos[sym],
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def subscribe(ws: wsproto.WSConnection):
|
async def subscribe(ws: wsproto.WSConnection):
|
||||||
# setup subs
|
# setup subs
|
||||||
|
|
||||||
|
@ -481,17 +518,25 @@ async def stream_quotes(
|
||||||
# TODO: use ``anext()`` when it lands in 3.10!
|
# TODO: use ``anext()`` when it lands in 3.10!
|
||||||
typ, quote = await msg_gen.__anext__()
|
typ, quote = await msg_gen.__anext__()
|
||||||
|
|
||||||
first_quote = {quote['symbol'].lower(): quote}
|
task_status.started((init_msgs, quote))
|
||||||
task_status.started((init_msgs, first_quote))
|
|
||||||
|
|
||||||
# signal to caller feed is ready for consumption
|
# signal to caller feed is ready for consumption
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
|
||||||
|
# import time
|
||||||
|
# last = time.time()
|
||||||
|
|
||||||
# start streaming
|
# start streaming
|
||||||
async for typ, msg in msg_gen:
|
async for typ, msg in msg_gen:
|
||||||
|
|
||||||
|
# period = time.time() - last
|
||||||
|
# hz = 1/period if period else float('inf')
|
||||||
|
# if hz > 60:
|
||||||
|
# log.info(f'Binance quotez : {hz}')
|
||||||
|
|
||||||
topic = msg['symbol'].lower()
|
topic = msg['symbol'].lower()
|
||||||
await send_chan.send({topic: msg})
|
await send_chan.send({topic: msg})
|
||||||
|
# last = time.time()
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
|
|
@ -23,7 +23,6 @@ from operator import attrgetter
|
||||||
from operator import itemgetter
|
from operator import itemgetter
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import pandas as pd
|
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
|
||||||
|
@ -47,8 +46,10 @@ _watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
@click.argument('kwargs', nargs=-1)
|
@click.argument('kwargs', nargs=-1)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def api(config, meth, kwargs, keys):
|
def api(config, meth, kwargs, keys):
|
||||||
"""Make a broker-client API method call
|
'''
|
||||||
"""
|
Make a broker-client API method call
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
broker = config['brokers'][0]
|
broker = config['brokers'][0]
|
||||||
|
|
||||||
|
@ -79,13 +80,13 @@ def api(config, meth, kwargs, keys):
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--df-output', '-df', flag_value=True,
|
|
||||||
help='Output in `pandas.DataFrame` format')
|
|
||||||
@click.argument('tickers', nargs=-1, required=True)
|
@click.argument('tickers', nargs=-1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def quote(config, tickers, df_output):
|
def quote(config, tickers):
|
||||||
"""Print symbol quotes to the console
|
'''
|
||||||
"""
|
Print symbol quotes to the console
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
|
|
||||||
|
@ -100,28 +101,19 @@ def quote(config, tickers, df_output):
|
||||||
if ticker not in syms:
|
if ticker not in syms:
|
||||||
brokermod.log.warn(f"Could not find symbol {ticker}?")
|
brokermod.log.warn(f"Could not find symbol {ticker}?")
|
||||||
|
|
||||||
if df_output:
|
|
||||||
cols = next(filter(bool, quotes)).copy()
|
|
||||||
cols.pop('symbol')
|
|
||||||
df = pd.DataFrame(
|
|
||||||
(quote or {} for quote in quotes),
|
|
||||||
columns=cols,
|
|
||||||
)
|
|
||||||
click.echo(df)
|
|
||||||
else:
|
|
||||||
click.echo(colorize_json(quotes))
|
click.echo(colorize_json(quotes))
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--df-output', '-df', flag_value=True,
|
|
||||||
help='Output in `pandas.DataFrame` format')
|
|
||||||
@click.option('--count', '-c', default=1000,
|
@click.option('--count', '-c', default=1000,
|
||||||
help='Number of bars to retrieve')
|
help='Number of bars to retrieve')
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbol', required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def bars(config, symbol, count, df_output):
|
def bars(config, symbol, count):
|
||||||
"""Retreive 1m bars for symbol and print on the console
|
'''
|
||||||
"""
|
Retreive 1m bars for symbol and print on the console
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
|
|
||||||
|
@ -133,7 +125,7 @@ def bars(config, symbol, count, df_output):
|
||||||
brokermod,
|
brokermod,
|
||||||
symbol,
|
symbol,
|
||||||
count=count,
|
count=count,
|
||||||
as_np=df_output
|
as_np=False,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -141,9 +133,6 @@ def bars(config, symbol, count, df_output):
|
||||||
log.error(f"No quotes could be found for {symbol}?")
|
log.error(f"No quotes could be found for {symbol}?")
|
||||||
return
|
return
|
||||||
|
|
||||||
if df_output:
|
|
||||||
click.echo(pd.DataFrame(bars))
|
|
||||||
else:
|
|
||||||
click.echo(colorize_json(bars))
|
click.echo(colorize_json(bars))
|
||||||
|
|
||||||
|
|
||||||
|
@ -156,8 +145,10 @@ def bars(config, symbol, count, df_output):
|
||||||
@click.argument('name', nargs=1, required=True)
|
@click.argument('name', nargs=1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def record(config, rate, name, dhost, filename):
|
def record(config, rate, name, dhost, filename):
|
||||||
"""Record client side quotes to a file on disk
|
'''
|
||||||
"""
|
Record client side quotes to a file on disk
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
loglevel = config['loglevel']
|
loglevel = config['loglevel']
|
||||||
|
@ -195,8 +186,10 @@ def record(config, rate, name, dhost, filename):
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbol', required=True)
|
||||||
@click.pass_context
|
@click.pass_context
|
||||||
def contracts(ctx, loglevel, broker, symbol, ids):
|
def contracts(ctx, loglevel, broker, symbol, ids):
|
||||||
"""Get list of all option contracts for symbol
|
'''
|
||||||
"""
|
Get list of all option contracts for symbol
|
||||||
|
|
||||||
|
'''
|
||||||
brokermod = get_brokermod(broker)
|
brokermod = get_brokermod(broker)
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
|
||||||
|
@ -213,14 +206,14 @@ def contracts(ctx, loglevel, broker, symbol, ids):
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--df-output', '-df', flag_value=True,
|
|
||||||
help='Output in `pandas.DataFrame` format')
|
|
||||||
@click.option('--date', '-d', help='Contracts expiry date')
|
@click.option('--date', '-d', help='Contracts expiry date')
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbol', required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def optsquote(config, symbol, df_output, date):
|
def optsquote(config, symbol, date):
|
||||||
"""Retreive symbol option quotes on the console
|
'''
|
||||||
"""
|
Retreive symbol option quotes on the console
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
|
|
||||||
|
@ -233,13 +226,6 @@ def optsquote(config, symbol, df_output, date):
|
||||||
log.error(f"No option quotes could be found for {symbol}?")
|
log.error(f"No option quotes could be found for {symbol}?")
|
||||||
return
|
return
|
||||||
|
|
||||||
if df_output:
|
|
||||||
df = pd.DataFrame(
|
|
||||||
(quote.values() for quote in quotes),
|
|
||||||
columns=quotes[0].keys(),
|
|
||||||
)
|
|
||||||
click.echo(df)
|
|
||||||
else:
|
|
||||||
click.echo(colorize_json(quotes))
|
click.echo(colorize_json(quotes))
|
||||||
|
|
||||||
|
|
||||||
|
@ -247,8 +233,10 @@ def optsquote(config, symbol, df_output, date):
|
||||||
@click.argument('tickers', nargs=-1, required=True)
|
@click.argument('tickers', nargs=-1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def symbol_info(config, tickers):
|
def symbol_info(config, tickers):
|
||||||
"""Print symbol quotes to the console
|
'''
|
||||||
"""
|
Print symbol quotes to the console
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermods'][0]
|
brokermod = config['brokermods'][0]
|
||||||
|
|
||||||
|
@ -270,8 +258,10 @@ def symbol_info(config, tickers):
|
||||||
@click.argument('pattern', required=True)
|
@click.argument('pattern', required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def search(config, pattern):
|
def search(config, pattern):
|
||||||
"""Search for symbols from broker backend(s).
|
'''
|
||||||
"""
|
Search for symbols from broker backend(s).
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermods = config['brokermods']
|
brokermods = config['brokermods']
|
||||||
|
|
||||||
|
|
|
@ -142,15 +142,23 @@ async def symbol_search(
|
||||||
brokermods: list[ModuleType],
|
brokermods: list[ModuleType],
|
||||||
pattern: str,
|
pattern: str,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
||||||
"""Return symbol info from broker.
|
'''
|
||||||
"""
|
Return symbol info from broker.
|
||||||
|
|
||||||
|
'''
|
||||||
results = []
|
results = []
|
||||||
|
|
||||||
async def search_backend(brokername: str) -> None:
|
async def search_backend(
|
||||||
|
brokermod: ModuleType
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
brokername: str = mod.name
|
||||||
|
|
||||||
async with maybe_spawn_brokerd(
|
async with maybe_spawn_brokerd(
|
||||||
brokername,
|
mod.name,
|
||||||
|
infect_asyncio=getattr(mod, '_infect_asyncio', False),
|
||||||
) as portal:
|
) as portal:
|
||||||
|
|
||||||
results.append((
|
results.append((
|
||||||
|
|
|
@ -38,6 +38,7 @@ import contextlib
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
|
from tractor.experimental import msgpub
|
||||||
from async_generator import asynccontextmanager
|
from async_generator import asynccontextmanager
|
||||||
|
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
|
@ -98,7 +99,7 @@ class BrokerFeed:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@tractor.msg.pub(tasks=['stock', 'option'])
|
@msgpub(tasks=['stock', 'option'])
|
||||||
async def stream_poll_requests(
|
async def stream_poll_requests(
|
||||||
get_topics: Callable,
|
get_topics: Callable,
|
||||||
get_quotes: Coroutine,
|
get_quotes: Coroutine,
|
||||||
|
@ -293,7 +294,7 @@ async def start_quote_stream(
|
||||||
|
|
||||||
await stream_poll_requests(
|
await stream_poll_requests(
|
||||||
|
|
||||||
# ``msg.pub`` required kwargs
|
# ``trionics.msgpub`` required kwargs
|
||||||
task_name=feed_type,
|
task_name=feed_type,
|
||||||
ctx=ctx,
|
ctx=ctx,
|
||||||
topics=symbols,
|
topics=symbols,
|
||||||
|
|
1912
piker/brokers/ib.py
1912
piker/brokers/ib.py
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,67 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Interactive Brokers API backend.
|
||||||
|
|
||||||
|
Sub-modules within break into the core functionalities:
|
||||||
|
|
||||||
|
- ``broker.py`` part for orders / trading endpoints
|
||||||
|
- ``data.py`` for real-time data feed endpoints
|
||||||
|
|
||||||
|
- ``client.py`` for the core API machinery which is ``trio``-ized
|
||||||
|
wrapping around ``ib_insync``.
|
||||||
|
|
||||||
|
- ``report.py`` for the hackery to build manual pp calcs
|
||||||
|
to avoid ib's absolute bullshit FIFO style position
|
||||||
|
tracking..
|
||||||
|
|
||||||
|
"""
|
||||||
|
from .api import (
|
||||||
|
get_client,
|
||||||
|
)
|
||||||
|
from .feed import (
|
||||||
|
open_history_client,
|
||||||
|
open_symbol_search,
|
||||||
|
stream_quotes,
|
||||||
|
)
|
||||||
|
from .broker import trades_dialogue
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
'get_client',
|
||||||
|
'trades_dialogue',
|
||||||
|
'open_history_client',
|
||||||
|
'open_symbol_search',
|
||||||
|
'stream_quotes',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# tractor RPC enable arg
|
||||||
|
__enable_modules__: list[str] = [
|
||||||
|
'api',
|
||||||
|
'feed',
|
||||||
|
'broker',
|
||||||
|
]
|
||||||
|
|
||||||
|
# passed to ``tractor.ActorNursery.start_actor()``
|
||||||
|
_spawn_kwargs = {
|
||||||
|
'infect_asyncio': True,
|
||||||
|
}
|
||||||
|
|
||||||
|
# annotation to let backend agnostic code
|
||||||
|
# know if ``brokerd`` should be spawned with
|
||||||
|
# ``tractor``'s aio mode.
|
||||||
|
_infect_asyncio: bool = True
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,590 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
"""
|
||||||
|
Order and trades endpoints for use with ``piker``'s EMS.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
from dataclasses import asdict
|
||||||
|
from functools import partial
|
||||||
|
from pprint import pformat
|
||||||
|
import time
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
AsyncIterator,
|
||||||
|
)
|
||||||
|
|
||||||
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import tractor
|
||||||
|
from ib_insync.contract import (
|
||||||
|
Contract,
|
||||||
|
Option,
|
||||||
|
)
|
||||||
|
from ib_insync.order import (
|
||||||
|
Trade,
|
||||||
|
OrderStatus,
|
||||||
|
)
|
||||||
|
from ib_insync.objects import (
|
||||||
|
Fill,
|
||||||
|
Execution,
|
||||||
|
)
|
||||||
|
from ib_insync.objects import Position
|
||||||
|
|
||||||
|
from piker import config
|
||||||
|
from piker.log import get_console_log
|
||||||
|
from piker.clearing._messages import (
|
||||||
|
BrokerdOrder,
|
||||||
|
BrokerdOrderAck,
|
||||||
|
BrokerdStatus,
|
||||||
|
BrokerdPosition,
|
||||||
|
BrokerdCancel,
|
||||||
|
BrokerdFill,
|
||||||
|
BrokerdError,
|
||||||
|
)
|
||||||
|
from .api import (
|
||||||
|
_accounts2clients,
|
||||||
|
_adhoc_futes_set,
|
||||||
|
log,
|
||||||
|
get_config,
|
||||||
|
open_client_proxies,
|
||||||
|
Client,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def pack_position(
|
||||||
|
pos: Position
|
||||||
|
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
con = pos.contract
|
||||||
|
|
||||||
|
if isinstance(con, Option):
|
||||||
|
# TODO: option symbol parsing and sane display:
|
||||||
|
symbol = con.localSymbol.replace(' ', '')
|
||||||
|
|
||||||
|
else:
|
||||||
|
# TODO: lookup fqsn even for derivs.
|
||||||
|
symbol = con.symbol.lower()
|
||||||
|
|
||||||
|
exch = (con.primaryExchange or con.exchange).lower()
|
||||||
|
symkey = '.'.join((symbol, exch))
|
||||||
|
if not exch:
|
||||||
|
# attempt to lookup the symbol from our
|
||||||
|
# hacked set..
|
||||||
|
for sym in _adhoc_futes_set:
|
||||||
|
if symbol in sym:
|
||||||
|
symkey = sym
|
||||||
|
break
|
||||||
|
|
||||||
|
expiry = con.lastTradeDateOrContractMonth
|
||||||
|
if expiry:
|
||||||
|
symkey += f'.{expiry}'
|
||||||
|
|
||||||
|
# TODO: options contracts into a sane format..
|
||||||
|
|
||||||
|
return BrokerdPosition(
|
||||||
|
broker='ib',
|
||||||
|
account=pos.account,
|
||||||
|
symbol=symkey,
|
||||||
|
currency=con.currency,
|
||||||
|
size=float(pos.position),
|
||||||
|
avg_price=float(pos.avgCost) / float(con.multiplier or 1.0),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_order_requests(
|
||||||
|
|
||||||
|
ems_order_stream: tractor.MsgStream,
|
||||||
|
accounts_def: dict[str, str],
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
request_msg: dict
|
||||||
|
async for request_msg in ems_order_stream:
|
||||||
|
log.info(f'Received order request {request_msg}')
|
||||||
|
|
||||||
|
action = request_msg['action']
|
||||||
|
account = request_msg['account']
|
||||||
|
|
||||||
|
acct_number = accounts_def.get(account)
|
||||||
|
if not acct_number:
|
||||||
|
log.error(
|
||||||
|
f'An IB account number for name {account} is not found?\n'
|
||||||
|
'Make sure you have all TWS and GW instances running.'
|
||||||
|
)
|
||||||
|
await ems_order_stream.send(BrokerdError(
|
||||||
|
oid=request_msg['oid'],
|
||||||
|
symbol=request_msg['symbol'],
|
||||||
|
reason=f'No account found: `{account}` ?',
|
||||||
|
).dict())
|
||||||
|
continue
|
||||||
|
|
||||||
|
client = _accounts2clients.get(account)
|
||||||
|
if not client:
|
||||||
|
log.error(
|
||||||
|
f'An IB client for account name {account} is not found.\n'
|
||||||
|
'Make sure you have all TWS and GW instances running.'
|
||||||
|
)
|
||||||
|
await ems_order_stream.send(BrokerdError(
|
||||||
|
oid=request_msg['oid'],
|
||||||
|
symbol=request_msg['symbol'],
|
||||||
|
reason=f'No api client loaded for account: `{account}` ?',
|
||||||
|
).dict())
|
||||||
|
continue
|
||||||
|
|
||||||
|
if action in {'buy', 'sell'}:
|
||||||
|
# validate
|
||||||
|
order = BrokerdOrder(**request_msg)
|
||||||
|
|
||||||
|
# call our client api to submit the order
|
||||||
|
reqid = client.submit_limit(
|
||||||
|
oid=order.oid,
|
||||||
|
symbol=order.symbol,
|
||||||
|
price=order.price,
|
||||||
|
action=order.action,
|
||||||
|
size=order.size,
|
||||||
|
account=acct_number,
|
||||||
|
|
||||||
|
# XXX: by default 0 tells ``ib_insync`` methods that
|
||||||
|
# there is no existing order so ask the client to create
|
||||||
|
# a new one (which it seems to do by allocating an int
|
||||||
|
# counter - collision prone..)
|
||||||
|
reqid=order.reqid,
|
||||||
|
)
|
||||||
|
if reqid is None:
|
||||||
|
await ems_order_stream.send(BrokerdError(
|
||||||
|
oid=request_msg['oid'],
|
||||||
|
symbol=request_msg['symbol'],
|
||||||
|
reason='Order already active?',
|
||||||
|
).dict())
|
||||||
|
|
||||||
|
# deliver ack that order has been submitted to broker routing
|
||||||
|
await ems_order_stream.send(
|
||||||
|
BrokerdOrderAck(
|
||||||
|
# ems order request id
|
||||||
|
oid=order.oid,
|
||||||
|
# broker specific request id
|
||||||
|
reqid=reqid,
|
||||||
|
time_ns=time.time_ns(),
|
||||||
|
account=account,
|
||||||
|
).dict()
|
||||||
|
)
|
||||||
|
|
||||||
|
elif action == 'cancel':
|
||||||
|
msg = BrokerdCancel(**request_msg)
|
||||||
|
client.submit_cancel(reqid=msg.reqid)
|
||||||
|
|
||||||
|
else:
|
||||||
|
log.error(f'Unknown order command: {request_msg}')
|
||||||
|
|
||||||
|
|
||||||
|
async def recv_trade_updates(
|
||||||
|
|
||||||
|
client: Client,
|
||||||
|
to_trio: trio.abc.SendChannel,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
"""Stream a ticker using the std L1 api.
|
||||||
|
"""
|
||||||
|
client.inline_errors(to_trio)
|
||||||
|
|
||||||
|
# sync with trio task
|
||||||
|
to_trio.send_nowait(None)
|
||||||
|
|
||||||
|
def push_tradesies(eventkit_obj, obj, fill=None):
|
||||||
|
"""Push events to trio task.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if fill is not None:
|
||||||
|
# execution details event
|
||||||
|
item = ('fill', (obj, fill))
|
||||||
|
|
||||||
|
elif eventkit_obj.name() == 'positionEvent':
|
||||||
|
item = ('position', obj)
|
||||||
|
|
||||||
|
else:
|
||||||
|
item = ('status', obj)
|
||||||
|
|
||||||
|
log.info(f'eventkit event ->\n{pformat(item)}')
|
||||||
|
|
||||||
|
try:
|
||||||
|
to_trio.send_nowait(item)
|
||||||
|
except trio.BrokenResourceError:
|
||||||
|
log.exception(f'Disconnected from {eventkit_obj} updates')
|
||||||
|
eventkit_obj.disconnect(push_tradesies)
|
||||||
|
|
||||||
|
# hook up to the weird eventkit object - event stream api
|
||||||
|
for ev_name in [
|
||||||
|
'orderStatusEvent', # all order updates
|
||||||
|
'execDetailsEvent', # all "fill" updates
|
||||||
|
'positionEvent', # avg price updates per symbol per account
|
||||||
|
|
||||||
|
# 'commissionReportEvent',
|
||||||
|
# XXX: ugh, it is a separate event from IB and it's
|
||||||
|
# emitted as follows:
|
||||||
|
# self.ib.commissionReportEvent.emit(trade, fill, report)
|
||||||
|
|
||||||
|
# XXX: not sure yet if we need these
|
||||||
|
# 'updatePortfolioEvent',
|
||||||
|
|
||||||
|
# XXX: these all seem to be weird ib_insync intrernal
|
||||||
|
# events that we probably don't care that much about
|
||||||
|
# given the internal design is wonky af..
|
||||||
|
# 'newOrderEvent',
|
||||||
|
# 'orderModifyEvent',
|
||||||
|
# 'cancelOrderEvent',
|
||||||
|
# 'openOrderEvent',
|
||||||
|
]:
|
||||||
|
eventkit_obj = getattr(client.ib, ev_name)
|
||||||
|
handler = partial(push_tradesies, eventkit_obj)
|
||||||
|
eventkit_obj.connect(handler)
|
||||||
|
|
||||||
|
# let the engine run and stream
|
||||||
|
await client.ib.disconnectedEvent
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def trades_dialogue(
|
||||||
|
|
||||||
|
ctx: tractor.Context,
|
||||||
|
loglevel: str = None,
|
||||||
|
|
||||||
|
) -> AsyncIterator[dict[str, Any]]:
|
||||||
|
|
||||||
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
accounts_def = config.load_accounts(['ib'])
|
||||||
|
|
||||||
|
global _client_cache
|
||||||
|
|
||||||
|
# deliver positions to subscriber before anything else
|
||||||
|
all_positions = []
|
||||||
|
accounts = set()
|
||||||
|
clients: list[tuple[Client, trio.MemoryReceiveChannel]] = []
|
||||||
|
|
||||||
|
async with (
|
||||||
|
trio.open_nursery() as nurse,
|
||||||
|
open_client_proxies() as (proxies, aioclients),
|
||||||
|
):
|
||||||
|
for account, proxy in proxies.items():
|
||||||
|
|
||||||
|
client = aioclients[account]
|
||||||
|
|
||||||
|
async def open_stream(
|
||||||
|
task_status: TaskStatus[
|
||||||
|
trio.abc.ReceiveChannel
|
||||||
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
|
):
|
||||||
|
# each api client has a unique event stream
|
||||||
|
async with tractor.to_asyncio.open_channel_from(
|
||||||
|
recv_trade_updates,
|
||||||
|
client=client,
|
||||||
|
) as (first, trade_event_stream):
|
||||||
|
|
||||||
|
task_status.started(trade_event_stream)
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
trade_event_stream = await nurse.start(open_stream)
|
||||||
|
|
||||||
|
clients.append((client, trade_event_stream))
|
||||||
|
|
||||||
|
assert account in accounts_def
|
||||||
|
accounts.add(account)
|
||||||
|
|
||||||
|
for client in aioclients.values():
|
||||||
|
for pos in client.positions():
|
||||||
|
|
||||||
|
msg = pack_position(pos)
|
||||||
|
msg.account = accounts_def.inverse[msg.account]
|
||||||
|
|
||||||
|
assert msg.account in accounts, (
|
||||||
|
f'Position for unknown account: {msg.account}')
|
||||||
|
|
||||||
|
all_positions.append(msg.dict())
|
||||||
|
|
||||||
|
trades: list[dict] = []
|
||||||
|
for proxy in proxies.values():
|
||||||
|
trades.append(await proxy.trades())
|
||||||
|
|
||||||
|
log.info(f'Loaded {len(trades)} from this session')
|
||||||
|
# TODO: write trades to local ``trades.toml``
|
||||||
|
# - use above per-session trades data and write to local file
|
||||||
|
# - get the "flex reports" working and pull historical data and
|
||||||
|
# also save locally.
|
||||||
|
|
||||||
|
await ctx.started((
|
||||||
|
all_positions,
|
||||||
|
tuple(name for name in accounts_def if name in accounts),
|
||||||
|
))
|
||||||
|
|
||||||
|
async with (
|
||||||
|
ctx.open_stream() as ems_stream,
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
):
|
||||||
|
# start order request handler **before** local trades event loop
|
||||||
|
n.start_soon(handle_order_requests, ems_stream, accounts_def)
|
||||||
|
|
||||||
|
# allocate event relay tasks for each client connection
|
||||||
|
for client, stream in clients:
|
||||||
|
n.start_soon(
|
||||||
|
deliver_trade_events,
|
||||||
|
stream,
|
||||||
|
ems_stream,
|
||||||
|
accounts_def
|
||||||
|
)
|
||||||
|
|
||||||
|
# block until cancelled
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
async def deliver_trade_events(
|
||||||
|
|
||||||
|
trade_event_stream: trio.MemoryReceiveChannel,
|
||||||
|
ems_stream: tractor.MsgStream,
|
||||||
|
accounts_def: dict[str, str],
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''Format and relay all trade events for a given client to the EMS.
|
||||||
|
|
||||||
|
'''
|
||||||
|
action_map = {'BOT': 'buy', 'SLD': 'sell'}
|
||||||
|
|
||||||
|
# TODO: for some reason we can receive a ``None`` here when the
|
||||||
|
# ib-gw goes down? Not sure exactly how that's happening looking
|
||||||
|
# at the eventkit code above but we should probably handle it...
|
||||||
|
async for event_name, item in trade_event_stream:
|
||||||
|
|
||||||
|
log.info(f'ib sending {event_name}:\n{pformat(item)}')
|
||||||
|
|
||||||
|
# TODO: templating the ib statuses in comparison with other
|
||||||
|
# brokers is likely the way to go:
|
||||||
|
# https://interactivebrokers.github.io/tws-api/interfaceIBApi_1_1EWrapper.html#a17f2a02d6449710b6394d0266a353313
|
||||||
|
# short list:
|
||||||
|
# - PendingSubmit
|
||||||
|
# - PendingCancel
|
||||||
|
# - PreSubmitted (simulated orders)
|
||||||
|
# - ApiCancelled (cancelled by client before submission
|
||||||
|
# to routing)
|
||||||
|
# - Cancelled
|
||||||
|
# - Filled
|
||||||
|
# - Inactive (reject or cancelled but not by trader)
|
||||||
|
|
||||||
|
# XXX: here's some other sucky cases from the api
|
||||||
|
# - short-sale but securities haven't been located, in this
|
||||||
|
# case we should probably keep the order in some kind of
|
||||||
|
# weird state or cancel it outright?
|
||||||
|
|
||||||
|
# status='PendingSubmit', message=''),
|
||||||
|
# status='Cancelled', message='Error 404,
|
||||||
|
# reqId 1550: Order held while securities are located.'),
|
||||||
|
# status='PreSubmitted', message='')],
|
||||||
|
|
||||||
|
if event_name == 'status':
|
||||||
|
|
||||||
|
# XXX: begin normalization of nonsense ib_insync internal
|
||||||
|
# object-state tracking representations...
|
||||||
|
|
||||||
|
# unwrap needed data from ib_insync internal types
|
||||||
|
trade: Trade = item
|
||||||
|
status: OrderStatus = trade.orderStatus
|
||||||
|
|
||||||
|
# skip duplicate filled updates - we get the deats
|
||||||
|
# from the execution details event
|
||||||
|
msg = BrokerdStatus(
|
||||||
|
|
||||||
|
reqid=trade.order.orderId,
|
||||||
|
time_ns=time.time_ns(), # cuz why not
|
||||||
|
account=accounts_def.inverse[trade.order.account],
|
||||||
|
|
||||||
|
# everyone doin camel case..
|
||||||
|
status=status.status.lower(), # force lower case
|
||||||
|
|
||||||
|
filled=status.filled,
|
||||||
|
reason=status.whyHeld,
|
||||||
|
|
||||||
|
# this seems to not be necessarily up to date in the
|
||||||
|
# execDetails event.. so we have to send it here I guess?
|
||||||
|
remaining=status.remaining,
|
||||||
|
|
||||||
|
broker_details={'name': 'ib'},
|
||||||
|
)
|
||||||
|
|
||||||
|
elif event_name == 'fill':
|
||||||
|
|
||||||
|
# for wtv reason this is a separate event type
|
||||||
|
# from IB, not sure why it's needed other then for extra
|
||||||
|
# complexity and over-engineering :eyeroll:.
|
||||||
|
# we may just end up dropping these events (or
|
||||||
|
# translating them to ``Status`` msgs) if we can
|
||||||
|
# show the equivalent status events are no more latent.
|
||||||
|
|
||||||
|
# unpack ib_insync types
|
||||||
|
# pep-0526 style:
|
||||||
|
# https://www.python.org/dev/peps/pep-0526/#global-and-local-variable-annotations
|
||||||
|
trade: Trade
|
||||||
|
fill: Fill
|
||||||
|
trade, fill = item
|
||||||
|
execu: Execution = fill.execution
|
||||||
|
|
||||||
|
# TODO: normalize out commissions details?
|
||||||
|
details = {
|
||||||
|
'contract': asdict(fill.contract),
|
||||||
|
'execution': asdict(fill.execution),
|
||||||
|
'commissions': asdict(fill.commissionReport),
|
||||||
|
'broker_time': execu.time, # supposedly server fill time
|
||||||
|
'name': 'ib',
|
||||||
|
}
|
||||||
|
|
||||||
|
msg = BrokerdFill(
|
||||||
|
# should match the value returned from `.submit_limit()`
|
||||||
|
reqid=execu.orderId,
|
||||||
|
time_ns=time.time_ns(), # cuz why not
|
||||||
|
|
||||||
|
action=action_map[execu.side],
|
||||||
|
size=execu.shares,
|
||||||
|
price=execu.price,
|
||||||
|
|
||||||
|
broker_details=details,
|
||||||
|
# XXX: required by order mode currently
|
||||||
|
broker_time=details['broker_time'],
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
elif event_name == 'error':
|
||||||
|
|
||||||
|
err: dict = item
|
||||||
|
|
||||||
|
# f$#$% gawd dammit insync..
|
||||||
|
con = err['contract']
|
||||||
|
if isinstance(con, Contract):
|
||||||
|
err['contract'] = asdict(con)
|
||||||
|
|
||||||
|
if err['reqid'] == -1:
|
||||||
|
log.error(f'TWS external order error:\n{pformat(err)}')
|
||||||
|
|
||||||
|
# TODO: what schema for this msg if we're going to make it
|
||||||
|
# portable across all backends?
|
||||||
|
# msg = BrokerdError(**err)
|
||||||
|
continue
|
||||||
|
|
||||||
|
elif event_name == 'position':
|
||||||
|
msg = pack_position(item)
|
||||||
|
msg.account = accounts_def.inverse[msg.account]
|
||||||
|
|
||||||
|
elif event_name == 'event':
|
||||||
|
|
||||||
|
# it's either a general system status event or an external
|
||||||
|
# trade event?
|
||||||
|
log.info(f"TWS system status: \n{pformat(item)}")
|
||||||
|
|
||||||
|
# TODO: support this again but needs parsing at the callback
|
||||||
|
# level...
|
||||||
|
# reqid = item.get('reqid', 0)
|
||||||
|
# if getattr(msg, 'reqid', 0) < -1:
|
||||||
|
# log.info(f"TWS triggered trade\n{pformat(msg.dict())}")
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
# msg.reqid = 'tws-' + str(-1 * reqid)
|
||||||
|
|
||||||
|
# mark msg as from "external system"
|
||||||
|
# TODO: probably something better then this.. and start
|
||||||
|
# considering multiplayer/group trades tracking
|
||||||
|
# msg.broker_details['external_src'] = 'tws'
|
||||||
|
|
||||||
|
# XXX: we always serialize to a dict for msgpack
|
||||||
|
# translations, ideally we can move to an msgspec (or other)
|
||||||
|
# encoder # that can be enabled in ``tractor`` ahead of
|
||||||
|
# time so we can pass through the message types directly.
|
||||||
|
await ems_stream.send(msg.dict())
|
||||||
|
|
||||||
|
|
||||||
|
def load_flex_trades(
|
||||||
|
path: Optional[str] = None,
|
||||||
|
|
||||||
|
) -> dict[str, str]:
|
||||||
|
|
||||||
|
from pprint import pprint
|
||||||
|
from ib_insync import flexreport, util
|
||||||
|
|
||||||
|
conf = get_config()
|
||||||
|
|
||||||
|
if not path:
|
||||||
|
# load ``brokers.toml`` and try to get the flex
|
||||||
|
# token and query id that must be previously defined
|
||||||
|
# by the user.
|
||||||
|
token = conf.get('flex_token')
|
||||||
|
if not token:
|
||||||
|
raise ValueError(
|
||||||
|
'You must specify a ``flex_token`` field in your'
|
||||||
|
'`brokers.toml` in order load your trade log, see our'
|
||||||
|
'intructions for how to set this up here:\n'
|
||||||
|
'PUT LINK HERE!'
|
||||||
|
)
|
||||||
|
|
||||||
|
qid = conf['flex_trades_query_id']
|
||||||
|
|
||||||
|
# TODO: hack this into our logging
|
||||||
|
# system like we do with the API client..
|
||||||
|
util.logToConsole()
|
||||||
|
|
||||||
|
# TODO: rewrite the query part of this with async..httpx?
|
||||||
|
report = flexreport.FlexReport(
|
||||||
|
token=token,
|
||||||
|
queryId=qid,
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# XXX: another project we could potentially look at,
|
||||||
|
# https://pypi.org/project/ibflex/
|
||||||
|
report = flexreport.FlexReport(path=path)
|
||||||
|
|
||||||
|
trade_entries = report.extract('Trade')
|
||||||
|
trades = {
|
||||||
|
# XXX: LOL apparently ``toml`` has a bug
|
||||||
|
# where a section key error will show up in the write
|
||||||
|
# if you leave this as an ``int``?
|
||||||
|
str(t.__dict__['tradeID']): t.__dict__
|
||||||
|
for t in trade_entries
|
||||||
|
}
|
||||||
|
|
||||||
|
ln = len(trades)
|
||||||
|
log.info(f'Loaded {ln} trades from flex query')
|
||||||
|
|
||||||
|
trades_by_account = {}
|
||||||
|
for tid, trade in trades.items():
|
||||||
|
trades_by_account.setdefault(
|
||||||
|
# oddly for some so-called "BookTrade" entries
|
||||||
|
# this field seems to be blank, no cuckin clue.
|
||||||
|
# trade['ibExecID']
|
||||||
|
str(trade['accountId']), {}
|
||||||
|
)[tid] = trade
|
||||||
|
|
||||||
|
section = {'ib': trades_by_account}
|
||||||
|
pprint(section)
|
||||||
|
|
||||||
|
# TODO: load the config first and append in
|
||||||
|
# the new trades loaded here..
|
||||||
|
try:
|
||||||
|
config.write(section, 'trades')
|
||||||
|
except KeyError:
|
||||||
|
import pdbpp; pdbpp.set_trace() # noqa
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
load_flex_trades()
|
|
@ -0,0 +1,938 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
"""
|
||||||
|
Data feed endpoints pre-wrapped and ready for use with ``tractor``/``trio``.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
import asyncio
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from dataclasses import asdict
|
||||||
|
from datetime import datetime
|
||||||
|
from math import isnan
|
||||||
|
import time
|
||||||
|
from typing import (
|
||||||
|
Callable,
|
||||||
|
Optional,
|
||||||
|
Awaitable,
|
||||||
|
)
|
||||||
|
|
||||||
|
from async_generator import aclosing
|
||||||
|
from fuzzywuzzy import process as fuzzy
|
||||||
|
import numpy as np
|
||||||
|
import pendulum
|
||||||
|
import tractor
|
||||||
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
|
from piker.data._sharedmem import ShmArray
|
||||||
|
from .._util import SymbolNotFound, NoData
|
||||||
|
from .api import (
|
||||||
|
_adhoc_futes_set,
|
||||||
|
log,
|
||||||
|
load_aio_clients,
|
||||||
|
ibis,
|
||||||
|
MethodProxy,
|
||||||
|
open_client_proxies,
|
||||||
|
get_preferred_data_client,
|
||||||
|
Ticker,
|
||||||
|
RequestError,
|
||||||
|
Contract,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# https://interactivebrokers.github.io/tws-api/tick_types.html
|
||||||
|
tick_types = {
|
||||||
|
77: 'trade',
|
||||||
|
|
||||||
|
# a "utrade" aka an off exchange "unreportable" (dark) vlm:
|
||||||
|
# https://interactivebrokers.github.io/tws-api/tick_types.html#rt_volume
|
||||||
|
48: 'dark_trade',
|
||||||
|
|
||||||
|
# standard L1 ticks
|
||||||
|
0: 'bsize',
|
||||||
|
1: 'bid',
|
||||||
|
2: 'ask',
|
||||||
|
3: 'asize',
|
||||||
|
4: 'last',
|
||||||
|
5: 'size',
|
||||||
|
8: 'volume',
|
||||||
|
|
||||||
|
# ``ib_insync`` already packs these into
|
||||||
|
# quotes under the following fields.
|
||||||
|
# 55: 'trades_per_min', # `'tradeRate'`
|
||||||
|
# 56: 'vlm_per_min', # `'volumeRate'`
|
||||||
|
# 89: 'shortable', # `'shortableShares'`
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_data_client() -> MethodProxy:
|
||||||
|
'''
|
||||||
|
Open the first found preferred "data client" as defined in the
|
||||||
|
user's ``brokers.toml`` in the ``ib.prefer_data_account`` variable
|
||||||
|
and deliver that client wrapped in a ``MethodProxy``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with (
|
||||||
|
open_client_proxies() as (proxies, clients),
|
||||||
|
):
|
||||||
|
account_name, client = get_preferred_data_client(clients)
|
||||||
|
proxy = proxies.get(f'ib.{account_name}')
|
||||||
|
if not proxy:
|
||||||
|
raise ValueError(
|
||||||
|
f'No preferred data client could be found for {account_name}!'
|
||||||
|
)
|
||||||
|
|
||||||
|
yield proxy
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
symbol: str,
|
||||||
|
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
'''
|
||||||
|
History retreival endpoint - delivers a historical frame callble
|
||||||
|
that takes in ``pendulum.datetime`` and returns ``numpy`` arrays.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with open_data_client() as proxy:
|
||||||
|
|
||||||
|
async def get_hist(
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, str]:
|
||||||
|
|
||||||
|
out, fails = await get_bars(proxy, symbol, end_dt=end_dt)
|
||||||
|
|
||||||
|
# TODO: add logic here to handle tradable hours and only grab
|
||||||
|
# valid bars in the range
|
||||||
|
if out is None:
|
||||||
|
# could be trying to retreive bars over weekend
|
||||||
|
log.error(f"Can't grab bars starting at {end_dt}!?!?")
|
||||||
|
raise NoData(
|
||||||
|
f'{end_dt}',
|
||||||
|
frame_size=2000,
|
||||||
|
)
|
||||||
|
|
||||||
|
bars, bars_array, first_dt, last_dt = out
|
||||||
|
|
||||||
|
# volume cleaning since there's -ve entries,
|
||||||
|
# wood luv to know what crookery that is..
|
||||||
|
vlm = bars_array['volume']
|
||||||
|
vlm[vlm < 0] = 0
|
||||||
|
|
||||||
|
return bars_array, first_dt, last_dt
|
||||||
|
|
||||||
|
# TODO: it seems like we can do async queries for ohlc
|
||||||
|
# but getting the order right still isn't working and I'm not
|
||||||
|
# quite sure why.. needs some tinkering and probably
|
||||||
|
# a lookthrough of the ``ib_insync`` machinery, for eg. maybe
|
||||||
|
# we have to do the batch queries on the `asyncio` side?
|
||||||
|
yield get_hist, {'erlangs': 1, 'rate': 6}
|
||||||
|
|
||||||
|
|
||||||
|
_pacing: str = (
|
||||||
|
'Historical Market Data Service error '
|
||||||
|
'message:Historical data request pacing violation'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_bars(
|
||||||
|
|
||||||
|
proxy: MethodProxy,
|
||||||
|
fqsn: str,
|
||||||
|
|
||||||
|
# blank to start which tells ib to look up the latest datum
|
||||||
|
end_dt: str = '',
|
||||||
|
|
||||||
|
) -> (dict, np.ndarray):
|
||||||
|
'''
|
||||||
|
Retrieve historical data from a ``trio``-side task using
|
||||||
|
a ``MethoProxy``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
fails = 0
|
||||||
|
bars: Optional[list] = None
|
||||||
|
first_dt: datetime = None
|
||||||
|
last_dt: datetime = None
|
||||||
|
|
||||||
|
if end_dt:
|
||||||
|
last_dt = pendulum.from_timestamp(end_dt.timestamp())
|
||||||
|
|
||||||
|
for _ in range(10):
|
||||||
|
try:
|
||||||
|
out = await proxy.bars(
|
||||||
|
fqsn=fqsn,
|
||||||
|
end_dt=end_dt,
|
||||||
|
)
|
||||||
|
if out:
|
||||||
|
bars, bars_array = out
|
||||||
|
|
||||||
|
else:
|
||||||
|
await tractor.breakpoint()
|
||||||
|
|
||||||
|
if bars_array is None:
|
||||||
|
raise SymbolNotFound(fqsn)
|
||||||
|
|
||||||
|
first_dt = pendulum.from_timestamp(
|
||||||
|
bars[0].date.timestamp())
|
||||||
|
|
||||||
|
last_dt = pendulum.from_timestamp(
|
||||||
|
bars[-1].date.timestamp())
|
||||||
|
|
||||||
|
time = bars_array['time']
|
||||||
|
assert time[-1] == last_dt.timestamp()
|
||||||
|
assert time[0] == first_dt.timestamp()
|
||||||
|
log.info(
|
||||||
|
f'{len(bars)} bars retreived for {first_dt} -> {last_dt}'
|
||||||
|
)
|
||||||
|
|
||||||
|
return (bars, bars_array, first_dt, last_dt), fails
|
||||||
|
|
||||||
|
except RequestError as err:
|
||||||
|
msg = err.message
|
||||||
|
# why do we always need to rebind this?
|
||||||
|
# _err = err
|
||||||
|
|
||||||
|
if 'No market data permissions for' in msg:
|
||||||
|
# TODO: signalling for no permissions searches
|
||||||
|
raise NoData(
|
||||||
|
f'Symbol: {fqsn}',
|
||||||
|
)
|
||||||
|
|
||||||
|
elif (
|
||||||
|
err.code == 162
|
||||||
|
and 'HMDS query returned no data' in err.message
|
||||||
|
):
|
||||||
|
# XXX: this is now done in the storage mgmt layer
|
||||||
|
# and we shouldn't implicitly decrement the frame dt
|
||||||
|
# index since the upper layer may be doing so
|
||||||
|
# concurrently and we don't want to be delivering frames
|
||||||
|
# that weren't asked for.
|
||||||
|
log.warning(
|
||||||
|
f'NO DATA found ending @ {end_dt}\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
# try to decrement start point and look further back
|
||||||
|
# end_dt = last_dt = last_dt.subtract(seconds=2000)
|
||||||
|
|
||||||
|
raise NoData(
|
||||||
|
f'Symbol: {fqsn}',
|
||||||
|
frame_size=2000,
|
||||||
|
)
|
||||||
|
|
||||||
|
elif _pacing in msg:
|
||||||
|
|
||||||
|
log.warning(
|
||||||
|
'History throttle rate reached!\n'
|
||||||
|
'Resetting farms with `ctrl-alt-f` hack\n'
|
||||||
|
)
|
||||||
|
# TODO: we might have to put a task lock around this
|
||||||
|
# method..
|
||||||
|
hist_ev = proxy.status_event(
|
||||||
|
'HMDS data farm connection is OK:ushmds'
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: other event messages we might want to try and
|
||||||
|
# wait for but i wasn't able to get any of this
|
||||||
|
# reliable..
|
||||||
|
# reconnect_start = proxy.status_event(
|
||||||
|
# 'Market data farm is connecting:usfuture'
|
||||||
|
# )
|
||||||
|
# live_ev = proxy.status_event(
|
||||||
|
# 'Market data farm connection is OK:usfuture'
|
||||||
|
# )
|
||||||
|
|
||||||
|
# try to wait on the reset event(s) to arrive, a timeout
|
||||||
|
# will trigger a retry up to 6 times (for now).
|
||||||
|
tries: int = 2
|
||||||
|
timeout: float = 10
|
||||||
|
|
||||||
|
# try 3 time with a data reset then fail over to
|
||||||
|
# a connection reset.
|
||||||
|
for i in range(1, tries):
|
||||||
|
|
||||||
|
log.warning('Sending DATA RESET request')
|
||||||
|
await data_reset_hack(reset_type='data')
|
||||||
|
|
||||||
|
with trio.move_on_after(timeout) as cs:
|
||||||
|
for name, ev in [
|
||||||
|
# TODO: not sure if waiting on other events
|
||||||
|
# is all that useful here or not. in theory
|
||||||
|
# you could wait on one of the ones above
|
||||||
|
# first to verify the reset request was
|
||||||
|
# sent?
|
||||||
|
('history', hist_ev),
|
||||||
|
]:
|
||||||
|
await ev.wait()
|
||||||
|
log.info(f"{name} DATA RESET")
|
||||||
|
break
|
||||||
|
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
fails += 1
|
||||||
|
log.warning(
|
||||||
|
f'Data reset {name} timeout, retrying {i}.'
|
||||||
|
)
|
||||||
|
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
|
||||||
|
log.warning('Sending CONNECTION RESET')
|
||||||
|
await data_reset_hack(reset_type='connection')
|
||||||
|
|
||||||
|
with trio.move_on_after(timeout) as cs:
|
||||||
|
for name, ev in [
|
||||||
|
# TODO: not sure if waiting on other events
|
||||||
|
# is all that useful here or not. in theory
|
||||||
|
# you could wait on one of the ones above
|
||||||
|
# first to verify the reset request was
|
||||||
|
# sent?
|
||||||
|
('history', hist_ev),
|
||||||
|
]:
|
||||||
|
await ev.wait()
|
||||||
|
log.info(f"{name} DATA RESET")
|
||||||
|
|
||||||
|
if cs.cancelled_caught:
|
||||||
|
fails += 1
|
||||||
|
log.warning('Data CONNECTION RESET timeout!?')
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
# else: # throttle wasn't fixed so error out immediately
|
||||||
|
# raise _err
|
||||||
|
|
||||||
|
|
||||||
|
async def backfill_bars(
|
||||||
|
|
||||||
|
fqsn: str,
|
||||||
|
shm: ShmArray, # type: ignore # noqa
|
||||||
|
|
||||||
|
# TODO: we want to avoid overrunning the underlying shm array buffer
|
||||||
|
# and we should probably calc the number of calls to make depending
|
||||||
|
# on that until we have the `marketstore` daemon in place in which
|
||||||
|
# case the shm size will be driven by user config and available sys
|
||||||
|
# memory.
|
||||||
|
count: int = 16,
|
||||||
|
|
||||||
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Fill historical bars into shared mem / storage afap.
|
||||||
|
|
||||||
|
TODO: avoid pacing constraints:
|
||||||
|
https://github.com/pikers/piker/issues/128
|
||||||
|
|
||||||
|
'''
|
||||||
|
# last_dt1 = None
|
||||||
|
last_dt = None
|
||||||
|
|
||||||
|
with trio.CancelScope() as cs:
|
||||||
|
|
||||||
|
async with open_data_client() as proxy:
|
||||||
|
|
||||||
|
out, fails = await get_bars(proxy, fqsn)
|
||||||
|
|
||||||
|
if out is None:
|
||||||
|
raise RuntimeError("Could not pull currrent history?!")
|
||||||
|
|
||||||
|
(first_bars, bars_array, first_dt, last_dt) = out
|
||||||
|
vlm = bars_array['volume']
|
||||||
|
vlm[vlm < 0] = 0
|
||||||
|
last_dt = first_dt
|
||||||
|
|
||||||
|
# write historical data to buffer
|
||||||
|
shm.push(bars_array)
|
||||||
|
|
||||||
|
task_status.started(cs)
|
||||||
|
|
||||||
|
i = 0
|
||||||
|
while i < count:
|
||||||
|
|
||||||
|
out, fails = await get_bars(proxy, fqsn, end_dt=first_dt)
|
||||||
|
|
||||||
|
if out is None:
|
||||||
|
# could be trying to retreive bars over weekend
|
||||||
|
# TODO: add logic here to handle tradable hours and
|
||||||
|
# only grab valid bars in the range
|
||||||
|
log.error(f"Can't grab bars starting at {first_dt}!?!?")
|
||||||
|
|
||||||
|
# XXX: get_bars() should internally decrement dt by
|
||||||
|
# 2k seconds and try again.
|
||||||
|
continue
|
||||||
|
|
||||||
|
(first_bars, bars_array, first_dt, last_dt) = out
|
||||||
|
# last_dt1 = last_dt
|
||||||
|
# last_dt = first_dt
|
||||||
|
|
||||||
|
# volume cleaning since there's -ve entries,
|
||||||
|
# wood luv to know what crookery that is..
|
||||||
|
vlm = bars_array['volume']
|
||||||
|
vlm[vlm < 0] = 0
|
||||||
|
|
||||||
|
# TODO we should probably dig into forums to see what peeps
|
||||||
|
# think this data "means" and then use it as an indicator of
|
||||||
|
# sorts? dinkus has mentioned that $vlms for the day dont'
|
||||||
|
# match other platforms nor the summary stat tws shows in
|
||||||
|
# the monitor - it's probably worth investigating.
|
||||||
|
|
||||||
|
shm.push(bars_array, prepend=True)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
|
||||||
|
asset_type_map = {
|
||||||
|
'STK': 'stock',
|
||||||
|
'OPT': 'option',
|
||||||
|
'FUT': 'future',
|
||||||
|
'CONTFUT': 'continuous_future',
|
||||||
|
'CASH': 'forex',
|
||||||
|
'IND': 'index',
|
||||||
|
'CFD': 'cfd',
|
||||||
|
'BOND': 'bond',
|
||||||
|
'CMDTY': 'commodity',
|
||||||
|
'FOP': 'futures_option',
|
||||||
|
'FUND': 'mutual_fund',
|
||||||
|
'WAR': 'warrant',
|
||||||
|
'IOPT': 'warran',
|
||||||
|
'BAG': 'bag',
|
||||||
|
# 'NEWS': 'news',
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
_quote_streams: dict[str, trio.abc.ReceiveStream] = {}
|
||||||
|
|
||||||
|
|
||||||
|
async def _setup_quote_stream(
|
||||||
|
|
||||||
|
from_trio: asyncio.Queue,
|
||||||
|
to_trio: trio.abc.SendChannel,
|
||||||
|
|
||||||
|
symbol: str,
|
||||||
|
opts: tuple[int] = (
|
||||||
|
'375', # RT trade volume (excludes utrades)
|
||||||
|
'233', # RT trade volume (includes utrades)
|
||||||
|
'236', # Shortable shares
|
||||||
|
|
||||||
|
# these all appear to only be updated every 25s thus
|
||||||
|
# making them mostly useless and explains why the scanner
|
||||||
|
# is always slow XD
|
||||||
|
# '293', # Trade count for day
|
||||||
|
'294', # Trade rate / minute
|
||||||
|
'295', # Vlm rate / minute
|
||||||
|
),
|
||||||
|
contract: Optional[Contract] = None,
|
||||||
|
|
||||||
|
) -> trio.abc.ReceiveChannel:
|
||||||
|
'''
|
||||||
|
Stream a ticker using the std L1 api.
|
||||||
|
|
||||||
|
This task is ``asyncio``-side and must be called from
|
||||||
|
``tractor.to_asyncio.open_channel_from()``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
global _quote_streams
|
||||||
|
|
||||||
|
to_trio.send_nowait(None)
|
||||||
|
|
||||||
|
async with load_aio_clients() as accts2clients:
|
||||||
|
caccount_name, client = get_preferred_data_client(accts2clients)
|
||||||
|
contract = contract or (await client.find_contract(symbol))
|
||||||
|
ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts))
|
||||||
|
|
||||||
|
# NOTE: it's batch-wise and slow af but I guess could
|
||||||
|
# be good for backchecking? Seems to be every 5s maybe?
|
||||||
|
# ticker: Ticker = client.ib.reqTickByTickData(
|
||||||
|
# contract, 'Last',
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # define a simple queue push routine that streams quote packets
|
||||||
|
# # to trio over the ``to_trio`` memory channel.
|
||||||
|
# to_trio, from_aio = trio.open_memory_channel(2**8) # type: ignore
|
||||||
|
def teardown():
|
||||||
|
ticker.updateEvent.disconnect(push)
|
||||||
|
log.error(f"Disconnected stream for `{symbol}`")
|
||||||
|
client.ib.cancelMktData(contract)
|
||||||
|
|
||||||
|
# decouple broadcast mem chan
|
||||||
|
_quote_streams.pop(symbol, None)
|
||||||
|
|
||||||
|
def push(t: Ticker) -> None:
|
||||||
|
"""
|
||||||
|
Push quotes to trio task.
|
||||||
|
|
||||||
|
"""
|
||||||
|
# log.debug(t)
|
||||||
|
try:
|
||||||
|
to_trio.send_nowait(t)
|
||||||
|
|
||||||
|
except (
|
||||||
|
trio.BrokenResourceError,
|
||||||
|
|
||||||
|
# XXX: HACK, not sure why this gets left stale (probably
|
||||||
|
# due to our terrible ``tractor.to_asyncio``
|
||||||
|
# implementation for streams.. but if the mem chan
|
||||||
|
# gets left here and starts blocking just kill the feed?
|
||||||
|
# trio.WouldBlock,
|
||||||
|
):
|
||||||
|
# XXX: eventkit's ``Event.emit()`` for whatever redic
|
||||||
|
# reason will catch and ignore regular exceptions
|
||||||
|
# resulting in tracebacks spammed to console..
|
||||||
|
# Manually do the dereg ourselves.
|
||||||
|
teardown()
|
||||||
|
except trio.WouldBlock:
|
||||||
|
log.warning(
|
||||||
|
f'channel is blocking symbol feed for {symbol}?'
|
||||||
|
f'\n{to_trio.statistics}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# except trio.WouldBlock:
|
||||||
|
# # for slow debugging purposes to avoid clobbering prompt
|
||||||
|
# # with log msgs
|
||||||
|
# pass
|
||||||
|
|
||||||
|
ticker.updateEvent.connect(push)
|
||||||
|
try:
|
||||||
|
await asyncio.sleep(float('inf'))
|
||||||
|
finally:
|
||||||
|
teardown()
|
||||||
|
|
||||||
|
# return from_aio
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_aio_quote_stream(
|
||||||
|
|
||||||
|
symbol: str,
|
||||||
|
contract: Optional[Contract] = None,
|
||||||
|
|
||||||
|
) -> trio.abc.ReceiveStream:
|
||||||
|
|
||||||
|
from tractor.trionics import broadcast_receiver
|
||||||
|
global _quote_streams
|
||||||
|
|
||||||
|
from_aio = _quote_streams.get(symbol)
|
||||||
|
if from_aio:
|
||||||
|
|
||||||
|
# if we already have a cached feed deliver a rx side clone to consumer
|
||||||
|
async with broadcast_receiver(
|
||||||
|
from_aio,
|
||||||
|
2**6,
|
||||||
|
) as from_aio:
|
||||||
|
yield from_aio
|
||||||
|
return
|
||||||
|
|
||||||
|
async with tractor.to_asyncio.open_channel_from(
|
||||||
|
_setup_quote_stream,
|
||||||
|
symbol=symbol,
|
||||||
|
contract=contract,
|
||||||
|
|
||||||
|
) as (first, from_aio):
|
||||||
|
|
||||||
|
# cache feed for later consumers
|
||||||
|
_quote_streams[symbol] = from_aio
|
||||||
|
|
||||||
|
yield from_aio
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: cython/mypyc/numba this!
|
||||||
|
def normalize(
|
||||||
|
ticker: Ticker,
|
||||||
|
calc_price: bool = False
|
||||||
|
|
||||||
|
) -> dict:
|
||||||
|
|
||||||
|
# should be real volume for this contract by default
|
||||||
|
calc_price = False
|
||||||
|
|
||||||
|
# check for special contract types
|
||||||
|
con = ticker.contract
|
||||||
|
if type(con) in (
|
||||||
|
ibis.Commodity,
|
||||||
|
ibis.Forex,
|
||||||
|
):
|
||||||
|
# commodities and forex don't have an exchange name and
|
||||||
|
# no real volume so we have to calculate the price
|
||||||
|
suffix = con.secType
|
||||||
|
# no real volume on this tract
|
||||||
|
calc_price = True
|
||||||
|
|
||||||
|
else:
|
||||||
|
suffix = con.primaryExchange
|
||||||
|
if not suffix:
|
||||||
|
suffix = con.exchange
|
||||||
|
|
||||||
|
# append a `.<suffix>` to the returned symbol
|
||||||
|
# key for derivatives that normally is the expiry
|
||||||
|
# date key.
|
||||||
|
expiry = con.lastTradeDateOrContractMonth
|
||||||
|
if expiry:
|
||||||
|
suffix += f'.{expiry}'
|
||||||
|
|
||||||
|
# convert named tuples to dicts so we send usable keys
|
||||||
|
new_ticks = []
|
||||||
|
for tick in ticker.ticks:
|
||||||
|
if tick and not isinstance(tick, dict):
|
||||||
|
td = tick._asdict()
|
||||||
|
td['type'] = tick_types.get(
|
||||||
|
td['tickType'],
|
||||||
|
'n/a',
|
||||||
|
)
|
||||||
|
|
||||||
|
new_ticks.append(td)
|
||||||
|
|
||||||
|
tbt = ticker.tickByTicks
|
||||||
|
if tbt:
|
||||||
|
print(f'tickbyticks:\n {ticker.tickByTicks}')
|
||||||
|
|
||||||
|
ticker.ticks = new_ticks
|
||||||
|
|
||||||
|
# some contracts don't have volume so we may want to calculate
|
||||||
|
# a midpoint price based on data we can acquire (such as bid / ask)
|
||||||
|
if calc_price:
|
||||||
|
ticker.ticks.append(
|
||||||
|
{'type': 'trade', 'price': ticker.marketPrice()}
|
||||||
|
)
|
||||||
|
|
||||||
|
# serialize for transport
|
||||||
|
data = asdict(ticker)
|
||||||
|
|
||||||
|
# generate fqsn with possible specialized suffix
|
||||||
|
# for derivatives, note the lowercase.
|
||||||
|
data['symbol'] = data['fqsn'] = '.'.join(
|
||||||
|
(con.symbol, suffix)
|
||||||
|
).lower()
|
||||||
|
|
||||||
|
# convert named tuples to dicts for transport
|
||||||
|
tbts = data.get('tickByTicks')
|
||||||
|
if tbts:
|
||||||
|
data['tickByTicks'] = [tbt._asdict() for tbt in tbts]
|
||||||
|
|
||||||
|
# add time stamps for downstream latency measurements
|
||||||
|
data['brokerd_ts'] = time.time()
|
||||||
|
|
||||||
|
# stupid stupid shit...don't even care any more..
|
||||||
|
# leave it until we do a proper latency study
|
||||||
|
# if ticker.rtTime is not None:
|
||||||
|
# data['broker_ts'] = data['rtTime_s'] = float(
|
||||||
|
# ticker.rtTime.timestamp) / 1000.
|
||||||
|
data.pop('rtTime')
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_quotes(
|
||||||
|
|
||||||
|
send_chan: trio.abc.SendChannel,
|
||||||
|
symbols: list[str],
|
||||||
|
feed_is_live: trio.Event,
|
||||||
|
loglevel: str = None,
|
||||||
|
|
||||||
|
# startup sync
|
||||||
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Stream symbol quotes.
|
||||||
|
|
||||||
|
This is a ``trio`` callable routine meant to be invoked
|
||||||
|
once the brokerd is up.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# TODO: support multiple subscriptions
|
||||||
|
sym = symbols[0]
|
||||||
|
log.info(f'request for real-time quotes: {sym}')
|
||||||
|
|
||||||
|
async with open_data_client() as proxy:
|
||||||
|
|
||||||
|
con, first_ticker, details = await proxy.get_sym_details(symbol=sym)
|
||||||
|
first_quote = normalize(first_ticker)
|
||||||
|
# print(f'first quote: {first_quote}')
|
||||||
|
|
||||||
|
def mk_init_msgs() -> dict[str, dict]:
|
||||||
|
'''
|
||||||
|
Collect a bunch of meta-data useful for feed startup and
|
||||||
|
pack in a `dict`-msg.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# pass back some symbol info like min_tick, trading_hours, etc.
|
||||||
|
syminfo = asdict(details)
|
||||||
|
syminfo.update(syminfo['contract'])
|
||||||
|
|
||||||
|
# nested dataclass we probably don't need and that won't IPC
|
||||||
|
# serialize
|
||||||
|
syminfo.pop('secIdList')
|
||||||
|
|
||||||
|
# TODO: more consistent field translation
|
||||||
|
atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']]
|
||||||
|
|
||||||
|
# for stocks it seems TWS reports too small a tick size
|
||||||
|
# such that you can't submit orders with that granularity?
|
||||||
|
min_tick = 0.01 if atype == 'stock' else 0
|
||||||
|
|
||||||
|
syminfo['price_tick_size'] = max(syminfo['minTick'], min_tick)
|
||||||
|
|
||||||
|
# for "traditional" assets, volume is normally discreet, not
|
||||||
|
# a float
|
||||||
|
syminfo['lot_tick_size'] = 0.0
|
||||||
|
|
||||||
|
ibclient = proxy._aio_ns.ib.client
|
||||||
|
host, port = ibclient.host, ibclient.port
|
||||||
|
|
||||||
|
# TODO: for loop through all symbols passed in
|
||||||
|
init_msgs = {
|
||||||
|
# pass back token, and bool, signalling if we're the writer
|
||||||
|
# and that history has been written
|
||||||
|
sym: {
|
||||||
|
'symbol_info': syminfo,
|
||||||
|
'fqsn': first_quote['fqsn'],
|
||||||
|
},
|
||||||
|
'status': {
|
||||||
|
'data_ep': f'{host}:{port}',
|
||||||
|
},
|
||||||
|
|
||||||
|
}
|
||||||
|
return init_msgs
|
||||||
|
|
||||||
|
init_msgs = mk_init_msgs()
|
||||||
|
|
||||||
|
# TODO: we should instead spawn a task that waits on a feed to start
|
||||||
|
# and let it wait indefinitely..instead of this hard coded stuff.
|
||||||
|
with trio.move_on_after(1):
|
||||||
|
contract, first_ticker, details = await proxy.get_quote(symbol=sym)
|
||||||
|
|
||||||
|
# it might be outside regular trading hours so see if we can at
|
||||||
|
# least grab history.
|
||||||
|
if isnan(first_ticker.last):
|
||||||
|
task_status.started((init_msgs, first_quote))
|
||||||
|
|
||||||
|
# it's not really live but this will unblock
|
||||||
|
# the brokerd feed task to tell the ui to update?
|
||||||
|
feed_is_live.set()
|
||||||
|
|
||||||
|
# block and let data history backfill code run.
|
||||||
|
await trio.sleep_forever()
|
||||||
|
return # we never expect feed to come up?
|
||||||
|
|
||||||
|
async with open_aio_quote_stream(
|
||||||
|
symbol=sym,
|
||||||
|
contract=con,
|
||||||
|
) as stream:
|
||||||
|
|
||||||
|
# ugh, clear ticks since we've consumed them
|
||||||
|
# (ahem, ib_insync is stateful trash)
|
||||||
|
first_ticker.ticks = []
|
||||||
|
|
||||||
|
task_status.started((init_msgs, first_quote))
|
||||||
|
|
||||||
|
async with aclosing(stream):
|
||||||
|
if type(first_ticker.contract) not in (
|
||||||
|
ibis.Commodity,
|
||||||
|
ibis.Forex
|
||||||
|
):
|
||||||
|
# wait for real volume on feed (trading might be closed)
|
||||||
|
while True:
|
||||||
|
ticker = await stream.receive()
|
||||||
|
|
||||||
|
# for a real volume contract we rait for the first
|
||||||
|
# "real" trade to take place
|
||||||
|
if (
|
||||||
|
# not calc_price
|
||||||
|
# and not ticker.rtTime
|
||||||
|
not ticker.rtTime
|
||||||
|
):
|
||||||
|
# spin consuming tickers until we get a real
|
||||||
|
# market datum
|
||||||
|
log.debug(f"New unsent ticker: {ticker}")
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
log.debug("Received first real volume tick")
|
||||||
|
# ugh, clear ticks since we've consumed them
|
||||||
|
# (ahem, ib_insync is truly stateful trash)
|
||||||
|
ticker.ticks = []
|
||||||
|
|
||||||
|
# XXX: this works because we don't use
|
||||||
|
# ``aclosing()`` above?
|
||||||
|
break
|
||||||
|
|
||||||
|
quote = normalize(ticker)
|
||||||
|
log.debug(f"First ticker received {quote}")
|
||||||
|
|
||||||
|
# tell caller quotes are now coming in live
|
||||||
|
feed_is_live.set()
|
||||||
|
|
||||||
|
# last = time.time()
|
||||||
|
async for ticker in stream:
|
||||||
|
quote = normalize(ticker)
|
||||||
|
await send_chan.send({quote['fqsn']: quote})
|
||||||
|
|
||||||
|
# ugh, clear ticks since we've consumed them
|
||||||
|
ticker.ticks = []
|
||||||
|
# last = time.time()
|
||||||
|
|
||||||
|
|
||||||
|
async def data_reset_hack(
|
||||||
|
reset_type: str = 'data',
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Run key combos for resetting data feeds and yield back to caller
|
||||||
|
when complete.
|
||||||
|
|
||||||
|
This is a linux-only hack around:
|
||||||
|
|
||||||
|
https://interactivebrokers.github.io/tws-api/historical_limitations.html#pacing_violations
|
||||||
|
|
||||||
|
TODOs:
|
||||||
|
- a return type that hopefully determines if the hack was
|
||||||
|
successful.
|
||||||
|
- other OS support?
|
||||||
|
- integration with ``ib-gw`` run in docker + Xorg?
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
async def vnc_click_hack(
|
||||||
|
reset_type: str = 'data'
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Reset the data or netowork connection for the VNC attached
|
||||||
|
ib gateway using magic combos.
|
||||||
|
|
||||||
|
'''
|
||||||
|
key = {'data': 'f', 'connection': 'r'}[reset_type]
|
||||||
|
|
||||||
|
import asyncvnc
|
||||||
|
|
||||||
|
async with asyncvnc.connect(
|
||||||
|
'localhost',
|
||||||
|
port=3003,
|
||||||
|
# password='ibcansmbz',
|
||||||
|
) as client:
|
||||||
|
|
||||||
|
# move to middle of screen
|
||||||
|
# 640x1800
|
||||||
|
client.mouse.move(
|
||||||
|
x=500,
|
||||||
|
y=500,
|
||||||
|
)
|
||||||
|
client.mouse.click()
|
||||||
|
client.keyboard.press('Ctrl', 'Alt', key) # keys are stacked
|
||||||
|
|
||||||
|
await tractor.to_asyncio.run_task(vnc_click_hack)
|
||||||
|
|
||||||
|
# we don't really need the ``xdotool`` approach any more B)
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def open_symbol_search(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# TODO: load user defined symbol set locally for fast search?
|
||||||
|
await ctx.started({})
|
||||||
|
|
||||||
|
async with open_data_client() as proxy:
|
||||||
|
async with ctx.open_stream() as stream:
|
||||||
|
|
||||||
|
last = time.time()
|
||||||
|
|
||||||
|
async for pattern in stream:
|
||||||
|
log.debug(f'received {pattern}')
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
assert pattern, 'IB can not accept blank search pattern'
|
||||||
|
|
||||||
|
# throttle search requests to no faster then 1Hz
|
||||||
|
diff = now - last
|
||||||
|
if diff < 1.0:
|
||||||
|
log.debug('throttle sleeping')
|
||||||
|
await trio.sleep(diff)
|
||||||
|
try:
|
||||||
|
pattern = stream.receive_nowait()
|
||||||
|
except trio.WouldBlock:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not pattern or pattern.isspace():
|
||||||
|
log.warning('empty pattern received, skipping..')
|
||||||
|
|
||||||
|
# TODO: *BUG* if nothing is returned here the client
|
||||||
|
# side will cache a null set result and not showing
|
||||||
|
# anything to the use on re-searches when this query
|
||||||
|
# timed out. We probably need a special "timeout" msg
|
||||||
|
# or something...
|
||||||
|
|
||||||
|
# XXX: this unblocks the far end search task which may
|
||||||
|
# hold up a multi-search nursery block
|
||||||
|
await stream.send({})
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
log.debug(f'searching for {pattern}')
|
||||||
|
|
||||||
|
last = time.time()
|
||||||
|
|
||||||
|
# async batch search using api stocks endpoint and module
|
||||||
|
# defined adhoc symbol set.
|
||||||
|
stock_results = []
|
||||||
|
|
||||||
|
async def stash_results(target: Awaitable[list]):
|
||||||
|
stock_results.extend(await target)
|
||||||
|
|
||||||
|
async with trio.open_nursery() as sn:
|
||||||
|
sn.start_soon(
|
||||||
|
stash_results,
|
||||||
|
proxy.search_symbols(
|
||||||
|
pattern=pattern,
|
||||||
|
upto=5,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
# trigger async request
|
||||||
|
await trio.sleep(0)
|
||||||
|
|
||||||
|
# match against our ad-hoc set immediately
|
||||||
|
adhoc_matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
list(_adhoc_futes_set),
|
||||||
|
score_cutoff=90,
|
||||||
|
)
|
||||||
|
log.info(f'fuzzy matched adhocs: {adhoc_matches}')
|
||||||
|
adhoc_match_results = {}
|
||||||
|
if adhoc_matches:
|
||||||
|
# TODO: do we need to pull contract details?
|
||||||
|
adhoc_match_results = {i[0]: {} for i in adhoc_matches}
|
||||||
|
|
||||||
|
log.debug(f'fuzzy matching stocks {stock_results}')
|
||||||
|
stock_matches = fuzzy.extractBests(
|
||||||
|
pattern,
|
||||||
|
stock_results,
|
||||||
|
score_cutoff=50,
|
||||||
|
)
|
||||||
|
|
||||||
|
matches = adhoc_match_results | {
|
||||||
|
item[0]: {} for item in stock_matches
|
||||||
|
}
|
||||||
|
# TODO: we used to deliver contract details
|
||||||
|
# {item[2]: item[0] for item in stock_matches}
|
||||||
|
|
||||||
|
log.debug(f"sending matches: {matches.keys()}")
|
||||||
|
await stream.send(matches)
|
|
@ -14,18 +14,20 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
Kraken backend.
|
Kraken backend.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from dataclasses import asdict, field
|
from dataclasses import asdict, field
|
||||||
from typing import List, Dict, Any, Tuple, Optional
|
from datetime import datetime
|
||||||
|
from pprint import pformat
|
||||||
|
from typing import Any, Optional, AsyncIterator, Callable, Union
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import trio
|
import trio
|
||||||
import arrow
|
import pendulum
|
||||||
import asks
|
import asks
|
||||||
from fuzzywuzzy import process as fuzzy
|
from fuzzywuzzy import process as fuzzy
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
@ -33,12 +35,30 @@ import tractor
|
||||||
from pydantic.dataclasses import dataclass
|
from pydantic.dataclasses import dataclass
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
import wsproto
|
import wsproto
|
||||||
|
import urllib.parse
|
||||||
|
import hashlib
|
||||||
|
import hmac
|
||||||
|
import base64
|
||||||
|
|
||||||
|
from .. import config
|
||||||
from .._cacheables import open_cached_client
|
from .._cacheables import open_cached_client
|
||||||
from ._util import resproc, SymbolNotFound, BrokerError
|
from ._util import (
|
||||||
|
resproc,
|
||||||
|
SymbolNotFound,
|
||||||
|
BrokerError,
|
||||||
|
DataThrottle,
|
||||||
|
DataUnavailable,
|
||||||
|
)
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from ..data import ShmArray
|
from ..data import ShmArray
|
||||||
from ..data._web_bs import open_autorecon_ws
|
from ..data._web_bs import open_autorecon_ws, NoBsWs
|
||||||
|
from ..clearing._paper_engine import PaperBoi
|
||||||
|
from ..clearing._messages import (
|
||||||
|
BrokerdPosition, BrokerdOrder, BrokerdStatus,
|
||||||
|
BrokerdOrderAck, BrokerdError, BrokerdCancel,
|
||||||
|
BrokerdFill,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
@ -67,7 +87,7 @@ ohlc_dtype = np.dtype(_ohlc_dtype)
|
||||||
_show_wap_in_history = True
|
_show_wap_in_history = True
|
||||||
|
|
||||||
|
|
||||||
_symbol_info_translation: Dict[str, str] = {
|
_symbol_info_translation: dict[str, str] = {
|
||||||
'tick_decimals': 'pair_decimals',
|
'tick_decimals': 'pair_decimals',
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,16 +109,16 @@ class Pair(BaseModel):
|
||||||
lot_multiplier: float
|
lot_multiplier: float
|
||||||
|
|
||||||
# array of leverage amounts available when buying
|
# array of leverage amounts available when buying
|
||||||
leverage_buy: List[int]
|
leverage_buy: list[int]
|
||||||
# array of leverage amounts available when selling
|
# array of leverage amounts available when selling
|
||||||
leverage_sell: List[int]
|
leverage_sell: list[int]
|
||||||
|
|
||||||
# fee schedule array in [volume, percent fee] tuples
|
# fee schedule array in [volume, percent fee] tuples
|
||||||
fees: List[Tuple[int, float]]
|
fees: list[tuple[int, float]]
|
||||||
|
|
||||||
# maker fee schedule array in [volume, percent fee] tuples (if on
|
# maker fee schedule array in [volume, percent fee] tuples (if on
|
||||||
# maker/taker)
|
# maker/taker)
|
||||||
fees_maker: List[Tuple[int, float]]
|
fees_maker: list[tuple[int, float]]
|
||||||
|
|
||||||
fee_volume_currency: str # volume discount currency
|
fee_volume_currency: str # volume discount currency
|
||||||
margin_call: str # margin call level
|
margin_call: str # margin call level
|
||||||
|
@ -106,13 +126,27 @@ class Pair(BaseModel):
|
||||||
ordermin: float # minimum order volume for pair
|
ordermin: float # minimum order volume for pair
|
||||||
|
|
||||||
|
|
||||||
|
class Trade(BaseModel):
|
||||||
|
'''
|
||||||
|
Trade class that helps parse and validate ownTrades stream
|
||||||
|
|
||||||
|
'''
|
||||||
|
reqid: str # kraken order transaction id
|
||||||
|
action: str # buy or sell
|
||||||
|
price: str # price of asset
|
||||||
|
size: str # vol of asset
|
||||||
|
broker_time: str # e.g GTC, GTD
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class OHLC:
|
class OHLC:
|
||||||
"""Description of the flattened OHLC quote format.
|
'''
|
||||||
|
Description of the flattened OHLC quote format.
|
||||||
|
|
||||||
For schema details see:
|
For schema details see:
|
||||||
https://docs.kraken.com/websockets/#message-ohlc
|
https://docs.kraken.com/websockets/#message-ohlc
|
||||||
"""
|
|
||||||
|
'''
|
||||||
chan_id: int # internal kraken id
|
chan_id: int # internal kraken id
|
||||||
chan_name: str # eg. ohlc-1 (name-interval)
|
chan_name: str # eg. ohlc-1 (name-interval)
|
||||||
pair: str # fx pair
|
pair: str # fx pair
|
||||||
|
@ -126,12 +160,54 @@ class OHLC:
|
||||||
volume: float # Accumulated volume **within interval**
|
volume: float # Accumulated volume **within interval**
|
||||||
count: int # Number of trades within interval
|
count: int # Number of trades within interval
|
||||||
# (sampled) generated tick data
|
# (sampled) generated tick data
|
||||||
ticks: List[Any] = field(default_factory=list)
|
ticks: list[Any] = field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
|
def get_config() -> dict[str, Any]:
|
||||||
|
|
||||||
|
conf, path = config.load()
|
||||||
|
section = conf.get('kraken')
|
||||||
|
|
||||||
|
if section is None:
|
||||||
|
log.warning(f'No config section found for kraken in {path}')
|
||||||
|
return {}
|
||||||
|
|
||||||
|
return section
|
||||||
|
|
||||||
|
|
||||||
|
def get_kraken_signature(
|
||||||
|
urlpath: str,
|
||||||
|
data: dict[str, Any],
|
||||||
|
secret: str
|
||||||
|
) -> str:
|
||||||
|
postdata = urllib.parse.urlencode(data)
|
||||||
|
encoded = (str(data['nonce']) + postdata).encode()
|
||||||
|
message = urlpath.encode() + hashlib.sha256(encoded).digest()
|
||||||
|
|
||||||
|
mac = hmac.new(base64.b64decode(secret), message, hashlib.sha512)
|
||||||
|
sigdigest = base64.b64encode(mac.digest())
|
||||||
|
return sigdigest.decode()
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidKey(ValueError):
|
||||||
|
'''
|
||||||
|
EAPI:Invalid key
|
||||||
|
This error is returned when the API key used for the call is
|
||||||
|
either expired or disabled, please review the API key in your
|
||||||
|
Settings -> API tab of account management or generate a new one
|
||||||
|
and update your application.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
class Client:
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
name: str = '',
|
||||||
|
api_key: str = '',
|
||||||
|
secret: str = ''
|
||||||
|
) -> None:
|
||||||
self._sesh = asks.Session(connections=4)
|
self._sesh = asks.Session(connections=4)
|
||||||
self._sesh.base_location = _url
|
self._sesh.base_location = _url
|
||||||
self._sesh.headers.update({
|
self._sesh.headers.update({
|
||||||
|
@ -139,9 +215,12 @@ class Client:
|
||||||
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
|
||||||
})
|
})
|
||||||
self._pairs: list[str] = []
|
self._pairs: list[str] = []
|
||||||
|
self._name = name
|
||||||
|
self._api_key = api_key
|
||||||
|
self._secret = secret
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pairs(self) -> Dict[str, Any]:
|
def pairs(self) -> dict[str, Any]:
|
||||||
if self._pairs is None:
|
if self._pairs is None:
|
||||||
raise RuntimeError(
|
raise RuntimeError(
|
||||||
"Make sure to run `cache_symbols()` on startup!"
|
"Make sure to run `cache_symbols()` on startup!"
|
||||||
|
@ -154,7 +233,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
method: str,
|
method: str,
|
||||||
data: dict,
|
data: dict,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
resp = await self._sesh.post(
|
resp = await self._sesh.post(
|
||||||
path=f'/public/{method}',
|
path=f'/public/{method}',
|
||||||
json=data,
|
json=data,
|
||||||
|
@ -162,6 +241,112 @@ class Client:
|
||||||
)
|
)
|
||||||
return resproc(resp, log)
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
async def _private(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict,
|
||||||
|
uri_path: str
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
headers = {
|
||||||
|
'Content-Type':
|
||||||
|
'application/x-www-form-urlencoded',
|
||||||
|
'API-Key':
|
||||||
|
self._api_key,
|
||||||
|
'API-Sign':
|
||||||
|
get_kraken_signature(uri_path, data, self._secret)
|
||||||
|
}
|
||||||
|
resp = await self._sesh.post(
|
||||||
|
path=f'/private/{method}',
|
||||||
|
data=data,
|
||||||
|
headers=headers,
|
||||||
|
timeout=float('inf')
|
||||||
|
)
|
||||||
|
return resproc(resp, log)
|
||||||
|
|
||||||
|
async def endpoint(
|
||||||
|
self,
|
||||||
|
method: str,
|
||||||
|
data: dict[str, Any]
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
uri_path = f'/0/private/{method}'
|
||||||
|
data['nonce'] = str(int(1000*time.time()))
|
||||||
|
return await self._private(method, data, uri_path)
|
||||||
|
|
||||||
|
async def get_trades(
|
||||||
|
self,
|
||||||
|
data: dict[str, Any] = {}
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
data['ofs'] = 0
|
||||||
|
# Grab all trade history
|
||||||
|
# https://docs.kraken.com/rest/#operation/getTradeHistory
|
||||||
|
# Kraken uses 'ofs' to refer to the offset
|
||||||
|
while True:
|
||||||
|
resp = await self.endpoint('TradesHistory', data)
|
||||||
|
# grab the first 50 trades
|
||||||
|
if data['ofs'] == 0:
|
||||||
|
trades = resp['result']['trades']
|
||||||
|
# load the next 50 trades using dict constructor
|
||||||
|
# for speed
|
||||||
|
elif data['ofs'] == 50:
|
||||||
|
trades = dict(trades, **resp['result']['trades'])
|
||||||
|
# catch the end of the trades
|
||||||
|
elif resp['result']['trades'] == {}:
|
||||||
|
count = resp['result']['count']
|
||||||
|
break
|
||||||
|
# update existing dict if num trades exceeds 100
|
||||||
|
else:
|
||||||
|
trades.update(resp['result']['trades'])
|
||||||
|
# increment the offset counter
|
||||||
|
data['ofs'] += 50
|
||||||
|
# To avoid exceeding API rate limit in case of a lot of trades
|
||||||
|
await trio.sleep(1)
|
||||||
|
|
||||||
|
# make sure you grabbed all the trades
|
||||||
|
assert count == len(trades.values())
|
||||||
|
|
||||||
|
return trades
|
||||||
|
|
||||||
|
async def submit_limit(
|
||||||
|
self,
|
||||||
|
symbol: str,
|
||||||
|
price: float,
|
||||||
|
action: str,
|
||||||
|
size: float,
|
||||||
|
reqid: str = None,
|
||||||
|
validate: bool = False # set True test call without a real submission
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Place an order and return integer request id provided by client.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# Build common data dict for common keys from both endpoints
|
||||||
|
data = {
|
||||||
|
"pair": symbol,
|
||||||
|
"price": str(price),
|
||||||
|
"validate": validate
|
||||||
|
}
|
||||||
|
if reqid is None:
|
||||||
|
# Build order data for kraken api
|
||||||
|
data |= {
|
||||||
|
"ordertype": "limit", "type": action, "volume": str(size)
|
||||||
|
}
|
||||||
|
return await self.endpoint('AddOrder', data)
|
||||||
|
else:
|
||||||
|
# Edit order data for kraken api
|
||||||
|
data["txid"] = reqid
|
||||||
|
return await self.endpoint('EditOrder', data)
|
||||||
|
|
||||||
|
async def submit_cancel(
|
||||||
|
self,
|
||||||
|
reqid: str,
|
||||||
|
) -> dict:
|
||||||
|
'''
|
||||||
|
Send cancel request for order id ``reqid``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# txid is a transaction id given by kraken
|
||||||
|
return await self.endpoint('CancelOrder', {"txid": reqid})
|
||||||
|
|
||||||
async def symbol_info(
|
async def symbol_info(
|
||||||
self,
|
self,
|
||||||
pair: Optional[str] = None,
|
pair: Optional[str] = None,
|
||||||
|
@ -197,7 +382,7 @@ class Client:
|
||||||
self,
|
self,
|
||||||
pattern: str,
|
pattern: str,
|
||||||
limit: int = None,
|
limit: int = None,
|
||||||
) -> Dict[str, Any]:
|
) -> dict[str, Any]:
|
||||||
if self._pairs is not None:
|
if self._pairs is not None:
|
||||||
data = self._pairs
|
data = self._pairs
|
||||||
else:
|
else:
|
||||||
|
@ -214,17 +399,26 @@ class Client:
|
||||||
async def bars(
|
async def bars(
|
||||||
self,
|
self,
|
||||||
symbol: str = 'XBTUSD',
|
symbol: str = 'XBTUSD',
|
||||||
|
|
||||||
# UTC 2017-07-02 12:53:20
|
# UTC 2017-07-02 12:53:20
|
||||||
since: int = None,
|
since: Optional[Union[int, datetime]] = None,
|
||||||
count: int = 720, # <- max allowed per query
|
count: int = 720, # <- max allowed per query
|
||||||
as_np: bool = True,
|
as_np: bool = True,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
|
|
||||||
if since is None:
|
if since is None:
|
||||||
since = arrow.utcnow().floor('minute').shift(
|
since = pendulum.now('UTC').start_of('minute').subtract(
|
||||||
minutes=-count).timestamp()
|
minutes=count).timestamp()
|
||||||
|
|
||||||
|
elif isinstance(since, int):
|
||||||
|
since = pendulum.from_timestamp(since).timestamp()
|
||||||
|
|
||||||
|
else: # presumably a pendulum datetime
|
||||||
|
since = since.timestamp()
|
||||||
|
|
||||||
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
# UTC 2017-07-02 12:53:20 is oldest seconds value
|
||||||
since = str(max(1499000000, since))
|
since = str(max(1499000000, int(since)))
|
||||||
json = await self._public(
|
json = await self._public(
|
||||||
'OHLC',
|
'OHLC',
|
||||||
data={
|
data={
|
||||||
|
@ -268,11 +462,29 @@ class Client:
|
||||||
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
|
||||||
return array
|
return array
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise SymbolNotFound(json['error'][0] + f': {symbol}')
|
errmsg = json['error'][0]
|
||||||
|
|
||||||
|
if 'not found' in errmsg:
|
||||||
|
raise SymbolNotFound(errmsg + f': {symbol}')
|
||||||
|
|
||||||
|
elif 'Too many requests' in errmsg:
|
||||||
|
raise DataThrottle(f'{symbol}')
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise BrokerError(errmsg)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def get_client() -> Client:
|
async def get_client() -> Client:
|
||||||
|
|
||||||
|
section = get_config()
|
||||||
|
if section:
|
||||||
|
client = Client(
|
||||||
|
name=section['key_descr'],
|
||||||
|
api_key=section['api_key'],
|
||||||
|
secret=section['secret']
|
||||||
|
)
|
||||||
|
else:
|
||||||
client = Client()
|
client = Client()
|
||||||
|
|
||||||
# at startup, load all symbols locally for fast search
|
# at startup, load all symbols locally for fast search
|
||||||
|
@ -281,8 +493,382 @@ async def get_client() -> Client:
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
async def stream_messages(ws):
|
def pack_positions(
|
||||||
|
acc: str,
|
||||||
|
trades: dict
|
||||||
|
) -> list[Any]:
|
||||||
|
positions: dict[str, float] = {}
|
||||||
|
vols: dict[str, float] = {}
|
||||||
|
costs: dict[str, float] = {}
|
||||||
|
position_msgs: list[Any] = []
|
||||||
|
|
||||||
|
for trade in trades.values():
|
||||||
|
sign = -1 if trade['type'] == 'sell' else 1
|
||||||
|
pair = trade['pair']
|
||||||
|
vol = float(trade['vol'])
|
||||||
|
vols[pair] = vols.get(pair, 0) + sign * vol
|
||||||
|
costs[pair] = costs.get(pair, 0) + sign * float(trade['cost'])
|
||||||
|
positions[pair] = costs[pair] / vols[pair] if vols[pair] else 0
|
||||||
|
|
||||||
|
for ticker, pos in positions.items():
|
||||||
|
vol = float(vols[ticker])
|
||||||
|
if not vol:
|
||||||
|
continue
|
||||||
|
norm_sym = normalize_symbol(ticker)
|
||||||
|
msg = BrokerdPosition(
|
||||||
|
broker='kraken',
|
||||||
|
account=acc,
|
||||||
|
symbol=norm_sym,
|
||||||
|
currency=norm_sym[-3:],
|
||||||
|
size=vol,
|
||||||
|
avg_price=float(pos),
|
||||||
|
)
|
||||||
|
position_msgs.append(msg.dict())
|
||||||
|
|
||||||
|
return position_msgs
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_symbol(
|
||||||
|
ticker: str
|
||||||
|
) -> str:
|
||||||
|
# This is to convert symbol names from what kraken
|
||||||
|
# uses to the traditional 3x3 pair symbol syntax
|
||||||
|
symlen = len(ticker)
|
||||||
|
if symlen == 6:
|
||||||
|
return ticker.lower()
|
||||||
|
else:
|
||||||
|
for sym in ['XXBT', 'XXMR', 'ZEUR']:
|
||||||
|
if sym in ticker:
|
||||||
|
ticker = ticker.replace(sym, sym[1:])
|
||||||
|
return ticker.lower()
|
||||||
|
|
||||||
|
|
||||||
|
def make_auth_sub(data: dict[str, Any]) -> dict[str, str]:
|
||||||
|
'''
|
||||||
|
Create a request subscription packet dict.
|
||||||
|
|
||||||
|
## TODO: point to the auth urls
|
||||||
|
https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
|
||||||
|
'''
|
||||||
|
# eg. specific logic for this in kraken's sync client:
|
||||||
|
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||||
|
return {
|
||||||
|
'event': 'subscribe',
|
||||||
|
'subscription': data,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def handle_order_requests(
|
||||||
|
|
||||||
|
client: Client,
|
||||||
|
ems_order_stream: tractor.MsgStream,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
request_msg: dict
|
||||||
|
order: BrokerdOrder
|
||||||
|
|
||||||
|
async for request_msg in ems_order_stream:
|
||||||
|
log.info(
|
||||||
|
'Received order request:\n'
|
||||||
|
f'{pformat(request_msg)}'
|
||||||
|
)
|
||||||
|
|
||||||
|
action = request_msg['action']
|
||||||
|
|
||||||
|
if action in {'buy', 'sell'}:
|
||||||
|
|
||||||
|
account = request_msg['account']
|
||||||
|
if account != 'kraken.spot':
|
||||||
|
log.error(
|
||||||
|
'This is a kraken account, \
|
||||||
|
only a `kraken.spot` selection is valid'
|
||||||
|
)
|
||||||
|
await ems_order_stream.send(BrokerdError(
|
||||||
|
oid=request_msg['oid'],
|
||||||
|
symbol=request_msg['symbol'],
|
||||||
|
|
||||||
|
# reason=f'Kraken only, No account found: `{account}` ?',
|
||||||
|
reason=(
|
||||||
|
'Kraken only, order mode disabled due to '
|
||||||
|
'https://github.com/pikers/piker/issues/299'
|
||||||
|
),
|
||||||
|
|
||||||
|
).dict())
|
||||||
|
continue
|
||||||
|
|
||||||
|
# validate
|
||||||
|
order = BrokerdOrder(**request_msg)
|
||||||
|
# call our client api to submit the order
|
||||||
|
resp = await client.submit_limit(
|
||||||
|
symbol=order.symbol,
|
||||||
|
price=order.price,
|
||||||
|
action=order.action,
|
||||||
|
size=order.size,
|
||||||
|
reqid=order.reqid,
|
||||||
|
)
|
||||||
|
|
||||||
|
err = resp['error']
|
||||||
|
if err:
|
||||||
|
oid = order.oid
|
||||||
|
log.error(f'Failed to submit order: {oid}')
|
||||||
|
|
||||||
|
await ems_order_stream.send(
|
||||||
|
BrokerdError(
|
||||||
|
oid=order.oid,
|
||||||
|
reqid=order.reqid,
|
||||||
|
symbol=order.symbol,
|
||||||
|
reason="Failed order submission",
|
||||||
|
broker_details=resp
|
||||||
|
).dict()
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# TODO: handle multiple orders (cancels?)
|
||||||
|
# txid is an array of strings
|
||||||
|
if order.reqid is None:
|
||||||
|
reqid = resp['result']['txid'][0]
|
||||||
|
else:
|
||||||
|
# update the internal pairing of oid to krakens
|
||||||
|
# txid with the new txid that is returned on edit
|
||||||
|
reqid = resp['result']['txid']
|
||||||
|
|
||||||
|
# deliver ack that order has been submitted to broker routing
|
||||||
|
await ems_order_stream.send(
|
||||||
|
BrokerdOrderAck(
|
||||||
|
|
||||||
|
# ems order request id
|
||||||
|
oid=order.oid,
|
||||||
|
|
||||||
|
# broker specific request id
|
||||||
|
reqid=reqid,
|
||||||
|
|
||||||
|
# account the made the order
|
||||||
|
account=order.account
|
||||||
|
|
||||||
|
).dict()
|
||||||
|
)
|
||||||
|
|
||||||
|
elif action == 'cancel':
|
||||||
|
msg = BrokerdCancel(**request_msg)
|
||||||
|
|
||||||
|
# Send order cancellation to kraken
|
||||||
|
resp = await client.submit_cancel(
|
||||||
|
reqid=msg.reqid
|
||||||
|
)
|
||||||
|
|
||||||
|
# Check to make sure there was no error returned by
|
||||||
|
# the kraken endpoint. Assert one order was cancelled.
|
||||||
|
try:
|
||||||
|
result = resp['result']
|
||||||
|
count = result['count']
|
||||||
|
|
||||||
|
# check for 'error' key if we received no 'result'
|
||||||
|
except KeyError:
|
||||||
|
error = resp.get('error')
|
||||||
|
|
||||||
|
await ems_order_stream.send(
|
||||||
|
BrokerdError(
|
||||||
|
oid=msg.oid,
|
||||||
|
reqid=msg.reqid,
|
||||||
|
symbol=msg.symbol,
|
||||||
|
reason="Failed order cancel",
|
||||||
|
broker_details=resp
|
||||||
|
).dict()
|
||||||
|
)
|
||||||
|
|
||||||
|
if not error:
|
||||||
|
raise BrokerError(f'Unknown order cancel response: {resp}')
|
||||||
|
|
||||||
|
else:
|
||||||
|
if not count: # no orders were cancelled?
|
||||||
|
|
||||||
|
# XXX: what exactly is this from and why would we care?
|
||||||
|
# there doesn't seem to be any docs here?
|
||||||
|
# https://docs.kraken.com/rest/#operation/cancelOrder
|
||||||
|
|
||||||
|
# Check to make sure the cancellation is NOT pending,
|
||||||
|
# then send the confirmation to the ems order stream
|
||||||
|
pending = result.get('pending')
|
||||||
|
if pending:
|
||||||
|
log.error(f'Order {oid} cancel was not yet successful')
|
||||||
|
|
||||||
|
await ems_order_stream.send(
|
||||||
|
BrokerdError(
|
||||||
|
oid=msg.oid,
|
||||||
|
reqid=msg.reqid,
|
||||||
|
symbol=msg.symbol,
|
||||||
|
# TODO: maybe figure out if pending
|
||||||
|
# cancels will eventually get cancelled
|
||||||
|
reason="Order cancel is still pending?",
|
||||||
|
broker_details=resp
|
||||||
|
).dict()
|
||||||
|
)
|
||||||
|
|
||||||
|
else: # order cancel success case.
|
||||||
|
|
||||||
|
await ems_order_stream.send(
|
||||||
|
BrokerdStatus(
|
||||||
|
reqid=msg.reqid,
|
||||||
|
account=msg.account,
|
||||||
|
time_ns=time.time_ns(),
|
||||||
|
status='cancelled',
|
||||||
|
reason='Order cancelled',
|
||||||
|
broker_details={'name': 'kraken'}
|
||||||
|
).dict()
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
log.error(f'Unknown order command: {request_msg}')
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def trades_dialogue(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
loglevel: str = None,
|
||||||
|
) -> AsyncIterator[dict[str, Any]]:
|
||||||
|
|
||||||
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def subscribe(ws: wsproto.WSConnection, token: str):
|
||||||
|
# XXX: setup subs
|
||||||
|
# https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
# specific logic for this in kraken's shitty sync client:
|
||||||
|
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||||
|
trades_sub = make_auth_sub(
|
||||||
|
{'name': 'ownTrades', 'token': token}
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: we want to eventually allow unsubs which should
|
||||||
|
# be completely fine to request from a separate task
|
||||||
|
# since internally the ws methods appear to be FIFO
|
||||||
|
# locked.
|
||||||
|
await ws.send_msg(trades_sub)
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
# unsub from all pairs on teardown
|
||||||
|
await ws.send_msg({
|
||||||
|
'event': 'unsubscribe',
|
||||||
|
'subscription': ['ownTrades'],
|
||||||
|
})
|
||||||
|
|
||||||
|
# XXX: do we need to ack the unsub?
|
||||||
|
# await ws.recv_msg()
|
||||||
|
|
||||||
|
# Authenticated block
|
||||||
|
async with get_client() as client:
|
||||||
|
if not client._api_key:
|
||||||
|
log.error('Missing Kraken API key: Trades WS connection failed')
|
||||||
|
await ctx.started(({}, ['paper']))
|
||||||
|
|
||||||
|
async with (
|
||||||
|
ctx.open_stream() as ems_stream,
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
):
|
||||||
|
|
||||||
|
client = PaperBoi(
|
||||||
|
'kraken',
|
||||||
|
ems_stream,
|
||||||
|
_buys={},
|
||||||
|
_sells={},
|
||||||
|
|
||||||
|
_reqids={},
|
||||||
|
|
||||||
|
# TODO: load paper positions from ``positions.toml``
|
||||||
|
_positions={},
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: maybe add multiple accounts
|
||||||
|
n.start_soon(handle_order_requests, client, ems_stream)
|
||||||
|
|
||||||
|
acc_name = 'kraken.' + client._name
|
||||||
|
trades = await client.get_trades()
|
||||||
|
|
||||||
|
position_msgs = pack_positions(acc_name, trades)
|
||||||
|
|
||||||
|
await ctx.started((position_msgs, (acc_name,)))
|
||||||
|
|
||||||
|
# Get websocket token for authenticated data stream
|
||||||
|
# Assert that a token was actually received.
|
||||||
|
resp = await client.endpoint('GetWebSocketsToken', {})
|
||||||
|
|
||||||
|
# lol wtf is this..
|
||||||
|
assert resp['error'] == []
|
||||||
|
|
||||||
|
token = resp['result']['token']
|
||||||
|
|
||||||
|
async with (
|
||||||
|
ctx.open_stream() as ems_stream,
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
):
|
||||||
|
# TODO: maybe add multiple accounts
|
||||||
|
n.start_soon(handle_order_requests, client, ems_stream)
|
||||||
|
|
||||||
|
# Process trades msg stream of ws
|
||||||
|
async with open_autorecon_ws(
|
||||||
|
'wss://ws-auth.kraken.com/',
|
||||||
|
fixture=subscribe,
|
||||||
|
token=token,
|
||||||
|
) as ws:
|
||||||
|
async for msg in process_trade_msgs(ws):
|
||||||
|
for trade in msg:
|
||||||
|
# check the type of packaged message
|
||||||
|
assert type(trade) == Trade
|
||||||
|
|
||||||
|
# prepare and send a filled status update
|
||||||
|
filled_msg = BrokerdStatus(
|
||||||
|
reqid=trade.reqid,
|
||||||
|
time_ns=time.time_ns(),
|
||||||
|
|
||||||
|
account='kraken.spot',
|
||||||
|
status='filled',
|
||||||
|
filled=float(trade.size),
|
||||||
|
reason='Order filled by kraken',
|
||||||
|
broker_details={
|
||||||
|
'name': 'kraken',
|
||||||
|
'broker_time': trade.broker_time
|
||||||
|
},
|
||||||
|
|
||||||
|
# TODO: figure out if kraken gives a count
|
||||||
|
# of how many units of underlying were
|
||||||
|
# filled. Alternatively we can decrement
|
||||||
|
# this value ourselves by associating and
|
||||||
|
# calcing from the diff with the original
|
||||||
|
# client-side request, see:
|
||||||
|
# https://github.com/pikers/piker/issues/296
|
||||||
|
remaining=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
await ems_stream.send(filled_msg.dict())
|
||||||
|
|
||||||
|
# send a fill msg for gui update
|
||||||
|
fill_msg = BrokerdFill(
|
||||||
|
reqid=trade.reqid,
|
||||||
|
time_ns=time.time_ns(),
|
||||||
|
|
||||||
|
action=trade.action,
|
||||||
|
size=float(trade.size),
|
||||||
|
price=float(trade.price),
|
||||||
|
# TODO: maybe capture more msg data i.e fees?
|
||||||
|
broker_details={'name': 'kraken'},
|
||||||
|
broker_time=float(trade.broker_time)
|
||||||
|
)
|
||||||
|
|
||||||
|
await ems_stream.send(fill_msg.dict())
|
||||||
|
|
||||||
|
|
||||||
|
async def stream_messages(
|
||||||
|
ws: NoBsWs,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Message stream parser and heartbeat handler.
|
||||||
|
|
||||||
|
Deliver ws subscription messages as well as handle heartbeat logic
|
||||||
|
though a single async generator.
|
||||||
|
|
||||||
|
'''
|
||||||
too_slow_count = last_hb = 0
|
too_slow_count = last_hb = 0
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -320,6 +906,18 @@ async def stream_messages(ws):
|
||||||
if err:
|
if err:
|
||||||
raise BrokerError(err)
|
raise BrokerError(err)
|
||||||
else:
|
else:
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
|
||||||
|
async def process_data_feed_msgs(
|
||||||
|
ws: NoBsWs,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Parse and pack data feed messages.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async for msg in stream_messages(ws):
|
||||||
|
|
||||||
chan_id, *payload_array, chan_name, pair = msg
|
chan_id, *payload_array, chan_name, pair = msg
|
||||||
|
|
||||||
if 'ohlc' in chan_name:
|
if 'ohlc' in chan_name:
|
||||||
|
@ -349,10 +947,54 @@ async def stream_messages(ws):
|
||||||
|
|
||||||
else:
|
else:
|
||||||
print(f'UNHANDLED MSG: {msg}')
|
print(f'UNHANDLED MSG: {msg}')
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
|
||||||
|
async def process_trade_msgs(
|
||||||
|
ws: NoBsWs,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Parse and pack data feed messages.
|
||||||
|
|
||||||
|
'''
|
||||||
|
sequence_counter = 0
|
||||||
|
async for msg in stream_messages(ws):
|
||||||
|
|
||||||
|
try:
|
||||||
|
# check that we are on the ownTrades stream and that msgs
|
||||||
|
# are arriving in sequence with kraken For clarification the
|
||||||
|
# kraken ws api docs for this stream:
|
||||||
|
# https://docs.kraken.com/websockets/#message-ownTrades
|
||||||
|
assert msg[1] == 'ownTrades'
|
||||||
|
assert msg[2]['sequence'] > sequence_counter
|
||||||
|
sequence_counter += 1
|
||||||
|
raw_msgs = msg[0]
|
||||||
|
trade_msgs = []
|
||||||
|
|
||||||
|
# Check that we are only processing new trades
|
||||||
|
if msg[2]['sequence'] != 1:
|
||||||
|
# check if its a new order or an update msg
|
||||||
|
for trade_msg in raw_msgs:
|
||||||
|
trade = list(trade_msg.values())[0]
|
||||||
|
order_msg = Trade(
|
||||||
|
reqid=trade['ordertxid'],
|
||||||
|
action=trade['type'],
|
||||||
|
price=trade['price'],
|
||||||
|
size=trade['vol'],
|
||||||
|
broker_time=trade['time']
|
||||||
|
)
|
||||||
|
trade_msgs.append(order_msg)
|
||||||
|
|
||||||
|
yield trade_msgs
|
||||||
|
|
||||||
|
except AssertionError:
|
||||||
|
print(f'UNHANDLED MSG: {msg}')
|
||||||
|
yield msg
|
||||||
|
|
||||||
|
|
||||||
def normalize(
|
def normalize(
|
||||||
ohlc: OHLC,
|
ohlc: OHLC,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
quote = asdict(ohlc)
|
quote = asdict(ohlc)
|
||||||
quote['broker_ts'] = quote['time']
|
quote['broker_ts'] = quote['time']
|
||||||
|
@ -370,12 +1012,13 @@ def normalize(
|
||||||
return topic, quote
|
return topic, quote
|
||||||
|
|
||||||
|
|
||||||
def make_sub(pairs: List[str], data: Dict[str, Any]) -> Dict[str, str]:
|
def make_sub(pairs: list[str], data: dict[str, Any]) -> dict[str, str]:
|
||||||
"""Create a request subscription packet dict.
|
'''
|
||||||
|
Create a request subscription packet dict.
|
||||||
|
|
||||||
https://docs.kraken.com/websockets/#message-subscribe
|
https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# eg. specific logic for this in kraken's sync client:
|
# eg. specific logic for this in kraken's sync client:
|
||||||
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
|
||||||
return {
|
return {
|
||||||
|
@ -385,6 +1028,55 @@ def make_sub(pairs: List[str], data: Dict[str, Any]) -> Dict[str, str]:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_history_client(
|
||||||
|
symbol: str,
|
||||||
|
|
||||||
|
) -> tuple[Callable, int]:
|
||||||
|
|
||||||
|
# TODO implement history getter for the new storage layer.
|
||||||
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
# lol, kraken won't send any more then the "last"
|
||||||
|
# 720 1m bars.. so we have to just ignore further
|
||||||
|
# requests of this type..
|
||||||
|
queries: int = 0
|
||||||
|
|
||||||
|
async def get_ohlc(
|
||||||
|
end_dt: Optional[datetime] = None,
|
||||||
|
start_dt: Optional[datetime] = None,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
datetime, # start
|
||||||
|
datetime, # end
|
||||||
|
]:
|
||||||
|
|
||||||
|
nonlocal queries
|
||||||
|
if queries > 0:
|
||||||
|
raise DataUnavailable
|
||||||
|
|
||||||
|
count = 0
|
||||||
|
while count <= 3:
|
||||||
|
try:
|
||||||
|
array = await client.bars(
|
||||||
|
symbol,
|
||||||
|
since=end_dt,
|
||||||
|
)
|
||||||
|
count += 1
|
||||||
|
queries += 1
|
||||||
|
break
|
||||||
|
except DataThrottle:
|
||||||
|
log.warning(f'kraken OHLC throttle for {symbol}')
|
||||||
|
await trio.sleep(1)
|
||||||
|
|
||||||
|
start_dt = pendulum.from_timestamp(array[0]['time'])
|
||||||
|
end_dt = pendulum.from_timestamp(array[-1]['time'])
|
||||||
|
return array, start_dt, end_dt
|
||||||
|
|
||||||
|
yield get_ohlc, {'erlangs': 1, 'rate': 1}
|
||||||
|
|
||||||
|
|
||||||
async def backfill_bars(
|
async def backfill_bars(
|
||||||
|
|
||||||
sym: str,
|
sym: str,
|
||||||
|
@ -393,8 +1085,9 @@ async def backfill_bars(
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Fill historical bars into shared mem / storage afap.
|
'''
|
||||||
"""
|
Fill historical bars into shared mem / storage afap.
|
||||||
|
'''
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
async with open_cached_client('kraken') as client:
|
async with open_cached_client('kraken') as client:
|
||||||
bars = await client.bars(symbol=sym)
|
bars = await client.bars(symbol=sym)
|
||||||
|
@ -405,8 +1098,7 @@ async def backfill_bars(
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
|
|
||||||
send_chan: trio.abc.SendChannel,
|
send_chan: trio.abc.SendChannel,
|
||||||
symbols: List[str],
|
symbols: list[str],
|
||||||
shm: ShmArray,
|
|
||||||
feed_is_live: trio.Event,
|
feed_is_live: trio.Event,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
|
@ -414,13 +1106,15 @@ async def stream_quotes(
|
||||||
sub_type: str = 'ohlc',
|
sub_type: str = 'ohlc',
|
||||||
|
|
||||||
# startup sync
|
# startup sync
|
||||||
task_status: TaskStatus[Tuple[Dict, Dict]] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Subscribe for ohlc stream of quotes for ``pairs``.
|
'''
|
||||||
|
Subscribe for ohlc stream of quotes for ``pairs``.
|
||||||
|
|
||||||
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
|
@ -451,10 +1145,11 @@ async def stream_quotes(
|
||||||
symbol: {
|
symbol: {
|
||||||
'symbol_info': sym_infos[sym],
|
'symbol_info': sym_infos[sym],
|
||||||
'shm_write_opts': {'sum_tick_vml': False},
|
'shm_write_opts': {'sum_tick_vml': False},
|
||||||
|
'fqsn': sym,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def subscribe(ws: wsproto.WSConnection):
|
async def subscribe(ws: wsproto.WSConnection):
|
||||||
# XXX: setup subs
|
# XXX: setup subs
|
||||||
# https://docs.kraken.com/websockets/#message-subscribe
|
# https://docs.kraken.com/websockets/#message-subscribe
|
||||||
|
@ -492,23 +1187,23 @@ async def stream_quotes(
|
||||||
# XXX: do we need to ack the unsub?
|
# XXX: do we need to ack the unsub?
|
||||||
# await ws.recv_msg()
|
# await ws.recv_msg()
|
||||||
|
|
||||||
# see the tips on reonnection logic:
|
# see the tips on reconnection logic:
|
||||||
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
# https://support.kraken.com/hc/en-us/articles/360044504011-WebSocket-API-unexpected-disconnections-from-market-data-feeds
|
||||||
|
ws: NoBsWs
|
||||||
async with open_autorecon_ws(
|
async with open_autorecon_ws(
|
||||||
'wss://ws.kraken.com/',
|
'wss://ws.kraken.com/',
|
||||||
fixture=subscribe,
|
fixture=subscribe,
|
||||||
) as ws:
|
) as ws:
|
||||||
|
|
||||||
# pull a first quote and deliver
|
# pull a first quote and deliver
|
||||||
msg_gen = stream_messages(ws)
|
msg_gen = process_data_feed_msgs(ws)
|
||||||
|
|
||||||
# TODO: use ``anext()`` when it lands in 3.10!
|
# TODO: use ``anext()`` when it lands in 3.10!
|
||||||
typ, ohlc_last = await msg_gen.__anext__()
|
typ, ohlc_last = await msg_gen.__anext__()
|
||||||
|
|
||||||
topic, quote = normalize(ohlc_last)
|
topic, quote = normalize(ohlc_last)
|
||||||
|
|
||||||
first_quote = {topic: quote}
|
task_status.started((init_msgs, quote))
|
||||||
task_status.started((init_msgs, first_quote))
|
|
||||||
|
|
||||||
# lol, only "closes" when they're margin squeezing clients ;P
|
# lol, only "closes" when they're margin squeezing clients ;P
|
||||||
feed_is_live.set()
|
feed_is_live.set()
|
||||||
|
@ -553,14 +1248,13 @@ async def stream_quotes(
|
||||||
quote = ohlc
|
quote = ohlc
|
||||||
topic = quote['symbol'].lower()
|
topic = quote['symbol'].lower()
|
||||||
|
|
||||||
# XXX: format required by ``tractor.msg.pub``
|
|
||||||
# requires a ``Dict[topic: str, quote: dict]``
|
|
||||||
await send_chan.send({topic: quote})
|
await send_chan.send({topic: quote})
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
async def open_symbol_search(
|
async def open_symbol_search(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
|
|
||||||
) -> Client:
|
) -> Client:
|
||||||
async with open_cached_client('kraken') as client:
|
async with open_cached_client('kraken') as client:
|
||||||
|
|
||||||
|
|
|
@ -19,7 +19,6 @@ Questrade API backend.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
import inspect
|
import inspect
|
||||||
import contextlib
|
|
||||||
import time
|
import time
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
@ -32,11 +31,10 @@ from typing import (
|
||||||
Callable,
|
Callable,
|
||||||
)
|
)
|
||||||
|
|
||||||
import arrow
|
import pendulum
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
from async_generator import asynccontextmanager
|
from async_generator import asynccontextmanager
|
||||||
import pandas as pd
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import wrapt
|
import wrapt
|
||||||
import asks
|
import asks
|
||||||
|
@ -46,7 +44,6 @@ from .._cacheables import open_cached_client, async_lifo_cache
|
||||||
from .. import config
|
from .. import config
|
||||||
from ._util import resproc, BrokerError, SymbolNotFound
|
from ._util import resproc, BrokerError, SymbolNotFound
|
||||||
from ..log import get_logger, colorize_json, get_console_log
|
from ..log import get_logger, colorize_json, get_console_log
|
||||||
from . import get_brokermod
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -601,12 +598,16 @@ class Client:
|
||||||
sid = sids[symbol]
|
sid = sids[symbol]
|
||||||
|
|
||||||
# get last market open end time
|
# get last market open end time
|
||||||
est_end = now = arrow.utcnow().to('US/Eastern').floor('minute')
|
est_end = now = pendulum.now('UTC').in_timezoe(
|
||||||
|
'America/New_York').start_of('minute')
|
||||||
|
|
||||||
# on non-paid feeds we can't retreive the first 15 mins
|
# on non-paid feeds we can't retreive the first 15 mins
|
||||||
wd = now.isoweekday()
|
wd = now.isoweekday()
|
||||||
if wd > 5:
|
if wd > 5:
|
||||||
quotes = await self.quote([symbol])
|
quotes = await self.quote([symbol])
|
||||||
est_end = arrow.get(quotes[0]['lastTradeTime'])
|
est_end = pendulum.parse(
|
||||||
|
quotes[0]['lastTradeTime']
|
||||||
|
)
|
||||||
if est_end.hour == 0:
|
if est_end.hour == 0:
|
||||||
# XXX don't bother figuring out extended hours for now
|
# XXX don't bother figuring out extended hours for now
|
||||||
est_end = est_end.replace(hour=17)
|
est_end = est_end.replace(hour=17)
|
||||||
|
@ -667,7 +668,7 @@ def get_OHLCV(
|
||||||
"""
|
"""
|
||||||
del bar['end']
|
del bar['end']
|
||||||
del bar['VWAP']
|
del bar['VWAP']
|
||||||
bar['start'] = pd.Timestamp(bar['start']).value/10**9
|
bar['start'] = pendulum.from_timestamp(bar['start']) / 10**9
|
||||||
return tuple(bar.values())
|
return tuple(bar.values())
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -27,27 +27,32 @@ _mag2suffix = bidict({3: 'k', 6: 'M', 9: 'B'})
|
||||||
|
|
||||||
|
|
||||||
def humanize(
|
def humanize(
|
||||||
|
|
||||||
number: float,
|
number: float,
|
||||||
digits: int = 1
|
digits: int = 1
|
||||||
|
|
||||||
) -> str:
|
) -> str:
|
||||||
'''Convert large numbers to something with at most ``digits`` and
|
'''
|
||||||
|
Convert large numbers to something with at most ``digits`` and
|
||||||
a letter suffix (eg. k: thousand, M: million, B: billion).
|
a letter suffix (eg. k: thousand, M: million, B: billion).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
try:
|
try:
|
||||||
float(number)
|
float(number)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
return 0
|
return '0'
|
||||||
|
|
||||||
if not number or number <= 0:
|
if not number or number <= 0:
|
||||||
return round(number, ndigits=digits)
|
return str(round(number, ndigits=digits))
|
||||||
|
|
||||||
mag = math.floor(math.log(number, 10))
|
mag = round(math.log(number, 10))
|
||||||
if mag < 3:
|
if mag < 3:
|
||||||
return round(number, ndigits=digits)
|
return str(round(number, ndigits=digits))
|
||||||
|
|
||||||
maxmag = max(itertools.takewhile(lambda key: mag >= key, _mag2suffix))
|
maxmag = max(
|
||||||
|
itertools.takewhile(
|
||||||
|
lambda key: mag >= key, _mag2suffix
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
return "{value}{suffix}".format(
|
return "{value}{suffix}".format(
|
||||||
value=round(number/10**maxmag, ndigits=digits),
|
value=round(number/10**maxmag, ndigits=digits),
|
||||||
|
|
|
@ -29,7 +29,8 @@ from ._messages import BrokerdPosition, Status
|
||||||
|
|
||||||
|
|
||||||
class Position(BaseModel):
|
class Position(BaseModel):
|
||||||
'''Basic pp (personal position) model with attached fills history.
|
'''
|
||||||
|
Basic pp (personal position) model with attached fills history.
|
||||||
|
|
||||||
This type should be IPC wire ready?
|
This type should be IPC wire ready?
|
||||||
|
|
||||||
|
@ -61,6 +62,15 @@ class Position(BaseModel):
|
||||||
self.avg_price = avg_price
|
self.avg_price = avg_price
|
||||||
self.size = size
|
self.size = size
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dsize(self) -> float:
|
||||||
|
'''
|
||||||
|
The "dollar" size of the pp, normally in trading (fiat) unit
|
||||||
|
terms.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return self.avg_price * self.size
|
||||||
|
|
||||||
|
|
||||||
_size_units = bidict({
|
_size_units = bidict({
|
||||||
'currency': '$ size',
|
'currency': '$ size',
|
||||||
|
@ -87,13 +97,21 @@ class Allocator(BaseModel):
|
||||||
|
|
||||||
symbol: Symbol
|
symbol: Symbol
|
||||||
account: Optional[str] = 'paper'
|
account: Optional[str] = 'paper'
|
||||||
size_unit: SizeUnit = 'currency'
|
# TODO: for enums this clearly doesn't fucking work, you can't set
|
||||||
|
# a default at startup by passing in a `dict` but yet you can set
|
||||||
|
# that value through assignment..for wtv cucked reason.. honestly, pure
|
||||||
|
# unintuitive garbage.
|
||||||
|
size_unit: str = 'currency'
|
||||||
_size_units: dict[str, Optional[str]] = _size_units
|
_size_units: dict[str, Optional[str]] = _size_units
|
||||||
|
|
||||||
@validator('size_unit')
|
@validator('size_unit', pre=True)
|
||||||
def lookup_key(cls, v):
|
def maybe_lookup_key(cls, v):
|
||||||
# apply the corresponding enum key for the text "description" value
|
# apply the corresponding enum key for the text "description" value
|
||||||
return v.name
|
if v not in _size_units:
|
||||||
|
return _size_units.inverse[v]
|
||||||
|
|
||||||
|
assert v in _size_units
|
||||||
|
return v
|
||||||
|
|
||||||
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
# TODO: if we ever want ot support non-uniform entry-slot-proportion
|
||||||
# "sizes"
|
# "sizes"
|
||||||
|
@ -106,7 +124,8 @@ class Allocator(BaseModel):
|
||||||
def step_sizes(
|
def step_sizes(
|
||||||
self,
|
self,
|
||||||
) -> (float, float):
|
) -> (float, float):
|
||||||
'''Return the units size for each unit type as a tuple.
|
'''
|
||||||
|
Return the units size for each unit type as a tuple.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
slots = self.slots
|
slots = self.slots
|
||||||
|
@ -134,7 +153,8 @@ class Allocator(BaseModel):
|
||||||
action: str,
|
action: str,
|
||||||
|
|
||||||
) -> dict:
|
) -> dict:
|
||||||
'''Generate order request info for the "next" submittable order
|
'''
|
||||||
|
Generate order request info for the "next" submittable order
|
||||||
depending on position / order entry config.
|
depending on position / order entry config.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -157,6 +177,11 @@ class Allocator(BaseModel):
|
||||||
slot_size = currency_per_slot / price
|
slot_size = currency_per_slot / price
|
||||||
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
l_sub_pp = (self.currency_limit - live_cost_basis) / price
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise ValueError(
|
||||||
|
f"Not valid size unit '{size_unit}'"
|
||||||
|
)
|
||||||
|
|
||||||
# an entry (adding-to or starting a pp)
|
# an entry (adding-to or starting a pp)
|
||||||
if (
|
if (
|
||||||
action == 'buy' and live_size > 0 or
|
action == 'buy' and live_size > 0 or
|
||||||
|
@ -204,7 +229,14 @@ class Allocator(BaseModel):
|
||||||
# **without** going past a net-zero pp. if the pp is
|
# **without** going past a net-zero pp. if the pp is
|
||||||
# > 1.5x a slot size, then front load: exit a slot's and
|
# > 1.5x a slot size, then front load: exit a slot's and
|
||||||
# expect net-zero to be acquired on the final exit.
|
# expect net-zero to be acquired on the final exit.
|
||||||
slot_size < pp_size < round((1.5*slot_size), ndigits=ld)
|
slot_size < pp_size < round((1.5*slot_size), ndigits=ld) or
|
||||||
|
|
||||||
|
# underlying requires discrete (int) units (eg. stocks)
|
||||||
|
# and thus our slot size (based on our limit) would
|
||||||
|
# exit a fractional unit's worth so, presuming we aren't
|
||||||
|
# supporting a fractional-units-style broker, we need
|
||||||
|
# exit the final unit.
|
||||||
|
ld == 0 and abs_live_size == 1
|
||||||
):
|
):
|
||||||
order_size = abs_live_size
|
order_size = abs_live_size
|
||||||
|
|
||||||
|
@ -232,7 +264,8 @@ class Allocator(BaseModel):
|
||||||
pp: Position,
|
pp: Position,
|
||||||
|
|
||||||
) -> float:
|
) -> float:
|
||||||
'''Calc and return the number of slots used by this ``Position``.
|
'''
|
||||||
|
Calc and return the number of slots used by this ``Position``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
abs_pp_size = abs(pp.size)
|
abs_pp_size = abs(pp.size)
|
||||||
|
@ -251,6 +284,14 @@ class Allocator(BaseModel):
|
||||||
return round(prop * self.slots)
|
return round(prop * self.slots)
|
||||||
|
|
||||||
|
|
||||||
|
_derivs = (
|
||||||
|
'future',
|
||||||
|
'continuous_future',
|
||||||
|
'option',
|
||||||
|
'futures_option',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def mk_allocator(
|
def mk_allocator(
|
||||||
|
|
||||||
symbol: Symbol,
|
symbol: Symbol,
|
||||||
|
@ -259,7 +300,7 @@ def mk_allocator(
|
||||||
# default allocation settings
|
# default allocation settings
|
||||||
defaults: dict[str, float] = {
|
defaults: dict[str, float] = {
|
||||||
'account': None, # select paper by default
|
'account': None, # select paper by default
|
||||||
'size_unit': _size_units['currency'],
|
'size_unit': 'currency',
|
||||||
'units_limit': 400,
|
'units_limit': 400,
|
||||||
'currency_limit': 5e3,
|
'currency_limit': 5e3,
|
||||||
'slots': 4,
|
'slots': 4,
|
||||||
|
@ -274,8 +315,8 @@ def mk_allocator(
|
||||||
# load and retreive user settings for default allocations
|
# load and retreive user settings for default allocations
|
||||||
# ``config.toml``
|
# ``config.toml``
|
||||||
user_def = {
|
user_def = {
|
||||||
'currency_limit': 5e3,
|
'currency_limit': 6e3,
|
||||||
'slots': 4,
|
'slots': 6,
|
||||||
}
|
}
|
||||||
|
|
||||||
defaults.update(user_def)
|
defaults.update(user_def)
|
||||||
|
@ -289,8 +330,7 @@ def mk_allocator(
|
||||||
|
|
||||||
# specific configs by asset class / type
|
# specific configs by asset class / type
|
||||||
|
|
||||||
if asset_type in ('future', 'option', 'futures_option'):
|
if asset_type in _derivs:
|
||||||
|
|
||||||
# since it's harder to know how currency "applies" in this case
|
# since it's harder to know how currency "applies" in this case
|
||||||
# given leverage properties
|
# given leverage properties
|
||||||
alloc.size_unit = '# units'
|
alloc.size_unit = '# units'
|
||||||
|
@ -308,9 +348,12 @@ def mk_allocator(
|
||||||
alloc.currency_limit = round(startup_size, ndigits=2)
|
alloc.currency_limit = round(startup_size, ndigits=2)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
startup_size = startup_pp.size
|
startup_size = abs(startup_pp.size)
|
||||||
|
|
||||||
if startup_size > alloc.units_limit:
|
if startup_size > alloc.units_limit:
|
||||||
alloc.units_limit = startup_size
|
alloc.units_limit = startup_size
|
||||||
|
|
||||||
|
if asset_type in _derivs:
|
||||||
|
alloc.slots = alloc.units_limit
|
||||||
|
|
||||||
return alloc
|
return alloc
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
Orders and execution client API.
|
Orders and execution client API.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager as acm
|
||||||
from typing import Dict
|
from typing import Dict
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
|
@ -27,7 +27,6 @@ import trio
|
||||||
import tractor
|
import tractor
|
||||||
from tractor.trionics import broadcast_receiver
|
from tractor.trionics import broadcast_receiver
|
||||||
|
|
||||||
from ..data._source import Symbol
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._ems import _emsd_main
|
from ._ems import _emsd_main
|
||||||
from .._daemon import maybe_open_emsd
|
from .._daemon import maybe_open_emsd
|
||||||
|
@ -156,16 +155,19 @@ async def relay_order_cmds_from_sync_code(
|
||||||
await to_ems_stream.send(cmd)
|
await to_ems_stream.send(cmd)
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@acm
|
||||||
async def open_ems(
|
async def open_ems(
|
||||||
broker: str,
|
fqsn: str,
|
||||||
symbol: Symbol,
|
|
||||||
|
|
||||||
) -> (OrderBook, tractor.MsgStream, dict):
|
) -> (
|
||||||
"""Spawn an EMS daemon and begin sending orders and receiving
|
OrderBook,
|
||||||
|
tractor.MsgStream,
|
||||||
|
dict,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Spawn an EMS daemon and begin sending orders and receiving
|
||||||
alerts.
|
alerts.
|
||||||
|
|
||||||
|
|
||||||
This EMS tries to reduce most broker's terrible order entry apis to
|
This EMS tries to reduce most broker's terrible order entry apis to
|
||||||
a very simple protocol built on a few easy to grok and/or
|
a very simple protocol built on a few easy to grok and/or
|
||||||
"rantsy" premises:
|
"rantsy" premises:
|
||||||
|
@ -194,21 +196,22 @@ async def open_ems(
|
||||||
- 'dark_executed', 'broker_executed'
|
- 'dark_executed', 'broker_executed'
|
||||||
- 'broker_filled'
|
- 'broker_filled'
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# wait for service to connect back to us signalling
|
# wait for service to connect back to us signalling
|
||||||
# ready for order commands
|
# ready for order commands
|
||||||
book = get_orders()
|
book = get_orders()
|
||||||
|
|
||||||
|
from ..data._source import unpack_fqsn
|
||||||
|
broker, symbol, suffix = unpack_fqsn(fqsn)
|
||||||
|
|
||||||
async with maybe_open_emsd(broker) as portal:
|
async with maybe_open_emsd(broker) as portal:
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
# connect to emsd
|
# connect to emsd
|
||||||
portal.open_context(
|
portal.open_context(
|
||||||
|
|
||||||
_emsd_main,
|
_emsd_main,
|
||||||
broker=broker,
|
fqsn=fqsn,
|
||||||
symbol=symbol.key,
|
|
||||||
|
|
||||||
) as (ctx, (positions, accounts)),
|
) as (ctx, (positions, accounts)),
|
||||||
|
|
||||||
|
@ -218,7 +221,7 @@ async def open_ems(
|
||||||
async with trio.open_nursery() as n:
|
async with trio.open_nursery() as n:
|
||||||
n.start_soon(
|
n.start_soon(
|
||||||
relay_order_cmds_from_sync_code,
|
relay_order_cmds_from_sync_code,
|
||||||
symbol.key,
|
fqsn,
|
||||||
trades_stream
|
trades_stream
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -47,11 +47,14 @@ log = get_logger(__name__)
|
||||||
|
|
||||||
# TODO: numba all of this
|
# TODO: numba all of this
|
||||||
def mk_check(
|
def mk_check(
|
||||||
|
|
||||||
trigger_price: float,
|
trigger_price: float,
|
||||||
known_last: float,
|
known_last: float,
|
||||||
action: str,
|
action: str,
|
||||||
|
|
||||||
) -> Callable[[float, float], bool]:
|
) -> Callable[[float, float], bool]:
|
||||||
"""Create a predicate for given ``exec_price`` based on last known
|
'''
|
||||||
|
Create a predicate for given ``exec_price`` based on last known
|
||||||
price, ``known_last``.
|
price, ``known_last``.
|
||||||
|
|
||||||
This is an automatic alert level thunk generator based on where the
|
This is an automatic alert level thunk generator based on where the
|
||||||
|
@ -59,7 +62,7 @@ def mk_check(
|
||||||
interest is; pick an appropriate comparison operator based on
|
interest is; pick an appropriate comparison operator based on
|
||||||
avoiding the case where the a predicate returns true immediately.
|
avoiding the case where the a predicate returns true immediately.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# str compares:
|
# str compares:
|
||||||
# https://stackoverflow.com/questions/46708708/compare-strings-in-numba-compiled-function
|
# https://stackoverflow.com/questions/46708708/compare-strings-in-numba-compiled-function
|
||||||
|
|
||||||
|
@ -77,8 +80,9 @@ def mk_check(
|
||||||
|
|
||||||
return check_lt
|
return check_lt
|
||||||
|
|
||||||
else:
|
raise ValueError(
|
||||||
return None
|
f'trigger: {trigger_price}, last: {known_last}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
|
@ -110,8 +114,8 @@ class _DarkBook:
|
||||||
|
|
||||||
# tracks most recent values per symbol each from data feed
|
# tracks most recent values per symbol each from data feed
|
||||||
lasts: dict[
|
lasts: dict[
|
||||||
tuple[str, str],
|
str,
|
||||||
float
|
float,
|
||||||
] = field(default_factory=dict)
|
] = field(default_factory=dict)
|
||||||
|
|
||||||
# mapping of piker ems order ids to current brokerd order flow message
|
# mapping of piker ems order ids to current brokerd order flow message
|
||||||
|
@ -132,40 +136,42 @@ async def clear_dark_triggers(
|
||||||
ems_client_order_stream: tractor.MsgStream,
|
ems_client_order_stream: tractor.MsgStream,
|
||||||
quote_stream: tractor.ReceiveMsgStream, # noqa
|
quote_stream: tractor.ReceiveMsgStream, # noqa
|
||||||
broker: str,
|
broker: str,
|
||||||
symbol: str,
|
fqsn: str,
|
||||||
|
|
||||||
book: _DarkBook,
|
book: _DarkBook,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Core dark order trigger loop.
|
'''
|
||||||
|
Core dark order trigger loop.
|
||||||
|
|
||||||
Scan the (price) data feed and submit triggered orders
|
Scan the (price) data feed and submit triggered orders
|
||||||
to broker.
|
to broker.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# this stream may eventually contain multiple symbols
|
|
||||||
# XXX: optimize this for speed!
|
# XXX: optimize this for speed!
|
||||||
|
# TODO:
|
||||||
|
# - numba all this!
|
||||||
|
# - this stream may eventually contain multiple symbols
|
||||||
async for quotes in quote_stream:
|
async for quotes in quote_stream:
|
||||||
|
|
||||||
# TODO: numba all this!
|
|
||||||
|
|
||||||
# start = time.time()
|
# start = time.time()
|
||||||
for sym, quote in quotes.items():
|
for sym, quote in quotes.items():
|
||||||
|
execs = book.orders.get(sym, {})
|
||||||
execs = book.orders.get(sym, None)
|
|
||||||
if execs is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
for tick in iterticks(
|
for tick in iterticks(
|
||||||
quote,
|
quote,
|
||||||
# dark order price filter(s)
|
# dark order price filter(s)
|
||||||
types=('ask', 'bid', 'trade', 'last')
|
types=(
|
||||||
|
'ask',
|
||||||
|
'bid',
|
||||||
|
'trade',
|
||||||
|
'last',
|
||||||
|
# 'dark_trade', # TODO: should allow via config?
|
||||||
|
)
|
||||||
):
|
):
|
||||||
price = tick.get('price')
|
price = tick.get('price')
|
||||||
ttype = tick['type']
|
ttype = tick['type']
|
||||||
|
|
||||||
# update to keep new cmds informed
|
# update to keep new cmds informed
|
||||||
book.lasts[(broker, symbol)] = price
|
book.lasts[sym] = price
|
||||||
|
|
||||||
for oid, (
|
for oid, (
|
||||||
pred,
|
pred,
|
||||||
|
@ -176,13 +182,21 @@ async def clear_dark_triggers(
|
||||||
) in (
|
) in (
|
||||||
tuple(execs.items())
|
tuple(execs.items())
|
||||||
):
|
):
|
||||||
|
if (
|
||||||
if not pred or (ttype not in tf) or (not pred(price)):
|
not pred or
|
||||||
|
ttype not in tf or
|
||||||
|
not pred(price)
|
||||||
|
):
|
||||||
|
log.debug(
|
||||||
|
f'skipping quote for {sym} '
|
||||||
|
f'{pred}, {ttype} not in {tf}?, {pred(price)}'
|
||||||
|
)
|
||||||
# majority of iterations will be non-matches
|
# majority of iterations will be non-matches
|
||||||
continue
|
continue
|
||||||
|
|
||||||
action: str = cmd['action']
|
action: str = cmd['action']
|
||||||
symbol: str = cmd['symbol']
|
symbol: str = cmd['symbol']
|
||||||
|
bfqsn: str = symbol.replace(f'.{broker}', '')
|
||||||
|
|
||||||
if action == 'alert':
|
if action == 'alert':
|
||||||
# nothing to do but relay a status
|
# nothing to do but relay a status
|
||||||
|
@ -212,7 +226,7 @@ async def clear_dark_triggers(
|
||||||
# order-request and instead create a new one.
|
# order-request and instead create a new one.
|
||||||
reqid=None,
|
reqid=None,
|
||||||
|
|
||||||
symbol=sym,
|
symbol=bfqsn,
|
||||||
price=submit_price,
|
price=submit_price,
|
||||||
size=cmd['size'],
|
size=cmd['size'],
|
||||||
)
|
)
|
||||||
|
@ -234,26 +248,35 @@ async def clear_dark_triggers(
|
||||||
oid=oid, # ems order id
|
oid=oid, # ems order id
|
||||||
resp=resp,
|
resp=resp,
|
||||||
time_ns=time.time_ns(),
|
time_ns=time.time_ns(),
|
||||||
|
symbol=fqsn,
|
||||||
symbol=symbol,
|
|
||||||
trigger_price=price,
|
trigger_price=price,
|
||||||
|
|
||||||
broker_details={'name': broker},
|
broker_details={'name': broker},
|
||||||
|
|
||||||
cmd=cmd, # original request message
|
cmd=cmd, # original request message
|
||||||
|
|
||||||
).dict()
|
).dict()
|
||||||
|
|
||||||
# remove exec-condition from set
|
# remove exec-condition from set
|
||||||
log.info(f'removing pred for {oid}')
|
log.info(f'removing pred for {oid}')
|
||||||
execs.pop(oid)
|
pred = execs.pop(oid, None)
|
||||||
|
if not pred:
|
||||||
|
log.warning(
|
||||||
|
f'pred for {oid} was already removed!?'
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
await ems_client_order_stream.send(msg)
|
await ems_client_order_stream.send(msg)
|
||||||
|
except (
|
||||||
|
trio.ClosedResourceError,
|
||||||
|
):
|
||||||
|
log.warning(
|
||||||
|
f'client {ems_client_order_stream} stream is broke'
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
else: # condition scan loop complete
|
else: # condition scan loop complete
|
||||||
log.debug(f'execs are {execs}')
|
log.debug(f'execs are {execs}')
|
||||||
if execs:
|
if execs:
|
||||||
book.orders[symbol] = execs
|
book.orders[fqsn] = execs
|
||||||
|
|
||||||
# print(f'execs scan took: {time.time() - start}')
|
# print(f'execs scan took: {time.time() - start}')
|
||||||
|
|
||||||
|
@ -269,14 +292,15 @@ class TradesRelay:
|
||||||
positions: dict[str, dict[str, BrokerdPosition]]
|
positions: dict[str, dict[str, BrokerdPosition]]
|
||||||
|
|
||||||
# allowed account names
|
# allowed account names
|
||||||
accounts: set[str]
|
accounts: tuple[str]
|
||||||
|
|
||||||
# count of connected ems clients for this ``brokerd``
|
# count of connected ems clients for this ``brokerd``
|
||||||
consumers: int = 0
|
consumers: int = 0
|
||||||
|
|
||||||
|
|
||||||
class Router(BaseModel):
|
class Router(BaseModel):
|
||||||
'''Order router which manages and tracks per-broker dark book,
|
'''
|
||||||
|
Order router which manages and tracks per-broker dark book,
|
||||||
alerts, clearing and related data feed management.
|
alerts, clearing and related data feed management.
|
||||||
|
|
||||||
A singleton per ``emsd`` actor.
|
A singleton per ``emsd`` actor.
|
||||||
|
@ -364,7 +388,8 @@ async def open_brokerd_trades_dialogue(
|
||||||
task_status: TaskStatus[TradesRelay] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[TradesRelay] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> tuple[dict, tractor.MsgStream]:
|
) -> tuple[dict, tractor.MsgStream]:
|
||||||
'''Open and yield ``brokerd`` trades dialogue context-stream if none
|
'''
|
||||||
|
Open and yield ``brokerd`` trades dialogue context-stream if none
|
||||||
already exists.
|
already exists.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -401,8 +426,7 @@ async def open_brokerd_trades_dialogue(
|
||||||
# actor to simulate the real IPC load it'll have when also
|
# actor to simulate the real IPC load it'll have when also
|
||||||
# pulling data from feeds
|
# pulling data from feeds
|
||||||
open_trades_endpoint = paper.open_paperboi(
|
open_trades_endpoint = paper.open_paperboi(
|
||||||
broker=broker,
|
fqsn='.'.join([symbol, broker]),
|
||||||
symbol=symbol,
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -414,6 +438,9 @@ async def open_brokerd_trades_dialogue(
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
positions: list[BrokerdPosition]
|
||||||
|
accounts: tuple[str]
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
open_trades_endpoint as (brokerd_ctx, (positions, accounts,)),
|
open_trades_endpoint as (brokerd_ctx, (positions, accounts,)),
|
||||||
brokerd_ctx.open_stream() as brokerd_trades_stream,
|
brokerd_ctx.open_stream() as brokerd_trades_stream,
|
||||||
|
@ -437,19 +464,20 @@ async def open_brokerd_trades_dialogue(
|
||||||
# locally cache and track positions per account.
|
# locally cache and track positions per account.
|
||||||
pps = {}
|
pps = {}
|
||||||
for msg in positions:
|
for msg in positions:
|
||||||
|
log.info(f'loading pp: {msg}')
|
||||||
|
|
||||||
account = msg['account']
|
account = msg['account']
|
||||||
assert account in accounts
|
assert account in accounts
|
||||||
|
|
||||||
pps.setdefault(
|
pps.setdefault(
|
||||||
msg['symbol'],
|
f'{msg["symbol"]}.{broker}',
|
||||||
{}
|
{}
|
||||||
)[account] = msg
|
)[account] = msg
|
||||||
|
|
||||||
relay = TradesRelay(
|
relay = TradesRelay(
|
||||||
brokerd_dialogue=brokerd_trades_stream,
|
brokerd_dialogue=brokerd_trades_stream,
|
||||||
positions=pps,
|
positions=pps,
|
||||||
accounts=set(accounts),
|
accounts=accounts,
|
||||||
consumers=1,
|
consumers=1,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -472,7 +500,9 @@ async def open_brokerd_trades_dialogue(
|
||||||
finally:
|
finally:
|
||||||
# parent context must have been closed
|
# parent context must have been closed
|
||||||
# remove from cache so next client will respawn if needed
|
# remove from cache so next client will respawn if needed
|
||||||
_router.relays.pop(broker)
|
relay = _router.relays.pop(broker, None)
|
||||||
|
if not relay:
|
||||||
|
log.warning(f'Relay for {broker} was already removed!?')
|
||||||
|
|
||||||
|
|
||||||
@tractor.context
|
@tractor.context
|
||||||
|
@ -504,7 +534,8 @@ async def translate_and_relay_brokerd_events(
|
||||||
router: Router,
|
router: Router,
|
||||||
|
|
||||||
) -> AsyncIterator[dict]:
|
) -> AsyncIterator[dict]:
|
||||||
'''Trades update loop - receive updates from ``brokerd`` trades
|
'''
|
||||||
|
Trades update loop - receive updates from ``brokerd`` trades
|
||||||
endpoint, convert to EMS response msgs, transmit **only** to
|
endpoint, convert to EMS response msgs, transmit **only** to
|
||||||
ordering client(s).
|
ordering client(s).
|
||||||
|
|
||||||
|
@ -532,7 +563,10 @@ async def translate_and_relay_brokerd_events(
|
||||||
|
|
||||||
name = brokerd_msg['name']
|
name = brokerd_msg['name']
|
||||||
|
|
||||||
log.info(f'Received broker trade event:\n{pformat(brokerd_msg)}')
|
log.info(
|
||||||
|
f'Received broker trade event:\n'
|
||||||
|
f'{pformat(brokerd_msg)}'
|
||||||
|
)
|
||||||
|
|
||||||
if name == 'position':
|
if name == 'position':
|
||||||
|
|
||||||
|
@ -540,14 +574,28 @@ async def translate_and_relay_brokerd_events(
|
||||||
|
|
||||||
# XXX: this will be useful for automatic strats yah?
|
# XXX: this will be useful for automatic strats yah?
|
||||||
# keep pps per account up to date locally in ``emsd`` mem
|
# keep pps per account up to date locally in ``emsd`` mem
|
||||||
relay.positions.setdefault(pos_msg['symbol'], {}).setdefault(
|
sym, broker = pos_msg['symbol'], pos_msg['broker']
|
||||||
|
|
||||||
|
relay.positions.setdefault(
|
||||||
|
# NOTE: translate to a FQSN!
|
||||||
|
f'{sym}.{broker}',
|
||||||
|
{}
|
||||||
|
).setdefault(
|
||||||
pos_msg['account'], {}
|
pos_msg['account'], {}
|
||||||
).update(pos_msg)
|
).update(pos_msg)
|
||||||
|
|
||||||
# fan-out-relay position msgs immediately by
|
# fan-out-relay position msgs immediately by
|
||||||
# broadcasting updates on all client streams
|
# broadcasting updates on all client streams
|
||||||
for client_stream in router.clients:
|
for client_stream in router.clients.copy():
|
||||||
|
try:
|
||||||
await client_stream.send(pos_msg)
|
await client_stream.send(pos_msg)
|
||||||
|
except(
|
||||||
|
trio.ClosedResourceError,
|
||||||
|
trio.BrokenResourceError,
|
||||||
|
):
|
||||||
|
router.clients.remove(client_stream)
|
||||||
|
log.warning(
|
||||||
|
f'client for {client_stream} was already closed?')
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -570,19 +618,28 @@ async def translate_and_relay_brokerd_events(
|
||||||
# packed at submission since we already know it ahead of
|
# packed at submission since we already know it ahead of
|
||||||
# time
|
# time
|
||||||
paper = brokerd_msg['broker_details'].get('paper_info')
|
paper = brokerd_msg['broker_details'].get('paper_info')
|
||||||
|
ext = brokerd_msg['broker_details'].get('external')
|
||||||
if paper:
|
if paper:
|
||||||
# paperboi keeps the ems id up front
|
# paperboi keeps the ems id up front
|
||||||
oid = paper['oid']
|
oid = paper['oid']
|
||||||
|
|
||||||
else:
|
elif ext:
|
||||||
# may be an order msg specified as "external" to the
|
# may be an order msg specified as "external" to the
|
||||||
# piker ems flow (i.e. generated by some other
|
# piker ems flow (i.e. generated by some other
|
||||||
# external broker backend client (like tws for ib)
|
# external broker backend client (like tws for ib)
|
||||||
ext = brokerd_msg['broker_details'].get('external')
|
|
||||||
if ext:
|
|
||||||
log.error(f"External trade event {ext}")
|
log.error(f"External trade event {ext}")
|
||||||
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
else:
|
||||||
|
# something is out of order, we don't have an oid for
|
||||||
|
# this broker-side message.
|
||||||
|
log.error(
|
||||||
|
'Unknown oid:{oid} for msg:\n'
|
||||||
|
f'{pformat(brokerd_msg)}'
|
||||||
|
'Unable to relay message to client side!?'
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
# check for existing live flow entry
|
# check for existing live flow entry
|
||||||
entry = book._ems_entries.get(oid)
|
entry = book._ems_entries.get(oid)
|
||||||
|
@ -780,7 +837,9 @@ async def process_client_order_cmds(
|
||||||
if reqid:
|
if reqid:
|
||||||
|
|
||||||
# send cancel to brokerd immediately!
|
# send cancel to brokerd immediately!
|
||||||
log.info("Submitting cancel for live order {reqid}")
|
log.info(
|
||||||
|
f'Submitting cancel for live order {reqid}'
|
||||||
|
)
|
||||||
|
|
||||||
await brokerd_order_stream.send(msg.dict())
|
await brokerd_order_stream.send(msg.dict())
|
||||||
|
|
||||||
|
@ -817,11 +876,15 @@ async def process_client_order_cmds(
|
||||||
|
|
||||||
msg = Order(**cmd)
|
msg = Order(**cmd)
|
||||||
|
|
||||||
sym = msg.symbol
|
fqsn = msg.symbol
|
||||||
trigger_price = msg.price
|
trigger_price = msg.price
|
||||||
size = msg.size
|
size = msg.size
|
||||||
exec_mode = msg.exec_mode
|
exec_mode = msg.exec_mode
|
||||||
broker = msg.brokers[0]
|
broker = msg.brokers[0]
|
||||||
|
# remove the broker part before creating a message
|
||||||
|
# to send to the specific broker since they probably
|
||||||
|
# aren't expectig their own name, but should they?
|
||||||
|
sym = fqsn.replace(f'.{broker}', '')
|
||||||
|
|
||||||
if exec_mode == 'live' and action in ('buy', 'sell',):
|
if exec_mode == 'live' and action in ('buy', 'sell',):
|
||||||
|
|
||||||
|
@ -879,7 +942,7 @@ async def process_client_order_cmds(
|
||||||
# price received from the feed, instead of being
|
# price received from the feed, instead of being
|
||||||
# like every other shitty tina platform that makes
|
# like every other shitty tina platform that makes
|
||||||
# the user choose the predicate operator.
|
# the user choose the predicate operator.
|
||||||
last = dark_book.lasts[(broker, sym)]
|
last = dark_book.lasts[fqsn]
|
||||||
pred = mk_check(trigger_price, last, action)
|
pred = mk_check(trigger_price, last, action)
|
||||||
|
|
||||||
spread_slap: float = 5
|
spread_slap: float = 5
|
||||||
|
@ -910,7 +973,7 @@ async def process_client_order_cmds(
|
||||||
# dark book entry if the order id already exists
|
# dark book entry if the order id already exists
|
||||||
|
|
||||||
dark_book.orders.setdefault(
|
dark_book.orders.setdefault(
|
||||||
sym, {}
|
fqsn, {}
|
||||||
)[oid] = (
|
)[oid] = (
|
||||||
pred,
|
pred,
|
||||||
tickfilter,
|
tickfilter,
|
||||||
|
@ -937,8 +1000,8 @@ async def process_client_order_cmds(
|
||||||
async def _emsd_main(
|
async def _emsd_main(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
broker: str,
|
fqsn: str,
|
||||||
symbol: str,
|
|
||||||
_exec_mode: str = 'dark', # ('paper', 'dark', 'live')
|
_exec_mode: str = 'dark', # ('paper', 'dark', 'live')
|
||||||
loglevel: str = 'info',
|
loglevel: str = 'info',
|
||||||
|
|
||||||
|
@ -980,6 +1043,8 @@ async def _emsd_main(
|
||||||
global _router
|
global _router
|
||||||
assert _router
|
assert _router
|
||||||
|
|
||||||
|
from ..data._source import unpack_fqsn
|
||||||
|
broker, symbol, suffix = unpack_fqsn(fqsn)
|
||||||
dark_book = _router.get_dark_book(broker)
|
dark_book = _router.get_dark_book(broker)
|
||||||
|
|
||||||
# TODO: would be nice if in tractor we can require either a ctx arg,
|
# TODO: would be nice if in tractor we can require either a ctx arg,
|
||||||
|
@ -992,17 +1057,16 @@ async def _emsd_main(
|
||||||
# spawn one task per broker feed
|
# spawn one task per broker feed
|
||||||
async with (
|
async with (
|
||||||
maybe_open_feed(
|
maybe_open_feed(
|
||||||
broker,
|
[fqsn],
|
||||||
[symbol],
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as (feed, stream),
|
) as (feed, quote_stream),
|
||||||
):
|
):
|
||||||
|
|
||||||
# XXX: this should be initial price quote from target provider
|
# XXX: this should be initial price quote from target provider
|
||||||
first_quote = feed.first_quotes[symbol]
|
first_quote = feed.first_quotes[fqsn]
|
||||||
|
|
||||||
book = _router.get_dark_book(broker)
|
book = _router.get_dark_book(broker)
|
||||||
book.lasts[(broker, symbol)] = first_quote['last']
|
book.lasts[fqsn] = first_quote['last']
|
||||||
|
|
||||||
# open a stream with the brokerd backend for order
|
# open a stream with the brokerd backend for order
|
||||||
# flow dialogue
|
# flow dialogue
|
||||||
|
@ -1026,13 +1090,13 @@ async def _emsd_main(
|
||||||
|
|
||||||
# flatten out collected pps from brokerd for delivery
|
# flatten out collected pps from brokerd for delivery
|
||||||
pp_msgs = {
|
pp_msgs = {
|
||||||
sym: list(pps.values())
|
fqsn: list(pps.values())
|
||||||
for sym, pps in relay.positions.items()
|
for fqsn, pps in relay.positions.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
# signal to client that we're started and deliver
|
# signal to client that we're started and deliver
|
||||||
# all known pps and accounts for this ``brokerd``.
|
# all known pps and accounts for this ``brokerd``.
|
||||||
await ems_ctx.started((pp_msgs, relay.accounts))
|
await ems_ctx.started((pp_msgs, list(relay.accounts)))
|
||||||
|
|
||||||
# establish 2-way stream with requesting order-client and
|
# establish 2-way stream with requesting order-client and
|
||||||
# begin handling inbound order requests and updates
|
# begin handling inbound order requests and updates
|
||||||
|
@ -1044,9 +1108,9 @@ async def _emsd_main(
|
||||||
|
|
||||||
brokerd_stream,
|
brokerd_stream,
|
||||||
ems_client_order_stream,
|
ems_client_order_stream,
|
||||||
stream,
|
quote_stream,
|
||||||
broker,
|
broker,
|
||||||
symbol,
|
fqsn, # form: <name>.<venue>.<suffix>.<broker>
|
||||||
book
|
book
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -1062,7 +1126,7 @@ async def _emsd_main(
|
||||||
# relay.brokerd_dialogue,
|
# relay.brokerd_dialogue,
|
||||||
brokerd_stream,
|
brokerd_stream,
|
||||||
|
|
||||||
symbol,
|
fqsn,
|
||||||
feed,
|
feed,
|
||||||
dark_book,
|
dark_book,
|
||||||
_router,
|
_router,
|
||||||
|
|
|
@ -155,8 +155,11 @@ class BrokerdOrder(BaseModel):
|
||||||
|
|
||||||
|
|
||||||
class BrokerdOrderAck(BaseModel):
|
class BrokerdOrderAck(BaseModel):
|
||||||
'''Immediate reponse to a brokerd order request providing
|
'''
|
||||||
the broker specifci unique order id.
|
Immediate reponse to a brokerd order request providing the broker
|
||||||
|
specific unique order id so that the EMS can associate this
|
||||||
|
(presumably differently formatted broker side ID) with our own
|
||||||
|
``.oid`` (which is a uuid4).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'ack'
|
name: str = 'ack'
|
||||||
|
@ -181,7 +184,7 @@ class BrokerdStatus(BaseModel):
|
||||||
# {
|
# {
|
||||||
# 'submitted',
|
# 'submitted',
|
||||||
# 'cancelled',
|
# 'cancelled',
|
||||||
# 'executed',
|
# 'filled',
|
||||||
# }
|
# }
|
||||||
status: str
|
status: str
|
||||||
|
|
||||||
|
@ -203,7 +206,8 @@ class BrokerdStatus(BaseModel):
|
||||||
|
|
||||||
|
|
||||||
class BrokerdFill(BaseModel):
|
class BrokerdFill(BaseModel):
|
||||||
'''A single message indicating a "fill-details" event from the broker
|
'''
|
||||||
|
A single message indicating a "fill-details" event from the broker
|
||||||
if avaiable.
|
if avaiable.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -227,16 +231,18 @@ class BrokerdFill(BaseModel):
|
||||||
|
|
||||||
|
|
||||||
class BrokerdError(BaseModel):
|
class BrokerdError(BaseModel):
|
||||||
'''Optional error type that can be relayed to emsd for error handling.
|
'''
|
||||||
|
Optional error type that can be relayed to emsd for error handling.
|
||||||
|
|
||||||
This is still a TODO thing since we're not sure how to employ it yet.
|
This is still a TODO thing since we're not sure how to employ it yet.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
name: str = 'error'
|
name: str = 'error'
|
||||||
oid: str
|
oid: str
|
||||||
|
|
||||||
# if no brokerd order request was actually submitted (eg. we errored
|
# if no brokerd order request was actually submitted (eg. we errored
|
||||||
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
# at the ``pikerd`` layer) then there will be ``reqid`` allocated.
|
||||||
reqid: Union[int, str] = ''
|
reqid: Optional[Union[int, str]] = None
|
||||||
|
|
||||||
symbol: str
|
symbol: str
|
||||||
reason: str
|
reason: str
|
||||||
|
|
|
@ -32,6 +32,7 @@ from dataclasses import dataclass
|
||||||
|
|
||||||
from .. import data
|
from .. import data
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
|
from ..data._source import unpack_fqsn
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._messages import (
|
from ._messages import (
|
||||||
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
BrokerdCancel, BrokerdOrder, BrokerdOrderAck, BrokerdStatus,
|
||||||
|
@ -446,16 +447,16 @@ async def trades_dialogue(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
broker: str,
|
broker: str,
|
||||||
symbol: str,
|
fqsn: str,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
tractor.log.get_console_log(loglevel)
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
|
||||||
data.open_feed(
|
data.open_feed(
|
||||||
broker,
|
[fqsn],
|
||||||
[symbol],
|
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
) as feed,
|
) as feed,
|
||||||
|
|
||||||
|
@ -490,15 +491,16 @@ async def trades_dialogue(
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_paperboi(
|
async def open_paperboi(
|
||||||
broker: str,
|
fqsn: str,
|
||||||
symbol: str,
|
|
||||||
loglevel: str,
|
loglevel: str,
|
||||||
|
|
||||||
) -> Callable:
|
) -> Callable:
|
||||||
'''Spawn a paper engine actor and yield through access to
|
'''
|
||||||
|
Spawn a paper engine actor and yield through access to
|
||||||
its context.
|
its context.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
broker, symbol, expiry = unpack_fqsn(fqsn)
|
||||||
service_name = f'paperboi.{broker}'
|
service_name = f'paperboi.{broker}'
|
||||||
|
|
||||||
async with (
|
async with (
|
||||||
|
@ -517,7 +519,7 @@ async def open_paperboi(
|
||||||
async with portal.open_context(
|
async with portal.open_context(
|
||||||
trades_dialogue,
|
trades_dialogue,
|
||||||
broker=broker,
|
broker=broker,
|
||||||
symbol=symbol,
|
fqsn=fqsn,
|
||||||
loglevel=loglevel,
|
loglevel=loglevel,
|
||||||
|
|
||||||
) as (ctx, first):
|
) as (ctx, first):
|
||||||
|
|
|
@ -1,7 +1,25 @@
|
||||||
"""
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
CLI commons.
|
CLI commons.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
import os
|
import os
|
||||||
|
from pprint import pformat
|
||||||
|
|
||||||
import click
|
import click
|
||||||
import trio
|
import trio
|
||||||
|
@ -16,29 +34,22 @@ from .. import config
|
||||||
log = get_logger('cli')
|
log = get_logger('cli')
|
||||||
DEFAULT_BROKER = 'questrade'
|
DEFAULT_BROKER = 'questrade'
|
||||||
|
|
||||||
_config_dir = click.get_app_dir('piker')
|
|
||||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
|
||||||
_context_defaults = dict(
|
|
||||||
default_map={
|
|
||||||
# Questrade specific quote poll rates
|
|
||||||
'monitor': {
|
|
||||||
'rate': 3,
|
|
||||||
},
|
|
||||||
'optschain': {
|
|
||||||
'rate': 1,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
@click.command()
|
||||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
@click.option('--loglevel', '-l', default='warning', help='Logging level')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
|
||||||
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
@click.option('--host', '-h', default='127.0.0.1', help='Host address to bind')
|
||||||
def pikerd(loglevel, host, tl, pdb):
|
@click.option(
|
||||||
"""Spawn the piker broker-daemon.
|
'--tsdb',
|
||||||
"""
|
is_flag=True,
|
||||||
|
help='Enable local ``marketstore`` instance'
|
||||||
|
)
|
||||||
|
def pikerd(loglevel, host, tl, pdb, tsdb):
|
||||||
|
'''
|
||||||
|
Spawn the piker broker-daemon.
|
||||||
|
|
||||||
|
'''
|
||||||
from .._daemon import open_pikerd
|
from .._daemon import open_pikerd
|
||||||
log = get_console_log(loglevel)
|
log = get_console_log(loglevel)
|
||||||
|
|
||||||
|
@ -52,13 +63,38 @@ def pikerd(loglevel, host, tl, pdb):
|
||||||
))
|
))
|
||||||
|
|
||||||
async def main():
|
async def main():
|
||||||
async with open_pikerd(loglevel=loglevel, debug_mode=pdb):
|
|
||||||
|
async with (
|
||||||
|
open_pikerd(
|
||||||
|
loglevel=loglevel,
|
||||||
|
debug_mode=pdb,
|
||||||
|
), # normally delivers a ``Services`` handle
|
||||||
|
trio.open_nursery() as n,
|
||||||
|
):
|
||||||
|
if tsdb:
|
||||||
|
from piker.data._ahab import start_ahab
|
||||||
|
from piker.data.marketstore import start_marketstore
|
||||||
|
|
||||||
|
log.info('Spawning `marketstore` supervisor')
|
||||||
|
ctn_ready, config, (cid, pid) = await n.start(
|
||||||
|
start_ahab,
|
||||||
|
'marketstored',
|
||||||
|
start_marketstore,
|
||||||
|
|
||||||
|
)
|
||||||
|
log.info(
|
||||||
|
f'`marketstore` up!\n'
|
||||||
|
f'`marketstored` pid: {pid}\n'
|
||||||
|
f'docker container id: {cid}\n'
|
||||||
|
f'config: {pformat(config)}'
|
||||||
|
)
|
||||||
|
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
@click.group(context_settings=_context_defaults)
|
@click.group(context_settings=config._context_defaults)
|
||||||
@click.option(
|
@click.option(
|
||||||
'--brokers', '-b',
|
'--brokers', '-b',
|
||||||
default=[DEFAULT_BROKER],
|
default=[DEFAULT_BROKER],
|
||||||
|
@ -87,8 +123,8 @@ def cli(ctx, brokers, loglevel, tl, configdir):
|
||||||
'loglevel': loglevel,
|
'loglevel': loglevel,
|
||||||
'tractorloglevel': None,
|
'tractorloglevel': None,
|
||||||
'log': get_console_log(loglevel),
|
'log': get_console_log(loglevel),
|
||||||
'confdir': _config_dir,
|
'confdir': config._config_dir,
|
||||||
'wl_path': _watchlists_data_path,
|
'wl_path': config._watchlists_data_path,
|
||||||
})
|
})
|
||||||
|
|
||||||
# allow enabling same loglevel in ``tractor`` machinery
|
# allow enabling same loglevel in ``tractor`` machinery
|
||||||
|
@ -107,15 +143,13 @@ def services(config, tl, names):
|
||||||
async with tractor.get_arbiter(
|
async with tractor.get_arbiter(
|
||||||
*_tractor_kwargs['arbiter_addr']
|
*_tractor_kwargs['arbiter_addr']
|
||||||
) as portal:
|
) as portal:
|
||||||
registry = await portal.run('self', 'get_registry')
|
registry = await portal.run_from_ns('self', 'get_registry')
|
||||||
json_d = {}
|
json_d = {}
|
||||||
for uid, socket in registry.items():
|
for key, socket in registry.items():
|
||||||
name, uuid = uid
|
# name, uuid = uid
|
||||||
host, port = socket
|
host, port = socket
|
||||||
json_d[f'{name}.{uuid}'] = f'{host}:{port}'
|
json_d[key] = f'{host}:{port}'
|
||||||
click.echo(
|
click.echo(f"{colorize_json(json_d)}")
|
||||||
f"Available `piker` services:\n{colorize_json(json_d)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
tractor.run(
|
tractor.run(
|
||||||
list_services,
|
list_services,
|
||||||
|
|
172
piker/config.py
172
piker/config.py
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -16,7 +16,10 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Broker configuration mgmt.
|
Broker configuration mgmt.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
import platform
|
||||||
|
import sys
|
||||||
import os
|
import os
|
||||||
from os.path import dirname
|
from os.path import dirname
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -24,14 +27,106 @@ from typing import Optional
|
||||||
|
|
||||||
from bidict import bidict
|
from bidict import bidict
|
||||||
import toml
|
import toml
|
||||||
import click
|
|
||||||
|
|
||||||
from .log import get_logger
|
from .log import get_logger
|
||||||
|
|
||||||
log = get_logger('broker-config')
|
log = get_logger('broker-config')
|
||||||
|
|
||||||
_config_dir = click.get_app_dir('piker')
|
|
||||||
_file_name = 'brokers.toml'
|
# taken from ``click`` since apparently they have some
|
||||||
|
# super weirdness with sigint and sudo..no clue
|
||||||
|
def get_app_dir(app_name, roaming=True, force_posix=False):
|
||||||
|
r"""Returns the config folder for the application. The default behavior
|
||||||
|
is to return whatever is most appropriate for the operating system.
|
||||||
|
|
||||||
|
To give you an idea, for an app called ``"Foo Bar"``, something like
|
||||||
|
the following folders could be returned:
|
||||||
|
|
||||||
|
Mac OS X:
|
||||||
|
``~/Library/Application Support/Foo Bar``
|
||||||
|
Mac OS X (POSIX):
|
||||||
|
``~/.foo-bar``
|
||||||
|
Unix:
|
||||||
|
``~/.config/foo-bar``
|
||||||
|
Unix (POSIX):
|
||||||
|
``~/.foo-bar``
|
||||||
|
Win XP (roaming):
|
||||||
|
``C:\Documents and Settings\<user>\Local Settings\Application Data\Foo``
|
||||||
|
Win XP (not roaming):
|
||||||
|
``C:\Documents and Settings\<user>\Application Data\Foo Bar``
|
||||||
|
Win 7 (roaming):
|
||||||
|
``C:\Users\<user>\AppData\Roaming\Foo Bar``
|
||||||
|
Win 7 (not roaming):
|
||||||
|
``C:\Users\<user>\AppData\Local\Foo Bar``
|
||||||
|
|
||||||
|
.. versionadded:: 2.0
|
||||||
|
|
||||||
|
:param app_name: the application name. This should be properly capitalized
|
||||||
|
and can contain whitespace.
|
||||||
|
:param roaming: controls if the folder should be roaming or not on Windows.
|
||||||
|
Has no affect otherwise.
|
||||||
|
:param force_posix: if this is set to `True` then on any POSIX system the
|
||||||
|
folder will be stored in the home folder with a leading
|
||||||
|
dot instead of the XDG config home or darwin's
|
||||||
|
application support folder.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _posixify(name):
|
||||||
|
return "-".join(name.split()).lower()
|
||||||
|
|
||||||
|
# if WIN:
|
||||||
|
if platform.system() == 'Windows':
|
||||||
|
key = "APPDATA" if roaming else "LOCALAPPDATA"
|
||||||
|
folder = os.environ.get(key)
|
||||||
|
if folder is None:
|
||||||
|
folder = os.path.expanduser("~")
|
||||||
|
return os.path.join(folder, app_name)
|
||||||
|
if force_posix:
|
||||||
|
return os.path.join(
|
||||||
|
os.path.expanduser("~/.{}".format(_posixify(app_name))))
|
||||||
|
if sys.platform == "darwin":
|
||||||
|
return os.path.join(
|
||||||
|
os.path.expanduser("~/Library/Application Support"), app_name
|
||||||
|
)
|
||||||
|
return os.path.join(
|
||||||
|
os.environ.get("XDG_CONFIG_HOME", os.path.expanduser("~/.config")),
|
||||||
|
_posixify(app_name),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_config_dir = _click_config_dir = get_app_dir('piker')
|
||||||
|
_parent_user = os.environ.get('SUDO_USER')
|
||||||
|
|
||||||
|
if _parent_user:
|
||||||
|
non_root_user_dir = os.path.expanduser(
|
||||||
|
f'~{_parent_user}'
|
||||||
|
)
|
||||||
|
root = 'root'
|
||||||
|
_config_dir = (
|
||||||
|
non_root_user_dir +
|
||||||
|
_click_config_dir[
|
||||||
|
_click_config_dir.rfind(root) + len(root):
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
_conf_names: set[str] = {
|
||||||
|
'brokers',
|
||||||
|
'trades',
|
||||||
|
'watchlists',
|
||||||
|
}
|
||||||
|
|
||||||
|
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
|
||||||
|
_context_defaults = dict(
|
||||||
|
default_map={
|
||||||
|
# Questrade specific quote poll rates
|
||||||
|
'monitor': {
|
||||||
|
'rate': 3,
|
||||||
|
},
|
||||||
|
'optschain': {
|
||||||
|
'rate': 1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _override_config_dir(
|
def _override_config_dir(
|
||||||
|
@ -41,41 +136,72 @@ def _override_config_dir(
|
||||||
_config_dir = path
|
_config_dir = path
|
||||||
|
|
||||||
|
|
||||||
def get_broker_conf_path():
|
def _conf_fn_w_ext(
|
||||||
|
name: str,
|
||||||
|
) -> str:
|
||||||
|
# change this if we ever change the config file format.
|
||||||
|
return f'{name}.toml'
|
||||||
|
|
||||||
|
|
||||||
|
def get_conf_path(
|
||||||
|
conf_name: str = 'brokers',
|
||||||
|
|
||||||
|
) -> str:
|
||||||
"""Return the default config path normally under
|
"""Return the default config path normally under
|
||||||
``~/.config/piker`` on linux.
|
``~/.config/piker`` on linux.
|
||||||
|
|
||||||
Contains files such as:
|
Contains files such as:
|
||||||
- brokers.toml
|
- brokers.toml
|
||||||
- watchlists.toml
|
- watchlists.toml
|
||||||
|
- trades.toml
|
||||||
|
|
||||||
|
# maybe coming soon ;)
|
||||||
- signals.toml
|
- signals.toml
|
||||||
- strats.toml
|
- strats.toml
|
||||||
|
|
||||||
"""
|
"""
|
||||||
return os.path.join(_config_dir, _file_name)
|
assert conf_name in _conf_names
|
||||||
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
|
return os.path.join(
|
||||||
|
_config_dir,
|
||||||
|
fn,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def repodir():
|
def repodir():
|
||||||
"""Return the abspath to the repo directory.
|
'''
|
||||||
"""
|
Return the abspath to the repo directory.
|
||||||
|
|
||||||
|
'''
|
||||||
dirpath = os.path.abspath(
|
dirpath = os.path.abspath(
|
||||||
# we're 3 levels down in **this** module file
|
# we're 3 levels down in **this** module file
|
||||||
dirname(dirname(dirname(os.path.realpath(__file__))))
|
dirname(dirname(os.path.realpath(__file__)))
|
||||||
)
|
)
|
||||||
return dirpath
|
return dirpath
|
||||||
|
|
||||||
|
|
||||||
def load(
|
def load(
|
||||||
|
conf_name: str = 'brokers',
|
||||||
path: str = None
|
path: str = None
|
||||||
|
|
||||||
) -> (dict, str):
|
) -> (dict, str):
|
||||||
"""Load broker config.
|
'''
|
||||||
"""
|
Load config file by name.
|
||||||
path = path or get_broker_conf_path()
|
|
||||||
|
'''
|
||||||
|
path = path or get_conf_path(conf_name)
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
shutil.copyfile(
|
fn = _conf_fn_w_ext(conf_name)
|
||||||
os.path.join(repodir(), 'data/brokers.toml'),
|
|
||||||
path,
|
template = os.path.join(
|
||||||
|
repodir(),
|
||||||
|
'config',
|
||||||
|
fn
|
||||||
)
|
)
|
||||||
|
# try to copy in a template config to the user's directory
|
||||||
|
# if one exists.
|
||||||
|
if os.path.isfile(template):
|
||||||
|
shutil.copyfile(template, path)
|
||||||
|
|
||||||
config = toml.load(path)
|
config = toml.load(path)
|
||||||
log.debug(f"Read config file {path}")
|
log.debug(f"Read config file {path}")
|
||||||
|
@ -84,13 +210,17 @@ def load(
|
||||||
|
|
||||||
def write(
|
def write(
|
||||||
config: dict, # toml config as dict
|
config: dict, # toml config as dict
|
||||||
|
name: str = 'brokers',
|
||||||
path: str = None,
|
path: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Write broker config to disk.
|
''''
|
||||||
|
Write broker config to disk.
|
||||||
|
|
||||||
Create a ``brokers.ini`` file if one does not exist.
|
Create a ``brokers.ini`` file if one does not exist.
|
||||||
"""
|
|
||||||
path = path or get_broker_conf_path()
|
'''
|
||||||
|
path = path or get_conf_path(name)
|
||||||
dirname = os.path.dirname(path)
|
dirname = os.path.dirname(path)
|
||||||
if not os.path.isdir(dirname):
|
if not os.path.isdir(dirname):
|
||||||
log.debug(f"Creating config dir {_config_dir}")
|
log.debug(f"Creating config dir {_config_dir}")
|
||||||
|
@ -100,7 +230,10 @@ def write(
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
"Watch out you're trying to write a blank config!")
|
"Watch out you're trying to write a blank config!")
|
||||||
|
|
||||||
log.debug(f"Writing config file {path}")
|
log.debug(
|
||||||
|
f"Writing config `{name}` file to:\n"
|
||||||
|
f"{path}"
|
||||||
|
)
|
||||||
with open(path, 'w') as cf:
|
with open(path, 'w') as cf:
|
||||||
return toml.dump(config, cf)
|
return toml.dump(config, cf)
|
||||||
|
|
||||||
|
@ -130,4 +263,5 @@ def load_accounts(
|
||||||
|
|
||||||
# our default paper engine entry
|
# our default paper engine entry
|
||||||
accounts['paper'] = None
|
accounts['paper'] = None
|
||||||
|
|
||||||
return accounts
|
return accounts
|
||||||
|
|
|
@ -0,0 +1,385 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Supervisor for docker with included specific-image service helpers.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import os
|
||||||
|
import time
|
||||||
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
Callable,
|
||||||
|
Any,
|
||||||
|
)
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
|
||||||
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
import tractor
|
||||||
|
from tractor.msg import NamespacePath
|
||||||
|
import docker
|
||||||
|
import json
|
||||||
|
from docker.models.containers import Container as DockerContainer
|
||||||
|
from docker.errors import (
|
||||||
|
DockerException,
|
||||||
|
APIError,
|
||||||
|
)
|
||||||
|
from requests.exceptions import ConnectionError, ReadTimeout
|
||||||
|
|
||||||
|
from ..log import get_logger, get_console_log
|
||||||
|
from .. import config
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DockerNotStarted(Exception):
|
||||||
|
'Prolly you dint start da daemon bruh'
|
||||||
|
|
||||||
|
|
||||||
|
class ContainerError(RuntimeError):
|
||||||
|
'Error reported via app-container logging level'
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_docker(
|
||||||
|
url: Optional[str] = None,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> docker.DockerClient:
|
||||||
|
|
||||||
|
client: Optional[docker.DockerClient] = None
|
||||||
|
try:
|
||||||
|
client = docker.DockerClient(
|
||||||
|
base_url=url,
|
||||||
|
**kwargs
|
||||||
|
) if url else docker.from_env(**kwargs)
|
||||||
|
|
||||||
|
yield client
|
||||||
|
|
||||||
|
except (
|
||||||
|
DockerException,
|
||||||
|
APIError,
|
||||||
|
) as err:
|
||||||
|
|
||||||
|
def unpack_msg(err: Exception) -> str:
|
||||||
|
args = getattr(err, 'args', None)
|
||||||
|
if args:
|
||||||
|
return args
|
||||||
|
else:
|
||||||
|
return str(err)
|
||||||
|
|
||||||
|
# could be more specific so let's check if it's just perms.
|
||||||
|
if err.args:
|
||||||
|
errs = err.args
|
||||||
|
for err in errs:
|
||||||
|
msg = unpack_msg(err)
|
||||||
|
if 'PermissionError' in msg:
|
||||||
|
raise DockerException('You dint run as root yo!')
|
||||||
|
|
||||||
|
elif 'FileNotFoundError' in msg:
|
||||||
|
raise DockerNotStarted('Did you start da service sister?')
|
||||||
|
|
||||||
|
# not perms?
|
||||||
|
raise
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if client:
|
||||||
|
client.close()
|
||||||
|
|
||||||
|
|
||||||
|
class Container:
|
||||||
|
'''
|
||||||
|
Wrapper around a ``docker.models.containers.Container`` to include
|
||||||
|
log capture and relay through our native logging system and helper
|
||||||
|
method(s) for cancellation/teardown.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
cntr: DockerContainer,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
self.cntr = cntr
|
||||||
|
# log msg de-duplication
|
||||||
|
self.seen_so_far = set()
|
||||||
|
|
||||||
|
async def process_logs_until(
|
||||||
|
self,
|
||||||
|
patt: str,
|
||||||
|
bp_on_msg: bool = False,
|
||||||
|
) -> bool:
|
||||||
|
'''
|
||||||
|
Attempt to capture container log messages and relay through our
|
||||||
|
native logging system.
|
||||||
|
|
||||||
|
'''
|
||||||
|
seen_so_far = self.seen_so_far
|
||||||
|
|
||||||
|
while True:
|
||||||
|
logs = self.cntr.logs()
|
||||||
|
entries = logs.decode().split('\n')
|
||||||
|
for entry in entries:
|
||||||
|
|
||||||
|
# ignore null lines
|
||||||
|
if not entry:
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
record = json.loads(entry.strip())
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
if 'Error' in entry:
|
||||||
|
raise RuntimeError(entry)
|
||||||
|
raise
|
||||||
|
|
||||||
|
msg = record['msg']
|
||||||
|
level = record['level']
|
||||||
|
if msg and entry not in seen_so_far:
|
||||||
|
seen_so_far.add(entry)
|
||||||
|
if bp_on_msg:
|
||||||
|
await tractor.breakpoint()
|
||||||
|
|
||||||
|
getattr(log, level, log.error)(f'{msg}')
|
||||||
|
|
||||||
|
# print(f'level: {level}')
|
||||||
|
if level in ('error', 'fatal'):
|
||||||
|
raise ContainerError(msg)
|
||||||
|
|
||||||
|
if patt in msg:
|
||||||
|
return True
|
||||||
|
|
||||||
|
# do a checkpoint so we don't block if cancelled B)
|
||||||
|
await trio.sleep(0.01)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def try_signal(
|
||||||
|
self,
|
||||||
|
signal: str = 'SIGINT',
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
try:
|
||||||
|
# XXX: market store doesn't seem to shutdown nicely all the
|
||||||
|
# time with this (maybe because there are still open grpc
|
||||||
|
# connections?) noticably after client connections have been
|
||||||
|
# made or are in use/teardown. It works just fine if you
|
||||||
|
# just start and stop the container tho?..
|
||||||
|
log.cancel(f'SENDING {signal} to {self.cntr.id}')
|
||||||
|
self.cntr.kill(signal)
|
||||||
|
return True
|
||||||
|
|
||||||
|
except docker.errors.APIError as err:
|
||||||
|
if 'is not running' in err.explanation:
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def cancel(
|
||||||
|
self,
|
||||||
|
stop_msg: str,
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
cid = self.cntr.id
|
||||||
|
# first try a graceful cancel
|
||||||
|
log.cancel(
|
||||||
|
f'SIGINT cancelling container: {cid}\n'
|
||||||
|
f'waiting on stop msg: "{stop_msg}"'
|
||||||
|
)
|
||||||
|
self.try_signal('SIGINT')
|
||||||
|
|
||||||
|
start = time.time()
|
||||||
|
for _ in range(30):
|
||||||
|
|
||||||
|
with trio.move_on_after(0.5) as cs:
|
||||||
|
cs.shield = True
|
||||||
|
await self.process_logs_until(stop_msg)
|
||||||
|
|
||||||
|
# if we aren't cancelled on above checkpoint then we
|
||||||
|
# assume we read the expected stop msg and terminated.
|
||||||
|
break
|
||||||
|
|
||||||
|
try:
|
||||||
|
log.info(f'Polling for container shutdown:\n{cid}')
|
||||||
|
|
||||||
|
if self.cntr.status not in {'exited', 'not-running'}:
|
||||||
|
self.cntr.wait(
|
||||||
|
timeout=0.1,
|
||||||
|
condition='not-running',
|
||||||
|
)
|
||||||
|
|
||||||
|
break
|
||||||
|
|
||||||
|
except (
|
||||||
|
ReadTimeout,
|
||||||
|
):
|
||||||
|
log.info(f'Still waiting on container:\n{cid}')
|
||||||
|
continue
|
||||||
|
|
||||||
|
except (
|
||||||
|
docker.errors.APIError,
|
||||||
|
ConnectionError,
|
||||||
|
):
|
||||||
|
log.exception('Docker connection failure')
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
delay = time.time() - start
|
||||||
|
log.error(
|
||||||
|
f'Failed to kill container {cid} after {delay}s\n'
|
||||||
|
'sending SIGKILL..'
|
||||||
|
)
|
||||||
|
# get out the big guns, bc apparently marketstore
|
||||||
|
# doesn't actually know how to terminate gracefully
|
||||||
|
# :eyeroll:...
|
||||||
|
self.try_signal('SIGKILL')
|
||||||
|
self.cntr.wait(
|
||||||
|
timeout=3,
|
||||||
|
condition='not-running',
|
||||||
|
)
|
||||||
|
|
||||||
|
log.cancel(f'Container stopped: {cid}')
|
||||||
|
|
||||||
|
|
||||||
|
@tractor.context
|
||||||
|
async def open_ahabd(
|
||||||
|
ctx: tractor.Context,
|
||||||
|
endpoint: str, # ns-pointer str-msg-type
|
||||||
|
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
get_console_log('info', name=__name__)
|
||||||
|
|
||||||
|
async with open_docker() as client:
|
||||||
|
|
||||||
|
# TODO: eventually offer a config-oriented API to do the mounts,
|
||||||
|
# params, etc. passing to ``Containter.run()``?
|
||||||
|
# call into endpoint for container config/init
|
||||||
|
ep_func = NamespacePath(endpoint).load_ref()
|
||||||
|
(
|
||||||
|
dcntr,
|
||||||
|
cntr_config,
|
||||||
|
start_msg,
|
||||||
|
stop_msg,
|
||||||
|
) = ep_func(client)
|
||||||
|
cntr = Container(dcntr)
|
||||||
|
|
||||||
|
with trio.move_on_after(1):
|
||||||
|
found = await cntr.process_logs_until(start_msg)
|
||||||
|
|
||||||
|
if not found and cntr not in client.containers.list():
|
||||||
|
raise RuntimeError(
|
||||||
|
'Failed to start `marketstore` check logs deats'
|
||||||
|
)
|
||||||
|
|
||||||
|
await ctx.started((
|
||||||
|
cntr.cntr.id,
|
||||||
|
os.getpid(),
|
||||||
|
cntr_config,
|
||||||
|
))
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# TODO: we might eventually want a proxy-style msg-prot here
|
||||||
|
# to allow remote control of containers without needing
|
||||||
|
# callers to have root perms?
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
finally:
|
||||||
|
with trio.CancelScope(shield=True):
|
||||||
|
await cntr.cancel(stop_msg)
|
||||||
|
|
||||||
|
|
||||||
|
async def start_ahab(
|
||||||
|
service_name: str,
|
||||||
|
endpoint: Callable[docker.DockerClient, DockerContainer],
|
||||||
|
task_status: TaskStatus[
|
||||||
|
tuple[
|
||||||
|
trio.Event,
|
||||||
|
dict[str, Any],
|
||||||
|
],
|
||||||
|
] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Start a ``docker`` container supervisor with given service name.
|
||||||
|
|
||||||
|
Currently the actor calling this task should normally be started
|
||||||
|
with root permissions (until we decide to use something that doesn't
|
||||||
|
require this, like docker's rootless mode or some wrapper project) but
|
||||||
|
te root perms are de-escalated after the docker supervisor sub-actor
|
||||||
|
is started.
|
||||||
|
|
||||||
|
'''
|
||||||
|
cn_ready = trio.Event()
|
||||||
|
try:
|
||||||
|
async with tractor.open_nursery(
|
||||||
|
loglevel='runtime',
|
||||||
|
) as tn:
|
||||||
|
|
||||||
|
portal = await tn.start_actor(
|
||||||
|
service_name,
|
||||||
|
enable_modules=[__name__]
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: we have issues with this on teardown
|
||||||
|
# where ``tractor`` tries to issue ``os.kill()``
|
||||||
|
# and hits perms errors since the root process
|
||||||
|
# doesn't any longer have root perms..
|
||||||
|
|
||||||
|
# de-escalate root perms to the original user
|
||||||
|
# after the docker supervisor actor is spawned.
|
||||||
|
if config._parent_user:
|
||||||
|
import pwd
|
||||||
|
os.setuid(
|
||||||
|
pwd.getpwnam(
|
||||||
|
config._parent_user
|
||||||
|
)[2] # named user's uid
|
||||||
|
)
|
||||||
|
|
||||||
|
async with portal.open_context(
|
||||||
|
open_ahabd,
|
||||||
|
endpoint=str(NamespacePath.from_ref(endpoint)),
|
||||||
|
) as (ctx, first):
|
||||||
|
|
||||||
|
cid, pid, cntr_config = first
|
||||||
|
|
||||||
|
task_status.started((
|
||||||
|
cn_ready,
|
||||||
|
cntr_config,
|
||||||
|
(cid, pid),
|
||||||
|
))
|
||||||
|
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
# since we demoted root perms in this parent
|
||||||
|
# we'll get a perms error on proc cleanup in
|
||||||
|
# ``tractor`` nursery exit. just make sure
|
||||||
|
# the child is terminated and don't raise the
|
||||||
|
# error if so.
|
||||||
|
|
||||||
|
# TODO: we could also consider adding
|
||||||
|
# a ``tractor.ZombieDetected`` or something that we could raise
|
||||||
|
# if we find the child didn't terminate.
|
||||||
|
except PermissionError:
|
||||||
|
log.warning('Failed to cancel root permsed container')
|
||||||
|
|
||||||
|
except (
|
||||||
|
trio.MultiError,
|
||||||
|
) as err:
|
||||||
|
for subexc in err.exceptions:
|
||||||
|
if isinstance(subexc, PermissionError):
|
||||||
|
log.warning('Failed to cancel root perms-ed container')
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
raise
|
|
@ -14,25 +14,69 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
Stream format enforcement.
|
Stream format enforcement.
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import AsyncIterator, Optional, Tuple
|
'''
|
||||||
|
from itertools import chain
|
||||||
import numpy as np
|
from typing import AsyncIterator
|
||||||
|
|
||||||
|
|
||||||
def iterticks(
|
def iterticks(
|
||||||
quote: dict,
|
quote: dict,
|
||||||
types: Tuple[str] = ('trade', 'utrade'),
|
types: tuple[str] = (
|
||||||
|
'trade',
|
||||||
|
'dark_trade',
|
||||||
|
),
|
||||||
|
deduplicate_darks: bool = False,
|
||||||
|
|
||||||
) -> AsyncIterator:
|
) -> AsyncIterator:
|
||||||
"""Iterate through ticks delivered per quote cycle.
|
'''
|
||||||
"""
|
Iterate through ticks delivered per quote cycle.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if deduplicate_darks:
|
||||||
|
assert 'dark_trade' in types
|
||||||
|
|
||||||
# print(f"{quote}\n\n")
|
# print(f"{quote}\n\n")
|
||||||
ticks = quote.get('ticks', ())
|
ticks = quote.get('ticks', ())
|
||||||
|
trades = {}
|
||||||
|
darks = {}
|
||||||
|
|
||||||
if ticks:
|
if ticks:
|
||||||
|
|
||||||
|
# do a first pass and attempt to remove duplicate dark
|
||||||
|
# trades with the same tick signature.
|
||||||
|
if deduplicate_darks:
|
||||||
|
for tick in ticks:
|
||||||
|
ttype = tick.get('type')
|
||||||
|
|
||||||
|
time = tick.get('time', None)
|
||||||
|
if time:
|
||||||
|
sig = (
|
||||||
|
time,
|
||||||
|
tick['price'],
|
||||||
|
tick['size']
|
||||||
|
)
|
||||||
|
|
||||||
|
if ttype == 'dark_trade':
|
||||||
|
darks[sig] = tick
|
||||||
|
|
||||||
|
elif ttype == 'trade':
|
||||||
|
trades[sig] = tick
|
||||||
|
|
||||||
|
# filter duplicates
|
||||||
|
for sig, tick in trades.items():
|
||||||
|
tick = darks.pop(sig, None)
|
||||||
|
if tick:
|
||||||
|
ticks.remove(tick)
|
||||||
|
# print(f'DUPLICATE {tick}')
|
||||||
|
|
||||||
|
# re-insert ticks
|
||||||
|
ticks.extend(list(chain(trades.values(), darks.values())))
|
||||||
|
|
||||||
for tick in ticks:
|
for tick in ticks:
|
||||||
# print(f"{quote['symbol']}: {tick}")
|
# print(f"{quote['symbol']}: {tick}")
|
||||||
if tick.get('type') in types:
|
ttype = tick.get('type')
|
||||||
|
if ttype in types:
|
||||||
yield tick
|
yield tick
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -15,40 +15,57 @@
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
Data buffers for fast shared humpy.
|
Sampling and broadcast machinery for (soft) real-time delivery of
|
||||||
|
financial data flows.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
from collections import Counter
|
||||||
import time
|
import time
|
||||||
from typing import Dict, List
|
from typing import TYPE_CHECKING, Optional, Union
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
from ._sharedmem import ShmArray
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._sharedmem import ShmArray
|
||||||
|
from .feed import _FeedsBus
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class sampler:
|
||||||
|
'''
|
||||||
|
Global sampling engine registry.
|
||||||
|
|
||||||
|
Manages state for sampling events, shm incrementing and
|
||||||
|
sample period logic.
|
||||||
|
|
||||||
|
'''
|
||||||
# TODO: we could stick these in a composed type to avoid
|
# TODO: we could stick these in a composed type to avoid
|
||||||
# angering the "i hate module scoped variables crowd" (yawn).
|
# angering the "i hate module scoped variables crowd" (yawn).
|
||||||
_shms: Dict[int, List[ShmArray]] = {}
|
ohlcv_shms: dict[int, list[ShmArray]] = {}
|
||||||
_start_increment: Dict[str, trio.Event] = {}
|
|
||||||
_incrementers: Dict[int, trio.CancelScope] = {}
|
|
||||||
_subscribers: Dict[str, tractor.Context] = {}
|
|
||||||
|
|
||||||
|
# holds one-task-per-sample-period tasks which are spawned as-needed by
|
||||||
|
# data feed requests with a given detected time step usually from
|
||||||
|
# history loading.
|
||||||
|
incrementers: dict[int, trio.CancelScope] = {}
|
||||||
|
|
||||||
def shm_incrementing(shm_token_name: str) -> trio.Event:
|
# holds all the ``tractor.Context`` remote subscriptions for
|
||||||
global _start_increment
|
# a particular sample period increment event: all subscribers are
|
||||||
return _start_increment.setdefault(shm_token_name, trio.Event())
|
# notified on a step.
|
||||||
|
subscribers: dict[int, tractor.Context] = {}
|
||||||
|
|
||||||
|
|
||||||
async def increment_ohlc_buffer(
|
async def increment_ohlc_buffer(
|
||||||
delay_s: int,
|
delay_s: int,
|
||||||
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED,
|
||||||
):
|
):
|
||||||
"""Task which inserts new bars into the provide shared memory array
|
'''
|
||||||
|
Task which inserts new bars into the provide shared memory array
|
||||||
every ``delay_s`` seconds.
|
every ``delay_s`` seconds.
|
||||||
|
|
||||||
This task fulfills 2 purposes:
|
This task fulfills 2 purposes:
|
||||||
|
@ -59,8 +76,8 @@ async def increment_ohlc_buffer(
|
||||||
|
|
||||||
Note that if **no** actor has initiated this task then **none** of
|
Note that if **no** actor has initiated this task then **none** of
|
||||||
the underlying buffers will actually be incremented.
|
the underlying buffers will actually be incremented.
|
||||||
"""
|
|
||||||
|
|
||||||
|
'''
|
||||||
# # wait for brokerd to signal we should start sampling
|
# # wait for brokerd to signal we should start sampling
|
||||||
# await shm_incrementing(shm_token['shm_name']).wait()
|
# await shm_incrementing(shm_token['shm_name']).wait()
|
||||||
|
|
||||||
|
@ -69,19 +86,18 @@ async def increment_ohlc_buffer(
|
||||||
# to solve this is to make this task aware of the instrument's
|
# to solve this is to make this task aware of the instrument's
|
||||||
# tradable hours?
|
# tradable hours?
|
||||||
|
|
||||||
global _incrementers
|
|
||||||
|
|
||||||
# adjust delay to compensate for trio processing time
|
# adjust delay to compensate for trio processing time
|
||||||
ad = min(_shms.keys()) - 0.001
|
ad = min(sampler.ohlcv_shms.keys()) - 0.001
|
||||||
|
|
||||||
total_s = 0 # total seconds counted
|
total_s = 0 # total seconds counted
|
||||||
lowest = min(_shms.keys())
|
lowest = min(sampler.ohlcv_shms.keys())
|
||||||
|
lowest_shm = sampler.ohlcv_shms[lowest][0]
|
||||||
ad = lowest - 0.001
|
ad = lowest - 0.001
|
||||||
|
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
|
|
||||||
# register this time period step as active
|
# register this time period step as active
|
||||||
_incrementers[delay_s] = cs
|
sampler.incrementers[delay_s] = cs
|
||||||
task_status.started(cs)
|
task_status.started(cs)
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
@ -91,8 +107,10 @@ async def increment_ohlc_buffer(
|
||||||
total_s += lowest
|
total_s += lowest
|
||||||
|
|
||||||
# increment all subscribed shm arrays
|
# increment all subscribed shm arrays
|
||||||
# TODO: this in ``numba``
|
# TODO:
|
||||||
for delay_s, shms in _shms.items():
|
# - this in ``numba``
|
||||||
|
# - just lookup shms for this step instead of iterating?
|
||||||
|
for delay_s, shms in sampler.ohlcv_shms.items():
|
||||||
if total_s % delay_s != 0:
|
if total_s % delay_s != 0:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
@ -117,76 +135,121 @@ async def increment_ohlc_buffer(
|
||||||
# write to the buffer
|
# write to the buffer
|
||||||
shm.push(last)
|
shm.push(last)
|
||||||
|
|
||||||
# broadcast the buffer index step
|
await broadcast(delay_s, shm=lowest_shm)
|
||||||
subs = _subscribers.get(delay_s, ())
|
|
||||||
|
|
||||||
for ctx in subs:
|
|
||||||
|
async def broadcast(
|
||||||
|
delay_s: int,
|
||||||
|
shm: Optional[ShmArray] = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Broadcast the given ``shm: ShmArray``'s buffer index step to any
|
||||||
|
subscribers for a given sample period.
|
||||||
|
|
||||||
|
The sent msg will include the first and last index which slice into
|
||||||
|
the buffer's non-empty data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
subs = sampler.subscribers.get(delay_s, ())
|
||||||
|
|
||||||
|
first = last = -1
|
||||||
|
|
||||||
|
if shm is None:
|
||||||
|
periods = sampler.ohlcv_shms.keys()
|
||||||
|
# if this is an update triggered by a history update there
|
||||||
|
# might not actually be any sampling bus setup since there's
|
||||||
|
# no "live feed" active yet.
|
||||||
|
if periods:
|
||||||
|
lowest = min(periods)
|
||||||
|
shm = sampler.ohlcv_shms[lowest][0]
|
||||||
|
first = shm._first.value
|
||||||
|
last = shm._last.value
|
||||||
|
|
||||||
|
for stream in subs:
|
||||||
try:
|
try:
|
||||||
await ctx.send_yield({'index': shm._last.value})
|
await stream.send({
|
||||||
|
'first': first,
|
||||||
|
'last': last,
|
||||||
|
'index': last,
|
||||||
|
})
|
||||||
except (
|
except (
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
trio.ClosedResourceError
|
trio.ClosedResourceError
|
||||||
):
|
):
|
||||||
log.error(f'{ctx.chan.uid} dropped connection')
|
log.error(
|
||||||
subs.remove(ctx)
|
f'{stream._ctx.chan.uid} dropped connection'
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
subs.remove(stream)
|
||||||
|
except ValueError:
|
||||||
|
log.warning(
|
||||||
|
f'{stream._ctx.chan.uid} sub already removed!?'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
@tractor.stream
|
@tractor.context
|
||||||
async def iter_ohlc_periods(
|
async def iter_ohlc_periods(
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
delay_s: int,
|
delay_s: int,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
'''
|
||||||
Subscribe to OHLC sampling "step" events: when the time
|
Subscribe to OHLC sampling "step" events: when the time
|
||||||
aggregation period increments, this event stream emits an index
|
aggregation period increments, this event stream emits an index
|
||||||
event.
|
event.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# add our subscription
|
# add our subscription
|
||||||
global _subscribers
|
subs = sampler.subscribers.setdefault(delay_s, [])
|
||||||
subs = _subscribers.setdefault(delay_s, [])
|
await ctx.started()
|
||||||
subs.append(ctx)
|
async with ctx.open_stream() as stream:
|
||||||
|
subs.append(stream)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# stream and block until cancelled
|
# stream and block until cancelled
|
||||||
await trio.sleep_forever()
|
await trio.sleep_forever()
|
||||||
finally:
|
finally:
|
||||||
try:
|
try:
|
||||||
subs.remove(ctx)
|
subs.remove(stream)
|
||||||
except ValueError:
|
except ValueError:
|
||||||
log.error(
|
log.error(
|
||||||
f'iOHLC step stream was already dropped for {ctx.chan.uid}?'
|
f'iOHLC step stream was already dropped {ctx.chan.uid}?'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def sample_and_broadcast(
|
async def sample_and_broadcast(
|
||||||
|
|
||||||
bus: '_FeedBus', # noqa
|
bus: _FeedsBus, # noqa
|
||||||
shm: ShmArray,
|
shm: ShmArray,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
|
brokername: str,
|
||||||
sum_tick_vlm: bool = True,
|
sum_tick_vlm: bool = True,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
log.info("Started shared mem bar writer")
|
log.info("Started shared mem bar writer")
|
||||||
|
|
||||||
|
overruns = Counter()
|
||||||
|
|
||||||
# iterate stream delivered by broker
|
# iterate stream delivered by broker
|
||||||
async for quotes in quote_stream:
|
async for quotes in quote_stream:
|
||||||
|
|
||||||
# TODO: ``numba`` this!
|
# TODO: ``numba`` this!
|
||||||
for sym, quote in quotes.items():
|
for broker_symbol, quote in quotes.items():
|
||||||
|
# TODO: in theory you can send the IPC msg *before* writing
|
||||||
# TODO: in theory you can send the IPC msg *before*
|
# to the sharedmem array to decrease latency, however, that
|
||||||
# writing to the sharedmem array to decrease latency,
|
# will require at least some way to prevent task switching
|
||||||
# however, that will require `tractor.msg.pub` support
|
# at the yield such that the array write isn't delayed while
|
||||||
# here or at least some way to prevent task switching
|
# another consumer is serviced..
|
||||||
# at the yield such that the array write isn't delayed
|
|
||||||
# while another consumer is serviced..
|
|
||||||
|
|
||||||
# start writing the shm buffer with appropriate
|
# start writing the shm buffer with appropriate
|
||||||
# trade data
|
# trade data
|
||||||
for tick in quote['ticks']:
|
|
||||||
|
|
||||||
|
# TODO: we should probably not write every single
|
||||||
|
# value to an OHLC sample stream XD
|
||||||
|
# for a tick stream sure.. but this is excessive..
|
||||||
|
ticks = quote['ticks']
|
||||||
|
for tick in ticks:
|
||||||
ticktype = tick['type']
|
ticktype = tick['type']
|
||||||
|
|
||||||
# write trade events to shm last OHLC sample
|
# write trade events to shm last OHLC sample
|
||||||
|
@ -236,20 +299,72 @@ async def sample_and_broadcast(
|
||||||
# end up triggering backpressure which which will
|
# end up triggering backpressure which which will
|
||||||
# eventually block this producer end of the feed and
|
# eventually block this producer end of the feed and
|
||||||
# thus other consumers still attached.
|
# thus other consumers still attached.
|
||||||
subs = bus._subscribers[sym.lower()]
|
subs: list[
|
||||||
|
tuple[
|
||||||
|
Union[tractor.MsgStream, trio.MemorySendChannel],
|
||||||
|
tractor.Context,
|
||||||
|
Optional[float], # tick throttle in Hz
|
||||||
|
]
|
||||||
|
] = bus._subscribers[broker_symbol.lower()]
|
||||||
|
|
||||||
lags = 0
|
# NOTE: by default the broker backend doesn't append
|
||||||
for (stream, tick_throttle) in subs:
|
# it's own "name" into the fqsn schema (but maybe it
|
||||||
|
# should?) so we have to manually generate the correct
|
||||||
|
# key here.
|
||||||
|
bsym = f'{broker_symbol}.{brokername}'
|
||||||
|
lags: int = 0
|
||||||
|
|
||||||
|
for (stream, ctx, tick_throttle) in subs:
|
||||||
|
|
||||||
try:
|
try:
|
||||||
with trio.move_on_after(0.2) as cs:
|
with trio.move_on_after(0.2) as cs:
|
||||||
if tick_throttle:
|
if tick_throttle:
|
||||||
# this is a send mem chan that likely
|
# this is a send mem chan that likely
|
||||||
# pushes to the ``uniform_rate_send()`` below.
|
# pushes to the ``uniform_rate_send()`` below.
|
||||||
await stream.send((sym, quote))
|
try:
|
||||||
|
stream.send_nowait(
|
||||||
|
(bsym, quote)
|
||||||
|
)
|
||||||
|
except trio.WouldBlock:
|
||||||
|
chan = ctx.chan
|
||||||
|
if ctx:
|
||||||
|
log.warning(
|
||||||
|
f'Feed overrun {bus.brokername} ->'
|
||||||
|
f'{chan.uid} !!!'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
key = id(stream)
|
||||||
|
overruns[key] += 1
|
||||||
|
log.warning(
|
||||||
|
f'Feed overrun {broker_symbol}'
|
||||||
|
'@{bus.brokername} -> '
|
||||||
|
f'feed @ {tick_throttle} Hz'
|
||||||
|
)
|
||||||
|
if overruns[key] > 6:
|
||||||
|
# TODO: should we check for the
|
||||||
|
# context being cancelled? this
|
||||||
|
# could happen but the
|
||||||
|
# channel-ipc-pipe is still up.
|
||||||
|
if not chan.connected():
|
||||||
|
log.warning(
|
||||||
|
'Dropping broken consumer:\n'
|
||||||
|
f'{broker_symbol}:'
|
||||||
|
f'{ctx.cid}@{chan.uid}'
|
||||||
|
)
|
||||||
|
await stream.aclose()
|
||||||
|
raise trio.BrokenResourceError
|
||||||
|
else:
|
||||||
|
log.warning(
|
||||||
|
'Feed getting overrun bro!\n'
|
||||||
|
f'{broker_symbol}:'
|
||||||
|
f'{ctx.cid}@{chan.uid}'
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
else:
|
else:
|
||||||
await stream.send({sym: quote})
|
await stream.send(
|
||||||
|
{bsym: quote}
|
||||||
|
)
|
||||||
|
|
||||||
if cs.cancelled_caught:
|
if cs.cancelled_caught:
|
||||||
lags += 1
|
lags += 1
|
||||||
|
@ -258,17 +373,32 @@ async def sample_and_broadcast(
|
||||||
|
|
||||||
except (
|
except (
|
||||||
trio.BrokenResourceError,
|
trio.BrokenResourceError,
|
||||||
trio.ClosedResourceError
|
trio.ClosedResourceError,
|
||||||
|
trio.EndOfChannel,
|
||||||
):
|
):
|
||||||
|
chan = ctx.chan
|
||||||
|
if ctx:
|
||||||
|
log.warning(
|
||||||
|
'Dropped `brokerd`-quotes-feed connection:\n'
|
||||||
|
f'{broker_symbol}:'
|
||||||
|
f'{ctx.cid}@{chan.uid}'
|
||||||
|
)
|
||||||
|
if tick_throttle:
|
||||||
|
assert stream._closed
|
||||||
|
|
||||||
# XXX: do we need to deregister here
|
# XXX: do we need to deregister here
|
||||||
# if it's done in the fee bus code?
|
# if it's done in the fee bus code?
|
||||||
# so far seems like no since this should all
|
# so far seems like no since this should all
|
||||||
# be single-threaded.
|
# be single-threaded. Doing it anyway though
|
||||||
log.warning(
|
# since there seems to be some kinda race..
|
||||||
f'{stream._ctx.chan.uid} dropped '
|
try:
|
||||||
'`brokerd`-quotes-feed connection'
|
|
||||||
)
|
|
||||||
subs.remove((stream, tick_throttle))
|
subs.remove((stream, tick_throttle))
|
||||||
|
except ValueError:
|
||||||
|
log.error(
|
||||||
|
f'Stream was already removed from subs!?\n'
|
||||||
|
f'{broker_symbol}:'
|
||||||
|
f'{ctx.cid}@{chan.uid}'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# TODO: a less naive throttler, here's some snippets:
|
# TODO: a less naive throttler, here's some snippets:
|
||||||
|
@ -281,58 +411,109 @@ async def uniform_rate_send(
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
stream: tractor.MsgStream,
|
stream: tractor.MsgStream,
|
||||||
|
|
||||||
|
task_status: TaskStatus = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
sleep_period = 1/rate - 0.000616
|
# TODO: compute the approx overhead latency per cycle
|
||||||
|
left_to_sleep = throttle_period = 1/rate - 0.000616
|
||||||
|
|
||||||
|
# send cycle state
|
||||||
|
first_quote = last_quote = None
|
||||||
last_send = time.time()
|
last_send = time.time()
|
||||||
aname = stream._ctx.chan.uid[0]
|
diff = 0
|
||||||
fsp = False
|
|
||||||
if 'fsp' in aname:
|
task_status.started()
|
||||||
fsp = True
|
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
|
|
||||||
sym, first_quote = await quote_stream.receive()
|
# compute the remaining time to sleep for this throttled cycle
|
||||||
start = time.time()
|
left_to_sleep = throttle_period - diff
|
||||||
|
|
||||||
|
if left_to_sleep > 0:
|
||||||
|
with trio.move_on_after(left_to_sleep) as cs:
|
||||||
|
try:
|
||||||
|
sym, last_quote = await quote_stream.receive()
|
||||||
|
except trio.EndOfChannel:
|
||||||
|
log.exception(f"feed for {stream} ended?")
|
||||||
|
break
|
||||||
|
|
||||||
|
diff = time.time() - last_send
|
||||||
|
|
||||||
|
if not first_quote:
|
||||||
|
first_quote = last_quote
|
||||||
|
|
||||||
|
if (throttle_period - diff) > 0:
|
||||||
|
# received a quote but the send cycle period hasn't yet
|
||||||
|
# expired we aren't supposed to send yet so append
|
||||||
|
# to the tick frame.
|
||||||
|
|
||||||
# append quotes since last iteration into the last quote's
|
# append quotes since last iteration into the last quote's
|
||||||
# tick array/buffer.
|
# tick array/buffer.
|
||||||
|
ticks = last_quote.get('ticks')
|
||||||
|
|
||||||
# TODO: once we decide to get fancy really we should have
|
# XXX: idea for frame type data structure we could
|
||||||
# a shared mem tick buffer that is just continually filled and
|
# use on the wire instead of a simple list?
|
||||||
# the UI just ready from it at it's display rate.
|
# frames = {
|
||||||
# we'll likely head toward this once we get this issue going:
|
# 'index': ['type_a', 'type_c', 'type_n', 'type_n'],
|
||||||
#
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
sym, next_quote = quote_stream.receive_nowait()
|
|
||||||
ticks = next_quote.get('ticks')
|
|
||||||
|
|
||||||
|
# 'type_a': [tick0, tick1, tick2, .., tickn],
|
||||||
|
# 'type_b': [tick0, tick1, tick2, .., tickn],
|
||||||
|
# 'type_c': [tick0, tick1, tick2, .., tickn],
|
||||||
|
# ...
|
||||||
|
# 'type_n': [tick0, tick1, tick2, .., tickn],
|
||||||
|
# }
|
||||||
|
|
||||||
|
# TODO: once we decide to get fancy really we should
|
||||||
|
# have a shared mem tick buffer that is just
|
||||||
|
# continually filled and the UI just ready from it
|
||||||
|
# at it's display rate.
|
||||||
if ticks:
|
if ticks:
|
||||||
first_quote['ticks'].extend(ticks)
|
first_quote['ticks'].extend(ticks)
|
||||||
|
|
||||||
except trio.WouldBlock:
|
# send cycle isn't due yet so continue waiting
|
||||||
now = time.time()
|
continue
|
||||||
rate = 1 / (now - last_send)
|
|
||||||
last_send = now
|
|
||||||
|
|
||||||
# log.info(f'{rate} Hz sending quotes') # \n{first_quote}')
|
if cs.cancelled_caught:
|
||||||
|
# 2 cases:
|
||||||
|
# no quote has arrived yet this cycle so wait for
|
||||||
|
# the next one.
|
||||||
|
if not first_quote:
|
||||||
|
# if no last quote was received since the last send
|
||||||
|
# cycle **AND** if we timed out waiting for a most
|
||||||
|
# recent quote **but** the throttle cycle is now due to
|
||||||
|
# be sent -> we want to immediately send the next
|
||||||
|
# received quote ASAP.
|
||||||
|
sym, first_quote = await quote_stream.receive()
|
||||||
|
|
||||||
|
# we have a quote already so send it now.
|
||||||
|
|
||||||
|
# measured_rate = 1 / (time.time() - last_send)
|
||||||
|
# log.info(
|
||||||
|
# f'`{sym}` throttled send hz: {round(measured_rate, ndigits=1)}'
|
||||||
|
# )
|
||||||
|
|
||||||
# TODO: now if only we could sync this to the display
|
# TODO: now if only we could sync this to the display
|
||||||
# rate timing exactly lul
|
# rate timing exactly lul
|
||||||
try:
|
try:
|
||||||
await stream.send({sym: first_quote})
|
await stream.send({sym: first_quote})
|
||||||
break
|
except (
|
||||||
except trio.ClosedResourceError:
|
# NOTE: any of these can be raised by ``tractor``'s IPC
|
||||||
|
# transport-layer and we want to be highly resilient
|
||||||
|
# to consumers which crash or lose network connection.
|
||||||
|
# I.e. we **DO NOT** want to crash and propagate up to
|
||||||
|
# ``pikerd`` these kinds of errors!
|
||||||
|
trio.ClosedResourceError,
|
||||||
|
trio.BrokenResourceError,
|
||||||
|
ConnectionResetError,
|
||||||
|
):
|
||||||
# if the feed consumer goes down then drop
|
# if the feed consumer goes down then drop
|
||||||
# out of this rate limiter
|
# out of this rate limiter
|
||||||
log.warning(f'{stream} closed')
|
log.warning(f'{stream} closed')
|
||||||
|
await stream.aclose()
|
||||||
return
|
return
|
||||||
|
|
||||||
end = time.time()
|
# reset send cycle state
|
||||||
diff = end - start
|
first_quote = last_quote = None
|
||||||
|
diff = 0
|
||||||
# throttle to provided transmit rate
|
last_send = time.time()
|
||||||
period = max(sleep_period - diff, 0)
|
|
||||||
if period > 0:
|
|
||||||
await trio.sleep(period)
|
|
||||||
|
|
|
@ -18,17 +18,19 @@
|
||||||
NumPy compatible shared memory buffers for real-time IPC streaming.
|
NumPy compatible shared memory buffers for real-time IPC streaming.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from dataclasses import dataclass, asdict
|
from __future__ import annotations
|
||||||
from sys import byteorder
|
from sys import byteorder
|
||||||
from typing import List, Tuple, Optional
|
import time
|
||||||
|
from typing import Optional
|
||||||
from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
from multiprocessing.shared_memory import SharedMemory, _USE_POSIX
|
||||||
from multiprocessing import resource_tracker as mantracker
|
|
||||||
|
|
||||||
if _USE_POSIX:
|
if _USE_POSIX:
|
||||||
from _posixshmem import shm_unlink
|
from _posixshmem import shm_unlink
|
||||||
|
|
||||||
import tractor
|
import tractor
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from pydantic import BaseModel
|
||||||
|
from numpy.lib import recfunctions as rfn
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._source import base_iohlc_dtype
|
from ._source import base_iohlc_dtype
|
||||||
|
@ -37,6 +39,19 @@ from ._source import base_iohlc_dtype
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# how much is probably dependent on lifestyle
|
||||||
|
_secs_in_day = int(60 * 60 * 24)
|
||||||
|
# we try for a buncha times, but only on a run-every-other-day kinda week.
|
||||||
|
_days_worth = 16
|
||||||
|
_default_size = _days_worth * _secs_in_day
|
||||||
|
# where to start the new data append index
|
||||||
|
_rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
|
||||||
|
|
||||||
|
|
||||||
|
def cuckoff_mantracker():
|
||||||
|
|
||||||
|
from multiprocessing import resource_tracker as mantracker
|
||||||
|
|
||||||
# Tell the "resource tracker" thing to fuck off.
|
# Tell the "resource tracker" thing to fuck off.
|
||||||
class ManTracker(mantracker.ResourceTracker):
|
class ManTracker(mantracker.ResourceTracker):
|
||||||
def register(self, name, rtype):
|
def register(self, name, rtype):
|
||||||
|
@ -48,17 +63,19 @@ class ManTracker(mantracker.ResourceTracker):
|
||||||
def ensure_running(self):
|
def ensure_running(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# "know your land and know your prey"
|
# "know your land and know your prey"
|
||||||
# https://www.dailymotion.com/video/x6ozzco
|
# https://www.dailymotion.com/video/x6ozzco
|
||||||
mantracker._resource_tracker = ManTracker()
|
mantracker._resource_tracker = ManTracker()
|
||||||
mantracker.register = mantracker._resource_tracker.register
|
mantracker.register = mantracker._resource_tracker.register
|
||||||
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
|
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
|
||||||
ensure_running = mantracker._resource_tracker.ensure_running
|
# ensure_running = mantracker._resource_tracker.ensure_running
|
||||||
mantracker.unregister = mantracker._resource_tracker.unregister
|
mantracker.unregister = mantracker._resource_tracker.unregister
|
||||||
mantracker.getfd = mantracker._resource_tracker.getfd
|
mantracker.getfd = mantracker._resource_tracker.getfd
|
||||||
|
|
||||||
|
|
||||||
|
cuckoff_mantracker()
|
||||||
|
|
||||||
|
|
||||||
class SharedInt:
|
class SharedInt:
|
||||||
"""Wrapper around a single entry shared memory array which
|
"""Wrapper around a single entry shared memory array which
|
||||||
holds an ``int`` value used as an index counter.
|
holds an ``int`` value used as an index counter.
|
||||||
|
@ -82,29 +99,42 @@ class SharedInt:
|
||||||
if _USE_POSIX:
|
if _USE_POSIX:
|
||||||
# We manually unlink to bypass all the "resource tracker"
|
# We manually unlink to bypass all the "resource tracker"
|
||||||
# nonsense meant for non-SC systems.
|
# nonsense meant for non-SC systems.
|
||||||
shm_unlink(self._shm.name)
|
name = self._shm.name
|
||||||
|
try:
|
||||||
|
shm_unlink(name)
|
||||||
|
except FileNotFoundError:
|
||||||
|
# might be a teardown race here?
|
||||||
|
log.warning(f'Shm for {name} already unlinked?')
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
class _Token(BaseModel):
|
||||||
class _Token:
|
'''
|
||||||
"""Internal represenation of a shared memory "token"
|
Internal represenation of a shared memory "token"
|
||||||
which can be used to key a system wide post shm entry.
|
which can be used to key a system wide post shm entry.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
|
class Config:
|
||||||
|
frozen = True
|
||||||
|
|
||||||
shm_name: str # this servers as a "key" value
|
shm_name: str # this servers as a "key" value
|
||||||
shm_first_index_name: str
|
shm_first_index_name: str
|
||||||
shm_last_index_name: str
|
shm_last_index_name: str
|
||||||
dtype_descr: List[Tuple[str]]
|
dtype_descr: tuple
|
||||||
|
|
||||||
def __post_init__(self):
|
@property
|
||||||
# np.array requires a list for dtype
|
def dtype(self) -> np.dtype:
|
||||||
self.dtype_descr = np.dtype(list(map(tuple, self.dtype_descr))).descr
|
return np.dtype(list(map(tuple, self.dtype_descr))).descr
|
||||||
|
|
||||||
def as_msg(self):
|
def as_msg(self):
|
||||||
return asdict(self)
|
return self.dict()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_msg(self, msg: dict) -> '_Token':
|
def from_msg(cls, msg: dict) -> _Token:
|
||||||
return msg if isinstance(msg, _Token) else _Token(**msg)
|
if isinstance(msg, _Token):
|
||||||
|
return msg
|
||||||
|
|
||||||
|
msg['dtype_descr'] = tuple(map(tuple, msg['dtype_descr']))
|
||||||
|
return _Token(**msg)
|
||||||
|
|
||||||
|
|
||||||
# TODO: this api?
|
# TODO: this api?
|
||||||
|
@ -127,20 +157,23 @@ def _make_token(
|
||||||
key: str,
|
key: str,
|
||||||
dtype: Optional[np.dtype] = None,
|
dtype: Optional[np.dtype] = None,
|
||||||
) -> _Token:
|
) -> _Token:
|
||||||
"""Create a serializable token that can be used
|
'''
|
||||||
|
Create a serializable token that can be used
|
||||||
to access a shared array.
|
to access a shared array.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
dtype = base_iohlc_dtype if dtype is None else dtype
|
dtype = base_iohlc_dtype if dtype is None else dtype
|
||||||
return _Token(
|
return _Token(
|
||||||
key,
|
shm_name=key,
|
||||||
key + "_first",
|
shm_first_index_name=key + "_first",
|
||||||
key + "_last",
|
shm_last_index_name=key + "_last",
|
||||||
np.dtype(dtype).descr
|
dtype_descr=np.dtype(dtype).descr
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ShmArray:
|
class ShmArray:
|
||||||
"""A shared memory ``numpy`` (compatible) array API.
|
'''
|
||||||
|
A shared memory ``numpy`` (compatible) array API.
|
||||||
|
|
||||||
An underlying shared memory buffer is allocated based on
|
An underlying shared memory buffer is allocated based on
|
||||||
a user specified ``numpy.ndarray``. This fixed size array
|
a user specified ``numpy.ndarray``. This fixed size array
|
||||||
|
@ -150,7 +183,7 @@ class ShmArray:
|
||||||
``SharedInt`` interfaces) values such that multiple processes can
|
``SharedInt`` interfaces) values such that multiple processes can
|
||||||
interact with the same array using a synchronized-index.
|
interact with the same array using a synchronized-index.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
shmarr: np.ndarray,
|
shmarr: np.ndarray,
|
||||||
|
@ -171,17 +204,21 @@ class ShmArray:
|
||||||
self._post_init: bool = False
|
self._post_init: bool = False
|
||||||
|
|
||||||
# pushing data does not write the index (aka primary key)
|
# pushing data does not write the index (aka primary key)
|
||||||
|
dtype = shmarr.dtype
|
||||||
|
if dtype.fields:
|
||||||
self._write_fields = list(shmarr.dtype.fields.keys())[1:]
|
self._write_fields = list(shmarr.dtype.fields.keys())[1:]
|
||||||
|
else:
|
||||||
|
self._write_fields = None
|
||||||
|
|
||||||
# TODO: ringbuf api?
|
# TODO: ringbuf api?
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _token(self) -> _Token:
|
def _token(self) -> _Token:
|
||||||
return _Token(
|
return _Token(
|
||||||
self._shm.name,
|
shm_name=self._shm.name,
|
||||||
self._first._shm.name,
|
shm_first_index_name=self._first._shm.name,
|
||||||
self._last._shm.name,
|
shm_last_index_name=self._last._shm.name,
|
||||||
self._array.dtype.descr,
|
dtype_descr=tuple(self._array.dtype.descr),
|
||||||
)
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -197,7 +234,8 @@ class ShmArray:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def array(self) -> np.ndarray:
|
def array(self) -> np.ndarray:
|
||||||
'''Return an up-to-date ``np.ndarray`` view of the
|
'''
|
||||||
|
Return an up-to-date ``np.ndarray`` view of the
|
||||||
so-far-written data to the underlying shm buffer.
|
so-far-written data to the underlying shm buffer.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -216,65 +254,131 @@ class ShmArray:
|
||||||
|
|
||||||
return a
|
return a
|
||||||
|
|
||||||
|
def ustruct(
|
||||||
|
self,
|
||||||
|
fields: Optional[list[str]] = None,
|
||||||
|
|
||||||
|
# type that all field values will be cast to
|
||||||
|
# in the returned view.
|
||||||
|
common_dtype: np.dtype = np.float,
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
|
||||||
|
array = self._array
|
||||||
|
|
||||||
|
if fields:
|
||||||
|
selection = array[fields]
|
||||||
|
# fcount = len(fields)
|
||||||
|
else:
|
||||||
|
selection = array
|
||||||
|
# fcount = len(array.dtype.fields)
|
||||||
|
|
||||||
|
# XXX: manual ``.view()`` attempt that also doesn't work.
|
||||||
|
# uview = selection.view(
|
||||||
|
# dtype='<f16',
|
||||||
|
# ).reshape(-1, 4, order='A')
|
||||||
|
|
||||||
|
# assert len(selection) == len(uview)
|
||||||
|
|
||||||
|
u = rfn.structured_to_unstructured(
|
||||||
|
selection,
|
||||||
|
# dtype=float,
|
||||||
|
copy=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# unstruct = np.ndarray(u.shape, dtype=a.dtype, buffer=shm.buf)
|
||||||
|
# array[:] = a[:]
|
||||||
|
return u
|
||||||
|
# return ShmArray(
|
||||||
|
# shmarr=u,
|
||||||
|
# first=self._first,
|
||||||
|
# last=self._last,
|
||||||
|
# shm=self._shm
|
||||||
|
# )
|
||||||
|
|
||||||
def last(
|
def last(
|
||||||
self,
|
self,
|
||||||
length: int = 1,
|
length: int = 1,
|
||||||
|
|
||||||
) -> np.ndarray:
|
) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Return the last ``length``'s worth of ("row") entries from the
|
||||||
|
array.
|
||||||
|
|
||||||
|
'''
|
||||||
return self.array[-length:]
|
return self.array[-length:]
|
||||||
|
|
||||||
def push(
|
def push(
|
||||||
self,
|
self,
|
||||||
data: np.ndarray,
|
data: np.ndarray,
|
||||||
|
|
||||||
|
field_map: Optional[dict[str, str]] = None,
|
||||||
prepend: bool = False,
|
prepend: bool = False,
|
||||||
|
update_first: bool = True,
|
||||||
start: Optional[int] = None,
|
start: Optional[int] = None,
|
||||||
|
|
||||||
) -> int:
|
) -> int:
|
||||||
'''Ring buffer like "push" to append data
|
'''
|
||||||
|
Ring buffer like "push" to append data
|
||||||
into the buffer and return updated "last" index.
|
into the buffer and return updated "last" index.
|
||||||
|
|
||||||
NB: no actual ring logic yet to give a "loop around" on overflow
|
NB: no actual ring logic yet to give a "loop around" on overflow
|
||||||
condition, lel.
|
condition, lel.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
self._post_init = True
|
|
||||||
length = len(data)
|
length = len(data)
|
||||||
index = start or self._last.value
|
|
||||||
|
|
||||||
if prepend:
|
if prepend:
|
||||||
index = self._first.value - length
|
index = (start or self._first.value) - length
|
||||||
|
|
||||||
if index < 0:
|
if index < 0:
|
||||||
raise ValueError(
|
raise ValueError(
|
||||||
f'Array size of {self._len} was overrun during prepend.\n'
|
f'Array size of {self._len} was overrun during prepend.\n'
|
||||||
'You have passed {abs(index)} too many datums.'
|
f'You have passed {abs(index)} too many datums.'
|
||||||
)
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
index = start if start is not None else self._last.value
|
||||||
|
|
||||||
end = index + length
|
end = index + length
|
||||||
|
|
||||||
fields = self._write_fields
|
if field_map:
|
||||||
|
src_names, dst_names = zip(*field_map.items())
|
||||||
|
else:
|
||||||
|
dst_names = src_names = self._write_fields
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self._array[fields][index:end] = data[fields][:]
|
self._array[
|
||||||
|
list(dst_names)
|
||||||
|
][index:end] = data[list(src_names)][:]
|
||||||
|
|
||||||
# NOTE: there was a race here between updating
|
# NOTE: there was a race here between updating
|
||||||
# the first and last indices and when the next reader
|
# the first and last indices and when the next reader
|
||||||
# tries to access ``.array`` (which due to the index
|
# tries to access ``.array`` (which due to the index
|
||||||
# overlap will be empty). Pretty sure we've fixed it now
|
# overlap will be empty). Pretty sure we've fixed it now
|
||||||
# but leaving this here as a reminder.
|
# but leaving this here as a reminder.
|
||||||
if prepend:
|
if prepend and update_first and length:
|
||||||
assert index < self._first.value
|
assert index < self._first.value
|
||||||
|
|
||||||
if index < self._first.value:
|
if (
|
||||||
|
index < self._first.value
|
||||||
|
and update_first
|
||||||
|
):
|
||||||
|
assert prepend, 'prepend=True not passed but index decreased?'
|
||||||
self._first.value = index
|
self._first.value = index
|
||||||
else:
|
|
||||||
|
elif not prepend:
|
||||||
self._last.value = end
|
self._last.value = end
|
||||||
|
|
||||||
|
self._post_init = True
|
||||||
return end
|
return end
|
||||||
|
|
||||||
except ValueError as err:
|
except ValueError as err:
|
||||||
# shoudl raise if diff detected
|
if field_map:
|
||||||
self.diff_err_fields(data)
|
raise
|
||||||
|
|
||||||
|
# should raise if diff detected
|
||||||
|
self.diff_err_fields(data)
|
||||||
raise err
|
raise err
|
||||||
|
|
||||||
def diff_err_fields(
|
def diff_err_fields(
|
||||||
|
@ -299,6 +403,7 @@ class ShmArray:
|
||||||
f"Input array has unknown field(s): {only_in_theirs}"
|
f"Input array has unknown field(s): {only_in_theirs}"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# TODO: support "silent" prepends that don't update ._first.value?
|
||||||
def prepend(
|
def prepend(
|
||||||
self,
|
self,
|
||||||
data: np.ndarray,
|
data: np.ndarray,
|
||||||
|
@ -325,12 +430,6 @@ class ShmArray:
|
||||||
...
|
...
|
||||||
|
|
||||||
|
|
||||||
# how much is probably dependent on lifestyle
|
|
||||||
_secs_in_day = int(60 * 60 * 24)
|
|
||||||
# we try for 3 times but only on a run-every-other-day kinda week.
|
|
||||||
_default_size = 3 * _secs_in_day
|
|
||||||
|
|
||||||
|
|
||||||
def open_shm_array(
|
def open_shm_array(
|
||||||
|
|
||||||
key: Optional[str] = None,
|
key: Optional[str] = None,
|
||||||
|
@ -355,7 +454,11 @@ def open_shm_array(
|
||||||
create=True,
|
create=True,
|
||||||
size=a.nbytes
|
size=a.nbytes
|
||||||
)
|
)
|
||||||
array = np.ndarray(a.shape, dtype=a.dtype, buffer=shm.buf)
|
array = np.ndarray(
|
||||||
|
a.shape,
|
||||||
|
dtype=a.dtype,
|
||||||
|
buffer=shm.buf
|
||||||
|
)
|
||||||
array[:] = a[:]
|
array[:] = a[:]
|
||||||
array.setflags(write=int(not readonly))
|
array.setflags(write=int(not readonly))
|
||||||
|
|
||||||
|
@ -381,7 +484,24 @@ def open_shm_array(
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
last.value = first.value = int(_secs_in_day)
|
# start the "real-time" updated section after 3-days worth of 1s
|
||||||
|
# sampled OHLC. this allows appending up to a days worth from
|
||||||
|
# tick/quote feeds before having to flush to a (tsdb) storage
|
||||||
|
# backend, and looks something like,
|
||||||
|
# -------------------------
|
||||||
|
# | | i
|
||||||
|
# _________________________
|
||||||
|
# <-------------> <------->
|
||||||
|
# history real-time
|
||||||
|
#
|
||||||
|
# Once fully "prepended", the history section will leave the
|
||||||
|
# ``ShmArray._start.value: int = 0`` and the yet-to-be written
|
||||||
|
# real-time section will start at ``ShmArray.index: int``.
|
||||||
|
|
||||||
|
# this sets the index to 3/4 of the length of the buffer
|
||||||
|
# leaving a "days worth of second samples" for the real-time
|
||||||
|
# section.
|
||||||
|
last.value = first.value = _rt_buffer_start
|
||||||
|
|
||||||
shmarr = ShmArray(
|
shmarr = ShmArray(
|
||||||
array,
|
array,
|
||||||
|
@ -395,6 +515,7 @@ def open_shm_array(
|
||||||
|
|
||||||
# "unlink" created shm on process teardown by
|
# "unlink" created shm on process teardown by
|
||||||
# pushing teardown calls onto actor context stack
|
# pushing teardown calls onto actor context stack
|
||||||
|
|
||||||
tractor._actor._lifetime_stack.callback(shmarr.close)
|
tractor._actor._lifetime_stack.callback(shmarr.close)
|
||||||
tractor._actor._lifetime_stack.callback(shmarr.destroy)
|
tractor._actor._lifetime_stack.callback(shmarr.destroy)
|
||||||
|
|
||||||
|
@ -402,27 +523,48 @@ def open_shm_array(
|
||||||
|
|
||||||
|
|
||||||
def attach_shm_array(
|
def attach_shm_array(
|
||||||
token: Tuple[str, str, Tuple[str, str]],
|
token: tuple[str, str, tuple[str, str]],
|
||||||
size: int = _default_size,
|
size: int = _default_size,
|
||||||
readonly: bool = True,
|
readonly: bool = True,
|
||||||
|
|
||||||
) -> ShmArray:
|
) -> ShmArray:
|
||||||
"""Attach to an existing shared memory array previously
|
'''
|
||||||
|
Attach to an existing shared memory array previously
|
||||||
created by another process using ``open_shared_array``.
|
created by another process using ``open_shared_array``.
|
||||||
|
|
||||||
No new shared mem is allocated but wrapper types for read/write
|
No new shared mem is allocated but wrapper types for read/write
|
||||||
access are constructed.
|
access are constructed.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
token = _Token.from_msg(token)
|
token = _Token.from_msg(token)
|
||||||
key = token.shm_name
|
key = token.shm_name
|
||||||
|
|
||||||
if key in _known_tokens:
|
if key in _known_tokens:
|
||||||
assert _Token.from_msg(_known_tokens[key]) == token, "WTF"
|
assert _Token.from_msg(_known_tokens[key]) == token, "WTF"
|
||||||
|
|
||||||
|
# XXX: ugh, looks like due to the ``shm_open()`` C api we can't
|
||||||
|
# actually place files in a subdir, see discussion here:
|
||||||
|
# https://stackoverflow.com/a/11103289
|
||||||
|
|
||||||
# attach to array buffer and view as per dtype
|
# attach to array buffer and view as per dtype
|
||||||
shm = SharedMemory(name=key)
|
_err: Optional[Exception] = None
|
||||||
|
for _ in range(3):
|
||||||
|
try:
|
||||||
|
shm = SharedMemory(
|
||||||
|
name=key,
|
||||||
|
create=False,
|
||||||
|
)
|
||||||
|
break
|
||||||
|
except OSError as oserr:
|
||||||
|
_err = oserr
|
||||||
|
time.sleep(0.1)
|
||||||
|
else:
|
||||||
|
if _err:
|
||||||
|
raise _err
|
||||||
|
|
||||||
shmarr = np.ndarray(
|
shmarr = np.ndarray(
|
||||||
(size,),
|
(size,),
|
||||||
dtype=token.dtype_descr,
|
dtype=token.dtype,
|
||||||
buffer=shm.buf
|
buffer=shm.buf
|
||||||
)
|
)
|
||||||
shmarr.setflags(write=int(not readonly))
|
shmarr.setflags(write=int(not readonly))
|
||||||
|
@ -470,8 +612,10 @@ def maybe_open_shm_array(
|
||||||
key: str,
|
key: str,
|
||||||
dtype: Optional[np.dtype] = None,
|
dtype: Optional[np.dtype] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
) -> Tuple[ShmArray, bool]:
|
|
||||||
"""Attempt to attach to a shared memory block using a "key" lookup
|
) -> tuple[ShmArray, bool]:
|
||||||
|
'''
|
||||||
|
Attempt to attach to a shared memory block using a "key" lookup
|
||||||
to registered blocks in the users overall "system" registry
|
to registered blocks in the users overall "system" registry
|
||||||
(presumes you don't have the block's explicit token).
|
(presumes you don't have the block's explicit token).
|
||||||
|
|
||||||
|
@ -485,7 +629,8 @@ def maybe_open_shm_array(
|
||||||
|
|
||||||
If you know the explicit ``_Token`` for your memory segment instead
|
If you know the explicit ``_Token`` for your memory segment instead
|
||||||
use ``attach_shm_array``.
|
use ``attach_shm_array``.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
try:
|
try:
|
||||||
# see if we already know this key
|
# see if we already know this key
|
||||||
token = _known_tokens[key]
|
token = _known_tokens[key]
|
||||||
|
@ -505,3 +650,35 @@ def maybe_open_shm_array(
|
||||||
# to fail if a block has been allocated
|
# to fail if a block has been allocated
|
||||||
# on the OS by someone else.
|
# on the OS by someone else.
|
||||||
return open_shm_array(key=key, dtype=dtype, **kwargs), True
|
return open_shm_array(key=key, dtype=dtype, **kwargs), True
|
||||||
|
|
||||||
|
|
||||||
|
def try_read(
|
||||||
|
array: np.ndarray
|
||||||
|
|
||||||
|
) -> Optional[np.ndarray]:
|
||||||
|
'''
|
||||||
|
Try to read the last row from a shared mem array or ``None``
|
||||||
|
if the array read returns a zero-length array result.
|
||||||
|
|
||||||
|
Can be used to check for backfilling race conditions where an array
|
||||||
|
is currently being (re-)written by a writer actor but the reader is
|
||||||
|
unaware and reads during the window where the first and last indexes
|
||||||
|
are being updated.
|
||||||
|
|
||||||
|
'''
|
||||||
|
try:
|
||||||
|
return array[-1]
|
||||||
|
except IndexError:
|
||||||
|
# XXX: race condition with backfilling shm.
|
||||||
|
#
|
||||||
|
# the underlying issue is that a backfill (aka prepend) and subsequent
|
||||||
|
# shm array first/last index update could result in an empty array
|
||||||
|
# read here since the indices may be updated in such a way that
|
||||||
|
# a read delivers an empty array (though it seems like we
|
||||||
|
# *should* be able to prevent that?). also, as and alt and
|
||||||
|
# something we need anyway, maybe there should be some kind of
|
||||||
|
# signal that a prepend is taking place and this consumer can
|
||||||
|
# respond (eg. redrawing graphics) accordingly.
|
||||||
|
|
||||||
|
# the array read was emtpy
|
||||||
|
return None
|
||||||
|
|
|
@ -17,12 +17,13 @@
|
||||||
"""
|
"""
|
||||||
numpy data source coversion helpers.
|
numpy data source coversion helpers.
|
||||||
"""
|
"""
|
||||||
from typing import Dict, Any, List
|
from __future__ import annotations
|
||||||
|
from typing import Any
|
||||||
import decimal
|
import decimal
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
from pydantic import BaseModel
|
||||||
from pydantic import BaseModel, validate_arguments
|
|
||||||
# from numba import from_dtype
|
# from numba import from_dtype
|
||||||
|
|
||||||
|
|
||||||
|
@ -32,7 +33,7 @@ ohlc_fields = [
|
||||||
('high', float),
|
('high', float),
|
||||||
('low', float),
|
('low', float),
|
||||||
('close', float),
|
('close', float),
|
||||||
('volume', int),
|
('volume', float),
|
||||||
('bar_wap', float),
|
('bar_wap', float),
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -47,16 +48,29 @@ base_ohlc_dtype = np.dtype(ohlc_fields)
|
||||||
# https://github.com/numba/numba/issues/4511
|
# https://github.com/numba/numba/issues/4511
|
||||||
# numba_ohlc_dtype = from_dtype(base_ohlc_dtype)
|
# numba_ohlc_dtype = from_dtype(base_ohlc_dtype)
|
||||||
|
|
||||||
# map time frame "keys" to minutes values
|
# map time frame "keys" to seconds values
|
||||||
tf_in_1m = {
|
tf_in_1s = bidict({
|
||||||
'1m': 1,
|
1: '1s',
|
||||||
'5m': 5,
|
60: '1m',
|
||||||
'15m': 15,
|
60*5: '5m',
|
||||||
'30m': 30,
|
60*15: '15m',
|
||||||
'1h': 60,
|
60*30: '30m',
|
||||||
'4h': 240,
|
60*60: '1h',
|
||||||
'1d': 1440,
|
60*60*24: '1d',
|
||||||
}
|
})
|
||||||
|
|
||||||
|
|
||||||
|
def mk_fqsn(
|
||||||
|
provider: str,
|
||||||
|
symbol: str,
|
||||||
|
|
||||||
|
) -> str:
|
||||||
|
'''
|
||||||
|
Generate a "fully qualified symbol name" which is
|
||||||
|
a reverse-hierarchical cross broker/provider symbol
|
||||||
|
|
||||||
|
'''
|
||||||
|
return '.'.join([symbol, provider]).lower()
|
||||||
|
|
||||||
|
|
||||||
def float_digits(
|
def float_digits(
|
||||||
|
@ -78,111 +92,166 @@ def ohlc_zeros(length: int) -> np.ndarray:
|
||||||
return np.zeros(length, dtype=base_ohlc_dtype)
|
return np.zeros(length, dtype=base_ohlc_dtype)
|
||||||
|
|
||||||
|
|
||||||
|
def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
|
||||||
|
'''
|
||||||
|
Unpack a fully-qualified-symbol-name to ``tuple``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
venue = ''
|
||||||
|
suffix = ''
|
||||||
|
|
||||||
|
# TODO: probably reverse the order of all this XD
|
||||||
|
tokens = fqsn.split('.')
|
||||||
|
if len(tokens) < 3:
|
||||||
|
# probably crypto
|
||||||
|
symbol, broker = tokens
|
||||||
|
return (
|
||||||
|
broker,
|
||||||
|
symbol,
|
||||||
|
'',
|
||||||
|
)
|
||||||
|
|
||||||
|
elif len(tokens) > 3:
|
||||||
|
symbol, venue, suffix, broker = tokens
|
||||||
|
else:
|
||||||
|
symbol, venue, broker = tokens
|
||||||
|
suffix = ''
|
||||||
|
|
||||||
|
# head, _, broker = fqsn.rpartition('.')
|
||||||
|
# symbol, _, suffix = head.rpartition('.')
|
||||||
|
return (
|
||||||
|
broker,
|
||||||
|
'.'.join([symbol, venue]),
|
||||||
|
suffix,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class Symbol(BaseModel):
|
class Symbol(BaseModel):
|
||||||
"""I guess this is some kinda container thing for dealing with
|
'''
|
||||||
|
I guess this is some kinda container thing for dealing with
|
||||||
all the different meta-data formats from brokers?
|
all the different meta-data formats from brokers?
|
||||||
|
|
||||||
Yah, i guess dats what it izz.
|
'''
|
||||||
"""
|
|
||||||
key: str
|
key: str
|
||||||
type_key: str # {'stock', 'forex', 'future', ... etc.}
|
tick_size: float = 0.01
|
||||||
tick_size: float
|
lot_tick_size: float = 0.0 # "volume" precision as min step value
|
||||||
lot_tick_size: float # "volume" precision as min step value
|
tick_size_digits: int = 2
|
||||||
tick_size_digits: int
|
lot_size_digits: int = 0
|
||||||
lot_size_digits: int
|
suffix: str = ''
|
||||||
broker_info: Dict[str, Dict[str, Any]] = {}
|
broker_info: dict[str, dict[str, Any]] = {}
|
||||||
|
|
||||||
# specifies a "class" of financial instrument
|
# specifies a "class" of financial instrument
|
||||||
# ex. stock, futer, option, bond etc.
|
# ex. stock, futer, option, bond etc.
|
||||||
|
|
||||||
@property
|
# @validate_arguments
|
||||||
def brokers(self) -> List[str]:
|
@classmethod
|
||||||
return list(self.broker_info.keys())
|
def from_broker_info(
|
||||||
|
cls,
|
||||||
|
broker: str,
|
||||||
|
symbol: str,
|
||||||
|
info: dict[str, Any],
|
||||||
|
suffix: str = '',
|
||||||
|
|
||||||
def nearest_tick(self, value: float) -> float:
|
# XXX: like wtf..
|
||||||
"""Return the nearest tick value based on mininum increment.
|
# ) -> 'Symbol':
|
||||||
|
) -> None:
|
||||||
|
|
||||||
"""
|
tick_size = info.get('price_tick_size', 0.01)
|
||||||
mult = 1 / self.tick_size
|
lot_tick_size = info.get('lot_tick_size', 0.0)
|
||||||
return round(value * mult) / mult
|
|
||||||
|
|
||||||
|
|
||||||
@validate_arguments
|
|
||||||
def mk_symbol(
|
|
||||||
|
|
||||||
key: str,
|
|
||||||
type_key: str,
|
|
||||||
tick_size: float = 0.01,
|
|
||||||
lot_tick_size: float = 0,
|
|
||||||
broker_info: dict[str, Any] = {},
|
|
||||||
|
|
||||||
) -> Symbol:
|
|
||||||
'''Create and return an instrument description for the
|
|
||||||
"symbol" named as ``key``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
return Symbol(
|
return Symbol(
|
||||||
key=key,
|
key=symbol,
|
||||||
type_key=type_key,
|
|
||||||
tick_size=tick_size,
|
tick_size=tick_size,
|
||||||
lot_tick_size=lot_tick_size,
|
lot_tick_size=lot_tick_size,
|
||||||
tick_size_digits=float_digits(tick_size),
|
tick_size_digits=float_digits(tick_size),
|
||||||
lot_size_digits=float_digits(lot_tick_size),
|
lot_size_digits=float_digits(lot_tick_size),
|
||||||
broker_info=broker_info,
|
suffix=suffix,
|
||||||
|
broker_info={broker: info},
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_fqsn(
|
||||||
|
cls,
|
||||||
|
fqsn: str,
|
||||||
|
info: dict[str, Any],
|
||||||
|
|
||||||
def from_df(
|
# XXX: like wtf..
|
||||||
df: pd.DataFrame,
|
# ) -> 'Symbol':
|
||||||
source=None,
|
) -> None:
|
||||||
default_tf=None
|
broker, key, suffix = unpack_fqsn(fqsn)
|
||||||
) -> np.recarray:
|
return cls.from_broker_info(
|
||||||
"""Convert OHLC formatted ``pandas.DataFrame`` to ``numpy.recarray``.
|
broker,
|
||||||
|
key,
|
||||||
|
info=info,
|
||||||
|
suffix=suffix,
|
||||||
|
)
|
||||||
|
|
||||||
"""
|
@property
|
||||||
df.reset_index(inplace=True)
|
def type_key(self) -> str:
|
||||||
|
return list(self.broker_info.values())[0]['asset_type']
|
||||||
|
|
||||||
# hackery to convert field names
|
@property
|
||||||
date = 'Date'
|
def brokers(self) -> list[str]:
|
||||||
if 'date' in df.columns:
|
return list(self.broker_info.keys())
|
||||||
date = 'date'
|
|
||||||
|
|
||||||
# convert to POSIX time
|
def nearest_tick(self, value: float) -> float:
|
||||||
df[date] = [d.timestamp() for d in df[date]]
|
'''
|
||||||
|
Return the nearest tick value based on mininum increment.
|
||||||
|
|
||||||
# try to rename from some camel case
|
'''
|
||||||
columns = {
|
mult = 1 / self.tick_size
|
||||||
'Date': 'time',
|
return round(value * mult) / mult
|
||||||
'date': 'time',
|
|
||||||
'Open': 'open',
|
|
||||||
'High': 'high',
|
|
||||||
'Low': 'low',
|
|
||||||
'Close': 'close',
|
|
||||||
'Volume': 'volume',
|
|
||||||
|
|
||||||
# most feeds are providing this over sesssion anchored
|
def front_feed(self) -> tuple[str, str]:
|
||||||
'vwap': 'bar_wap',
|
'''
|
||||||
|
Return the "current" feed key for this symbol.
|
||||||
|
|
||||||
# XXX: ib_insync calls this the "wap of the bar"
|
(i.e. the broker + symbol key in a tuple).
|
||||||
# but no clue what is actually is...
|
|
||||||
# https://github.com/pikers/piker/issues/119#issuecomment-729120988
|
|
||||||
'average': 'bar_wap',
|
|
||||||
}
|
|
||||||
|
|
||||||
df = df.rename(columns=columns)
|
'''
|
||||||
|
return (
|
||||||
|
list(self.broker_info.keys())[0],
|
||||||
|
self.key,
|
||||||
|
)
|
||||||
|
|
||||||
for name in df.columns:
|
def tokens(self) -> tuple[str]:
|
||||||
# if name not in base_ohlc_dtype.names[1:]:
|
broker, key = self.front_feed()
|
||||||
if name not in base_ohlc_dtype.names:
|
if self.suffix:
|
||||||
del df[name]
|
return (key, self.suffix, broker)
|
||||||
|
else:
|
||||||
|
return (key, broker)
|
||||||
|
|
||||||
# TODO: it turns out column access on recarrays is actually slower:
|
def front_fqsn(self) -> str:
|
||||||
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
'''
|
||||||
# it might make sense to make these structured arrays?
|
fqsn = "fully qualified symbol name"
|
||||||
array = df.to_records(index=False)
|
|
||||||
_nan_to_closest_num(array)
|
|
||||||
|
|
||||||
return array
|
Basically the idea here is for all client-ish code (aka programs/actors
|
||||||
|
that ask the provider agnostic layers in the stack for data) should be
|
||||||
|
able to tell which backend / venue / derivative each data feed/flow is
|
||||||
|
from by an explicit string key of the current form:
|
||||||
|
|
||||||
|
<instrumentname>.<venue>.<suffixwithmetadata>.<brokerbackendname>
|
||||||
|
|
||||||
|
TODO: I have thoughts that we should actually change this to be
|
||||||
|
more like an "attr lookup" (like how the web should have done
|
||||||
|
urls, but marketting peeps ruined it etc. etc.):
|
||||||
|
|
||||||
|
<broker>.<venue>.<instrumentname>.<suffixwithmetadata>
|
||||||
|
|
||||||
|
'''
|
||||||
|
tokens = self.tokens()
|
||||||
|
fqsn = '.'.join(tokens)
|
||||||
|
return fqsn
|
||||||
|
|
||||||
|
def iterfqsns(self) -> list[str]:
|
||||||
|
keys = []
|
||||||
|
for broker in self.broker_info.keys():
|
||||||
|
fqsn = mk_fqsn(self.key, broker)
|
||||||
|
if self.suffix:
|
||||||
|
fqsn += f'.{self.suffix}'
|
||||||
|
keys.append(fqsn)
|
||||||
|
|
||||||
|
return keys
|
||||||
|
|
||||||
|
|
||||||
def _nan_to_closest_num(array: np.ndarray):
|
def _nan_to_closest_num(array: np.ndarray):
|
||||||
|
|
|
@ -20,7 +20,7 @@ ToOlS fOr CoPInG wITh "tHE wEB" protocols.
|
||||||
"""
|
"""
|
||||||
from contextlib import asynccontextmanager, AsyncExitStack
|
from contextlib import asynccontextmanager, AsyncExitStack
|
||||||
from types import ModuleType
|
from types import ModuleType
|
||||||
from typing import Any, Callable
|
from typing import Any, Callable, AsyncGenerator
|
||||||
import json
|
import json
|
||||||
|
|
||||||
import trio
|
import trio
|
||||||
|
@ -53,11 +53,13 @@ class NoBsWs:
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
url: str,
|
url: str,
|
||||||
|
token: str,
|
||||||
stack: AsyncExitStack,
|
stack: AsyncExitStack,
|
||||||
fixture: Callable,
|
fixture: Callable,
|
||||||
serializer: ModuleType = json,
|
serializer: ModuleType = json,
|
||||||
):
|
):
|
||||||
self.url = url
|
self.url = url
|
||||||
|
self.token = token
|
||||||
self.fixture = fixture
|
self.fixture = fixture
|
||||||
self._stack = stack
|
self._stack = stack
|
||||||
self._ws: 'WebSocketConnection' = None # noqa
|
self._ws: 'WebSocketConnection' = None # noqa
|
||||||
|
@ -81,9 +83,15 @@ class NoBsWs:
|
||||||
trio_websocket.open_websocket_url(self.url)
|
trio_websocket.open_websocket_url(self.url)
|
||||||
)
|
)
|
||||||
# rerun user code fixture
|
# rerun user code fixture
|
||||||
|
if self.token == '':
|
||||||
ret = await self._stack.enter_async_context(
|
ret = await self._stack.enter_async_context(
|
||||||
self.fixture(self)
|
self.fixture(self)
|
||||||
)
|
)
|
||||||
|
else:
|
||||||
|
ret = await self._stack.enter_async_context(
|
||||||
|
self.fixture(self, self.token)
|
||||||
|
)
|
||||||
|
|
||||||
assert ret is None
|
assert ret is None
|
||||||
|
|
||||||
log.info(f'Connection success: {self.url}')
|
log.info(f'Connection success: {self.url}')
|
||||||
|
@ -127,12 +135,14 @@ async def open_autorecon_ws(
|
||||||
|
|
||||||
# TODO: proper type annot smh
|
# TODO: proper type annot smh
|
||||||
fixture: Callable,
|
fixture: Callable,
|
||||||
):
|
# used for authenticated websockets
|
||||||
|
token: str = '',
|
||||||
|
) -> AsyncGenerator[tuple[...], NoBsWs]:
|
||||||
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
"""Apparently we can QoS for all sorts of reasons..so catch em.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
async with AsyncExitStack() as stack:
|
async with AsyncExitStack() as stack:
|
||||||
ws = NoBsWs(url, stack, fixture=fixture)
|
ws = NoBsWs(url, token, stack, fixture=fixture)
|
||||||
await ws._connect()
|
await ws._connect()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -16,26 +16,34 @@
|
||||||
|
|
||||||
"""
|
"""
|
||||||
marketstore cli.
|
marketstore cli.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import List
|
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
|
||||||
|
from anyio_marketstore import open_marketstore_client
|
||||||
import trio
|
import trio
|
||||||
import tractor
|
import tractor
|
||||||
import click
|
import click
|
||||||
|
import numpy as np
|
||||||
|
|
||||||
from .marketstore import (
|
from .marketstore import (
|
||||||
get_client,
|
get_client,
|
||||||
stream_quotes,
|
# stream_quotes,
|
||||||
ingest_quote_stream,
|
ingest_quote_stream,
|
||||||
_url,
|
# _url,
|
||||||
_tick_tbk_ids,
|
_tick_tbk_ids,
|
||||||
mk_tbk,
|
mk_tbk,
|
||||||
)
|
)
|
||||||
from ..cli import cli
|
from ..cli import cli
|
||||||
from .. import watchlists as wl
|
from .. import watchlists as wl
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
from ._sharedmem import (
|
||||||
|
maybe_open_shm_array,
|
||||||
|
)
|
||||||
|
from ._source import (
|
||||||
|
base_iohlc_dtype,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -49,51 +57,58 @@ log = get_logger(__name__)
|
||||||
)
|
)
|
||||||
@click.argument('names', nargs=-1)
|
@click.argument('names', nargs=-1)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def ms_stream(config: dict, names: List[str], url: str):
|
def ms_stream(
|
||||||
"""Connect to a marketstore time bucket stream for (a set of) symbols(s)
|
config: dict,
|
||||||
|
names: list[str],
|
||||||
|
url: str,
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Connect to a marketstore time bucket stream for (a set of) symbols(s)
|
||||||
and print to console.
|
and print to console.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
async def main():
|
async def main():
|
||||||
async for quote in stream_quotes(symbols=names):
|
# async for quote in stream_quotes(symbols=names):
|
||||||
log.info(f"Received quote:\n{quote}")
|
# log.info(f"Received quote:\n{quote}")
|
||||||
|
...
|
||||||
|
|
||||||
trio.run(main)
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
# @cli.command()
|
||||||
@click.option(
|
# @click.option(
|
||||||
'--url',
|
# '--url',
|
||||||
default=_url,
|
# default=_url,
|
||||||
help='HTTP URL of marketstore instance'
|
# help='HTTP URL of marketstore instance'
|
||||||
)
|
# )
|
||||||
@click.argument('names', nargs=-1)
|
# @click.argument('names', nargs=-1)
|
||||||
@click.pass_obj
|
# @click.pass_obj
|
||||||
def ms_destroy(config: dict, names: List[str], url: str) -> None:
|
# def ms_destroy(config: dict, names: list[str], url: str) -> None:
|
||||||
"""Destroy symbol entries in the local marketstore instance.
|
# """Destroy symbol entries in the local marketstore instance.
|
||||||
"""
|
# """
|
||||||
async def main():
|
# async def main():
|
||||||
nonlocal names
|
# nonlocal names
|
||||||
async with get_client(url) as client:
|
# async with get_client(url) as client:
|
||||||
|
#
|
||||||
if not names:
|
# if not names:
|
||||||
names = await client.list_symbols()
|
# names = await client.list_symbols()
|
||||||
|
#
|
||||||
# default is to wipe db entirely.
|
# # default is to wipe db entirely.
|
||||||
answer = input(
|
# answer = input(
|
||||||
"This will entirely wipe you local marketstore db @ "
|
# "This will entirely wipe you local marketstore db @ "
|
||||||
f"{url} of the following symbols:\n {pformat(names)}"
|
# f"{url} of the following symbols:\n {pformat(names)}"
|
||||||
"\n\nDelete [N/y]?\n")
|
# "\n\nDelete [N/y]?\n")
|
||||||
|
#
|
||||||
if answer == 'y':
|
# if answer == 'y':
|
||||||
for sym in names:
|
# for sym in names:
|
||||||
# tbk = _tick_tbk.format(sym)
|
# # tbk = _tick_tbk.format(sym)
|
||||||
tbk = tuple(sym, *_tick_tbk_ids)
|
# tbk = tuple(sym, *_tick_tbk_ids)
|
||||||
print(f"Destroying {tbk}..")
|
# print(f"Destroying {tbk}..")
|
||||||
await client.destroy(mk_tbk(tbk))
|
# await client.destroy(mk_tbk(tbk))
|
||||||
else:
|
# else:
|
||||||
print("Nothing deleted.")
|
# print("Nothing deleted.")
|
||||||
|
#
|
||||||
tractor.run(main)
|
# tractor.run(main)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
|
@ -102,41 +117,53 @@ def ms_destroy(config: dict, names: List[str], url: str) -> None:
|
||||||
is_flag=True,
|
is_flag=True,
|
||||||
help='Enable tractor logging')
|
help='Enable tractor logging')
|
||||||
@click.option(
|
@click.option(
|
||||||
'--url',
|
'--host',
|
||||||
default=_url,
|
default='localhost'
|
||||||
help='HTTP URL of marketstore instance'
|
|
||||||
)
|
)
|
||||||
@click.argument('name', nargs=1, required=True)
|
@click.option(
|
||||||
|
'--port',
|
||||||
|
default=5993
|
||||||
|
)
|
||||||
|
@click.argument('symbols', nargs=-1)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def ms_shell(config, name, tl, url):
|
def storesh(
|
||||||
"""Start an IPython shell ready to query the local marketstore db.
|
config,
|
||||||
"""
|
tl,
|
||||||
async def main():
|
host,
|
||||||
async with get_client(url) as client:
|
port,
|
||||||
query = client.query # noqa
|
symbols: list[str],
|
||||||
# TODO: write magics to query marketstore
|
):
|
||||||
from IPython import embed
|
'''
|
||||||
embed()
|
Start an IPython shell ready to query the local marketstore db.
|
||||||
|
|
||||||
tractor.run(main)
|
'''
|
||||||
|
from piker.data.marketstore import tsdb_history_update
|
||||||
|
from piker._daemon import open_piker_runtime
|
||||||
|
|
||||||
|
async def main():
|
||||||
|
nonlocal symbols
|
||||||
|
|
||||||
|
async with open_piker_runtime(
|
||||||
|
'storesh',
|
||||||
|
enable_modules=['piker.data._ahab'],
|
||||||
|
):
|
||||||
|
symbol = symbols[0]
|
||||||
|
await tsdb_history_update(symbol)
|
||||||
|
|
||||||
|
trio.run(main)
|
||||||
|
|
||||||
|
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option('--test-file', '-t', help='Test quote stream file')
|
@click.option('--test-file', '-t', help='Test quote stream file')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
||||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
|
|
||||||
@click.option(
|
|
||||||
'--url',
|
|
||||||
default=_url,
|
|
||||||
help='HTTP URL of marketstore instance'
|
|
||||||
)
|
|
||||||
@click.argument('name', nargs=1, required=True)
|
@click.argument('name', nargs=1, required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def ingest(config, name, test_file, tl, url):
|
def ingest(config, name, test_file, tl):
|
||||||
"""Ingest real-time broker quotes and ticks to a marketstore instance.
|
'''
|
||||||
"""
|
Ingest real-time broker quotes and ticks to a marketstore instance.
|
||||||
|
|
||||||
|
'''
|
||||||
# global opts
|
# global opts
|
||||||
brokermod = config['brokermod']
|
|
||||||
loglevel = config['loglevel']
|
loglevel = config['loglevel']
|
||||||
tractorloglevel = config['tractorloglevel']
|
tractorloglevel = config['tractorloglevel']
|
||||||
# log = config['log']
|
# log = config['log']
|
||||||
|
@ -145,15 +172,25 @@ def ingest(config, name, test_file, tl, url):
|
||||||
watchlists = wl.merge_watchlist(watchlist_from_file, wl._builtins)
|
watchlists = wl.merge_watchlist(watchlist_from_file, wl._builtins)
|
||||||
symbols = watchlists[name]
|
symbols = watchlists[name]
|
||||||
|
|
||||||
tractor.run(
|
grouped_syms = {}
|
||||||
partial(
|
for sym in symbols:
|
||||||
|
symbol, _, provider = sym.rpartition('.')
|
||||||
|
if provider not in grouped_syms:
|
||||||
|
grouped_syms[provider] = []
|
||||||
|
|
||||||
|
grouped_syms[provider].append(symbol)
|
||||||
|
|
||||||
|
async def entry_point():
|
||||||
|
async with tractor.open_nursery() as n:
|
||||||
|
for provider, symbols in grouped_syms.items():
|
||||||
|
await n.run_in_actor(
|
||||||
ingest_quote_stream,
|
ingest_quote_stream,
|
||||||
symbols,
|
|
||||||
brokermod.name,
|
|
||||||
tries=1,
|
|
||||||
loglevel=loglevel,
|
|
||||||
),
|
|
||||||
name='ingest_marketstore',
|
name='ingest_marketstore',
|
||||||
loglevel=tractorloglevel,
|
symbols=symbols,
|
||||||
debug_mode=True,
|
brokername=provider,
|
||||||
|
tries=1,
|
||||||
|
actorloglevel=loglevel,
|
||||||
|
loglevel=tractorloglevel
|
||||||
)
|
)
|
||||||
|
|
||||||
|
tractor.run(entry_point)
|
||||||
|
|
1161
piker/data/feed.py
1161
piker/data/feed.py
File diff suppressed because it is too large
Load Diff
|
@ -14,36 +14,200 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
``marketstore`` integration.
|
``marketstore`` integration.
|
||||||
|
|
||||||
- client management routines
|
- client management routines
|
||||||
- ticK data ingest routines
|
- ticK data ingest routines
|
||||||
- websocket client for subscribing to write triggers
|
- websocket client for subscribing to write triggers
|
||||||
- todo: tick sequence stream-cloning for testing
|
- todo: tick sequence stream-cloning for testing
|
||||||
- todo: docker container management automation
|
|
||||||
"""
|
'''
|
||||||
from contextlib import asynccontextmanager
|
from __future__ import annotations
|
||||||
from typing import Dict, Any, List, Callable, Tuple
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from datetime import datetime
|
||||||
|
from pprint import pformat
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Optional,
|
||||||
|
Union,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
import time
|
import time
|
||||||
from math import isnan
|
from math import isnan
|
||||||
|
|
||||||
|
from bidict import bidict
|
||||||
import msgpack
|
import msgpack
|
||||||
|
import pyqtgraph as pg
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
|
||||||
import pymarketstore as pymkts
|
|
||||||
import tractor
|
import tractor
|
||||||
from trio_websocket import open_websocket_url
|
from trio_websocket import open_websocket_url
|
||||||
|
from anyio_marketstore import (
|
||||||
|
open_marketstore_client,
|
||||||
|
MarketstoreClient,
|
||||||
|
Params,
|
||||||
|
)
|
||||||
|
import pendulum
|
||||||
|
import purerpc
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
import docker
|
||||||
|
from ._ahab import DockerContainer
|
||||||
|
|
||||||
|
from .feed import maybe_open_feed
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from ..data import open_feed
|
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
_tick_tbk_ids: Tuple[str, str] = ('1Sec', 'TICK')
|
|
||||||
|
# container level config
|
||||||
|
_config = {
|
||||||
|
'grpc_listen_port': 5995,
|
||||||
|
'ws_listen_port': 5993,
|
||||||
|
'log_level': 'debug',
|
||||||
|
}
|
||||||
|
|
||||||
|
_yaml_config = '''
|
||||||
|
# piker's ``marketstore`` config.
|
||||||
|
|
||||||
|
# mount this config using:
|
||||||
|
# sudo docker run --mount \
|
||||||
|
# type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
||||||
|
# 5993:5993 alpacamarkets/marketstore:latest
|
||||||
|
|
||||||
|
root_directory: data
|
||||||
|
listen_port: {ws_listen_port}
|
||||||
|
grpc_listen_port: {grpc_listen_port}
|
||||||
|
log_level: {log_level}
|
||||||
|
queryable: true
|
||||||
|
stop_grace_period: 0
|
||||||
|
wal_rotate_interval: 5
|
||||||
|
stale_threshold: 5
|
||||||
|
enable_add: true
|
||||||
|
enable_remove: false
|
||||||
|
|
||||||
|
triggers:
|
||||||
|
- module: ondiskagg.so
|
||||||
|
on: "*/1Sec/OHLCV"
|
||||||
|
config:
|
||||||
|
# filter: "nasdaq"
|
||||||
|
destinations:
|
||||||
|
- 1Min
|
||||||
|
- 5Min
|
||||||
|
- 15Min
|
||||||
|
- 1H
|
||||||
|
- 1D
|
||||||
|
|
||||||
|
- module: stream.so
|
||||||
|
on: '*/*/*'
|
||||||
|
# config:
|
||||||
|
# filter: "nasdaq"
|
||||||
|
|
||||||
|
'''.format(**_config)
|
||||||
|
|
||||||
|
|
||||||
|
def start_marketstore(
|
||||||
|
client: docker.DockerClient,
|
||||||
|
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> tuple[DockerContainer, dict[str, Any]]:
|
||||||
|
'''
|
||||||
|
Start and supervise a marketstore instance with its config bind-mounted
|
||||||
|
in from the piker config directory on the system.
|
||||||
|
|
||||||
|
The equivalent cli cmd to this code is:
|
||||||
|
|
||||||
|
sudo docker run --mount \
|
||||||
|
type=bind,source="$HOME/.config/piker/",target="/etc" -i -p \
|
||||||
|
5993:5993 alpacamarkets/marketstore:latest
|
||||||
|
|
||||||
|
'''
|
||||||
|
import os
|
||||||
|
import docker
|
||||||
|
from .. import config
|
||||||
|
get_console_log('info', name=__name__)
|
||||||
|
|
||||||
|
mktsdir = os.path.join(config._config_dir, 'marketstore')
|
||||||
|
|
||||||
|
# create when dne
|
||||||
|
if not os.path.isdir(mktsdir):
|
||||||
|
os.mkdir(mktsdir)
|
||||||
|
|
||||||
|
yml_file = os.path.join(mktsdir, 'mkts.yml')
|
||||||
|
if not os.path.isfile(yml_file):
|
||||||
|
log.warning(
|
||||||
|
f'No `marketstore` config exists?: {yml_file}\n'
|
||||||
|
'Generating new file from template:\n'
|
||||||
|
f'{_yaml_config}\n'
|
||||||
|
)
|
||||||
|
with open(yml_file, 'w') as yf:
|
||||||
|
yf.write(_yaml_config)
|
||||||
|
|
||||||
|
# create a mount from user's local piker config dir into container
|
||||||
|
config_dir_mnt = docker.types.Mount(
|
||||||
|
target='/etc',
|
||||||
|
source=mktsdir,
|
||||||
|
type='bind',
|
||||||
|
)
|
||||||
|
|
||||||
|
# create a user config subdir where the marketstore
|
||||||
|
# backing filesystem database can be persisted.
|
||||||
|
persistent_data_dir = os.path.join(
|
||||||
|
mktsdir, 'data',
|
||||||
|
)
|
||||||
|
if not os.path.isdir(persistent_data_dir):
|
||||||
|
os.mkdir(persistent_data_dir)
|
||||||
|
|
||||||
|
data_dir_mnt = docker.types.Mount(
|
||||||
|
target='/data',
|
||||||
|
source=persistent_data_dir,
|
||||||
|
type='bind',
|
||||||
|
)
|
||||||
|
|
||||||
|
dcntr: DockerContainer = client.containers.run(
|
||||||
|
'alpacamarkets/marketstore:latest',
|
||||||
|
# do we need this for cmds?
|
||||||
|
# '-i',
|
||||||
|
|
||||||
|
# '-p 5993:5993',
|
||||||
|
ports={
|
||||||
|
'5993/tcp': 5993, # jsonrpc / ws?
|
||||||
|
'5995/tcp': 5995, # grpc
|
||||||
|
},
|
||||||
|
mounts=[
|
||||||
|
config_dir_mnt,
|
||||||
|
data_dir_mnt,
|
||||||
|
],
|
||||||
|
detach=True,
|
||||||
|
# stop_signal='SIGINT',
|
||||||
|
init=True,
|
||||||
|
# remove=True,
|
||||||
|
)
|
||||||
|
return (
|
||||||
|
dcntr,
|
||||||
|
_config,
|
||||||
|
|
||||||
|
# expected startup and stop msgs
|
||||||
|
"launching tcp listener for all services...",
|
||||||
|
"exiting...",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
_tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK')
|
||||||
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
|
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
|
||||||
_url: str = 'http://localhost:5993/rpc'
|
|
||||||
|
_tick_dt = [
|
||||||
|
# these two are required for as a "primary key"
|
||||||
|
('Epoch', 'i8'),
|
||||||
|
('Nanoseconds', 'i4'),
|
||||||
|
('IsTrade', 'i1'),
|
||||||
|
('IsBid', 'i1'),
|
||||||
|
('Price', 'f4'),
|
||||||
|
('Size', 'f4')
|
||||||
|
]
|
||||||
|
|
||||||
_quote_dt = [
|
_quote_dt = [
|
||||||
# these two are required for as a "primary key"
|
# these two are required for as a "primary key"
|
||||||
('Epoch', 'i8'),
|
('Epoch', 'i8'),
|
||||||
|
@ -61,6 +225,7 @@ _quote_dt = [
|
||||||
# ('brokerd_ts', 'i64'),
|
# ('brokerd_ts', 'i64'),
|
||||||
# ('VWAP', 'f4')
|
# ('VWAP', 'f4')
|
||||||
]
|
]
|
||||||
|
|
||||||
_quote_tmp = {}.fromkeys(dict(_quote_dt).keys(), np.nan)
|
_quote_tmp = {}.fromkeys(dict(_quote_dt).keys(), np.nan)
|
||||||
_tick_map = {
|
_tick_map = {
|
||||||
'Up': 1,
|
'Up': 1,
|
||||||
|
@ -69,31 +234,52 @@ _tick_map = {
|
||||||
None: np.nan,
|
None: np.nan,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_ohlcv_dt = [
|
||||||
|
# these two are required for as a "primary key"
|
||||||
|
('Epoch', 'i8'),
|
||||||
|
# ('Nanoseconds', 'i4'),
|
||||||
|
|
||||||
class MarketStoreError(Exception):
|
# ohlcv sampling
|
||||||
"Generic marketstore client error"
|
('Open', 'f4'),
|
||||||
|
('High', 'f4'),
|
||||||
|
('Low', 'f4'),
|
||||||
|
('Close', 'f4'),
|
||||||
|
('Volume', 'f4'),
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def err_on_resp(response: dict) -> None:
|
ohlc_key_map = bidict({
|
||||||
"""Raise any errors found in responses from client request.
|
'Epoch': 'time',
|
||||||
"""
|
'Open': 'open',
|
||||||
responses = response['responses']
|
'High': 'high',
|
||||||
if responses is not None:
|
'Low': 'low',
|
||||||
for r in responses:
|
'Close': 'close',
|
||||||
err = r['error']
|
'Volume': 'volume',
|
||||||
if err:
|
})
|
||||||
raise MarketStoreError(err)
|
|
||||||
|
|
||||||
|
def mk_tbk(keys: tuple[str, str, str]) -> str:
|
||||||
|
'''
|
||||||
|
Generate a marketstore table key from a tuple.
|
||||||
|
Converts,
|
||||||
|
``('SPY', '1Sec', 'TICK')`` -> ``"SPY/1Sec/TICK"```
|
||||||
|
|
||||||
|
'''
|
||||||
|
return '/'.join(keys)
|
||||||
|
|
||||||
|
|
||||||
def quote_to_marketstore_structarray(
|
def quote_to_marketstore_structarray(
|
||||||
quote: Dict[str, Any],
|
quote: dict[str, Any],
|
||||||
last_fill: str,
|
last_fill: Optional[float]
|
||||||
|
|
||||||
) -> np.array:
|
) -> np.array:
|
||||||
"""Return marketstore writeable structarray from quote ``dict``.
|
'''
|
||||||
"""
|
Return marketstore writeable structarray from quote ``dict``.
|
||||||
|
|
||||||
|
'''
|
||||||
if last_fill:
|
if last_fill:
|
||||||
# new fill bby
|
# new fill bby
|
||||||
now = timestamp(last_fill)
|
now = int(pendulum.parse(last_fill).timestamp)
|
||||||
else:
|
else:
|
||||||
# this should get inserted upstream by the broker-client to
|
# this should get inserted upstream by the broker-client to
|
||||||
# subtract from IPC latency
|
# subtract from IPC latency
|
||||||
|
@ -101,7 +287,7 @@ def quote_to_marketstore_structarray(
|
||||||
|
|
||||||
secs, ns = now / 10**9, now % 10**9
|
secs, ns = now / 10**9, now % 10**9
|
||||||
|
|
||||||
# pack into List[Tuple[str, Any]]
|
# pack into list[tuple[str, Any]]
|
||||||
array_input = []
|
array_input = []
|
||||||
|
|
||||||
# insert 'Epoch' entry first and then 'Nanoseconds'.
|
# insert 'Epoch' entry first and then 'Nanoseconds'.
|
||||||
|
@ -123,146 +309,467 @@ def quote_to_marketstore_structarray(
|
||||||
return np.array([tuple(array_input)], dtype=_quote_dt)
|
return np.array([tuple(array_input)], dtype=_quote_dt)
|
||||||
|
|
||||||
|
|
||||||
def timestamp(datestr: str) -> int:
|
@acm
|
||||||
"""Return marketstore compatible 'Epoch' integer in nanoseconds
|
|
||||||
from a date formatted str.
|
|
||||||
"""
|
|
||||||
return int(pd.Timestamp(datestr).value)
|
|
||||||
|
|
||||||
|
|
||||||
def mk_tbk(keys: Tuple[str, str, str]) -> str:
|
|
||||||
"""Generate a marketstore table key from a tuple.
|
|
||||||
|
|
||||||
Converts,
|
|
||||||
``('SPY', '1Sec', 'TICK')`` -> ``"SPY/1Sec/TICK"```
|
|
||||||
"""
|
|
||||||
return '{}/' + '/'.join(keys)
|
|
||||||
|
|
||||||
|
|
||||||
class Client:
|
|
||||||
"""Async wrapper around the alpaca ``pymarketstore`` sync client.
|
|
||||||
|
|
||||||
This will server as the shell for building out a proper async client
|
|
||||||
that isn't horribly documented and un-tested..
|
|
||||||
"""
|
|
||||||
def __init__(self, url: str):
|
|
||||||
self._client = pymkts.Client(url)
|
|
||||||
|
|
||||||
async def _invoke(
|
|
||||||
self,
|
|
||||||
meth: Callable,
|
|
||||||
*args,
|
|
||||||
**kwargs,
|
|
||||||
) -> Any:
|
|
||||||
return err_on_resp(meth(*args, **kwargs))
|
|
||||||
|
|
||||||
async def destroy(
|
|
||||||
self,
|
|
||||||
tbk: Tuple[str, str, str],
|
|
||||||
) -> None:
|
|
||||||
return await self._invoke(self._client.destroy, mk_tbk(tbk))
|
|
||||||
|
|
||||||
async def list_symbols(
|
|
||||||
self,
|
|
||||||
tbk: str,
|
|
||||||
) -> List[str]:
|
|
||||||
return await self._invoke(self._client.list_symbols, mk_tbk(tbk))
|
|
||||||
|
|
||||||
async def write(
|
|
||||||
self,
|
|
||||||
symbol: str,
|
|
||||||
array: np.ndarray,
|
|
||||||
) -> None:
|
|
||||||
start = time.time()
|
|
||||||
await self._invoke(
|
|
||||||
self._client.write,
|
|
||||||
array,
|
|
||||||
_tick_tbk.format(symbol),
|
|
||||||
isvariablelength=True
|
|
||||||
)
|
|
||||||
log.debug(f"{symbol} write time (s): {time.time() - start}")
|
|
||||||
|
|
||||||
def query(
|
|
||||||
self,
|
|
||||||
symbol,
|
|
||||||
tbk: Tuple[str, str] = _tick_tbk_ids,
|
|
||||||
) -> pd.DataFrame:
|
|
||||||
# XXX: causes crash
|
|
||||||
# client.query(pymkts.Params(symbol, '*', 'OHCLV'
|
|
||||||
result = self._client.query(
|
|
||||||
pymkts.Params(symbol, *tbk),
|
|
||||||
)
|
|
||||||
return result.first().df()
|
|
||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
|
||||||
async def get_client(
|
async def get_client(
|
||||||
url: str = _url,
|
host: str = 'localhost',
|
||||||
) -> Client:
|
port: int = 5995
|
||||||
yield Client(url)
|
|
||||||
|
) -> MarketstoreClient:
|
||||||
|
'''
|
||||||
|
Load a ``anyio_marketstore`` grpc client connected
|
||||||
|
to an existing ``marketstore`` server.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with open_marketstore_client(
|
||||||
|
host,
|
||||||
|
port
|
||||||
|
) as client:
|
||||||
|
yield client
|
||||||
|
|
||||||
|
|
||||||
|
class MarketStoreError(Exception):
|
||||||
|
"Generic marketstore client error"
|
||||||
|
|
||||||
|
|
||||||
|
# def err_on_resp(response: dict) -> None:
|
||||||
|
# """Raise any errors found in responses from client request.
|
||||||
|
# """
|
||||||
|
# responses = response['responses']
|
||||||
|
# if responses is not None:
|
||||||
|
# for r in responses:
|
||||||
|
# err = r['error']
|
||||||
|
# if err:
|
||||||
|
# raise MarketStoreError(err)
|
||||||
|
|
||||||
|
|
||||||
|
# map of seconds ints to "time frame" accepted keys
|
||||||
|
tf_in_1s = bidict({
|
||||||
|
1: '1Sec',
|
||||||
|
60: '1Min',
|
||||||
|
60*5: '5Min',
|
||||||
|
60*15: '15Min',
|
||||||
|
60*30: '30Min',
|
||||||
|
60*60: '1H',
|
||||||
|
60*60*24: '1D',
|
||||||
|
})
|
||||||
|
|
||||||
|
|
||||||
|
class Storage:
|
||||||
|
'''
|
||||||
|
High level storage api for both real-time and historical ingest.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
client: MarketstoreClient,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# TODO: eventually this should be an api/interface type that
|
||||||
|
# ensures we can support multiple tsdb backends.
|
||||||
|
self.client = client
|
||||||
|
|
||||||
|
# series' cache from tsdb reads
|
||||||
|
self._arrays: dict[str, np.ndarray] = {}
|
||||||
|
|
||||||
|
async def list_keys(self) -> list[str]:
|
||||||
|
return await self.client.list_symbols()
|
||||||
|
|
||||||
|
async def search_keys(self, pattern: str) -> list[str]:
|
||||||
|
'''
|
||||||
|
Search for time series key in the storage backend.
|
||||||
|
|
||||||
|
'''
|
||||||
|
...
|
||||||
|
|
||||||
|
async def write_ticks(self, ticks: list) -> None:
|
||||||
|
...
|
||||||
|
|
||||||
|
async def load(
|
||||||
|
self,
|
||||||
|
fqsn: str,
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
dict[int, np.ndarray], # timeframe (in secs) to series
|
||||||
|
Optional[datetime], # first dt
|
||||||
|
Optional[datetime], # last dt
|
||||||
|
]:
|
||||||
|
|
||||||
|
first_tsdb_dt, last_tsdb_dt = None, None
|
||||||
|
tsdb_arrays = await self.read_ohlcv(
|
||||||
|
fqsn,
|
||||||
|
# on first load we don't need to pull the max
|
||||||
|
# history per request size worth.
|
||||||
|
limit=3000,
|
||||||
|
)
|
||||||
|
log.info(f'Loaded tsdb history {tsdb_arrays}')
|
||||||
|
|
||||||
|
if tsdb_arrays:
|
||||||
|
fastest = list(tsdb_arrays.values())[0]
|
||||||
|
times = fastest['Epoch']
|
||||||
|
first, last = times[0], times[-1]
|
||||||
|
first_tsdb_dt, last_tsdb_dt = map(
|
||||||
|
pendulum.from_timestamp, [first, last]
|
||||||
|
)
|
||||||
|
|
||||||
|
return tsdb_arrays, first_tsdb_dt, last_tsdb_dt
|
||||||
|
|
||||||
|
async def read_ohlcv(
|
||||||
|
self,
|
||||||
|
fqsn: str,
|
||||||
|
timeframe: Optional[Union[int, str]] = None,
|
||||||
|
end: Optional[int] = None,
|
||||||
|
limit: int = int(800e3),
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
MarketstoreClient,
|
||||||
|
Union[dict, np.ndarray]
|
||||||
|
]:
|
||||||
|
client = self.client
|
||||||
|
syms = await client.list_symbols()
|
||||||
|
|
||||||
|
if fqsn not in syms:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
tfstr = tf_in_1s[1]
|
||||||
|
|
||||||
|
params = Params(
|
||||||
|
symbols=fqsn,
|
||||||
|
timeframe=tfstr,
|
||||||
|
attrgroup='OHLCV',
|
||||||
|
end=end,
|
||||||
|
# limit_from_start=True,
|
||||||
|
|
||||||
|
# TODO: figure the max limit here given the
|
||||||
|
# ``purepc`` msg size limit of purerpc: 33554432
|
||||||
|
limit=limit,
|
||||||
|
)
|
||||||
|
|
||||||
|
if timeframe is None:
|
||||||
|
log.info(f'starting {fqsn} tsdb granularity scan..')
|
||||||
|
# loop through and try to find highest granularity
|
||||||
|
for tfstr in tf_in_1s.values():
|
||||||
|
try:
|
||||||
|
log.info(f'querying for {tfstr}@{fqsn}')
|
||||||
|
params.set('timeframe', tfstr)
|
||||||
|
result = await client.query(params)
|
||||||
|
break
|
||||||
|
|
||||||
|
except purerpc.grpclib.exceptions.UnknownError:
|
||||||
|
# XXX: this is already logged by the container and
|
||||||
|
# thus shows up through `marketstored` logs relay.
|
||||||
|
# log.warning(f'{tfstr}@{fqsn} not found')
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
else:
|
||||||
|
result = await client.query(params)
|
||||||
|
|
||||||
|
# TODO: it turns out column access on recarrays is actually slower:
|
||||||
|
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
|
||||||
|
# it might make sense to make these structured arrays?
|
||||||
|
# Fill out a `numpy` array-results map
|
||||||
|
arrays = {}
|
||||||
|
for fqsn, data_set in result.by_symbols().items():
|
||||||
|
arrays.setdefault(fqsn, {})[
|
||||||
|
tf_in_1s.inverse[data_set.timeframe]
|
||||||
|
] = data_set.array
|
||||||
|
|
||||||
|
return arrays[fqsn][timeframe] if timeframe else arrays[fqsn]
|
||||||
|
|
||||||
|
async def delete_ts(
|
||||||
|
self,
|
||||||
|
key: str,
|
||||||
|
timeframe: Optional[Union[int, str]] = None,
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
|
||||||
|
client = self.client
|
||||||
|
syms = await client.list_symbols()
|
||||||
|
print(syms)
|
||||||
|
# if key not in syms:
|
||||||
|
# raise KeyError(f'`{fqsn}` table key not found?')
|
||||||
|
|
||||||
|
return await client.destroy(tbk=key)
|
||||||
|
|
||||||
|
async def write_ohlcv(
|
||||||
|
self,
|
||||||
|
fqsn: str,
|
||||||
|
ohlcv: np.ndarray,
|
||||||
|
append_and_duplicate: bool = True,
|
||||||
|
limit: int = int(800e3),
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# build mkts schema compat array for writing
|
||||||
|
mkts_dt = np.dtype(_ohlcv_dt)
|
||||||
|
mkts_array = np.zeros(
|
||||||
|
len(ohlcv),
|
||||||
|
dtype=mkts_dt,
|
||||||
|
)
|
||||||
|
# copy from shm array (yes it's this easy):
|
||||||
|
# https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
|
||||||
|
mkts_array[:] = ohlcv[[
|
||||||
|
'time',
|
||||||
|
'open',
|
||||||
|
'high',
|
||||||
|
'low',
|
||||||
|
'close',
|
||||||
|
'volume',
|
||||||
|
]]
|
||||||
|
|
||||||
|
m, r = divmod(len(mkts_array), limit)
|
||||||
|
|
||||||
|
for i in range(m, 1):
|
||||||
|
to_push = mkts_array[i-1:i*limit]
|
||||||
|
|
||||||
|
# write to db
|
||||||
|
resp = await self.client.write(
|
||||||
|
to_push,
|
||||||
|
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||||
|
|
||||||
|
# NOTE: will will append duplicates
|
||||||
|
# for the same timestamp-index.
|
||||||
|
# TODO: pre deduplicate?
|
||||||
|
isvariablelength=append_and_duplicate,
|
||||||
|
)
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f'Wrote {mkts_array.size} datums to tsdb\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
for resp in resp.responses:
|
||||||
|
err = resp.error
|
||||||
|
if err:
|
||||||
|
raise MarketStoreError(err)
|
||||||
|
|
||||||
|
if r:
|
||||||
|
to_push = mkts_array[m*limit:]
|
||||||
|
|
||||||
|
# write to db
|
||||||
|
resp = await self.client.write(
|
||||||
|
to_push,
|
||||||
|
tbk=f'{fqsn}/1Sec/OHLCV',
|
||||||
|
|
||||||
|
# NOTE: will will append duplicates
|
||||||
|
# for the same timestamp-index.
|
||||||
|
# TODO: pre deduplicate?
|
||||||
|
isvariablelength=append_and_duplicate,
|
||||||
|
)
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
f'Wrote {mkts_array.size} datums to tsdb\n'
|
||||||
|
)
|
||||||
|
|
||||||
|
for resp in resp.responses:
|
||||||
|
err = resp.error
|
||||||
|
if err:
|
||||||
|
raise MarketStoreError(err)
|
||||||
|
|
||||||
|
# XXX: currently the only way to do this is through the CLI:
|
||||||
|
|
||||||
|
# sudo ./marketstore connect --dir ~/.config/piker/data
|
||||||
|
# >> \show mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||||
|
# and this seems to block and use up mem..
|
||||||
|
# >> \trim mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
|
||||||
|
|
||||||
|
# relevant source code for this is here:
|
||||||
|
# https://github.com/alpacahq/marketstore/blob/master/cmd/connect/session/trim.go#L14
|
||||||
|
# def delete_range(self, start_dt, end_dt) -> None:
|
||||||
|
# ...
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_storage_client(
|
||||||
|
fqsn: str,
|
||||||
|
period: Optional[Union[int, str]] = None, # in seconds
|
||||||
|
|
||||||
|
) -> tuple[Storage, dict[str, np.ndarray]]:
|
||||||
|
'''
|
||||||
|
Load a series by key and deliver in ``numpy`` struct array format.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with (
|
||||||
|
# eventually a storage backend endpoint
|
||||||
|
get_client() as client,
|
||||||
|
):
|
||||||
|
# slap on our wrapper api
|
||||||
|
yield Storage(client)
|
||||||
|
|
||||||
|
|
||||||
|
async def tsdb_history_update(
|
||||||
|
fqsn: Optional[str] = None,
|
||||||
|
|
||||||
|
) -> list[str]:
|
||||||
|
|
||||||
|
# TODO: real-time dedicated task for ensuring
|
||||||
|
# history consistency between the tsdb, shm and real-time feed..
|
||||||
|
|
||||||
|
# update sequence design notes:
|
||||||
|
|
||||||
|
# - load existing highest frequency data from mkts
|
||||||
|
# * how do we want to offer this to the UI?
|
||||||
|
# - lazy loading?
|
||||||
|
# - try to load it all and expect graphics caching/diffing
|
||||||
|
# to hide extra bits that aren't in view?
|
||||||
|
|
||||||
|
# - compute the diff between latest data from broker and shm
|
||||||
|
# * use sql api in mkts to determine where the backend should
|
||||||
|
# start querying for data?
|
||||||
|
# * append any diff with new shm length
|
||||||
|
# * determine missing (gapped) history by scanning
|
||||||
|
# * how far back do we look?
|
||||||
|
|
||||||
|
# - begin rt update ingest and aggregation
|
||||||
|
# * could start by always writing ticks to mkts instead of
|
||||||
|
# worrying about a shm queue for now.
|
||||||
|
# * we have a short list of shm queues worth groking:
|
||||||
|
# - https://github.com/pikers/piker/issues/107
|
||||||
|
# * the original data feed arch blurb:
|
||||||
|
# - https://github.com/pikers/piker/issues/98
|
||||||
|
#
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
disabled=False, # not pg_profile_enabled(),
|
||||||
|
delayed=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_storage_client(fqsn) as storage,
|
||||||
|
|
||||||
|
maybe_open_feed(
|
||||||
|
[fqsn],
|
||||||
|
start_stream=False,
|
||||||
|
|
||||||
|
) as (feed, stream),
|
||||||
|
):
|
||||||
|
profiler(f'opened feed for {fqsn}')
|
||||||
|
|
||||||
|
to_append = feed.shm.array
|
||||||
|
to_prepend = None
|
||||||
|
|
||||||
|
if fqsn:
|
||||||
|
symbol = feed.symbols.get(fqsn)
|
||||||
|
if symbol:
|
||||||
|
fqsn = symbol.front_fqsn()
|
||||||
|
|
||||||
|
# diff db history with shm and only write the missing portions
|
||||||
|
ohlcv = feed.shm.array
|
||||||
|
|
||||||
|
# TODO: use pg profiler
|
||||||
|
tsdb_arrays = await storage.read_ohlcv(fqsn)
|
||||||
|
# hist diffing
|
||||||
|
if tsdb_arrays:
|
||||||
|
for secs in (1, 60):
|
||||||
|
ts = tsdb_arrays.get(secs)
|
||||||
|
if ts is not None and len(ts):
|
||||||
|
# these aren't currently used but can be referenced from
|
||||||
|
# within the embedded ipython shell below.
|
||||||
|
to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
|
||||||
|
to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
|
||||||
|
|
||||||
|
profiler('Finished db arrays diffs')
|
||||||
|
|
||||||
|
syms = await storage.client.list_symbols()
|
||||||
|
log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
|
||||||
|
profiler(f'listed symbols {syms}')
|
||||||
|
|
||||||
|
# TODO: ask if user wants to write history for detected
|
||||||
|
# available shm buffers?
|
||||||
|
from tractor.trionics import ipython_embed
|
||||||
|
await ipython_embed()
|
||||||
|
|
||||||
|
# for array in [to_append, to_prepend]:
|
||||||
|
# if array is None:
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# log.info(
|
||||||
|
# f'Writing datums {array.size} -> to tsdb from shm\n'
|
||||||
|
# )
|
||||||
|
# await storage.write_ohlcv(fqsn, array)
|
||||||
|
|
||||||
|
# profiler('Finished db writes')
|
||||||
|
|
||||||
|
|
||||||
async def ingest_quote_stream(
|
async def ingest_quote_stream(
|
||||||
symbols: List[str],
|
symbols: list[str],
|
||||||
brokername: str,
|
brokername: str,
|
||||||
tries: int = 1,
|
tries: int = 1,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Ingest a broker quote stream into marketstore in (sampled) tick format.
|
'''
|
||||||
"""
|
Ingest a broker quote stream into a ``marketstore`` tsdb.
|
||||||
async with open_feed(
|
|
||||||
brokername,
|
|
||||||
symbols,
|
|
||||||
loglevel=loglevel,
|
|
||||||
) as (first_quotes, qstream):
|
|
||||||
|
|
||||||
quote_cache = first_quotes.copy()
|
'''
|
||||||
|
async with (
|
||||||
async with get_client() as ms_client:
|
maybe_open_feed(brokername, symbols, loglevel=loglevel) as feed,
|
||||||
|
get_client() as ms_client,
|
||||||
# start ingest to marketstore
|
):
|
||||||
async for quotes in qstream:
|
async for quotes in feed.stream:
|
||||||
log.info(quotes)
|
log.info(quotes)
|
||||||
for symbol, quote in quotes.items():
|
for symbol, quote in quotes.items():
|
||||||
|
for tick in quote.get('ticks', ()):
|
||||||
|
ticktype = tick.get('type', 'n/a')
|
||||||
|
|
||||||
# remap tick strs to ints
|
# techtonic tick write
|
||||||
quote['tick'] = _tick_map[quote.get('tick', 'Equal')]
|
array = quote_to_marketstore_structarray({
|
||||||
|
'IsTrade': 1 if ticktype == 'trade' else 0,
|
||||||
|
'IsBid': 1 if ticktype in ('bid', 'bsize') else 0,
|
||||||
|
'Price': tick.get('price'),
|
||||||
|
'Size': tick.get('size')
|
||||||
|
}, last_fill=quote.get('broker_ts', None))
|
||||||
|
|
||||||
# check for volume update (i.e. did trades happen
|
await ms_client.write(array, _tick_tbk)
|
||||||
# since last quote)
|
|
||||||
new_vol = quote.get('volume', None)
|
|
||||||
if new_vol is None:
|
|
||||||
log.debug(f"No fills for {symbol}")
|
|
||||||
if new_vol == quote_cache.get('volume'):
|
|
||||||
# should never happen due to field diffing
|
|
||||||
# on sender side
|
|
||||||
log.error(
|
|
||||||
f"{symbol}: got same volume as last quote?")
|
|
||||||
|
|
||||||
quote_cache.update(quote)
|
# LEGACY WRITE LOOP (using old tick dt)
|
||||||
|
# quote_cache = {
|
||||||
|
# 'size': 0,
|
||||||
|
# 'tick': 0
|
||||||
|
# }
|
||||||
|
|
||||||
a = quote_to_marketstore_structarray(
|
# async for quotes in qstream:
|
||||||
quote,
|
# log.info(quotes)
|
||||||
# TODO: check this closer to the broker query api
|
# for symbol, quote in quotes.items():
|
||||||
last_fill=quote.get('fill_time', '')
|
|
||||||
)
|
# # remap tick strs to ints
|
||||||
await ms_client.write(symbol, a)
|
# quote['tick'] = _tick_map[quote.get('tick', 'Equal')]
|
||||||
|
|
||||||
|
# # check for volume update (i.e. did trades happen
|
||||||
|
# # since last quote)
|
||||||
|
# new_vol = quote.get('volume', None)
|
||||||
|
# if new_vol is None:
|
||||||
|
# log.debug(f"No fills for {symbol}")
|
||||||
|
# if new_vol == quote_cache.get('volume'):
|
||||||
|
# # should never happen due to field diffing
|
||||||
|
# # on sender side
|
||||||
|
# log.error(
|
||||||
|
# f"{symbol}: got same volume as last quote?")
|
||||||
|
|
||||||
|
# quote_cache.update(quote)
|
||||||
|
|
||||||
|
# a = quote_to_marketstore_structarray(
|
||||||
|
# quote,
|
||||||
|
# # TODO: check this closer to the broker query api
|
||||||
|
# last_fill=quote.get('fill_time', '')
|
||||||
|
# )
|
||||||
|
# await ms_client.write(symbol, a)
|
||||||
|
|
||||||
|
|
||||||
async def stream_quotes(
|
async def stream_quotes(
|
||||||
symbols: List[str],
|
symbols: list[str],
|
||||||
host: str = 'localhost',
|
host: str = 'localhost',
|
||||||
port: int = 5993,
|
port: int = 5993,
|
||||||
diff_cached: bool = True,
|
diff_cached: bool = True,
|
||||||
loglevel: str = None,
|
loglevel: str = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Open a symbol stream from a running instance of marketstore and
|
'''
|
||||||
|
Open a symbol stream from a running instance of marketstore and
|
||||||
log to console.
|
log to console.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
# XXX: required to propagate ``tractor`` loglevel to piker logging
|
||||||
get_console_log(loglevel or tractor.current_actor().loglevel)
|
get_console_log(loglevel or tractor.current_actor().loglevel)
|
||||||
|
|
||||||
tbks: Dict[str, str] = {sym: f"{sym}/*/*" for sym in symbols}
|
tbks: dict[str, str] = {sym: f"{sym}/*/*" for sym in symbols}
|
||||||
|
|
||||||
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
async with open_websocket_url(f'ws://{host}:{port}/ws') as ws:
|
||||||
# send subs topics to server
|
# send subs topics to server
|
||||||
|
@ -271,7 +778,7 @@ async def stream_quotes(
|
||||||
)
|
)
|
||||||
log.info(resp)
|
log.info(resp)
|
||||||
|
|
||||||
async def recv() -> Dict[str, Any]:
|
async def recv() -> dict[str, Any]:
|
||||||
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
return msgpack.loads((await ws.get_message()), encoding='utf-8')
|
||||||
|
|
||||||
streams = (await recv())['streams']
|
streams = (await recv())['streams']
|
||||||
|
|
|
@ -0,0 +1,199 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship of pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
FSP (financial signal processing) apis.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# TODO: things to figure the heck out:
|
||||||
|
# - how to handle non-plottable values (pyqtgraph has facility for this
|
||||||
|
# now in `arrayToQPath()`)
|
||||||
|
# - composition of fsps / implicit chaining syntax (we need an issue)
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
from functools import partial
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
Callable,
|
||||||
|
Awaitable,
|
||||||
|
Optional,
|
||||||
|
)
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
import tractor
|
||||||
|
from tractor.msg import NamespacePath
|
||||||
|
|
||||||
|
from ..data._sharedmem import (
|
||||||
|
ShmArray,
|
||||||
|
maybe_open_shm_array,
|
||||||
|
attach_shm_array,
|
||||||
|
_Token,
|
||||||
|
)
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
# global fsp registry filled out by @fsp decorator below
|
||||||
|
_fsp_registry = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _load_builtins() -> dict[tuple, Callable]:
|
||||||
|
|
||||||
|
# import to implicity trigger registration via ``@fsp``
|
||||||
|
from . import _momo # noqa
|
||||||
|
from . import _volume # noqa
|
||||||
|
|
||||||
|
return _fsp_registry
|
||||||
|
|
||||||
|
|
||||||
|
class Fsp:
|
||||||
|
'''
|
||||||
|
"Financial signal processor" decorator wrapped async function.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# TODO: checkout the advanced features from ``wrapt``:
|
||||||
|
# - dynamic enable toggling,
|
||||||
|
# https://wrapt.readthedocs.io/en/latest/decorators.html#dynamically-disabling-decorators
|
||||||
|
# - custom object proxies, might be useful for implementing n-compose
|
||||||
|
# https://wrapt.readthedocs.io/en/latest/wrappers.html#custom-object-proxies
|
||||||
|
# - custom function wrappers,
|
||||||
|
# https://wrapt.readthedocs.io/en/latest/wrappers.html#custom-function-wrappers
|
||||||
|
|
||||||
|
# actor-local map of source flow shm tokens
|
||||||
|
# + the consuming fsp *to* the consumers output
|
||||||
|
# shm flow.
|
||||||
|
_flow_registry: dict[
|
||||||
|
tuple[_Token, str], _Token,
|
||||||
|
] = {}
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
func: Callable[..., Awaitable],
|
||||||
|
*,
|
||||||
|
outputs: tuple[str] = (),
|
||||||
|
display_name: Optional[str] = None,
|
||||||
|
**config,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# TODO (maybe):
|
||||||
|
# - type introspection?
|
||||||
|
# - should we make this a wrapt object proxy?
|
||||||
|
self.func = func
|
||||||
|
self.__name__ = func.__name__ # XXX: must have func-object name
|
||||||
|
|
||||||
|
self.ns_path: tuple[str, str] = NamespacePath.from_ref(func)
|
||||||
|
self.outputs = outputs
|
||||||
|
self.config: dict[str, Any] = config
|
||||||
|
|
||||||
|
# register with declared set.
|
||||||
|
_fsp_registry[self.ns_path] = self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self) -> str:
|
||||||
|
return self.__name__
|
||||||
|
|
||||||
|
def __call__(
|
||||||
|
self,
|
||||||
|
|
||||||
|
# TODO: when we settle on py3.10 we should probably use the new
|
||||||
|
# type annots from pep 612:
|
||||||
|
# https://www.python.org/dev/peps/pep-0612/
|
||||||
|
# instance,
|
||||||
|
*args,
|
||||||
|
**kwargs
|
||||||
|
):
|
||||||
|
return self.func(*args, **kwargs)
|
||||||
|
|
||||||
|
# TODO: lru_cache this? prettty sure it'll work?
|
||||||
|
def get_shm(
|
||||||
|
self,
|
||||||
|
src_shm: ShmArray,
|
||||||
|
|
||||||
|
) -> ShmArray:
|
||||||
|
'''
|
||||||
|
Provide access to allocated shared mem array
|
||||||
|
for this "instance" of a signal processor for
|
||||||
|
the given ``key``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
dst_token = self._flow_registry[
|
||||||
|
(src_shm._token, self.name)
|
||||||
|
]
|
||||||
|
shm = attach_shm_array(dst_token)
|
||||||
|
return shm
|
||||||
|
|
||||||
|
|
||||||
|
def fsp(
|
||||||
|
wrapped=None,
|
||||||
|
*,
|
||||||
|
outputs: tuple[str] = (),
|
||||||
|
display_name: Optional[str] = None,
|
||||||
|
**config,
|
||||||
|
|
||||||
|
) -> Fsp:
|
||||||
|
|
||||||
|
if wrapped is None:
|
||||||
|
return partial(
|
||||||
|
Fsp,
|
||||||
|
outputs=outputs,
|
||||||
|
display_name=display_name,
|
||||||
|
**config,
|
||||||
|
)
|
||||||
|
|
||||||
|
return Fsp(wrapped, outputs=(wrapped.__name__,))
|
||||||
|
|
||||||
|
|
||||||
|
def mk_fsp_shm_key(
|
||||||
|
sym: str,
|
||||||
|
target: Fsp
|
||||||
|
|
||||||
|
) -> str:
|
||||||
|
uid = tractor.current_actor().uid
|
||||||
|
return f'{sym}.fsp.{target.name}.{".".join(uid)}'
|
||||||
|
|
||||||
|
|
||||||
|
def maybe_mk_fsp_shm(
|
||||||
|
sym: str,
|
||||||
|
target: Fsp,
|
||||||
|
readonly: bool = True,
|
||||||
|
|
||||||
|
) -> (str, ShmArray, bool):
|
||||||
|
'''
|
||||||
|
Allocate a single row shm array for an symbol-fsp pair if none
|
||||||
|
exists, otherwise load the shm already existing for that token.
|
||||||
|
|
||||||
|
'''
|
||||||
|
assert isinstance(sym, str), '`sym` should be file-name-friendly `str`'
|
||||||
|
|
||||||
|
# TODO: load output types from `Fsp`
|
||||||
|
# - should `index` be a required internal field?
|
||||||
|
fsp_dtype = np.dtype(
|
||||||
|
[('index', int)] +
|
||||||
|
[(field_name, float) for field_name in target.outputs]
|
||||||
|
)
|
||||||
|
|
||||||
|
key = mk_fsp_shm_key(sym, target)
|
||||||
|
|
||||||
|
shm, opened = maybe_open_shm_array(
|
||||||
|
key,
|
||||||
|
# TODO: create entry for each time frame
|
||||||
|
dtype=fsp_dtype,
|
||||||
|
readonly=True,
|
||||||
|
)
|
||||||
|
return key, shm, opened
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship of piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship of pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -20,35 +20,32 @@ core task logic for processing chains
|
||||||
'''
|
'''
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from typing import AsyncIterator, Callable, Optional
|
from typing import (
|
||||||
|
AsyncIterator, Callable, Optional,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
import trio
|
import trio
|
||||||
from trio_typing import TaskStatus
|
from trio_typing import TaskStatus
|
||||||
import tractor
|
import tractor
|
||||||
|
from tractor.msg import NamespacePath
|
||||||
|
|
||||||
from ..log import get_logger, get_console_log
|
from ..log import get_logger, get_console_log
|
||||||
from .. import data
|
from .. import data
|
||||||
from ..data import attach_shm_array
|
from ..data import attach_shm_array
|
||||||
from ..data.feed import Feed
|
from ..data.feed import Feed
|
||||||
from ..data._sharedmem import ShmArray
|
from ..data._sharedmem import ShmArray
|
||||||
from ._momo import _rsi, _wma
|
from ..data._source import Symbol
|
||||||
from ._volume import _tina_vwap
|
from ._api import (
|
||||||
|
Fsp,
|
||||||
|
_load_builtins,
|
||||||
|
_Token,
|
||||||
|
)
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
_fsp_builtins = {
|
|
||||||
'rsi': _rsi,
|
|
||||||
'wma': _wma,
|
|
||||||
'vwap': _tina_vwap,
|
|
||||||
}
|
|
||||||
|
|
||||||
# TODO: things to figure the heck out:
|
|
||||||
# - how to handle non-plottable values (pyqtgraph has facility for this
|
|
||||||
# now in `arrayToQPath()`)
|
|
||||||
# - composition of fsps / implicit chaining syntax (we need an issue)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class TaskTracker:
|
class TaskTracker:
|
||||||
|
@ -79,18 +76,16 @@ async def filter_quotes_by_sym(
|
||||||
|
|
||||||
async def fsp_compute(
|
async def fsp_compute(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
symbol: Symbol,
|
||||||
symbol: str,
|
|
||||||
feed: Feed,
|
feed: Feed,
|
||||||
quote_stream: trio.abc.ReceiveChannel,
|
quote_stream: trio.abc.ReceiveChannel,
|
||||||
|
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
dst: ShmArray,
|
dst: ShmArray,
|
||||||
|
|
||||||
func_name: str,
|
|
||||||
func: Callable,
|
func: Callable,
|
||||||
|
|
||||||
attach_stream: bool = True,
|
# attach_stream: bool = False,
|
||||||
task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
|
task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -100,26 +95,79 @@ async def fsp_compute(
|
||||||
disabled=True
|
disabled=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
fqsn = symbol.front_fqsn()
|
||||||
out_stream = func(
|
out_stream = func(
|
||||||
|
|
||||||
# TODO: do we even need this if we do the feed api right?
|
# TODO: do we even need this if we do the feed api right?
|
||||||
# shouldn't a local stream do this before we get a handle
|
# shouldn't a local stream do this before we get a handle
|
||||||
# to the async iterable? it's that or we do some kinda
|
# to the async iterable? it's that or we do some kinda
|
||||||
# async itertools style?
|
# async itertools style?
|
||||||
filter_quotes_by_sym(symbol, quote_stream),
|
filter_quotes_by_sym(fqsn, quote_stream),
|
||||||
|
|
||||||
|
# XXX: currently the ``ohlcv`` arg
|
||||||
feed.shm,
|
feed.shm,
|
||||||
)
|
)
|
||||||
|
|
||||||
# Conduct a single iteration of fsp with historical bars input
|
# Conduct a single iteration of fsp with historical bars input
|
||||||
# and get historical output
|
# and get historical output
|
||||||
|
history_output: Union[
|
||||||
|
dict[str, np.ndarray], # multi-output case
|
||||||
|
np.ndarray, # single output case
|
||||||
|
]
|
||||||
history_output = await out_stream.__anext__()
|
history_output = await out_stream.__anext__()
|
||||||
|
|
||||||
|
func_name = func.__name__
|
||||||
profiler(f'{func_name} generated history')
|
profiler(f'{func_name} generated history')
|
||||||
|
|
||||||
# build a struct array which includes an 'index' field to push
|
# build struct array with an 'index' field to push as history
|
||||||
# as history
|
|
||||||
history = np.array(
|
# TODO: push using a[['f0', 'f1', .., 'fn']] = .. syntax no?
|
||||||
np.arange(len(history_output)),
|
# if the output array is multi-field then push
|
||||||
|
# each respective field.
|
||||||
|
fields = getattr(dst.array.dtype, 'fields', None).copy()
|
||||||
|
fields.pop('index')
|
||||||
|
history: Optional[np.ndarray] = None # TODO: nptyping here!
|
||||||
|
|
||||||
|
if fields and len(fields) > 1 and fields:
|
||||||
|
if not isinstance(history_output, dict):
|
||||||
|
raise ValueError(
|
||||||
|
f'`{func_name}` is a multi-output FSP and should yield a '
|
||||||
|
'`dict[str, np.ndarray]` for history'
|
||||||
|
)
|
||||||
|
|
||||||
|
for key in fields.keys():
|
||||||
|
if key in history_output:
|
||||||
|
output = history_output[key]
|
||||||
|
|
||||||
|
if history is None:
|
||||||
|
|
||||||
|
if output is None:
|
||||||
|
length = len(src.array)
|
||||||
|
else:
|
||||||
|
length = len(output)
|
||||||
|
|
||||||
|
# using the first output, determine
|
||||||
|
# the length of the struct-array that
|
||||||
|
# will be pushed to shm.
|
||||||
|
history = np.zeros(
|
||||||
|
length,
|
||||||
|
dtype=dst.array.dtype
|
||||||
|
)
|
||||||
|
|
||||||
|
if output is None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
history[key] = output
|
||||||
|
|
||||||
|
# single-key output stream
|
||||||
|
else:
|
||||||
|
if not isinstance(history_output, np.ndarray):
|
||||||
|
raise ValueError(
|
||||||
|
f'`{func_name}` is a single output FSP and should yield an '
|
||||||
|
'`np.ndarray` for history'
|
||||||
|
)
|
||||||
|
history = np.zeros(
|
||||||
|
len(history_output),
|
||||||
dtype=dst.array.dtype
|
dtype=dst.array.dtype
|
||||||
)
|
)
|
||||||
history[func_name] = history_output
|
history[func_name] = history_output
|
||||||
|
@ -146,28 +194,39 @@ async def fsp_compute(
|
||||||
|
|
||||||
# setup a respawn handle
|
# setup a respawn handle
|
||||||
with trio.CancelScope() as cs:
|
with trio.CancelScope() as cs:
|
||||||
|
|
||||||
|
# TODO: might be better to just make a "restart" method where
|
||||||
|
# the target task is spawned implicitly and then the event is
|
||||||
|
# set via some higher level api? At that poing we might as well
|
||||||
|
# be writing a one-cancels-one nursery though right?
|
||||||
tracker = TaskTracker(trio.Event(), cs)
|
tracker = TaskTracker(trio.Event(), cs)
|
||||||
await ctx.started(index)
|
|
||||||
task_status.started((tracker, index))
|
task_status.started((tracker, index))
|
||||||
|
|
||||||
profiler(f'{func_name} yield last index')
|
profiler(f'{func_name} yield last index')
|
||||||
|
|
||||||
# import time
|
# import time
|
||||||
# last = time.time()
|
# last = time.time()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# rt stream
|
|
||||||
async with ctx.open_stream() as stream:
|
|
||||||
async for processed in out_stream:
|
async for processed in out_stream:
|
||||||
|
|
||||||
log.debug(f"{func_name}: {processed}")
|
log.debug(f"{func_name}: {processed}")
|
||||||
|
key, output = processed
|
||||||
index = src.index
|
index = src.index
|
||||||
dst.array[-1][func_name] = processed
|
dst.array[-1][key] = output
|
||||||
|
|
||||||
# NOTE: for now we aren't streaming this to the consumer
|
# NOTE: for now we aren't streaming this to the consumer
|
||||||
# stream latest array index entry which basically just acts
|
# stream latest array index entry which basically just acts
|
||||||
# as trigger msg to tell the consumer to read from shm
|
# as trigger msg to tell the consumer to read from shm
|
||||||
if attach_stream:
|
# TODO: further this should likely be implemented much
|
||||||
await stream.send(index)
|
# like our `Feed` api where there is one background
|
||||||
|
# "service" task which computes output and then sends to
|
||||||
|
# N-consumers who subscribe for the real-time output,
|
||||||
|
# which we'll likely want to implement using local-mem
|
||||||
|
# chans for the fan out?
|
||||||
|
# if attach_stream:
|
||||||
|
# await client_stream.send(index)
|
||||||
|
|
||||||
# period = time.time() - last
|
# period = time.time() - last
|
||||||
# hz = 1/period if period else float('nan')
|
# hz = 1/period if period else float('nan')
|
||||||
|
@ -182,15 +241,18 @@ async def fsp_compute(
|
||||||
async def cascade(
|
async def cascade(
|
||||||
|
|
||||||
ctx: tractor.Context,
|
ctx: tractor.Context,
|
||||||
brokername: str,
|
|
||||||
|
# data feed key
|
||||||
|
fqsn: str,
|
||||||
|
|
||||||
src_shm_token: dict,
|
src_shm_token: dict,
|
||||||
dst_shm_token: tuple[str, np.dtype],
|
dst_shm_token: tuple[str, np.dtype],
|
||||||
|
|
||||||
symbol: str,
|
ns_path: NamespacePath,
|
||||||
func_name: str,
|
|
||||||
zero_on_step: bool = False,
|
|
||||||
|
|
||||||
|
shm_registry: dict[str, _Token],
|
||||||
|
|
||||||
|
zero_on_step: bool = False,
|
||||||
loglevel: Optional[str] = None,
|
loglevel: Optional[str] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -199,7 +261,10 @@ async def cascade(
|
||||||
destination shm array buffer.
|
destination shm array buffer.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
profiler = pg.debug.Profiler(delayed=False, disabled=False)
|
profiler = pg.debug.Profiler(
|
||||||
|
delayed=False,
|
||||||
|
disabled=False
|
||||||
|
)
|
||||||
|
|
||||||
if loglevel:
|
if loglevel:
|
||||||
get_console_log(loglevel)
|
get_console_log(loglevel)
|
||||||
|
@ -207,15 +272,34 @@ async def cascade(
|
||||||
src = attach_shm_array(token=src_shm_token)
|
src = attach_shm_array(token=src_shm_token)
|
||||||
dst = attach_shm_array(readonly=False, token=dst_shm_token)
|
dst = attach_shm_array(readonly=False, token=dst_shm_token)
|
||||||
|
|
||||||
func: Callable = _fsp_builtins.get(func_name)
|
reg = _load_builtins()
|
||||||
|
lines = '\n'.join([f'{key.rpartition(":")[2]} => {key}' for key in reg])
|
||||||
|
log.info(
|
||||||
|
f'Registered FSP set:\n{lines}'
|
||||||
|
)
|
||||||
|
|
||||||
|
# update actorlocal flows table which registers
|
||||||
|
# readonly "instances" of this fsp for symbol/source
|
||||||
|
# so that consumer fsps can look it up by source + fsp.
|
||||||
|
# TODO: ugh i hate this wind/unwind to list over the wire
|
||||||
|
# but not sure how else to do it.
|
||||||
|
for (token, fsp_name, dst_token) in shm_registry:
|
||||||
|
Fsp._flow_registry[
|
||||||
|
(_Token.from_msg(token), fsp_name)
|
||||||
|
] = _Token.from_msg(dst_token)
|
||||||
|
|
||||||
|
fsp: Fsp = reg.get(
|
||||||
|
NamespacePath(ns_path)
|
||||||
|
)
|
||||||
|
func = fsp.func
|
||||||
|
|
||||||
if not func:
|
if not func:
|
||||||
# TODO: assume it's a func target path
|
# TODO: assume it's a func target path
|
||||||
raise ValueError('Unknown fsp target: {func_name}')
|
raise ValueError(f'Unknown fsp target: {ns_path}')
|
||||||
|
|
||||||
# open a data feed stream with requested broker
|
# open a data feed stream with requested broker
|
||||||
async with data.feed.maybe_open_feed(
|
async with data.feed.maybe_open_feed(
|
||||||
brokername,
|
[fqsn],
|
||||||
[symbol],
|
|
||||||
|
|
||||||
# TODO throttle tick outputs from *this* daemon since
|
# TODO throttle tick outputs from *this* daemon since
|
||||||
# it'll emit tons of ticks due to the throttle only
|
# it'll emit tons of ticks due to the throttle only
|
||||||
|
@ -224,12 +308,14 @@ async def cascade(
|
||||||
# tick_throttle=60,
|
# tick_throttle=60,
|
||||||
|
|
||||||
) as (feed, quote_stream):
|
) as (feed, quote_stream):
|
||||||
|
symbol = feed.symbols[fqsn]
|
||||||
|
|
||||||
profiler(f'{func_name}: feed up')
|
profiler(f'{func}: feed up')
|
||||||
|
|
||||||
assert src.token == feed.shm.token
|
assert src.token == feed.shm.token
|
||||||
# last_len = new_len = len(src.array)
|
# last_len = new_len = len(src.array)
|
||||||
|
|
||||||
|
func_name = func.__name__
|
||||||
async with (
|
async with (
|
||||||
trio.open_nursery() as n,
|
trio.open_nursery() as n,
|
||||||
):
|
):
|
||||||
|
@ -237,7 +323,6 @@ async def cascade(
|
||||||
fsp_target = partial(
|
fsp_target = partial(
|
||||||
|
|
||||||
fsp_compute,
|
fsp_compute,
|
||||||
ctx=ctx,
|
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
feed=feed,
|
feed=feed,
|
||||||
quote_stream=quote_stream,
|
quote_stream=quote_stream,
|
||||||
|
@ -246,7 +331,7 @@ async def cascade(
|
||||||
src=src,
|
src=src,
|
||||||
dst=dst,
|
dst=dst,
|
||||||
|
|
||||||
func_name=func_name,
|
# target
|
||||||
func=func
|
func=func
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -258,13 +343,39 @@ async def cascade(
|
||||||
|
|
||||||
profiler(f'{func_name}: fsp up')
|
profiler(f'{func_name}: fsp up')
|
||||||
|
|
||||||
async def resync(tracker: TaskTracker) -> tuple[TaskTracker, int]:
|
# sync client
|
||||||
|
await ctx.started(index)
|
||||||
|
|
||||||
|
# XXX: rt stream with client which we MUST
|
||||||
|
# open here (and keep it open) in order to make
|
||||||
|
# incremental "updates" as history prepends take
|
||||||
|
# place.
|
||||||
|
async with ctx.open_stream() as client_stream:
|
||||||
|
|
||||||
|
# TODO: these likely should all become
|
||||||
|
# methods of this ``TaskLifetime`` or wtv
|
||||||
|
# abstraction..
|
||||||
|
async def resync(
|
||||||
|
tracker: TaskTracker,
|
||||||
|
|
||||||
|
) -> tuple[TaskTracker, int]:
|
||||||
# TODO: adopt an incremental update engine/approach
|
# TODO: adopt an incremental update engine/approach
|
||||||
# where possible here eventually!
|
# where possible here eventually!
|
||||||
log.warning(f're-syncing fsp {func_name} to source')
|
log.debug(f're-syncing fsp {func_name} to source')
|
||||||
tracker.cs.cancel()
|
tracker.cs.cancel()
|
||||||
await tracker.complete.wait()
|
await tracker.complete.wait()
|
||||||
return await n.start(fsp_target)
|
tracker, index = await n.start(fsp_target)
|
||||||
|
|
||||||
|
# always trigger UI refresh after history update,
|
||||||
|
# see ``piker.ui._fsp.FspAdmin.open_chain()`` and
|
||||||
|
# ``piker.ui._display.trigger_update()``.
|
||||||
|
await client_stream.send({
|
||||||
|
'fsp_update': {
|
||||||
|
'key': dst_shm_token,
|
||||||
|
'first': dst._first.value,
|
||||||
|
'last': dst._last.value,
|
||||||
|
}})
|
||||||
|
return tracker, index
|
||||||
|
|
||||||
def is_synced(
|
def is_synced(
|
||||||
src: ShmArray,
|
src: ShmArray,
|
||||||
|
@ -304,14 +415,21 @@ async def cascade(
|
||||||
|
|
||||||
s, step, ld = is_synced(src, dst)
|
s, step, ld = is_synced(src, dst)
|
||||||
|
|
||||||
|
# detect sample period step for subscription to increment
|
||||||
|
# signal
|
||||||
|
times = src.array['time']
|
||||||
|
delay_s = times[-1] - times[times != times[-1]][-1]
|
||||||
|
|
||||||
# Increment the underlying shared memory buffer on every
|
# Increment the underlying shared memory buffer on every
|
||||||
# "increment" msg received from the underlying data feed.
|
# "increment" msg received from the underlying data feed.
|
||||||
async with feed.index_stream() as stream:
|
async with feed.index_stream(
|
||||||
|
int(delay_s)
|
||||||
|
) as istream:
|
||||||
|
|
||||||
profiler(f'{func_name}: sample stream up')
|
profiler(f'{func_name}: sample stream up')
|
||||||
profiler.finish()
|
profiler.finish()
|
||||||
|
|
||||||
async for msg in stream:
|
async for _ in istream:
|
||||||
|
|
||||||
# respawn the compute task if the source
|
# respawn the compute task if the source
|
||||||
# array has been updated such that we compute
|
# array has been updated such that we compute
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship of pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -23,6 +23,7 @@ from typing import AsyncIterator, Optional
|
||||||
import numpy as np
|
import numpy as np
|
||||||
from numba import jit, float64, optional, int64
|
from numba import jit, float64, optional, int64
|
||||||
|
|
||||||
|
from ._api import fsp
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
from ..data._sharedmem import ShmArray
|
from ..data._sharedmem import ShmArray
|
||||||
|
|
||||||
|
@ -106,7 +107,7 @@ def ema(
|
||||||
# nopython=True,
|
# nopython=True,
|
||||||
# nogil=True
|
# nogil=True
|
||||||
# )
|
# )
|
||||||
def rsi(
|
def _rsi(
|
||||||
|
|
||||||
# TODO: use https://github.com/ramonhagenaars/nptyping
|
# TODO: use https://github.com/ramonhagenaars/nptyping
|
||||||
signal: 'np.ndarray[float64]',
|
signal: 'np.ndarray[float64]',
|
||||||
|
@ -146,7 +147,7 @@ def rsi(
|
||||||
return rsi, up_ema[-1], down_ema[-1]
|
return rsi, up_ema[-1], down_ema[-1]
|
||||||
|
|
||||||
|
|
||||||
def wma(
|
def _wma(
|
||||||
|
|
||||||
signal: np.ndarray,
|
signal: np.ndarray,
|
||||||
length: int,
|
length: int,
|
||||||
|
@ -166,13 +167,38 @@ def wma(
|
||||||
|
|
||||||
assert length == len(weights)
|
assert length == len(weights)
|
||||||
|
|
||||||
|
# lol, for long sequences this is nutso slow and expensive..
|
||||||
return np.convolve(signal, weights, 'valid')
|
return np.convolve(signal, weights, 'valid')
|
||||||
|
|
||||||
|
|
||||||
# @piker.fsp.emit(
|
@fsp
|
||||||
# timeframes=['1s', '5s', '15s', '1m', '5m', '1H'],
|
async def wma(
|
||||||
# )
|
|
||||||
async def _rsi(
|
source, #: AsyncStream[np.ndarray],
|
||||||
|
length: int,
|
||||||
|
ohlcv: np.ndarray, # price time-frame "aware"
|
||||||
|
|
||||||
|
) -> AsyncIterator[np.ndarray]: # maybe something like like FspStream?
|
||||||
|
'''
|
||||||
|
Streaming weighted moving average.
|
||||||
|
|
||||||
|
``weights`` is a sequence of already scaled values. As an example
|
||||||
|
for the WMA often found in "techincal analysis":
|
||||||
|
``weights = np.arange(1, N) * N*(N-1)/2``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# deliver historical output as "first yield"
|
||||||
|
yield _wma(ohlcv.array['close'], length)
|
||||||
|
|
||||||
|
# begin real-time section
|
||||||
|
|
||||||
|
async for quote in source:
|
||||||
|
for tick in iterticks(quote, type='trade'):
|
||||||
|
yield _wma(ohlcv.last(length))
|
||||||
|
|
||||||
|
|
||||||
|
@fsp
|
||||||
|
async def rsi(
|
||||||
|
|
||||||
source: 'QuoteStream[Dict[str, Any]]', # noqa
|
source: 'QuoteStream[Dict[str, Any]]', # noqa
|
||||||
ohlcv: ShmArray,
|
ohlcv: ShmArray,
|
||||||
|
@ -188,11 +214,11 @@ async def _rsi(
|
||||||
sig = ohlcv.array['close']
|
sig = ohlcv.array['close']
|
||||||
|
|
||||||
# wilder says to seed the RSI EMAs with the SMA for the "period"
|
# wilder says to seed the RSI EMAs with the SMA for the "period"
|
||||||
seed = wma(ohlcv.last(period)['close'], period)[0]
|
seed = _wma(ohlcv.last(period)['close'], period)[0]
|
||||||
|
|
||||||
# TODO: the emas here should be seeded with a period SMA as per
|
# TODO: the emas here should be seeded with a period SMA as per
|
||||||
# wilder's original formula..
|
# wilder's original formula..
|
||||||
rsi_h, last_up_ema_close, last_down_ema_close = rsi(
|
rsi_h, last_up_ema_close, last_down_ema_close = _rsi(
|
||||||
sig, period, seed, seed)
|
sig, period, seed, seed)
|
||||||
up_ema_last = last_up_ema_close
|
up_ema_last = last_up_ema_close
|
||||||
down_ema_last = last_down_ema_close
|
down_ema_last = last_down_ema_close
|
||||||
|
@ -218,35 +244,10 @@ async def _rsi(
|
||||||
last_down_ema_close = down_ema_last
|
last_down_ema_close = down_ema_last
|
||||||
index = ohlcv.index
|
index = ohlcv.index
|
||||||
|
|
||||||
rsi_out, up_ema_last, down_ema_last = rsi(
|
rsi_out, up_ema_last, down_ema_last = _rsi(
|
||||||
sig,
|
sig,
|
||||||
period=period,
|
period=period,
|
||||||
up_ema_last=last_up_ema_close,
|
up_ema_last=last_up_ema_close,
|
||||||
down_ema_last=last_down_ema_close,
|
down_ema_last=last_down_ema_close,
|
||||||
)
|
)
|
||||||
yield rsi_out[-1:]
|
yield rsi_out[-1:]
|
||||||
|
|
||||||
|
|
||||||
async def _wma(
|
|
||||||
|
|
||||||
source, #: AsyncStream[np.ndarray],
|
|
||||||
length: int,
|
|
||||||
ohlcv: np.ndarray, # price time-frame "aware"
|
|
||||||
|
|
||||||
) -> AsyncIterator[np.ndarray]: # maybe something like like FspStream?
|
|
||||||
'''
|
|
||||||
Streaming weighted moving average.
|
|
||||||
|
|
||||||
``weights`` is a sequence of already scaled values. As an example
|
|
||||||
for the WMA often found in "techincal analysis":
|
|
||||||
``weights = np.arange(1, N) * N*(N-1)/2``.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# deliver historical output as "first yield"
|
|
||||||
yield wma(ohlcv.array['close'], length)
|
|
||||||
|
|
||||||
# begin real-time section
|
|
||||||
|
|
||||||
async for quote in source:
|
|
||||||
for tick in iterticks(quote, type='trade'):
|
|
||||||
yield wma(ohlcv.last(length))
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship of pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -14,20 +14,33 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
from typing import AsyncIterator, Optional
|
from typing import AsyncIterator, Optional, Union
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
from tractor.trionics._broadcast import AsyncReceiver
|
||||||
|
|
||||||
|
from ._api import fsp
|
||||||
from ..data._normalize import iterticks
|
from ..data._normalize import iterticks
|
||||||
|
from ..data._sharedmem import ShmArray
|
||||||
|
from ._momo import _wma
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# NOTE: is the same as our `wma` fsp, and if so which one is faster?
|
||||||
|
# Ohhh, this is an IIR style i think? So it has an anchor point
|
||||||
|
# effectively instead of a moving window/FIR style?
|
||||||
def wap(
|
def wap(
|
||||||
|
|
||||||
signal: np.ndarray,
|
signal: np.ndarray,
|
||||||
weights: np.ndarray,
|
weights: np.ndarray,
|
||||||
) -> np.ndarray:
|
|
||||||
"""Weighted average price from signal and weights.
|
|
||||||
|
|
||||||
"""
|
) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Weighted average price from signal and weights.
|
||||||
|
|
||||||
|
'''
|
||||||
cum_weights = np.cumsum(weights)
|
cum_weights = np.cumsum(weights)
|
||||||
cum_weighted_input = np.cumsum(signal * weights)
|
cum_weighted_input = np.cumsum(signal * weights)
|
||||||
|
|
||||||
|
@ -46,16 +59,25 @@ def wap(
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
async def _tina_vwap(
|
@fsp
|
||||||
source, #: AsyncStream[np.ndarray],
|
async def tina_vwap(
|
||||||
ohlcv: np.ndarray, # price time-frame "aware"
|
|
||||||
|
source: AsyncReceiver[dict],
|
||||||
|
ohlcv: ShmArray, # OHLC sampled history
|
||||||
|
|
||||||
|
# TODO: anchor logic (eg. to session start)
|
||||||
anchors: Optional[np.ndarray] = None,
|
anchors: Optional[np.ndarray] = None,
|
||||||
) -> AsyncIterator[np.ndarray]: # maybe something like like FspStream?
|
|
||||||
"""Streaming volume weighted moving average.
|
) -> Union[
|
||||||
|
AsyncIterator[np.ndarray],
|
||||||
|
float
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Streaming volume weighted moving average.
|
||||||
|
|
||||||
Calling this "tina" for now since we're using HLC3 instead of tick.
|
Calling this "tina" for now since we're using HLC3 instead of tick.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
if anchors is None:
|
if anchors is None:
|
||||||
# TODO:
|
# TODO:
|
||||||
# anchor to session start of data if possible
|
# anchor to session start of data if possible
|
||||||
|
@ -75,8 +97,10 @@ async def _tina_vwap(
|
||||||
# vwap_tot = h_vwap[-1]
|
# vwap_tot = h_vwap[-1]
|
||||||
|
|
||||||
async for quote in source:
|
async for quote in source:
|
||||||
|
for tick in iterticks(
|
||||||
for tick in iterticks(quote, types=['trade']):
|
quote,
|
||||||
|
types=['trade'],
|
||||||
|
):
|
||||||
|
|
||||||
# c, h, l, v = ohlcv.array[-1][
|
# c, h, l, v = ohlcv.array[-1][
|
||||||
# ['closes', 'high', 'low', 'volume']
|
# ['closes', 'high', 'low', 'volume']
|
||||||
|
@ -90,4 +114,245 @@ async def _tina_vwap(
|
||||||
w_tot += price * size
|
w_tot += price * size
|
||||||
|
|
||||||
# yield ((((o + h + l) / 3) * v) weights_tot) / v_tot
|
# yield ((((o + h + l) / 3) * v) weights_tot) / v_tot
|
||||||
yield w_tot / v_tot
|
yield 'tina_vwap', w_tot / v_tot
|
||||||
|
|
||||||
|
|
||||||
|
@fsp(
|
||||||
|
outputs=(
|
||||||
|
'dolla_vlm',
|
||||||
|
'dark_vlm',
|
||||||
|
'trade_count',
|
||||||
|
'dark_trade_count',
|
||||||
|
),
|
||||||
|
curve_style='step',
|
||||||
|
)
|
||||||
|
async def dolla_vlm(
|
||||||
|
source: AsyncReceiver[dict],
|
||||||
|
ohlcv: ShmArray, # OHLC sampled history
|
||||||
|
|
||||||
|
) -> AsyncIterator[
|
||||||
|
tuple[str, Union[np.ndarray, float]],
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
"Dollar Volume", aka the volume in asset-currency-units (usually
|
||||||
|
a fiat) computed from some price function for the sample step
|
||||||
|
*multiplied* (*) by the asset unit volume.
|
||||||
|
|
||||||
|
Useful for comparing cross asset "money flow" in #s that are
|
||||||
|
asset-currency-independent.
|
||||||
|
|
||||||
|
'''
|
||||||
|
a = ohlcv.array
|
||||||
|
chl3 = (a['close'] + a['high'] + a['low']) / 3
|
||||||
|
v = a['volume']
|
||||||
|
|
||||||
|
# on first iteration yield history
|
||||||
|
yield {
|
||||||
|
'dolla_vlm': chl3 * v,
|
||||||
|
'dark_vlm': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
i = ohlcv.index
|
||||||
|
dvlm = vlm = 0
|
||||||
|
dark_trade_count = trade_count = 0
|
||||||
|
|
||||||
|
async for quote in source:
|
||||||
|
for tick in iterticks(
|
||||||
|
quote,
|
||||||
|
types=(
|
||||||
|
'trade',
|
||||||
|
'dark_trade',
|
||||||
|
),
|
||||||
|
deduplicate_darks=True,
|
||||||
|
):
|
||||||
|
|
||||||
|
# this computes tick-by-tick weightings from here forward
|
||||||
|
size = tick['size']
|
||||||
|
price = tick['price']
|
||||||
|
|
||||||
|
li = ohlcv.index
|
||||||
|
if li > i:
|
||||||
|
i = li
|
||||||
|
trade_count = dark_trade_count = dvlm = vlm = 0
|
||||||
|
|
||||||
|
# TODO: for marginned instruments (futes, etfs?) we need to
|
||||||
|
# show the margin $vlm by multiplying by whatever multiplier
|
||||||
|
# is reported in the sym info.
|
||||||
|
|
||||||
|
ttype = tick.get('type')
|
||||||
|
|
||||||
|
if ttype == 'dark_trade':
|
||||||
|
dvlm += price * size
|
||||||
|
yield 'dark_vlm', dvlm
|
||||||
|
|
||||||
|
dark_trade_count += 1
|
||||||
|
yield 'dark_trade_count', dark_trade_count
|
||||||
|
|
||||||
|
# print(f'{dark_trade_count}th dark_trade: {tick}')
|
||||||
|
|
||||||
|
else:
|
||||||
|
# print(f'vlm: {tick}')
|
||||||
|
vlm += price * size
|
||||||
|
yield 'dolla_vlm', vlm
|
||||||
|
|
||||||
|
trade_count += 1
|
||||||
|
yield 'trade_count', trade_count
|
||||||
|
|
||||||
|
# TODO: plot both to compare?
|
||||||
|
# c, h, l, v = ohlcv.last()[
|
||||||
|
# ['close', 'high', 'low', 'volume']
|
||||||
|
# ][0]
|
||||||
|
# tina_lvlm = c+h+l/3 * v
|
||||||
|
# print(f' tinal vlm: {tina_lvlm}')
|
||||||
|
|
||||||
|
|
||||||
|
@fsp(
|
||||||
|
# TODO: eventually I guess we should support some kinda declarative
|
||||||
|
# graphics config syntax per output yah? That seems like a clean way
|
||||||
|
# to let users configure things? Not sure how exactly to offer that
|
||||||
|
# api as well as how to expose such a thing *inside* the body?
|
||||||
|
outputs=(
|
||||||
|
# pulled verbatim from `ib` for now
|
||||||
|
'1m_trade_rate',
|
||||||
|
'1m_vlm_rate',
|
||||||
|
|
||||||
|
# our own instantaneous rate calcs which are all
|
||||||
|
# parameterized by a samples count (bars) period
|
||||||
|
'trade_rate',
|
||||||
|
'dark_trade_rate',
|
||||||
|
|
||||||
|
'dvlm_rate',
|
||||||
|
'dark_dvlm_rate',
|
||||||
|
),
|
||||||
|
curve_style='line',
|
||||||
|
)
|
||||||
|
async def flow_rates(
|
||||||
|
source: AsyncReceiver[dict],
|
||||||
|
ohlcv: ShmArray, # OHLC sampled history
|
||||||
|
|
||||||
|
# TODO (idea): a dynamic generic / boxing type that can be updated by other
|
||||||
|
# FSPs, user input, and possibly any general event stream in
|
||||||
|
# real-time. Hint: ideally implemented with caching until mutated
|
||||||
|
# ;)
|
||||||
|
period: 'Param[int]' = 6, # noqa
|
||||||
|
|
||||||
|
# TODO: support other means by providing a map
|
||||||
|
# to weights `partial()`-ed with `wma()`?
|
||||||
|
mean_type: str = 'arithmetic',
|
||||||
|
|
||||||
|
# TODO (idea): a generic for declaring boxed fsps much like ``pytest``
|
||||||
|
# fixtures? This probably needs a lot of thought if we want to offer
|
||||||
|
# a higher level composition syntax eventually (oh right gotta make
|
||||||
|
# an issue for that).
|
||||||
|
# ideas for how to allow composition / intercalling:
|
||||||
|
# - offer a `Fsp.get_history()` to do the first yield output?
|
||||||
|
# * err wait can we just have shm access directly?
|
||||||
|
# - how would it work if some consumer fsp wanted to dynamically
|
||||||
|
# change params which are input to the callee fsp? i guess we could
|
||||||
|
# lazy copy in that case?
|
||||||
|
# dvlm: 'Fsp[dolla_vlm]'
|
||||||
|
|
||||||
|
) -> AsyncIterator[
|
||||||
|
tuple[str, Union[np.ndarray, float]],
|
||||||
|
]:
|
||||||
|
# generally no history available prior to real-time calcs
|
||||||
|
yield {
|
||||||
|
# from ib
|
||||||
|
'1m_trade_rate': None,
|
||||||
|
'1m_vlm_rate': None,
|
||||||
|
|
||||||
|
'trade_rate': None,
|
||||||
|
'dark_trade_rate': None,
|
||||||
|
|
||||||
|
'dvlm_rate': None,
|
||||||
|
'dark_dvlm_rate': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
# TODO: 3.10 do ``anext()``
|
||||||
|
quote = await source.__anext__()
|
||||||
|
|
||||||
|
# ltr = 0
|
||||||
|
# lvr = 0
|
||||||
|
tr = quote.get('tradeRate')
|
||||||
|
yield '1m_trade_rate', tr or 0
|
||||||
|
vr = quote.get('volumeRate')
|
||||||
|
yield '1m_vlm_rate', vr or 0
|
||||||
|
|
||||||
|
yield 'trade_rate', 0
|
||||||
|
yield 'dark_trade_rate', 0
|
||||||
|
yield 'dvlm_rate', 0
|
||||||
|
yield 'dark_dvlm_rate', 0
|
||||||
|
|
||||||
|
# NOTE: in theory we could dynamically allocate a cascade based on
|
||||||
|
# this call but not sure if that's too "dynamic" in terms of
|
||||||
|
# validating cascade flows from message typing perspective.
|
||||||
|
|
||||||
|
# attach to ``dolla_vlm`` fsp running
|
||||||
|
# on this same source flow.
|
||||||
|
dvlm_shm = dolla_vlm.get_shm(ohlcv)
|
||||||
|
|
||||||
|
# precompute arithmetic mean weights (all ones)
|
||||||
|
seq = np.full((period,), 1)
|
||||||
|
weights = seq / seq.sum()
|
||||||
|
|
||||||
|
async for quote in source:
|
||||||
|
if not quote:
|
||||||
|
log.error("OH WTF NO QUOTE IN FSP")
|
||||||
|
continue
|
||||||
|
|
||||||
|
# dvlm_wma = _wma(
|
||||||
|
# dvlm_shm.array['dolla_vlm'],
|
||||||
|
# period,
|
||||||
|
# weights=weights,
|
||||||
|
# )
|
||||||
|
# yield 'dvlm_rate', dvlm_wma[-1]
|
||||||
|
|
||||||
|
if period > 1:
|
||||||
|
trade_rate_wma = _wma(
|
||||||
|
dvlm_shm.array['trade_count'][-period:],
|
||||||
|
period,
|
||||||
|
weights=weights,
|
||||||
|
)
|
||||||
|
trade_rate = trade_rate_wma[-1]
|
||||||
|
# print(trade_rate)
|
||||||
|
yield 'trade_rate', trade_rate
|
||||||
|
else:
|
||||||
|
# instantaneous rate per sample step
|
||||||
|
count = dvlm_shm.array['trade_count'][-1]
|
||||||
|
yield 'trade_rate', count
|
||||||
|
|
||||||
|
# TODO: skip this if no dark vlm is declared
|
||||||
|
# by symbol info (eg. in crypto$)
|
||||||
|
# dark_dvlm_wma = _wma(
|
||||||
|
# dvlm_shm.array['dark_vlm'],
|
||||||
|
# period,
|
||||||
|
# weights=weights,
|
||||||
|
# )
|
||||||
|
# yield 'dark_dvlm_rate', dark_dvlm_wma[-1]
|
||||||
|
|
||||||
|
if period > 1:
|
||||||
|
dark_trade_rate_wma = _wma(
|
||||||
|
dvlm_shm.array['dark_trade_count'][-period:],
|
||||||
|
period,
|
||||||
|
weights=weights,
|
||||||
|
)
|
||||||
|
yield 'dark_trade_rate', dark_trade_rate_wma[-1]
|
||||||
|
else:
|
||||||
|
# instantaneous rate per sample step
|
||||||
|
dark_count = dvlm_shm.array['dark_trade_count'][-1]
|
||||||
|
yield 'dark_trade_rate', dark_count
|
||||||
|
|
||||||
|
# XXX: ib specific schema we should
|
||||||
|
# probably pre-pack ourselves.
|
||||||
|
|
||||||
|
# tr = quote.get('tradeRate')
|
||||||
|
# if tr is not None and tr != ltr:
|
||||||
|
# # print(f'trade rate: {tr}')
|
||||||
|
# yield '1m_trade_rate', tr
|
||||||
|
# ltr = tr
|
||||||
|
|
||||||
|
# vr = quote.get('volumeRate')
|
||||||
|
# if vr is not None and vr != lvr:
|
||||||
|
# # print(f'vlm rate: {vr}')
|
||||||
|
# yield '1m_vlm_rate', vr
|
||||||
|
# lvr = vr
|
||||||
|
|
|
@ -25,10 +25,13 @@ from pygments import highlight, lexers, formatters
|
||||||
|
|
||||||
# Makes it so we only see the full module name when using ``__name__``
|
# Makes it so we only see the full module name when using ``__name__``
|
||||||
# without the extra "piker." prefix.
|
# without the extra "piker." prefix.
|
||||||
_proj_name = 'piker'
|
_proj_name: str = 'piker'
|
||||||
|
|
||||||
|
|
||||||
def get_logger(name: str = None) -> logging.Logger:
|
def get_logger(
|
||||||
|
name: str = None,
|
||||||
|
|
||||||
|
) -> logging.Logger:
|
||||||
'''Return the package log or a sub-log for `name` if provided.
|
'''Return the package log or a sub-log for `name` if provided.
|
||||||
'''
|
'''
|
||||||
return tractor.log.get_logger(name=name, _root_name=_proj_name)
|
return tractor.log.get_logger(name=name, _root_name=_proj_name)
|
||||||
|
|
|
@ -0,0 +1,80 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship of piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
sugarz for trio/tractor conc peeps.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from typing import AsyncContextManager
|
||||||
|
from typing import TypeVar
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
|
||||||
|
import trio
|
||||||
|
|
||||||
|
|
||||||
|
# A regular invariant generic type
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
async def _enter_and_sleep(
|
||||||
|
|
||||||
|
mngr: AsyncContextManager[T],
|
||||||
|
to_yield: dict[int, T],
|
||||||
|
all_entered: trio.Event,
|
||||||
|
# task_status: TaskStatus[T] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> T:
|
||||||
|
'''Open the async context manager deliver it's value
|
||||||
|
to this task's spawner and sleep until cancelled.
|
||||||
|
|
||||||
|
'''
|
||||||
|
async with mngr as value:
|
||||||
|
to_yield[id(mngr)] = value
|
||||||
|
|
||||||
|
if all(to_yield.values()):
|
||||||
|
all_entered.set()
|
||||||
|
|
||||||
|
# sleep until cancelled
|
||||||
|
await trio.sleep_forever()
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def async_enter_all(
|
||||||
|
|
||||||
|
*mngrs: list[AsyncContextManager[T]],
|
||||||
|
|
||||||
|
) -> tuple[T]:
|
||||||
|
|
||||||
|
to_yield = {}.fromkeys(id(mngr) for mngr in mngrs)
|
||||||
|
|
||||||
|
all_entered = trio.Event()
|
||||||
|
|
||||||
|
async with trio.open_nursery() as n:
|
||||||
|
for mngr in mngrs:
|
||||||
|
n.start_soon(
|
||||||
|
_enter_and_sleep,
|
||||||
|
mngr,
|
||||||
|
to_yield,
|
||||||
|
all_entered,
|
||||||
|
)
|
||||||
|
|
||||||
|
# deliver control once all managers have started up
|
||||||
|
await all_entered.wait()
|
||||||
|
yield tuple(to_yield.values())
|
||||||
|
|
||||||
|
# tear down all sleeper tasks thus triggering individual
|
||||||
|
# mngr ``__aexit__()``s.
|
||||||
|
n.cancel_scope.cancel()
|
|
@ -18,55 +18,32 @@
|
||||||
Anchor funtions for UI placement of annotions.
|
Anchor funtions for UI placement of annotions.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
from typing import Callable
|
from __future__ import annotations
|
||||||
|
from typing import Callable, TYPE_CHECKING
|
||||||
|
|
||||||
from PyQt5.QtCore import QPointF
|
from PyQt5.QtCore import QPointF
|
||||||
from PyQt5.QtWidgets import QGraphicsPathItem
|
from PyQt5.QtWidgets import QGraphicsPathItem
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import ChartPlotWidget
|
||||||
from ._label import Label
|
from ._label import Label
|
||||||
|
|
||||||
|
|
||||||
def marker_right_points(
|
|
||||||
|
|
||||||
chart: 'ChartPlotWidget', # noqa
|
|
||||||
marker_size: int = 20,
|
|
||||||
|
|
||||||
) -> (float, float, float):
|
|
||||||
'''Return x-dimension, y-axis-aware, level-line marker oriented scene values.
|
|
||||||
|
|
||||||
X values correspond to set the end of a level line, end of
|
|
||||||
a paried level line marker, and the right most side of the "right"
|
|
||||||
axis respectively.
|
|
||||||
|
|
||||||
'''
|
|
||||||
# TODO: compute some sensible maximum value here
|
|
||||||
# and use a humanized scheme to limit to that length.
|
|
||||||
l1_len = chart._max_l1_line_len
|
|
||||||
ryaxis = chart.getAxis('right')
|
|
||||||
|
|
||||||
r_axis_x = ryaxis.pos().x()
|
|
||||||
up_to_l1_sc = r_axis_x - l1_len - 10
|
|
||||||
|
|
||||||
marker_right = up_to_l1_sc - (1.375 * 2 * marker_size)
|
|
||||||
line_end = marker_right - (6/16 * marker_size)
|
|
||||||
|
|
||||||
return line_end, marker_right, r_axis_x
|
|
||||||
|
|
||||||
|
|
||||||
def vbr_left(
|
def vbr_left(
|
||||||
label: Label,
|
label: Label,
|
||||||
|
|
||||||
) -> Callable[..., float]:
|
) -> Callable[..., float]:
|
||||||
"""Return a closure which gives the scene x-coordinate for the
|
'''
|
||||||
leftmost point of the containing view box.
|
Return a closure which gives the scene x-coordinate for the leftmost
|
||||||
|
point of the containing view box.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
return label.vbr().left
|
return label.vbr().left
|
||||||
|
|
||||||
|
|
||||||
def right_axis(
|
def right_axis(
|
||||||
|
|
||||||
chart: 'ChartPlotWidget', # noqa
|
chart: ChartPlotWidget, # noqa
|
||||||
label: Label,
|
label: Label,
|
||||||
|
|
||||||
side: str = 'left',
|
side: str = 'left',
|
||||||
|
@ -141,13 +118,13 @@ def gpath_pin(
|
||||||
return path_br.bottomRight() - QPointF(label.w, label.h / 6)
|
return path_br.bottomRight() - QPointF(label.w, label.h / 6)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
def pp_tight_and_right(
|
def pp_tight_and_right(
|
||||||
label: Label
|
label: Label
|
||||||
|
|
||||||
) -> QPointF:
|
) -> QPointF:
|
||||||
'''Place *just* right of the pp label.
|
'''
|
||||||
|
Place *just* right of the pp label.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
txt = label.txt
|
# txt = label.txt
|
||||||
return label.txt.pos() + QPointF(label.w - label.h/3, 0)
|
return label.txt.pos() + QPointF(label.w - label.h/3, 0)
|
||||||
|
|
|
@ -26,8 +26,6 @@ from PyQt5.QtWidgets import QGraphicsPathItem
|
||||||
from pyqtgraph import Point, functions as fn, Color
|
from pyqtgraph import Point, functions as fn, Color
|
||||||
import numpy as np
|
import numpy as np
|
||||||
|
|
||||||
from ._anchors import marker_right_points
|
|
||||||
|
|
||||||
|
|
||||||
def mk_marker_path(
|
def mk_marker_path(
|
||||||
|
|
||||||
|
@ -116,7 +114,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
|
|
||||||
self.get_level = get_level
|
self.get_level = get_level
|
||||||
self._on_paint = on_paint
|
self._on_paint = on_paint
|
||||||
self.scene_x = lambda: marker_right_points(chart)[1]
|
self.scene_x = lambda: chart.marker_right_points()[1]
|
||||||
self.level: float = 0
|
self.level: float = 0
|
||||||
self.keep_in_view = keep_in_view
|
self.keep_in_view = keep_in_view
|
||||||
|
|
||||||
|
@ -169,7 +167,7 @@ class LevelMarker(QGraphicsPathItem):
|
||||||
vr = view.state['viewRange']
|
vr = view.state['viewRange']
|
||||||
ymn, ymx = vr[1]
|
ymn, ymx = vr[1]
|
||||||
|
|
||||||
# _, marker_right, _ = marker_right_points(line._chart)
|
# _, marker_right, _ = line._chart.marker_right_points()
|
||||||
x = self.scene_x()
|
x = self.scene_x()
|
||||||
|
|
||||||
if self.style == '>|': # short style, points "down-to" line
|
if self.style == '>|': # short style, points "down-to" line
|
||||||
|
|
|
@ -85,11 +85,11 @@ async def _async_main(
|
||||||
screen = godwidget.window.current_screen()
|
screen = godwidget.window.current_screen()
|
||||||
|
|
||||||
# configure graphics update throttling based on display refresh rate
|
# configure graphics update throttling based on display refresh rate
|
||||||
_display._clear_throttle_rate = min(
|
_display._quote_throttle_rate = min(
|
||||||
round(screen.refreshRate()),
|
round(screen.refreshRate()),
|
||||||
_display._clear_throttle_rate,
|
_display._quote_throttle_rate,
|
||||||
)
|
)
|
||||||
log.info(f'Set graphics update rate to {_display._clear_throttle_rate} Hz')
|
log.info(f'Set graphics update rate to {_display._quote_throttle_rate} Hz')
|
||||||
|
|
||||||
# TODO: do styling / themeing setup
|
# TODO: do styling / themeing setup
|
||||||
# _style.style_ze_sheets(godwidget)
|
# _style.style_ze_sheets(godwidget)
|
||||||
|
@ -170,10 +170,11 @@ def _main(
|
||||||
piker_loglevel: str,
|
piker_loglevel: str,
|
||||||
tractor_kwargs,
|
tractor_kwargs,
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Sync entry point to start a chart app.
|
'''
|
||||||
|
Sync entry point to start a chart: a ``tractor`` + Qt runtime
|
||||||
|
entry point
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# ``tractor`` + Qt runtime entry point
|
|
||||||
run_qtractor(
|
run_qtractor(
|
||||||
func=_async_main,
|
func=_async_main,
|
||||||
args=(sym, brokernames, piker_loglevel),
|
args=(sym, brokernames, piker_loglevel),
|
||||||
|
|
|
@ -18,39 +18,45 @@
|
||||||
Chart axes graphics and behavior.
|
Chart axes graphics and behavior.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import List, Tuple, Optional
|
from functools import lru_cache
|
||||||
|
from typing import Optional, Callable
|
||||||
from math import floor
|
from math import floor
|
||||||
|
|
||||||
import pandas as pd
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
from PyQt5.QtCore import QPointF
|
from PyQt5.QtCore import QPointF
|
||||||
|
|
||||||
from ._style import DpiAwareFont, hcolor, _font
|
|
||||||
from ..data._source import float_digits
|
from ..data._source import float_digits
|
||||||
|
from ._label import Label
|
||||||
|
from ._style import DpiAwareFont, hcolor, _font
|
||||||
|
from ._interaction import ChartView
|
||||||
|
|
||||||
_axis_pen = pg.mkPen(hcolor('bracket'))
|
_axis_pen = pg.mkPen(hcolor('bracket'))
|
||||||
|
|
||||||
|
|
||||||
class Axis(pg.AxisItem):
|
class Axis(pg.AxisItem):
|
||||||
"""A better axis that sizes tick contents considering font size.
|
'''
|
||||||
|
A better axis that sizes tick contents considering font size.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
linkedsplits,
|
linkedsplits,
|
||||||
typical_max_str: str = '100 000.000',
|
typical_max_str: str = '100 000.000',
|
||||||
min_tick: int = 2,
|
text_color: str = 'bracket',
|
||||||
**kwargs
|
**kwargs
|
||||||
) -> None:
|
|
||||||
|
|
||||||
super().__init__(**kwargs)
|
) -> None:
|
||||||
|
super().__init__(
|
||||||
|
# textPen=textPen,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
|
||||||
# XXX: pretty sure this makes things slower
|
# XXX: pretty sure this makes things slower
|
||||||
# self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
# self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
self.linkedsplits = linkedsplits
|
self.linkedsplits = linkedsplits
|
||||||
self._min_tick = min_tick
|
|
||||||
self._dpi_font = _font
|
self._dpi_font = _font
|
||||||
|
|
||||||
self.setTickFont(_font.font)
|
self.setTickFont(_font.font)
|
||||||
|
@ -72,44 +78,128 @@ class Axis(pg.AxisItem):
|
||||||
})
|
})
|
||||||
|
|
||||||
self.setTickFont(_font.font)
|
self.setTickFont(_font.font)
|
||||||
|
|
||||||
|
# NOTE: this is for surrounding "border"
|
||||||
self.setPen(_axis_pen)
|
self.setPen(_axis_pen)
|
||||||
|
|
||||||
|
# this is the text color
|
||||||
|
# self.setTextPen(pg.mkPen(hcolor(text_color)))
|
||||||
|
self.text_color = text_color
|
||||||
|
|
||||||
self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||||
|
|
||||||
# size the pertinent axis dimension to a "typical value"
|
# size the pertinent axis dimension to a "typical value"
|
||||||
self.size_to_values()
|
self.size_to_values()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def text_color(self) -> str:
|
||||||
|
return self._text_color
|
||||||
|
|
||||||
|
@text_color.setter
|
||||||
|
def text_color(self, text_color: str) -> None:
|
||||||
|
self.setTextPen(pg.mkPen(hcolor(text_color)))
|
||||||
|
self._text_color = text_color
|
||||||
|
|
||||||
def size_to_values(self) -> None:
|
def size_to_values(self) -> None:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
def set_min_tick(self, size: int) -> None:
|
def txt_offsets(self) -> tuple[int, int]:
|
||||||
self._min_tick = size
|
|
||||||
|
|
||||||
def txt_offsets(self) -> Tuple[int, int]:
|
|
||||||
return tuple(self.style['tickTextOffset'])
|
return tuple(self.style['tickTextOffset'])
|
||||||
|
|
||||||
|
|
||||||
class PriceAxis(Axis):
|
class PriceAxis(Axis):
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
min_tick: int = 2,
|
||||||
|
title: str = '',
|
||||||
|
formatter: Optional[Callable[[float], str]] = None,
|
||||||
|
**kwargs
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
super().__init__(*args, **kwargs)
|
||||||
|
self.formatter = formatter
|
||||||
|
self._min_tick: int = min_tick
|
||||||
|
self.title = None
|
||||||
|
|
||||||
|
def set_title(
|
||||||
|
self,
|
||||||
|
title: str,
|
||||||
|
view: Optional[ChartView] = None,
|
||||||
|
color: Optional[str] = None,
|
||||||
|
|
||||||
|
) -> Label:
|
||||||
|
'''
|
||||||
|
Set a sane UX label using our built-in ``Label``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# XXX: built-in labels but they're huge, and placed weird..
|
||||||
|
# self.setLabel(title)
|
||||||
|
# self.showLabel()
|
||||||
|
|
||||||
|
label = self.title = Label(
|
||||||
|
view=view or self.linkedView(),
|
||||||
|
fmt_str=title,
|
||||||
|
color=color or self.text_color,
|
||||||
|
parent=self,
|
||||||
|
# update_on_range_change=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
def below_axis() -> QPointF:
|
||||||
|
return QPointF(
|
||||||
|
0,
|
||||||
|
self.size().height(),
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: doesn't work? have to pass it above
|
||||||
|
# label.txt.setParent(self)
|
||||||
|
label.scene_anchor = below_axis
|
||||||
|
label.render()
|
||||||
|
label.show()
|
||||||
|
label.update()
|
||||||
|
return label
|
||||||
|
|
||||||
|
def set_min_tick(
|
||||||
|
self,
|
||||||
|
size: int
|
||||||
|
) -> None:
|
||||||
|
self._min_tick = size
|
||||||
|
|
||||||
def size_to_values(self) -> None:
|
def size_to_values(self) -> None:
|
||||||
|
# self.typical_br = _font._qfm.boundingRect(typical_max_str)
|
||||||
self.setWidth(self.typical_br.width())
|
self.setWidth(self.typical_br.width())
|
||||||
|
|
||||||
# XXX: drop for now since it just eats up h space
|
# XXX: drop for now since it just eats up h space
|
||||||
|
|
||||||
def tickStrings(self, vals, scale, spacing):
|
def tickStrings(
|
||||||
|
self,
|
||||||
|
vals: tuple[float],
|
||||||
|
scale: float,
|
||||||
|
spacing: float,
|
||||||
|
|
||||||
# TODO: figure out how to enforce min tick spacing by passing
|
) -> list[str]:
|
||||||
# it into the parent type
|
# TODO: figure out how to enforce min tick spacing by passing it
|
||||||
digits = max(float_digits(spacing * scale), self._min_tick)
|
# into the parent type
|
||||||
|
digits = max(
|
||||||
|
float_digits(spacing * scale),
|
||||||
|
self._min_tick,
|
||||||
|
)
|
||||||
|
if self.title:
|
||||||
|
self.title.update()
|
||||||
|
|
||||||
# print(f'vals: {vals}\nscale: {scale}\nspacing: {spacing}')
|
# print(f'vals: {vals}\nscale: {scale}\nspacing: {spacing}')
|
||||||
# print(f'digits: {digits}')
|
# print(f'digits: {digits}')
|
||||||
|
|
||||||
|
if not self.formatter:
|
||||||
return [
|
return [
|
||||||
('{value:,.{digits}f}').format(
|
('{value:,.{digits}f}').format(
|
||||||
digits=digits,
|
digits=digits,
|
||||||
value=v,
|
value=v,
|
||||||
).replace(',', ' ') for v in vals
|
).replace(',', ' ') for v in vals
|
||||||
]
|
]
|
||||||
|
else:
|
||||||
|
return list(map(self.formatter, vals))
|
||||||
|
|
||||||
|
|
||||||
class DynamicDateAxis(Axis):
|
class DynamicDateAxis(Axis):
|
||||||
|
@ -128,13 +218,14 @@ class DynamicDateAxis(Axis):
|
||||||
|
|
||||||
def _indexes_to_timestrs(
|
def _indexes_to_timestrs(
|
||||||
self,
|
self,
|
||||||
indexes: List[int],
|
indexes: list[int],
|
||||||
) -> List[str]:
|
|
||||||
|
) -> list[str]:
|
||||||
|
|
||||||
# try:
|
|
||||||
chart = self.linkedsplits.chart
|
chart = self.linkedsplits.chart
|
||||||
bars = chart._arrays['ohlc']
|
flow = chart._flows[chart.name]
|
||||||
shm = self.linkedsplits.chart._shm
|
shm = flow.shm
|
||||||
|
bars = shm.array
|
||||||
first = shm._first.value
|
first = shm._first.value
|
||||||
|
|
||||||
bars_len = len(bars)
|
bars_len = len(bars)
|
||||||
|
@ -151,12 +242,27 @@ class DynamicDateAxis(Axis):
|
||||||
)]
|
)]
|
||||||
|
|
||||||
# TODO: **don't** have this hard coded shift to EST
|
# TODO: **don't** have this hard coded shift to EST
|
||||||
dts = pd.to_datetime(epochs, unit='s') # - 4*pd.offsets.Hour()
|
# delay = times[-1] - times[-2]
|
||||||
|
dts = np.array(epochs, dtype='datetime64[s]')
|
||||||
|
|
||||||
delay = times[-1] - times[-2]
|
# see units listing:
|
||||||
return dts.strftime(self.tick_tpl[delay])
|
# https://numpy.org/devdocs/reference/arrays.datetime.html#datetime-units
|
||||||
|
return list(np.datetime_as_string(dts))
|
||||||
|
|
||||||
def tickStrings(self, values: List[float], scale, spacing):
|
# TODO: per timeframe formatting?
|
||||||
|
# - we probably need this based on zoom now right?
|
||||||
|
# prec = self.np_dt_precision[delay]
|
||||||
|
# return dts.strftime(self.tick_tpl[delay])
|
||||||
|
|
||||||
|
def tickStrings(
|
||||||
|
self,
|
||||||
|
values: tuple[float],
|
||||||
|
scale: float,
|
||||||
|
spacing: float,
|
||||||
|
|
||||||
|
) -> list[str]:
|
||||||
|
# info = self.tickStrings.cache_info()
|
||||||
|
# print(info)
|
||||||
return self._indexes_to_timestrs(values)
|
return self._indexes_to_timestrs(values)
|
||||||
|
|
||||||
|
|
||||||
|
@ -207,6 +313,8 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
self.path = None
|
self.path = None
|
||||||
self.rect = None
|
self.rect = None
|
||||||
|
|
||||||
|
self._pw = self.pixelWidth()
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
self,
|
self,
|
||||||
p: QtGui.QPainter,
|
p: QtGui.QPainter,
|
||||||
|
@ -256,9 +364,10 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
|
|
||||||
|
|
||||||
def boundingRect(self): # noqa
|
def boundingRect(self): # noqa
|
||||||
"""Size the graphics space from the text contents.
|
'''
|
||||||
|
Size the graphics space from the text contents.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
if self.label_str:
|
if self.label_str:
|
||||||
self._size_br_from_str(self.label_str)
|
self._size_br_from_str(self.label_str)
|
||||||
|
|
||||||
|
@ -274,23 +383,32 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
|
|
||||||
return QtCore.QRectF()
|
return QtCore.QRectF()
|
||||||
|
|
||||||
# return self.rect or QtCore.QRectF()
|
# TODO: but the input probably needs to be the "len" of
|
||||||
|
# the current text value:
|
||||||
|
@lru_cache
|
||||||
|
def _size_br_from_str(
|
||||||
|
self,
|
||||||
|
value: str
|
||||||
|
|
||||||
def _size_br_from_str(self, value: str) -> None:
|
) -> tuple[float, float]:
|
||||||
"""Do our best to render the bounding rect to a set margin
|
'''
|
||||||
|
Do our best to render the bounding rect to a set margin
|
||||||
around provided string contents.
|
around provided string contents.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
# size the filled rect to text and/or parent axis
|
# size the filled rect to text and/or parent axis
|
||||||
# if not self._txt_br:
|
# if not self._txt_br:
|
||||||
# # XXX: this can't be c
|
# # XXX: this can't be called until stuff is rendered?
|
||||||
# self._txt_br = self._dpifont.boundingRect(value)
|
# self._txt_br = self._dpifont.boundingRect(value)
|
||||||
|
|
||||||
txt_br = self._txt_br = self._dpifont.boundingRect(value)
|
txt_br = self._txt_br = self._dpifont.boundingRect(value)
|
||||||
txt_h, txt_w = txt_br.height(), txt_br.width()
|
txt_h, txt_w = txt_br.height(), txt_br.width()
|
||||||
|
# print(f'wsw: {self._dpifont.boundingRect(" ")}')
|
||||||
|
|
||||||
# allow subtypes to specify a static width and height
|
# allow subtypes to specify a static width and height
|
||||||
h, w = self.size_hint()
|
h, w = self.size_hint()
|
||||||
|
# print(f'axis size: {self._parent.size()}')
|
||||||
|
# print(f'axis geo: {self._parent.geometry()}')
|
||||||
|
|
||||||
self.rect = QtCore.QRectF(
|
self.rect = QtCore.QRectF(
|
||||||
0, 0,
|
0, 0,
|
||||||
|
@ -301,7 +419,7 @@ class AxisLabel(pg.GraphicsObject):
|
||||||
# hb = self.path.controlPointRect()
|
# hb = self.path.controlPointRect()
|
||||||
# hb_size = hb.size()
|
# hb_size = hb.size()
|
||||||
|
|
||||||
return self.rect
|
return (self.rect.width(), self.rect.height())
|
||||||
|
|
||||||
# _common_text_flags = (
|
# _common_text_flags = (
|
||||||
# QtCore.Qt.TextDontClip |
|
# QtCore.Qt.TextDontClip |
|
||||||
|
@ -320,7 +438,7 @@ class XAxisLabel(AxisLabel):
|
||||||
| QtCore.Qt.AlignCenter
|
| QtCore.Qt.AlignCenter
|
||||||
)
|
)
|
||||||
|
|
||||||
def size_hint(self) -> Tuple[float, float]:
|
def size_hint(self) -> tuple[float, float]:
|
||||||
# size to parent axis height
|
# size to parent axis height
|
||||||
return self._parent.height(), None
|
return self._parent.height(), None
|
||||||
|
|
||||||
|
@ -329,31 +447,34 @@ class XAxisLabel(AxisLabel):
|
||||||
abs_pos: QPointF, # scene coords
|
abs_pos: QPointF, # scene coords
|
||||||
value: float, # data for text
|
value: float, # data for text
|
||||||
offset: int = 0 # if have margins, k?
|
offset: int = 0 # if have margins, k?
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
timestrs = self._parent._indexes_to_timestrs([int(value)])
|
timestrs = self._parent._indexes_to_timestrs([int(value)])
|
||||||
|
|
||||||
if not timestrs.any():
|
if not len(timestrs):
|
||||||
return
|
return
|
||||||
|
|
||||||
pad = 1*' '
|
pad = 1*' '
|
||||||
self.label_str = pad + timestrs[0] + pad
|
self.label_str = pad + str(timestrs[0]) + pad
|
||||||
|
|
||||||
_, y_offset = self._parent.txt_offsets()
|
_, y_offset = self._parent.txt_offsets()
|
||||||
|
|
||||||
w = self.boundingRect().width()
|
w = self.boundingRect().width()
|
||||||
|
|
||||||
self.setPos(QPointF(
|
self.setPos(
|
||||||
abs_pos.x() - w/2,
|
QPointF(
|
||||||
|
abs_pos.x() - w/2 - self._pw,
|
||||||
y_offset/2,
|
y_offset/2,
|
||||||
))
|
)
|
||||||
|
)
|
||||||
self.update()
|
self.update()
|
||||||
|
|
||||||
def _draw_arrow_path(self):
|
def _draw_arrow_path(self):
|
||||||
y_offset = self._parent.style['tickTextOffset'][1]
|
y_offset = self._parent.style['tickTextOffset'][1]
|
||||||
path = QtGui.QPainterPath()
|
path = QtGui.QPainterPath()
|
||||||
h, w = self.rect.height(), self.rect.width()
|
h, w = self.rect.height(), self.rect.width()
|
||||||
middle = w/2 - 0.5
|
middle = w/2 - self._pw * 0.5
|
||||||
aw = h/2
|
aw = h/2
|
||||||
left = middle - aw
|
left = middle - aw
|
||||||
right = middle + aw
|
right = middle + aw
|
||||||
|
@ -396,9 +517,13 @@ class YAxisLabel(AxisLabel):
|
||||||
if getattr(self._parent, 'txt_offsets', False):
|
if getattr(self._parent, 'txt_offsets', False):
|
||||||
self.x_offset, y_offset = self._parent.txt_offsets()
|
self.x_offset, y_offset = self._parent.txt_offsets()
|
||||||
|
|
||||||
def size_hint(self) -> Tuple[float, float]:
|
def size_hint(self) -> tuple[float, float]:
|
||||||
# size to parent axis width
|
# size to parent axis width(-ish)
|
||||||
return None, self._parent.width()
|
wsh = self._dpifont.boundingRect(' ').height() / 2
|
||||||
|
return (
|
||||||
|
None,
|
||||||
|
self._parent.size().width() - wsh,
|
||||||
|
)
|
||||||
|
|
||||||
def update_label(
|
def update_label(
|
||||||
self,
|
self,
|
||||||
|
@ -419,16 +544,19 @@ class YAxisLabel(AxisLabel):
|
||||||
br = self.boundingRect()
|
br = self.boundingRect()
|
||||||
h = br.height()
|
h = br.height()
|
||||||
|
|
||||||
self.setPos(QPointF(
|
self.setPos(
|
||||||
|
QPointF(
|
||||||
x_offset,
|
x_offset,
|
||||||
abs_pos.y() - h / 2 - self._y_margin / 2
|
abs_pos.y() - h / 2 - self._pw,
|
||||||
))
|
)
|
||||||
|
)
|
||||||
self.update()
|
self.update()
|
||||||
|
|
||||||
def update_on_resize(self, vr, r):
|
def update_on_resize(self, vr, r):
|
||||||
"""Tiis is a ``.sigRangeChanged()`` handler.
|
'''
|
||||||
|
This is a ``.sigRangeChanged()`` handler.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
index, last = self._last_datum
|
index, last = self._last_datum
|
||||||
if index is not None:
|
if index is not None:
|
||||||
self.update_from_data(index, last)
|
self.update_from_data(index, last)
|
||||||
|
@ -438,11 +566,13 @@ class YAxisLabel(AxisLabel):
|
||||||
index: int,
|
index: int,
|
||||||
value: float,
|
value: float,
|
||||||
_save_last: bool = True,
|
_save_last: bool = True,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Update the label's text contents **and** position from
|
'''
|
||||||
|
Update the label's text contents **and** position from
|
||||||
a view box coordinate datum.
|
a view box coordinate datum.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
if _save_last:
|
if _save_last:
|
||||||
self._last_datum = (index, value)
|
self._last_datum = (index, value)
|
||||||
|
|
||||||
|
@ -456,7 +586,7 @@ class YAxisLabel(AxisLabel):
|
||||||
path = QtGui.QPainterPath()
|
path = QtGui.QPainterPath()
|
||||||
h = self.rect.height()
|
h = self.rect.height()
|
||||||
path.moveTo(0, 0)
|
path.moveTo(0, 0)
|
||||||
path.lineTo(-x_offset - h/4, h/2.)
|
path.lineTo(-x_offset - h/4, h/2. - self._pw/2)
|
||||||
path.lineTo(0, h)
|
path.lineTo(0, h)
|
||||||
path.closeSubpath()
|
path.closeSubpath()
|
||||||
self.path = path
|
self.path = path
|
||||||
|
|
1001
piker/ui/_chart.py
1001
piker/ui/_chart.py
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,318 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Graphics related downsampling routines for compressing to pixel
|
||||||
|
limits on the display device.
|
||||||
|
|
||||||
|
'''
|
||||||
|
import math
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from numpy.lib import recfunctions as rfn
|
||||||
|
from numba import (
|
||||||
|
jit,
|
||||||
|
# float64, optional, int64,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def hl2mxmn(ohlc: np.ndarray) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Convert a OHLC struct-array containing 'high'/'low' columns
|
||||||
|
to a "joined" max/min 1-d array.
|
||||||
|
|
||||||
|
'''
|
||||||
|
index = ohlc['index']
|
||||||
|
hls = ohlc[[
|
||||||
|
'low',
|
||||||
|
'high',
|
||||||
|
]]
|
||||||
|
|
||||||
|
mxmn = np.empty(2*hls.size, dtype=np.float64)
|
||||||
|
x = np.empty(2*hls.size, dtype=np.float64)
|
||||||
|
trace_hl(hls, mxmn, x, index[0])
|
||||||
|
x = x + index[0]
|
||||||
|
|
||||||
|
return mxmn, x
|
||||||
|
|
||||||
|
|
||||||
|
@jit(
|
||||||
|
# TODO: the type annots..
|
||||||
|
# float64[:](float64[:],),
|
||||||
|
nopython=True,
|
||||||
|
)
|
||||||
|
def trace_hl(
|
||||||
|
hl: 'np.ndarray',
|
||||||
|
out: np.ndarray,
|
||||||
|
x: np.ndarray,
|
||||||
|
start: int,
|
||||||
|
|
||||||
|
# the "offset" values in the x-domain which
|
||||||
|
# place the 2 output points around each ``int``
|
||||||
|
# master index.
|
||||||
|
margin: float = 0.43,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
"Trace" the outline of the high-low values of an ohlc sequence
|
||||||
|
as a line such that the maximum deviation (aka disperaion) between
|
||||||
|
bars if preserved.
|
||||||
|
|
||||||
|
This routine is expected to modify input arrays in-place.
|
||||||
|
|
||||||
|
'''
|
||||||
|
last_l = hl['low'][0]
|
||||||
|
last_h = hl['high'][0]
|
||||||
|
|
||||||
|
for i in range(hl.size):
|
||||||
|
row = hl[i]
|
||||||
|
l, h = row['low'], row['high']
|
||||||
|
|
||||||
|
up_diff = h - last_l
|
||||||
|
down_diff = last_h - l
|
||||||
|
|
||||||
|
if up_diff > down_diff:
|
||||||
|
out[2*i + 1] = h
|
||||||
|
out[2*i] = last_l
|
||||||
|
else:
|
||||||
|
out[2*i + 1] = l
|
||||||
|
out[2*i] = last_h
|
||||||
|
|
||||||
|
last_l = l
|
||||||
|
last_h = h
|
||||||
|
|
||||||
|
x[2*i] = int(i) - margin
|
||||||
|
x[2*i + 1] = int(i) + margin
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def ohlc_flatten(
|
||||||
|
ohlc: np.ndarray,
|
||||||
|
use_mxmn: bool = True,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
'''
|
||||||
|
Convert an OHLCV struct-array into a flat ready-for-line-plotting
|
||||||
|
1-d array that is 4 times the size with x-domain values distributed
|
||||||
|
evenly (by 0.5 steps) over each index.
|
||||||
|
|
||||||
|
'''
|
||||||
|
index = ohlc['index']
|
||||||
|
|
||||||
|
if use_mxmn:
|
||||||
|
# traces a line optimally over highs to lows
|
||||||
|
# using numba. NOTE: pretty sure this is faster
|
||||||
|
# and looks about the same as the below output.
|
||||||
|
flat, x = hl2mxmn(ohlc)
|
||||||
|
|
||||||
|
else:
|
||||||
|
flat = rfn.structured_to_unstructured(
|
||||||
|
ohlc[['open', 'high', 'low', 'close']]
|
||||||
|
).flatten()
|
||||||
|
|
||||||
|
x = np.linspace(
|
||||||
|
start=index[0] - 0.5,
|
||||||
|
stop=index[-1] + 0.5,
|
||||||
|
num=len(flat),
|
||||||
|
)
|
||||||
|
return x, flat
|
||||||
|
|
||||||
|
|
||||||
|
def ds_m4(
|
||||||
|
x: np.ndarray,
|
||||||
|
y: np.ndarray,
|
||||||
|
# units-per-pixel-x(dimension)
|
||||||
|
uppx: float,
|
||||||
|
|
||||||
|
# XXX: troll zone / easter egg..
|
||||||
|
# want to mess with ur pal, pass in the actual
|
||||||
|
# pixel width here instead of uppx-proper (i.e. pass
|
||||||
|
# in our ``pg.GraphicsObject`` derivative's ``.px_width()``
|
||||||
|
# gto mega-trip-out ur bud). Hint, it used to be implemented
|
||||||
|
# (wrongly) using "pixel width", so check the git history ;)
|
||||||
|
|
||||||
|
xrange: Optional[float] = None,
|
||||||
|
|
||||||
|
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||||
|
'''
|
||||||
|
Downsample using the M4 algorithm.
|
||||||
|
|
||||||
|
This is more or less an OHLC style sampling of a line-style series.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# NOTE: this method is a so called "visualization driven data
|
||||||
|
# aggregation" approach. It gives error-free line chart
|
||||||
|
# downsampling, see
|
||||||
|
# further scientific paper resources:
|
||||||
|
# - http://www.vldb.org/pvldb/vol7/p797-jugel.pdf
|
||||||
|
# - http://www.vldb.org/2014/program/papers/demo/p997-jugel.pdf
|
||||||
|
|
||||||
|
# Details on implementation of this algo are based in,
|
||||||
|
# https://github.com/pikers/piker/issues/109
|
||||||
|
|
||||||
|
# XXX: from infinite on downsampling viewable graphics:
|
||||||
|
# "one thing i remembered about the binning - if you are
|
||||||
|
# picking a range within your timeseries the start and end bin
|
||||||
|
# should be one more bin size outside the visual range, then
|
||||||
|
# you get better visual fidelity at the edges of the graph"
|
||||||
|
# "i didn't show it in the sample code, but it's accounted for
|
||||||
|
# in the start and end indices and number of bins"
|
||||||
|
|
||||||
|
# should never get called unless actually needed
|
||||||
|
assert uppx > 1
|
||||||
|
|
||||||
|
# NOTE: if we didn't pre-slice the data to downsample
|
||||||
|
# you could in theory pass these as the slicing params,
|
||||||
|
# do we care though since we can always just pre-slice the
|
||||||
|
# input?
|
||||||
|
x_start = x[0] # x value start/lowest in domain
|
||||||
|
|
||||||
|
if xrange is None:
|
||||||
|
x_end = x[-1] # x end value/highest in domain
|
||||||
|
xrange = (x_end - x_start)
|
||||||
|
|
||||||
|
# XXX: always round up on the input pixels
|
||||||
|
# lnx = len(x)
|
||||||
|
# uppx *= max(4 / (1 + math.log(uppx, 2)), 1)
|
||||||
|
|
||||||
|
pxw = math.ceil(xrange / uppx)
|
||||||
|
|
||||||
|
# scale up the frame "width" directly with uppx
|
||||||
|
w = uppx
|
||||||
|
|
||||||
|
# ensure we make more then enough
|
||||||
|
# frames (windows) for the output pixel
|
||||||
|
frames = pxw
|
||||||
|
|
||||||
|
# if we have more and then exact integer's
|
||||||
|
# (uniform quotient output) worth of datum-domain-points
|
||||||
|
# per windows-frame, add one more window to ensure
|
||||||
|
# we have room for all output down-samples.
|
||||||
|
pts_per_pixel, r = divmod(xrange, frames)
|
||||||
|
if r:
|
||||||
|
# while r:
|
||||||
|
frames += 1
|
||||||
|
pts_per_pixel, r = divmod(xrange, frames)
|
||||||
|
|
||||||
|
# print(
|
||||||
|
# f'uppx: {uppx}\n'
|
||||||
|
# f'xrange: {xrange}\n'
|
||||||
|
# f'pxw: {pxw}\n'
|
||||||
|
# f'frames: {frames}\n'
|
||||||
|
# )
|
||||||
|
assert frames >= (xrange / uppx)
|
||||||
|
|
||||||
|
# call into ``numba``
|
||||||
|
nb, i_win, y_out = _m4(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
|
||||||
|
frames,
|
||||||
|
|
||||||
|
# TODO: see func below..
|
||||||
|
# i_win,
|
||||||
|
# y_out,
|
||||||
|
|
||||||
|
# first index in x data to start at
|
||||||
|
x_start,
|
||||||
|
# window size for each "frame" of data to downsample (normally
|
||||||
|
# scaled by the ratio of pixels on screen to data in x-range).
|
||||||
|
w,
|
||||||
|
)
|
||||||
|
|
||||||
|
# filter out any overshoot in the input allocation arrays by
|
||||||
|
# removing zero-ed tail entries which should start at a certain
|
||||||
|
# index.
|
||||||
|
i_win = i_win[i_win != 0]
|
||||||
|
y_out = y_out[:i_win.size]
|
||||||
|
|
||||||
|
return nb, i_win, y_out
|
||||||
|
|
||||||
|
|
||||||
|
@jit(
|
||||||
|
nopython=True,
|
||||||
|
nogil=True,
|
||||||
|
)
|
||||||
|
def _m4(
|
||||||
|
|
||||||
|
xs: np.ndarray,
|
||||||
|
ys: np.ndarray,
|
||||||
|
|
||||||
|
frames: int,
|
||||||
|
|
||||||
|
# TODO: using this approach by having the ``.zeros()`` alloc lines
|
||||||
|
# below, in put python was causing segs faults and alloc crashes..
|
||||||
|
# we might need to see how it behaves with shm arrays and consider
|
||||||
|
# allocating them once at startup?
|
||||||
|
|
||||||
|
# pre-alloc array of x indices mapping to the start
|
||||||
|
# of each window used for downsampling in y.
|
||||||
|
# i_win: np.ndarray,
|
||||||
|
# pre-alloc array of output downsampled y values
|
||||||
|
# y_out: np.ndarray,
|
||||||
|
|
||||||
|
x_start: int,
|
||||||
|
step: float,
|
||||||
|
|
||||||
|
) -> int:
|
||||||
|
# nbins = len(i_win)
|
||||||
|
# count = len(xs)
|
||||||
|
|
||||||
|
# these are pre-allocated and mutated by ``numba``
|
||||||
|
# code in-place.
|
||||||
|
y_out = np.zeros((frames, 4), ys.dtype)
|
||||||
|
i_win = np.zeros(frames, xs.dtype)
|
||||||
|
|
||||||
|
bincount = 0
|
||||||
|
x_left = x_start
|
||||||
|
|
||||||
|
# Find the first window's starting value which *includes* the
|
||||||
|
# first value in the x-domain array, i.e. the first
|
||||||
|
# "left-side-of-window" **plus** the downsampling step,
|
||||||
|
# creates a window which includes the first x **value**.
|
||||||
|
while xs[0] >= x_left + step:
|
||||||
|
x_left += step
|
||||||
|
|
||||||
|
# set all bins in the left-most entry to the starting left-most x value
|
||||||
|
# (aka a row broadcast).
|
||||||
|
i_win[bincount] = x_left
|
||||||
|
# set all y-values to the first value passed in.
|
||||||
|
y_out[bincount] = ys[0]
|
||||||
|
|
||||||
|
for i in range(len(xs)):
|
||||||
|
x = xs[i]
|
||||||
|
y = ys[i]
|
||||||
|
if x < x_left + step: # the current window "step" is [bin, bin+1)
|
||||||
|
y_out[bincount, 1] = min(y, y_out[bincount, 1])
|
||||||
|
y_out[bincount, 2] = max(y, y_out[bincount, 2])
|
||||||
|
y_out[bincount, 3] = y
|
||||||
|
else:
|
||||||
|
# Find the next bin
|
||||||
|
while x >= x_left + step:
|
||||||
|
x_left += step
|
||||||
|
|
||||||
|
bincount += 1
|
||||||
|
i_win[bincount] = x_left
|
||||||
|
y_out[bincount] = y
|
||||||
|
|
||||||
|
return bincount, i_win, y_out
|
|
@ -24,7 +24,7 @@ from typing import Optional, Callable
|
||||||
import inspect
|
import inspect
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtCore, QtWidgets
|
||||||
from PyQt5.QtCore import QPointF, QRectF
|
from PyQt5.QtCore import QPointF, QRectF
|
||||||
|
|
||||||
from ._style import (
|
from ._style import (
|
||||||
|
@ -43,8 +43,8 @@ log = get_logger(__name__)
|
||||||
# latency (in terms of perceived lag in cross hair) so really be sure
|
# latency (in terms of perceived lag in cross hair) so really be sure
|
||||||
# there's an improvement if you want to change it!
|
# there's an improvement if you want to change it!
|
||||||
|
|
||||||
_mouse_rate_limit = 120 # TODO; should we calc current screen refresh rate?
|
_mouse_rate_limit = 60 # TODO; should we calc current screen refresh rate?
|
||||||
_debounce_delay = 1 / 40
|
_debounce_delay = 0
|
||||||
_ch_label_opac = 1
|
_ch_label_opac = 1
|
||||||
|
|
||||||
|
|
||||||
|
@ -95,25 +95,33 @@ class LineDot(pg.CurvePoint):
|
||||||
|
|
||||||
def event(
|
def event(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
ev: QtCore.QEvent,
|
ev: QtCore.QEvent,
|
||||||
|
|
||||||
) -> None:
|
) -> bool:
|
||||||
if not isinstance(
|
|
||||||
ev, QtCore.QDynamicPropertyChangeEvent
|
if (
|
||||||
) or self.curve() is None:
|
not isinstance(ev, QtCore.QDynamicPropertyChangeEvent)
|
||||||
|
or self.curve() is None
|
||||||
|
):
|
||||||
return False
|
return False
|
||||||
|
|
||||||
(x, y) = self.curve().getData()
|
# TODO: get rid of this ``.getData()`` and
|
||||||
index = self.property('index')
|
# make a more pythonic api to retreive backing
|
||||||
# first = self._plot._arrays['ohlc'][0]['index']
|
# numpy arrays...
|
||||||
# first = x[0]
|
# (x, y) = self.curve().getData()
|
||||||
# i = index - first
|
# index = self.property('index')
|
||||||
i = index - x[0]
|
# # first = self._plot._arrays['ohlc'][0]['index']
|
||||||
if i > 0 and i < len(y):
|
# # first = x[0]
|
||||||
newPos = (index, y[i])
|
# # i = index - first
|
||||||
QtWidgets.QGraphicsItem.setPos(self, *newPos)
|
# if index:
|
||||||
return True
|
# i = round(index - x[0])
|
||||||
|
# if i > 0 and i < len(y):
|
||||||
|
# newPos = (index, y[i])
|
||||||
|
# QtWidgets.QGraphicsItem.setPos(
|
||||||
|
# self,
|
||||||
|
# *newPos,
|
||||||
|
# )
|
||||||
|
# return True
|
||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -188,6 +196,9 @@ class ContentsLabel(pg.LabelItem):
|
||||||
|
|
||||||
self.setText(
|
self.setText(
|
||||||
"<b>i</b>:{index}<br/>"
|
"<b>i</b>:{index}<br/>"
|
||||||
|
# NB: these fields must be indexed in the correct order via
|
||||||
|
# the slice syntax below.
|
||||||
|
"<b>epoch</b>:{}<br/>"
|
||||||
"<b>O</b>:{}<br/>"
|
"<b>O</b>:{}<br/>"
|
||||||
"<b>H</b>:{}<br/>"
|
"<b>H</b>:{}<br/>"
|
||||||
"<b>L</b>:{}<br/>"
|
"<b>L</b>:{}<br/>"
|
||||||
|
@ -195,7 +206,15 @@ class ContentsLabel(pg.LabelItem):
|
||||||
"<b>V</b>:{}<br/>"
|
"<b>V</b>:{}<br/>"
|
||||||
"<b>wap</b>:{}".format(
|
"<b>wap</b>:{}".format(
|
||||||
*array[index - first][
|
*array[index - first][
|
||||||
['open', 'high', 'low', 'close', 'volume', 'bar_wap']
|
[
|
||||||
|
'time',
|
||||||
|
'open',
|
||||||
|
'high',
|
||||||
|
'low',
|
||||||
|
'close',
|
||||||
|
'volume',
|
||||||
|
'bar_wap',
|
||||||
|
]
|
||||||
],
|
],
|
||||||
name=name,
|
name=name,
|
||||||
index=index,
|
index=index,
|
||||||
|
@ -240,18 +259,20 @@ class ContentsLabels:
|
||||||
def update_labels(
|
def update_labels(
|
||||||
self,
|
self,
|
||||||
index: int,
|
index: int,
|
||||||
# array_name: str,
|
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# for name, (label, update) in self._labels.items():
|
|
||||||
for chart, name, label, update in self._labels:
|
for chart, name, label, update in self._labels:
|
||||||
|
|
||||||
if not (index >= 0 and index < chart._arrays['ohlc'][-1]['index']):
|
flow = chart._flows[name]
|
||||||
# out of range
|
array = flow.shm.array
|
||||||
print('out of range?')
|
|
||||||
continue
|
|
||||||
|
|
||||||
array = chart._arrays[name]
|
if not (
|
||||||
|
index >= 0
|
||||||
|
and index < array[-1]['index']
|
||||||
|
):
|
||||||
|
# out of range
|
||||||
|
print('WTF out of range?')
|
||||||
|
continue
|
||||||
|
|
||||||
# call provided update func with data point
|
# call provided update func with data point
|
||||||
try:
|
try:
|
||||||
|
@ -276,7 +297,7 @@ class ContentsLabels:
|
||||||
) -> ContentsLabel:
|
) -> ContentsLabel:
|
||||||
|
|
||||||
label = ContentsLabel(
|
label = ContentsLabel(
|
||||||
view=chart._vb,
|
view=chart.view,
|
||||||
anchor_at=anchor_at,
|
anchor_at=anchor_at,
|
||||||
)
|
)
|
||||||
self._labels.append(
|
self._labels.append(
|
||||||
|
@ -288,7 +309,8 @@ class ContentsLabels:
|
||||||
|
|
||||||
|
|
||||||
class Cursor(pg.GraphicsObject):
|
class Cursor(pg.GraphicsObject):
|
||||||
'''Multi-plot cursor for use on a ``LinkedSplits`` chart (set).
|
'''
|
||||||
|
Multi-plot cursor for use on a ``LinkedSplits`` chart (set).
|
||||||
|
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -303,7 +325,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
self.linked = linkedsplits
|
self.linked = linkedsplits
|
||||||
self.graphics: dict[str, pg.GraphicsObject] = {}
|
self.graphics: dict[str, pg.GraphicsObject] = {}
|
||||||
self.plots: List['PlotChartWidget'] = [] # type: ignore # noqa
|
self.plots: list['PlotChartWidget'] = [] # type: ignore # noqa
|
||||||
self.active_plot = None
|
self.active_plot = None
|
||||||
self.digits: int = digits
|
self.digits: int = digits
|
||||||
self._datum_xy: tuple[int, float] = (0, 0)
|
self._datum_xy: tuple[int, float] = (0, 0)
|
||||||
|
@ -365,7 +387,13 @@ class Cursor(pg.GraphicsObject):
|
||||||
self,
|
self,
|
||||||
plot: 'ChartPlotWidget', # noqa
|
plot: 'ChartPlotWidget', # noqa
|
||||||
digits: int = 0,
|
digits: int = 0,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
'''
|
||||||
|
Add chart to tracked set such that a cross-hair and possibly
|
||||||
|
curve tracking cursor can be drawn on the plot.
|
||||||
|
|
||||||
|
'''
|
||||||
# add ``pg.graphicsItems.InfiniteLine``s
|
# add ``pg.graphicsItems.InfiniteLine``s
|
||||||
# vertical and horizonal lines and a y-axis label
|
# vertical and horizonal lines and a y-axis label
|
||||||
|
|
||||||
|
@ -378,7 +406,8 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
yl = YAxisLabel(
|
yl = YAxisLabel(
|
||||||
chart=plot,
|
chart=plot,
|
||||||
parent=plot.getAxis('right'),
|
# parent=plot.getAxis('right'),
|
||||||
|
parent=plot.pi_overlay.get_axis(plot.plotItem, 'right'),
|
||||||
digits=digits or self.digits,
|
digits=digits or self.digits,
|
||||||
opacity=_ch_label_opac,
|
opacity=_ch_label_opac,
|
||||||
bg_color=self.label_color,
|
bg_color=self.label_color,
|
||||||
|
@ -393,6 +422,7 @@ class Cursor(pg.GraphicsObject):
|
||||||
slot=self.mouseMoved,
|
slot=self.mouseMoved,
|
||||||
delay=_debounce_delay,
|
delay=_debounce_delay,
|
||||||
)
|
)
|
||||||
|
|
||||||
px_enter = pg.SignalProxy(
|
px_enter = pg.SignalProxy(
|
||||||
plot.sig_mouse_enter,
|
plot.sig_mouse_enter,
|
||||||
rateLimit=_mouse_rate_limit,
|
rateLimit=_mouse_rate_limit,
|
||||||
|
@ -418,24 +448,39 @@ class Cursor(pg.GraphicsObject):
|
||||||
# keep x-axis right below main chart
|
# keep x-axis right below main chart
|
||||||
plot_index = -1 if _xaxis_at == 'bottom' else 0
|
plot_index = -1 if _xaxis_at == 'bottom' else 0
|
||||||
|
|
||||||
self.xaxis_label = XAxisLabel(
|
# ONLY create an x-axis label for the cursor
|
||||||
|
# if this plot owns the 'bottom' axis.
|
||||||
|
# if 'bottom' in plot.plotItem.axes:
|
||||||
|
if plot.linked.xaxis_chart is plot:
|
||||||
|
xlabel = self.xaxis_label = XAxisLabel(
|
||||||
parent=self.plots[plot_index].getAxis('bottom'),
|
parent=self.plots[plot_index].getAxis('bottom'),
|
||||||
|
# parent=self.plots[plot_index].pi_overlay.get_axis(
|
||||||
|
# plot.plotItem, 'bottom'
|
||||||
|
# ),
|
||||||
|
|
||||||
opacity=_ch_label_opac,
|
opacity=_ch_label_opac,
|
||||||
bg_color=self.label_color,
|
bg_color=self.label_color,
|
||||||
)
|
)
|
||||||
# place label off-screen during startup
|
# place label off-screen during startup
|
||||||
self.xaxis_label.setPos(self.plots[0].mapFromView(QPointF(0, 0)))
|
xlabel.setPos(
|
||||||
|
self.plots[0].mapFromView(QPointF(0, 0))
|
||||||
|
)
|
||||||
|
xlabel.show()
|
||||||
|
|
||||||
def add_curve_cursor(
|
def add_curve_cursor(
|
||||||
self,
|
self,
|
||||||
plot: 'ChartPlotWidget', # noqa
|
plot: 'ChartPlotWidget', # noqa
|
||||||
curve: 'PlotCurveItem', # noqa
|
curve: 'PlotCurveItem', # noqa
|
||||||
|
|
||||||
) -> LineDot:
|
) -> LineDot:
|
||||||
# if this plot contains curves add line dot "cursors" to denote
|
# if this plot contains curves add line dot "cursors" to denote
|
||||||
# the current sample under the mouse
|
# the current sample under the mouse
|
||||||
|
main_flow = plot._flows[plot.name]
|
||||||
|
# read out last index
|
||||||
|
i = main_flow.shm.array[-1]['index']
|
||||||
cursor = LineDot(
|
cursor = LineDot(
|
||||||
curve,
|
curve,
|
||||||
index=plot._arrays['ohlc'][-1]['index'],
|
index=i,
|
||||||
plot=plot
|
plot=plot
|
||||||
)
|
)
|
||||||
plot.addItem(cursor)
|
plot.addItem(cursor)
|
||||||
|
@ -459,12 +504,15 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
def mouseMoved(
|
def mouseMoved(
|
||||||
self,
|
self,
|
||||||
evt: 'tuple[QMouseEvent]', # noqa
|
coords: tuple[QPointF], # noqa
|
||||||
) -> None: # noqa
|
|
||||||
"""Update horizonal and vertical lines when mouse moves inside
|
) -> None:
|
||||||
|
'''
|
||||||
|
Update horizonal and vertical lines when mouse moves inside
|
||||||
either the main chart or any indicator subplot.
|
either the main chart or any indicator subplot.
|
||||||
"""
|
|
||||||
pos = evt[0]
|
'''
|
||||||
|
pos = coords[0]
|
||||||
|
|
||||||
# find position inside active plot
|
# find position inside active plot
|
||||||
try:
|
try:
|
||||||
|
@ -483,24 +531,27 @@ class Cursor(pg.GraphicsObject):
|
||||||
|
|
||||||
ix = round(x) # since bars are centered around index
|
ix = round(x) # since bars are centered around index
|
||||||
|
|
||||||
|
# px perfect...
|
||||||
|
line_offset = self._lw / 2
|
||||||
|
|
||||||
# round y value to nearest tick step
|
# round y value to nearest tick step
|
||||||
m = self._y_incr_mult
|
m = self._y_incr_mult
|
||||||
iy = round(y * m) / m
|
iy = round(y * m) / m
|
||||||
|
vl_y = iy - line_offset
|
||||||
# px perfect...
|
|
||||||
line_offset = self._lw / 2
|
|
||||||
|
|
||||||
# update y-range items
|
# update y-range items
|
||||||
if iy != last_iy:
|
if iy != last_iy:
|
||||||
|
|
||||||
if self._y_label_update:
|
if self._y_label_update:
|
||||||
self.graphics[self.active_plot]['yl'].update_label(
|
self.graphics[self.active_plot]['yl'].update_label(
|
||||||
abs_pos=plot.mapFromView(QPointF(ix, iy)),
|
# abs_pos=plot.mapFromView(QPointF(ix, iy)),
|
||||||
|
abs_pos=plot.mapFromView(QPointF(ix, vl_y)),
|
||||||
value=iy
|
value=iy
|
||||||
)
|
)
|
||||||
|
|
||||||
# only update horizontal xhair line if label is enabled
|
# only update horizontal xhair line if label is enabled
|
||||||
self.graphics[plot]['hl'].setY(iy)
|
# self.graphics[plot]['hl'].setY(iy)
|
||||||
|
self.graphics[plot]['hl'].setY(vl_y)
|
||||||
|
|
||||||
# update all trackers
|
# update all trackers
|
||||||
for item in self._trackers:
|
for item in self._trackers:
|
||||||
|
@ -513,27 +564,37 @@ class Cursor(pg.GraphicsObject):
|
||||||
# with cursor movement
|
# with cursor movement
|
||||||
self.contents_labels.update_labels(ix)
|
self.contents_labels.update_labels(ix)
|
||||||
|
|
||||||
|
vl_x = ix + line_offset
|
||||||
for plot, opts in self.graphics.items():
|
for plot, opts in self.graphics.items():
|
||||||
|
|
||||||
# update the chart's "contents" label
|
|
||||||
# plot.update_contents_labels(ix)
|
|
||||||
|
|
||||||
# move the vertical line to the current "center of bar"
|
# move the vertical line to the current "center of bar"
|
||||||
opts['vl'].setX(ix + line_offset)
|
opts['vl'].setX(vl_x)
|
||||||
|
|
||||||
# update all subscribed curve dots
|
# update all subscribed curve dots
|
||||||
for cursor in opts.get('cursors', ()):
|
for cursor in opts.get('cursors', ()):
|
||||||
cursor.setIndex(ix)
|
cursor.setIndex(ix)
|
||||||
|
|
||||||
# update the label on the bottom of the crosshair
|
# Update the label on the bottom of the crosshair.
|
||||||
self.xaxis_label.update_label(
|
# TODO: make this an up-front calc that we update
|
||||||
|
# on axis-widget resize events instead of on every mouse
|
||||||
|
# update cylce.
|
||||||
|
|
||||||
# XXX: requires:
|
# left axis offset width for calcuating
|
||||||
# https://github.com/pyqtgraph/pyqtgraph/pull/1418
|
# absolute x-axis label placement.
|
||||||
# otherwise gobbles tons of CPU..
|
left_axis_width = 0
|
||||||
|
if len(plot.pi_overlay.overlays):
|
||||||
|
# breakpoint()
|
||||||
|
lefts = plot.pi_overlay.get_axes('left')
|
||||||
|
if lefts:
|
||||||
|
for left in lefts:
|
||||||
|
left_axis_width += left.width()
|
||||||
|
|
||||||
# map back to abs (label-local) coordinates
|
# map back to abs (label-local) coordinates
|
||||||
abs_pos=plot.mapFromView(QPointF(ix + line_offset, iy)),
|
self.xaxis_label.update_label(
|
||||||
|
abs_pos=(
|
||||||
|
plot.mapFromView(QPointF(vl_x, iy)) -
|
||||||
|
QPointF(left_axis_width, 0)
|
||||||
|
),
|
||||||
value=ix,
|
value=ix,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -18,155 +18,467 @@
|
||||||
Fast, smooth, sexy curves.
|
Fast, smooth, sexy curves.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import Tuple
|
from contextlib import contextmanager as cm
|
||||||
|
from typing import Optional, Callable
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtWidgets
|
||||||
|
from PyQt5.QtWidgets import QGraphicsItem
|
||||||
from .._profile import pg_profile_enabled
|
from PyQt5.QtCore import (
|
||||||
|
Qt,
|
||||||
|
QLineF,
|
||||||
|
QSizeF,
|
||||||
|
QRectF,
|
||||||
|
# QRect,
|
||||||
|
QPointF,
|
||||||
|
)
|
||||||
|
from PyQt5.QtGui import (
|
||||||
|
QPainter,
|
||||||
|
QPainterPath,
|
||||||
|
)
|
||||||
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
|
from ._style import hcolor
|
||||||
|
# from ._compression import (
|
||||||
|
# # ohlc_to_m4_line,
|
||||||
|
# ds_m4,
|
||||||
|
# )
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
|
|
||||||
# TODO: got a feeling that dropping this inheritance gets us even more speedups
|
log = get_logger(__name__)
|
||||||
class FastAppendCurve(pg.PlotCurveItem):
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
|
_line_styles: dict[str, int] = {
|
||||||
|
'solid': Qt.PenStyle.SolidLine,
|
||||||
|
'dash': Qt.PenStyle.DashLine,
|
||||||
|
'dot': Qt.PenStyle.DotLine,
|
||||||
|
'dashdot': Qt.PenStyle.DashDotLine,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class Curve(pg.GraphicsObject):
|
||||||
|
'''
|
||||||
|
A faster, simpler, append friendly version of
|
||||||
|
``pyqtgraph.PlotCurveItem`` built for highly customizable real-time
|
||||||
|
updates.
|
||||||
|
|
||||||
|
This type is a much stripped down version of a ``pyqtgraph`` style
|
||||||
|
"graphics object" in the sense that the internal lower level
|
||||||
|
graphics which are drawn in the ``.paint()`` method are actually
|
||||||
|
rendered outside of this class entirely and instead are assigned as
|
||||||
|
state (instance vars) here and then drawn during a Qt graphics
|
||||||
|
cycle.
|
||||||
|
|
||||||
|
The main motivation for this more modular, composed design is that
|
||||||
|
lower level graphics data can be rendered in different threads and
|
||||||
|
then read and drawn in this main thread without having to worry
|
||||||
|
about dealing with Qt's concurrency primitives. See
|
||||||
|
``piker.ui._flows.Renderer`` for details and logic related to lower
|
||||||
|
level path generation and incremental update. The main differences in
|
||||||
|
the path generation code include:
|
||||||
|
|
||||||
|
- avoiding regeneration of the entire historical path where possible
|
||||||
|
and instead only updating the "new" segment(s) via a ``numpy``
|
||||||
|
array diff calc.
|
||||||
|
- here, the "last" graphics datum-segment is drawn independently
|
||||||
|
such that near-term (high frequency) discrete-time-sampled style
|
||||||
|
updates don't trigger a full path redraw.
|
||||||
|
|
||||||
|
'''
|
||||||
|
|
||||||
|
# sub-type customization methods
|
||||||
|
sub_br: Optional[Callable] = None
|
||||||
|
sub_paint: Optional[Callable] = None
|
||||||
|
declare_paintables: Optional[Callable] = None
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
|
||||||
|
step_mode: bool = False,
|
||||||
|
color: str = 'default_lightest',
|
||||||
|
fill_color: Optional[str] = None,
|
||||||
|
style: str = 'solid',
|
||||||
|
name: Optional[str] = None,
|
||||||
|
use_fpath: bool = True,
|
||||||
|
|
||||||
|
**kwargs
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
self._name = name
|
||||||
|
|
||||||
|
# brutaaalll, see comments within..
|
||||||
|
self.yData = None
|
||||||
|
self.xData = None
|
||||||
|
|
||||||
|
# self._last_cap: int = 0
|
||||||
|
self.path: Optional[QPainterPath] = None
|
||||||
|
|
||||||
|
# additional path used for appends which tries to avoid
|
||||||
|
# triggering an update/redraw of the presumably larger
|
||||||
|
# historical ``.path`` above.
|
||||||
|
self.use_fpath = use_fpath
|
||||||
|
self.fast_path: Optional[QPainterPath] = None
|
||||||
|
|
||||||
# TODO: we can probably just dispense with the parent since
|
# TODO: we can probably just dispense with the parent since
|
||||||
# we're basically only using the pen setting now...
|
# we're basically only using the pen setting now...
|
||||||
super().__init__(*args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
|
|
||||||
self._last_line: QtCore.QLineF = None
|
# all history of curve is drawn in single px thickness
|
||||||
self._xrange: Tuple[int, int] = self.dataBounds(ax=0)
|
pen = pg.mkPen(hcolor(color))
|
||||||
|
pen.setStyle(_line_styles[style])
|
||||||
|
|
||||||
|
if 'dash' in style:
|
||||||
|
pen.setDashPattern([8, 3])
|
||||||
|
|
||||||
|
self._pen = pen
|
||||||
|
|
||||||
|
# last segment is drawn in 2px thickness for emphasis
|
||||||
|
# self.last_step_pen = pg.mkPen(hcolor(color), width=2)
|
||||||
|
self.last_step_pen = pg.mkPen(pen, width=2)
|
||||||
|
|
||||||
|
# self._last_line: Optional[QLineF] = None
|
||||||
|
self._last_line = QLineF()
|
||||||
|
self._last_w: float = 1
|
||||||
|
|
||||||
|
# flat-top style histogram-like discrete curve
|
||||||
|
# self._step_mode: bool = step_mode
|
||||||
|
|
||||||
|
# self._fill = True
|
||||||
|
self._brush = pg.functions.mkBrush(hcolor(fill_color or color))
|
||||||
|
|
||||||
|
# NOTE: this setting seems to mostly prevent redraws on mouse
|
||||||
|
# interaction which is a huge boon for avg interaction latency.
|
||||||
|
|
||||||
# TODO: one question still remaining is if this makes trasform
|
# TODO: one question still remaining is if this makes trasform
|
||||||
# interactions slower (such as zooming) and if so maybe if/when
|
# interactions slower (such as zooming) and if so maybe if/when
|
||||||
# we implement a "history" mode for the view we disable this in
|
# we implement a "history" mode for the view we disable this in
|
||||||
# that mode?
|
# that mode?
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
# don't enable caching by default for the case where the
|
||||||
|
# only thing drawn is the "last" line segment which can
|
||||||
|
# have a weird artifact where it won't be fully drawn to its
|
||||||
|
# endpoint (something we saw on trade rate curves)
|
||||||
|
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
def update_from_array(
|
# XXX: see explanation for different caching modes:
|
||||||
self,
|
# https://stackoverflow.com/a/39410081
|
||||||
x,
|
# seems to only be useful if we don't re-generate the entire
|
||||||
y,
|
# QPainterPath every time
|
||||||
) -> QtGui.QPainterPath:
|
# curve.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
# don't ever use this - it's a colossal nightmare of artefacts
|
||||||
flip_cache = False
|
# and is disastrous for performance.
|
||||||
|
# curve.setCacheMode(QtWidgets.QGraphicsItem.ItemCoordinateCache)
|
||||||
|
|
||||||
# print(f"xrange: {self._xrange}")
|
# allow sub-type customization
|
||||||
istart, istop = self._xrange
|
declare = self.declare_paintables
|
||||||
|
if declare:
|
||||||
|
declare()
|
||||||
|
|
||||||
prepend_length = istart - x[0]
|
# TODO: probably stick this in a new parent
|
||||||
append_length = x[-1] - istop
|
# type which will contain our own version of
|
||||||
|
# what ``PlotCurveItem`` had in terms of base
|
||||||
|
# functionality? A `FlowGraphic` maybe?
|
||||||
|
def x_uppx(self) -> int:
|
||||||
|
|
||||||
if self.path is None or prepend_length:
|
px_vecs = self.pixelVectors()[0]
|
||||||
self.path = pg.functions.arrayToQPath(
|
if px_vecs:
|
||||||
x[:-1],
|
xs_in_px = px_vecs.x()
|
||||||
y[:-1],
|
return round(xs_in_px)
|
||||||
connect='all'
|
else:
|
||||||
)
|
return 0
|
||||||
profiler('generate fresh path')
|
|
||||||
|
|
||||||
# TODO: get this working - right now it's giving heck on vwap...
|
def px_width(self) -> float:
|
||||||
# if prepend_length:
|
|
||||||
# breakpoint()
|
|
||||||
|
|
||||||
# prepend_path = pg.functions.arrayToQPath(
|
vb = self.getViewBox()
|
||||||
# x[0:prepend_length],
|
if not vb:
|
||||||
# y[0:prepend_length],
|
return 0
|
||||||
# connect='all'
|
|
||||||
# )
|
|
||||||
|
|
||||||
# # swap prepend path in "front"
|
vr = self.viewRect()
|
||||||
# old_path = self.path
|
l, r = int(vr.left()), int(vr.right())
|
||||||
# self.path = prepend_path
|
|
||||||
# # self.path.moveTo(new_x[0], new_y[0])
|
|
||||||
# self.path.connectPath(old_path)
|
|
||||||
|
|
||||||
if append_length:
|
start, stop = self._xrange
|
||||||
# print(f"append_length: {append_length}")
|
lbar = max(l, start)
|
||||||
new_x = x[-append_length - 2:-1]
|
rbar = min(r, stop)
|
||||||
new_y = y[-append_length - 2:-1]
|
|
||||||
# print((new_x, new_y))
|
|
||||||
|
|
||||||
append_path = pg.functions.arrayToQPath(
|
return vb.mapViewToDevice(
|
||||||
new_x,
|
QLineF(lbar, 0, rbar, 0)
|
||||||
new_y,
|
).length()
|
||||||
connect='all'
|
|
||||||
)
|
|
||||||
# print(f"append_path br: {append_path.boundingRect()}")
|
|
||||||
# self.path.moveTo(new_x[0], new_y[0])
|
|
||||||
# self.path.connectPath(append_path)
|
|
||||||
self.path.connectPath(append_path)
|
|
||||||
|
|
||||||
# XXX: pretty annoying but, without this there's little
|
|
||||||
# artefacts on the append updates to the curve...
|
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
|
||||||
self.prepareGeometryChange()
|
|
||||||
flip_cache = True
|
|
||||||
|
|
||||||
# print(f"update br: {self.path.boundingRect()}")
|
|
||||||
|
|
||||||
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
# XXX: lol brutal, the internals of `CurvePoint` (inherited by
|
||||||
# our `LineDot`) required ``.getData()`` to work..
|
# our `LineDot`) required ``.getData()`` to work..
|
||||||
self.xData = x
|
def getData(self):
|
||||||
self.yData = y
|
return self.xData, self.yData
|
||||||
|
|
||||||
self._xrange = x[0], x[-1]
|
def clear(self):
|
||||||
self._last_line = QtCore.QLineF(x[-2], y[-2], x[-1], y[-1])
|
'''
|
||||||
|
Clear internal graphics making object ready for full re-draw.
|
||||||
|
|
||||||
# trigger redraw of path
|
'''
|
||||||
# do update before reverting to cache mode
|
# NOTE: original code from ``pg.PlotCurveItem``
|
||||||
self.prepareGeometryChange()
|
self.xData = None
|
||||||
self.update()
|
self.yData = None
|
||||||
|
|
||||||
if flip_cache:
|
# XXX: previously, if not trying to leverage `.reserve()` allocs
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
# then you might as well create a new one..
|
||||||
|
# self.path = None
|
||||||
|
|
||||||
|
# path reservation aware non-mem de-alloc cleaning
|
||||||
|
if self.path:
|
||||||
|
self.path.clear()
|
||||||
|
|
||||||
|
if self.fast_path:
|
||||||
|
# self.fast_path.clear()
|
||||||
|
self.fast_path = None
|
||||||
|
|
||||||
|
@cm
|
||||||
|
def reset_cache(self) -> None:
|
||||||
|
self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
||||||
|
yield
|
||||||
|
self.setCacheMode(QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
def boundingRect(self):
|
def boundingRect(self):
|
||||||
|
'''
|
||||||
|
Compute and then cache our rect.
|
||||||
|
'''
|
||||||
if self.path is None:
|
if self.path is None:
|
||||||
return QtGui.QPainterPath().boundingRect()
|
return QPainterPath().boundingRect()
|
||||||
else:
|
else:
|
||||||
# dynamically override this method after initial
|
# dynamically override this method after initial
|
||||||
# path is created to avoid requiring the above None check
|
# path is created to avoid requiring the above None check
|
||||||
self.boundingRect = self._br
|
self.boundingRect = self._path_br
|
||||||
return self._br()
|
return self._path_br()
|
||||||
|
|
||||||
def _br(self):
|
def _path_br(self):
|
||||||
"""Post init ``.boundingRect()```.
|
'''
|
||||||
|
Post init ``.boundingRect()```.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
# hb = self.path.boundingRect()
|
||||||
hb = self.path.controlPointRect()
|
hb = self.path.controlPointRect()
|
||||||
hb_size = hb.size()
|
hb_size = hb.size()
|
||||||
|
|
||||||
|
fp = self.fast_path
|
||||||
|
if fp:
|
||||||
|
fhb = fp.controlPointRect()
|
||||||
|
hb_size = fhb.size() + hb_size
|
||||||
|
|
||||||
# print(f'hb_size: {hb_size}')
|
# print(f'hb_size: {hb_size}')
|
||||||
|
|
||||||
w = hb_size.width() + 1
|
# if self._last_step_rect:
|
||||||
h = hb_size.height() + 1
|
# hb_size += self._last_step_rect.size()
|
||||||
|
|
||||||
br = QtCore.QRectF(
|
# if self._line:
|
||||||
|
# br = self._last_step_rect.bottomRight()
|
||||||
|
|
||||||
|
# tl = QPointF(
|
||||||
|
# # self._vr[0],
|
||||||
|
# # hb.topLeft().y(),
|
||||||
|
# # 0,
|
||||||
|
# # hb_size.height() + 1
|
||||||
|
# )
|
||||||
|
|
||||||
|
# br = self._last_step_rect.bottomRight()
|
||||||
|
|
||||||
|
w = hb_size.width()
|
||||||
|
h = hb_size.height()
|
||||||
|
|
||||||
|
sbr = self.sub_br
|
||||||
|
if sbr:
|
||||||
|
w, h = self.sub_br(w, h)
|
||||||
|
else:
|
||||||
|
# assume plain line graphic and use
|
||||||
|
# default unit step in each direction.
|
||||||
|
|
||||||
|
# only on a plane line do we include
|
||||||
|
# and extra index step's worth of width
|
||||||
|
# since in the step case the end of the curve
|
||||||
|
# actually terminates earlier so we don't need
|
||||||
|
# this for the last step.
|
||||||
|
w += self._last_w
|
||||||
|
# ll = self._last_line
|
||||||
|
h += 1 # ll.y2() - ll.y1()
|
||||||
|
|
||||||
|
# br = QPointF(
|
||||||
|
# self._vr[-1],
|
||||||
|
# # tl.x() + w,
|
||||||
|
# tl.y() + h,
|
||||||
|
# )
|
||||||
|
|
||||||
|
br = QRectF(
|
||||||
|
|
||||||
# top left
|
# top left
|
||||||
QtCore.QPointF(hb.topLeft()),
|
# hb.topLeft()
|
||||||
|
# tl,
|
||||||
|
QPointF(hb.topLeft()),
|
||||||
|
|
||||||
|
# br,
|
||||||
# total size
|
# total size
|
||||||
QtCore.QSizeF(w, h)
|
# QSizeF(hb_size)
|
||||||
|
# hb_size,
|
||||||
|
QSizeF(w, h)
|
||||||
)
|
)
|
||||||
# print(f'bounding rect: {br}')
|
# print(f'bounding rect: {br}')
|
||||||
return br
|
return br
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
self,
|
self,
|
||||||
p: QtGui.QPainter,
|
p: QPainter,
|
||||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||||
w: QtWidgets.QWidget
|
w: QtWidgets.QWidget
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
profiler = pg.debug.Profiler(
|
||||||
# p.setRenderHint(p.Antialiasing, True)
|
msg=f'Curve.paint(): `{self._name}`',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
ms_threshold=ms_slower_then,
|
||||||
|
)
|
||||||
|
|
||||||
p.setPen(self.opts['pen'])
|
sub_paint = self.sub_paint
|
||||||
|
if sub_paint:
|
||||||
|
sub_paint(p, profiler)
|
||||||
|
|
||||||
|
p.setPen(self.last_step_pen)
|
||||||
p.drawLine(self._last_line)
|
p.drawLine(self._last_line)
|
||||||
profiler('.drawLine()')
|
profiler('.drawLine()')
|
||||||
|
p.setPen(self._pen)
|
||||||
|
|
||||||
p.drawPath(self.path)
|
path = self.path
|
||||||
profiler('.drawPath()')
|
# cap = path.capacity()
|
||||||
|
# if cap != self._last_cap:
|
||||||
|
# print(f'NEW CAPACITY: {self._last_cap} -> {cap}')
|
||||||
|
# self._last_cap = cap
|
||||||
|
|
||||||
|
if path:
|
||||||
|
p.drawPath(path)
|
||||||
|
profiler(f'.drawPath(path): {path.capacity()}')
|
||||||
|
|
||||||
|
fp = self.fast_path
|
||||||
|
if fp:
|
||||||
|
p.drawPath(fp)
|
||||||
|
profiler('.drawPath(fast_path)')
|
||||||
|
|
||||||
|
# TODO: try out new work from `pyqtgraph` main which should
|
||||||
|
# repair horrid perf (pretty sure i did and it was still
|
||||||
|
# horrible?):
|
||||||
|
# https://github.com/pyqtgraph/pyqtgraph/pull/2032
|
||||||
|
# if self._fill:
|
||||||
|
# brush = self.opts['brush']
|
||||||
|
# p.fillPath(self.path, brush)
|
||||||
|
|
||||||
|
def draw_last_datum(
|
||||||
|
self,
|
||||||
|
path: QPainterPath,
|
||||||
|
src_data: np.ndarray,
|
||||||
|
render_data: np.ndarray,
|
||||||
|
reset: bool,
|
||||||
|
array_key: str,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# default line draw last call
|
||||||
|
# with self.reset_cache():
|
||||||
|
x = render_data['index']
|
||||||
|
y = render_data[array_key]
|
||||||
|
|
||||||
|
# draw the "current" step graphic segment so it
|
||||||
|
# lines up with the "middle" of the current
|
||||||
|
# (OHLC) sample.
|
||||||
|
self._last_line = QLineF(
|
||||||
|
x[-2], y[-2],
|
||||||
|
x[-1], y[-1],
|
||||||
|
)
|
||||||
|
|
||||||
|
return x, y
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: this should probably be a "downsampled" curve type
|
||||||
|
# that draws a bar-style (but for the px column) last graphics
|
||||||
|
# element such that the current datum in view can be shown
|
||||||
|
# (via it's max / min) even when highly zoomed out.
|
||||||
|
class FlattenedOHLC(Curve):
|
||||||
|
|
||||||
|
def draw_last_datum(
|
||||||
|
self,
|
||||||
|
path: QPainterPath,
|
||||||
|
src_data: np.ndarray,
|
||||||
|
render_data: np.ndarray,
|
||||||
|
reset: bool,
|
||||||
|
array_key: str,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
lasts = src_data[-2:]
|
||||||
|
x = lasts['index']
|
||||||
|
y = lasts['close']
|
||||||
|
|
||||||
|
# draw the "current" step graphic segment so it
|
||||||
|
# lines up with the "middle" of the current
|
||||||
|
# (OHLC) sample.
|
||||||
|
self._last_line = QLineF(
|
||||||
|
x[-2], y[-2],
|
||||||
|
x[-1], y[-1]
|
||||||
|
)
|
||||||
|
return x, y
|
||||||
|
|
||||||
|
|
||||||
|
class StepCurve(Curve):
|
||||||
|
|
||||||
|
def declare_paintables(
|
||||||
|
self,
|
||||||
|
) -> None:
|
||||||
|
self._last_step_rect = QRectF()
|
||||||
|
|
||||||
|
def draw_last_datum(
|
||||||
|
self,
|
||||||
|
path: QPainterPath,
|
||||||
|
src_data: np.ndarray,
|
||||||
|
render_data: np.ndarray,
|
||||||
|
reset: bool,
|
||||||
|
array_key: str,
|
||||||
|
|
||||||
|
w: float = 0.5,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# TODO: remove this and instead place all step curve
|
||||||
|
# updating into pre-path data render callbacks.
|
||||||
|
# full input data
|
||||||
|
x = src_data['index']
|
||||||
|
y = src_data[array_key]
|
||||||
|
|
||||||
|
x_last = x[-1]
|
||||||
|
y_last = y[-1]
|
||||||
|
|
||||||
|
# lol, commenting this makes step curves
|
||||||
|
# all "black" for me :eyeroll:..
|
||||||
|
self._last_line = QLineF(
|
||||||
|
x_last - w, 0,
|
||||||
|
x_last + w, 0,
|
||||||
|
)
|
||||||
|
self._last_step_rect = QRectF(
|
||||||
|
x_last - w, 0,
|
||||||
|
x_last + w, y_last,
|
||||||
|
)
|
||||||
|
return x, y
|
||||||
|
|
||||||
|
def sub_paint(
|
||||||
|
self,
|
||||||
|
p: QPainter,
|
||||||
|
profiler: pg.debug.Profiler,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
# p.drawLines(*tuple(filter(bool, self._last_step_lines)))
|
||||||
|
# p.drawRect(self._last_step_rect)
|
||||||
|
p.fillRect(self._last_step_rect, self._brush)
|
||||||
|
profiler('.fillRect()')
|
||||||
|
|
||||||
|
def sub_br(
|
||||||
|
self,
|
||||||
|
path_w: float,
|
||||||
|
path_h: float,
|
||||||
|
|
||||||
|
) -> (float, float):
|
||||||
|
# passthrough
|
||||||
|
return path_w, path_h
|
||||||
|
|
1264
piker/ui/_display.py
1264
piker/ui/_display.py
File diff suppressed because it is too large
Load Diff
|
@ -342,7 +342,8 @@ class SelectRect(QtGui.QGraphicsRectItem):
|
||||||
ixmn, ixmx = round(xmn), round(xmx)
|
ixmn, ixmx = round(xmn), round(xmx)
|
||||||
nbars = ixmx - ixmn + 1
|
nbars = ixmx - ixmn + 1
|
||||||
|
|
||||||
data = self._chart._arrays['ohlc'][ixmn:ixmx]
|
chart = self._chart
|
||||||
|
data = chart._flows[chart.name].shm.array[ixmn:ixmx]
|
||||||
|
|
||||||
if len(data):
|
if len(data):
|
||||||
std = data['close'].std()
|
std = data['close'].std()
|
||||||
|
|
|
@ -26,7 +26,9 @@ import trio
|
||||||
from PyQt5 import QtCore
|
from PyQt5 import QtCore
|
||||||
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
from PyQt5.QtCore import QEvent, pyqtBoundSignal
|
||||||
from PyQt5.QtWidgets import QWidget
|
from PyQt5.QtWidgets import QWidget
|
||||||
from PyQt5.QtWidgets import QGraphicsSceneMouseEvent as gs_mouse
|
from PyQt5.QtWidgets import (
|
||||||
|
QGraphicsSceneMouseEvent as gs_mouse,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
MOUSE_EVENTS = {
|
MOUSE_EVENTS = {
|
||||||
|
@ -129,6 +131,8 @@ class EventRelay(QtCore.QObject):
|
||||||
# TODO: is there a global setting for this?
|
# TODO: is there a global setting for this?
|
||||||
if ev.isAutoRepeat() and self._filter_auto_repeats:
|
if ev.isAutoRepeat() and self._filter_auto_repeats:
|
||||||
ev.ignore()
|
ev.ignore()
|
||||||
|
# filter out this event and stop it's processing
|
||||||
|
# https://doc.qt.io/qt-5/qobject.html#installEventFilter
|
||||||
return True
|
return True
|
||||||
|
|
||||||
# NOTE: the event object instance coming out
|
# NOTE: the event object instance coming out
|
||||||
|
@ -152,9 +156,6 @@ class EventRelay(QtCore.QObject):
|
||||||
|
|
||||||
# **do not** filter out this event
|
# **do not** filter out this event
|
||||||
# and instead forward to the source widget
|
# and instead forward to the source widget
|
||||||
return False
|
|
||||||
|
|
||||||
# filter out this event
|
|
||||||
# https://doc.qt.io/qt-5/qobject.html#installEventFilter
|
# https://doc.qt.io/qt-5/qobject.html#installEventFilter
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
|
@ -49,10 +49,6 @@ from . import _style
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
|
||||||
# pyqtgraph global config
|
# pyqtgraph global config
|
||||||
# might as well enable this for now?
|
|
||||||
pg.useOpenGL = True
|
|
||||||
pg.enableExperimental = True
|
|
||||||
|
|
||||||
# engage core tweaks that give us better response
|
# engage core tweaks that give us better response
|
||||||
# latency then the average pg user
|
# latency then the average pg user
|
||||||
_do_overrides()
|
_do_overrides()
|
||||||
|
@ -61,7 +57,9 @@ _do_overrides()
|
||||||
# XXX: pretty sure none of this shit works on linux as per:
|
# XXX: pretty sure none of this shit works on linux as per:
|
||||||
# https://bugreports.qt.io/browse/QTBUG-53022
|
# https://bugreports.qt.io/browse/QTBUG-53022
|
||||||
# it seems to work on windows.. no idea wtf is up.
|
# it seems to work on windows.. no idea wtf is up.
|
||||||
|
is_windows = False
|
||||||
if platform.system() == "Windows":
|
if platform.system() == "Windows":
|
||||||
|
is_windows = True
|
||||||
|
|
||||||
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
|
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
|
||||||
# must be set before creating the application
|
# must be set before creating the application
|
||||||
|
@ -182,6 +180,8 @@ def run_qtractor(
|
||||||
|
|
||||||
window.main_widget = main_widget
|
window.main_widget = main_widget
|
||||||
window.setCentralWidget(instance)
|
window.setCentralWidget(instance)
|
||||||
|
if is_windows:
|
||||||
|
window.configure_to_desktop()
|
||||||
|
|
||||||
# actually render to screen
|
# actually render to screen
|
||||||
window.show()
|
window.show()
|
||||||
|
|
|
@ -0,0 +1,83 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Feed status and controls widget(s) for embedding in a UI-pane.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
from textwrap import dedent
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
|
|
||||||
|
# from PyQt5.QtCore import Qt
|
||||||
|
|
||||||
|
from ._style import _font, _font_small
|
||||||
|
# from ..calc import humanize
|
||||||
|
from ._label import FormatLabel
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import ChartPlotWidget
|
||||||
|
from ..data.feed import Feed
|
||||||
|
from ._forms import FieldsForm
|
||||||
|
|
||||||
|
|
||||||
|
def mk_feed_label(
|
||||||
|
form: FieldsForm,
|
||||||
|
feed: Feed,
|
||||||
|
chart: ChartPlotWidget,
|
||||||
|
|
||||||
|
) -> FormatLabel:
|
||||||
|
'''
|
||||||
|
Generate a label from feed meta-data to be displayed
|
||||||
|
in a UI sidepane.
|
||||||
|
|
||||||
|
TODO: eventually buttons for changing settings over
|
||||||
|
a feed control protocol.
|
||||||
|
|
||||||
|
'''
|
||||||
|
status = feed.status
|
||||||
|
assert status
|
||||||
|
|
||||||
|
msg = dedent("""
|
||||||
|
actor: **{actor_name}**\n
|
||||||
|
|_ @**{host}:{port}**\n
|
||||||
|
""")
|
||||||
|
|
||||||
|
for key, val in status.items():
|
||||||
|
if key in ('host', 'port', 'actor_name'):
|
||||||
|
continue
|
||||||
|
msg += f'\n|_ {key}: **{{{key}}}**\n'
|
||||||
|
|
||||||
|
feed_label = FormatLabel(
|
||||||
|
fmt_str=msg,
|
||||||
|
# |_ streams: **{symbols}**\n
|
||||||
|
font=_font.font,
|
||||||
|
font_size=_font_small.px_size,
|
||||||
|
font_color='default_lightest',
|
||||||
|
)
|
||||||
|
|
||||||
|
# form.vbox.setAlignment(feed_label, Qt.AlignBottom)
|
||||||
|
# form.vbox.setAlignment(Qt.AlignBottom)
|
||||||
|
_ = chart.height() - (
|
||||||
|
form.height() +
|
||||||
|
form.fill_bar.height()
|
||||||
|
# feed_label.height()
|
||||||
|
)
|
||||||
|
|
||||||
|
feed_label.format(**feed.status)
|
||||||
|
|
||||||
|
return feed_label
|
File diff suppressed because it is too large
Load Diff
|
@ -21,6 +21,7 @@ Text entry "forms" widgets (mostly for configuration and UI user input).
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from functools import partial
|
from functools import partial
|
||||||
|
from math import floor
|
||||||
from typing import (
|
from typing import (
|
||||||
Optional, Any, Callable, Awaitable
|
Optional, Any, Callable, Awaitable
|
||||||
)
|
)
|
||||||
|
@ -48,7 +49,7 @@ from ._style import hcolor, _font, _font_small, DpiAwareFont
|
||||||
from ._label import FormatLabel
|
from ._label import FormatLabel
|
||||||
|
|
||||||
|
|
||||||
class FontAndChartAwareLineEdit(QLineEdit):
|
class Edit(QLineEdit):
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
||||||
|
@ -105,7 +106,7 @@ class FontAndChartAwareLineEdit(QLineEdit):
|
||||||
# TODO: somehow this math ain't right?
|
# TODO: somehow this math ain't right?
|
||||||
chars_w_pxs = dpi_font.boundingRect('0'*self._chars).width()
|
chars_w_pxs = dpi_font.boundingRect('0'*self._chars).width()
|
||||||
scale = round(dpi_font.scale())
|
scale = round(dpi_font.scale())
|
||||||
psh.setWidth(chars_w_pxs * scale)
|
psh.setWidth(int(chars_w_pxs * scale))
|
||||||
return psh
|
return psh
|
||||||
|
|
||||||
def set_width_in_chars(
|
def set_width_in_chars(
|
||||||
|
@ -174,7 +175,6 @@ class Selection(QComboBox):
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
parent=None,
|
parent=None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
self._items: dict[str, int] = {}
|
self._items: dict[str, int] = {}
|
||||||
|
@ -200,7 +200,6 @@ class Selection(QComboBox):
|
||||||
|
|
||||||
def set_style(
|
def set_style(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
color: str,
|
color: str,
|
||||||
font_size: int,
|
font_size: int,
|
||||||
|
|
||||||
|
@ -217,9 +216,10 @@ class Selection(QComboBox):
|
||||||
def resize(
|
def resize(
|
||||||
self,
|
self,
|
||||||
char: str = 'W',
|
char: str = 'W',
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
br = _font.boundingRect(str(char))
|
br = _font.boundingRect(str(char))
|
||||||
_, h = br.width(), br.height()
|
_, h = br.width(), int(br.height())
|
||||||
|
|
||||||
# TODO: something better then this monkey patch
|
# TODO: something better then this monkey patch
|
||||||
view = self.view()
|
view = self.view()
|
||||||
|
@ -238,9 +238,11 @@ class Selection(QComboBox):
|
||||||
keys: list[str],
|
keys: list[str],
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Write keys to the selection verbatim.
|
'''
|
||||||
|
Write keys to the selection verbatim.
|
||||||
|
|
||||||
All other items are cleared beforehand.
|
All other items are cleared beforehand.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
self.clear()
|
self.clear()
|
||||||
self._items.clear()
|
self._items.clear()
|
||||||
|
@ -329,7 +331,7 @@ class FieldsForm(QWidget):
|
||||||
self.form.setSpacing(0)
|
self.form.setSpacing(0)
|
||||||
self.form.setHorizontalSpacing(0)
|
self.form.setHorizontalSpacing(0)
|
||||||
|
|
||||||
self.vbox.addLayout(self.form, stretch=1/3)
|
self.vbox.addLayout(self.form, stretch=3)
|
||||||
|
|
||||||
self.labels: dict[str, QLabel] = {}
|
self.labels: dict[str, QLabel] = {}
|
||||||
self.fields: dict[str, QWidget] = {}
|
self.fields: dict[str, QWidget] = {}
|
||||||
|
@ -369,13 +371,14 @@ class FieldsForm(QWidget):
|
||||||
key: str,
|
key: str,
|
||||||
label_name: str,
|
label_name: str,
|
||||||
value: str,
|
value: str,
|
||||||
|
readonly: bool = False,
|
||||||
|
|
||||||
) -> FontAndChartAwareLineEdit:
|
) -> Edit:
|
||||||
|
|
||||||
# TODO: maybe a distint layout per "field" item?
|
# TODO: maybe a distint layout per "field" item?
|
||||||
label = self.add_field_label(label_name)
|
label = self.add_field_label(label_name)
|
||||||
|
|
||||||
edit = FontAndChartAwareLineEdit(
|
edit = Edit(
|
||||||
parent=self,
|
parent=self,
|
||||||
# width_in_chars=6,
|
# width_in_chars=6,
|
||||||
)
|
)
|
||||||
|
@ -386,6 +389,7 @@ class FieldsForm(QWidget):
|
||||||
}}
|
}}
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
edit.setReadOnly(readonly)
|
||||||
edit.setText(str(value))
|
edit.setText(str(value))
|
||||||
self.form.addRow(label, edit)
|
self.form.addRow(label, edit)
|
||||||
|
|
||||||
|
@ -478,13 +482,15 @@ def mk_form(
|
||||||
for key, conf in fields_schema.items():
|
for key, conf in fields_schema.items():
|
||||||
wtype = conf['type']
|
wtype = conf['type']
|
||||||
label = str(conf.get('label', key))
|
label = str(conf.get('label', key))
|
||||||
|
kwargs = conf.get('kwargs', {})
|
||||||
|
|
||||||
# plain (line) edit field
|
# plain (line) edit field
|
||||||
if wtype == 'edit':
|
if wtype == 'edit':
|
||||||
w = form.add_edit_field(
|
w = form.add_edit_field(
|
||||||
key,
|
key,
|
||||||
label,
|
label,
|
||||||
conf['default_value']
|
conf['default_value'],
|
||||||
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
# drop-down selection
|
# drop-down selection
|
||||||
|
@ -493,7 +499,8 @@ def mk_form(
|
||||||
w = form.add_select_field(
|
w = form.add_select_field(
|
||||||
key,
|
key,
|
||||||
label,
|
label,
|
||||||
values
|
values,
|
||||||
|
**kwargs,
|
||||||
)
|
)
|
||||||
|
|
||||||
w._key = key
|
w._key = key
|
||||||
|
@ -531,11 +538,12 @@ async def open_form_input_handling(
|
||||||
|
|
||||||
|
|
||||||
class FillStatusBar(QProgressBar):
|
class FillStatusBar(QProgressBar):
|
||||||
'''A status bar for fills up to a position limit.
|
'''
|
||||||
|
A status bar for fills up to a position limit.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
border_px: int = 2
|
border_px: int = 2
|
||||||
slot_margin_px: int = 2
|
slot_margin_px: int = 1
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
@ -546,12 +554,16 @@ class FillStatusBar(QProgressBar):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__(parent=parent)
|
super().__init__(parent=parent)
|
||||||
self.approx_h = approx_height_px
|
|
||||||
|
self.approx_h = int(round(approx_height_px))
|
||||||
|
self.setMinimumHeight(self.approx_h)
|
||||||
|
self.setMaximumHeight(self.approx_h)
|
||||||
|
|
||||||
self.font_size = font_size
|
self.font_size = font_size
|
||||||
|
|
||||||
self.setFormat('') # label format
|
self.setFormat('') # label format
|
||||||
self.setMinimumWidth(width_px)
|
self.setMinimumWidth(int(width_px))
|
||||||
self.setMaximumWidth(width_px)
|
self.setMaximumWidth(int(width_px))
|
||||||
|
|
||||||
def set_slots(
|
def set_slots(
|
||||||
self,
|
self,
|
||||||
|
@ -560,17 +572,12 @@ class FillStatusBar(QProgressBar):
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
|
||||||
approx_h = self.approx_h
|
|
||||||
# TODO: compute "used height" thus far and mostly fill the rest
|
|
||||||
tot_slot_h, r = divmod(
|
|
||||||
approx_h,
|
|
||||||
slots,
|
|
||||||
)
|
|
||||||
clipped = slots * tot_slot_h + 2*self.border_px
|
|
||||||
self.setMaximumHeight(clipped)
|
|
||||||
slot_height_px = tot_slot_h - 2*self.slot_margin_px
|
|
||||||
|
|
||||||
self.setOrientation(Qt.Vertical)
|
self.setOrientation(Qt.Vertical)
|
||||||
|
h = self.height()
|
||||||
|
|
||||||
|
# TODO: compute "used height" thus far and mostly fill the rest
|
||||||
|
tot_slot_h, r = divmod(h, slots)
|
||||||
|
|
||||||
self.setStyleSheet(
|
self.setStyleSheet(
|
||||||
f"""
|
f"""
|
||||||
QProgressBar {{
|
QProgressBar {{
|
||||||
|
@ -585,21 +592,28 @@ class FillStatusBar(QProgressBar):
|
||||||
border: {self.border_px}px solid {hcolor('default_light')};
|
border: {self.border_px}px solid {hcolor('default_light')};
|
||||||
border-radius: 2px;
|
border-radius: 2px;
|
||||||
}}
|
}}
|
||||||
|
|
||||||
QProgressBar::chunk {{
|
QProgressBar::chunk {{
|
||||||
|
|
||||||
background-color: {hcolor('default_spotlight')};
|
background-color: {hcolor('default_spotlight')};
|
||||||
color: {hcolor('bracket')};
|
color: {hcolor('bracket')};
|
||||||
|
|
||||||
border-radius: 2px;
|
border-radius: 2px;
|
||||||
|
|
||||||
margin: {self.slot_margin_px}px;
|
|
||||||
height: {slot_height_px}px;
|
|
||||||
|
|
||||||
}}
|
}}
|
||||||
"""
|
"""
|
||||||
)
|
)
|
||||||
|
|
||||||
|
# to set a discrete "block" per slot...
|
||||||
|
# XXX: couldn't get the discrete math to work here such
|
||||||
|
# that it was always correctly showing a discretized value
|
||||||
|
# up to the limit; not sure if it's the ``.setRange()``
|
||||||
|
# / ``.setValue()`` api or not but i was able to get something
|
||||||
|
# close screwing with the divmod above above but after so large
|
||||||
|
# a value it would always be less chunks then the correct
|
||||||
|
# value..
|
||||||
|
# margin: {self.slot_margin_px}px;
|
||||||
|
# height: {slot_height_px}px;
|
||||||
|
|
||||||
|
|
||||||
# margin-bottom: {slot_margin_px*2}px;
|
# margin-bottom: {slot_margin_px*2}px;
|
||||||
# margin-top: {slot_margin_px*2}px;
|
# margin-top: {slot_margin_px*2}px;
|
||||||
# color: #19232D;
|
# color: #19232D;
|
||||||
|
@ -648,11 +662,22 @@ def mk_fill_status_bar(
|
||||||
font_size=bar_label_font_size,
|
font_size=bar_label_font_size,
|
||||||
font_color='gunmetal',
|
font_color='gunmetal',
|
||||||
)
|
)
|
||||||
|
# size according to dpi scaled fonted contents to avoid
|
||||||
|
# resizes on magnitude changes (eg. 9 -> 10 %)
|
||||||
|
min_w = int(_font.boundingRect('1000.0M% pnl').width())
|
||||||
|
left_label.setMinimumWidth(min_w)
|
||||||
|
left_label.resize(
|
||||||
|
min_w,
|
||||||
|
left_label.size().height(),
|
||||||
|
)
|
||||||
|
|
||||||
|
bar_labels_lhs.addSpacing(int(5/8 * bar_h))
|
||||||
|
|
||||||
bar_labels_lhs.addSpacing(5/8 * bar_h)
|
|
||||||
bar_labels_lhs.addWidget(
|
bar_labels_lhs.addWidget(
|
||||||
left_label,
|
left_label,
|
||||||
alignment=Qt.AlignLeft | Qt.AlignTop,
|
# XXX: doesn't seem to actually push up against
|
||||||
|
# the status bar?
|
||||||
|
alignment=Qt.AlignRight | Qt.AlignTop,
|
||||||
)
|
)
|
||||||
|
|
||||||
# this hbox is added as a layout by the paner maker/caller
|
# this hbox is added as a layout by the paner maker/caller
|
||||||
|
@ -717,7 +742,7 @@ def mk_order_pane_layout(
|
||||||
|
|
||||||
) -> FieldsForm:
|
) -> FieldsForm:
|
||||||
|
|
||||||
font_size: int = _font.px_size - 1
|
font_size: int = _font.px_size - 2
|
||||||
|
|
||||||
# TODO: maybe just allocate the whole fields form here
|
# TODO: maybe just allocate the whole fields form here
|
||||||
# and expect an async ctx entry?
|
# and expect an async ctx entry?
|
||||||
|
@ -725,12 +750,12 @@ def mk_order_pane_layout(
|
||||||
parent=parent,
|
parent=parent,
|
||||||
fields_schema={
|
fields_schema={
|
||||||
'account': {
|
'account': {
|
||||||
'label': '**account**:',
|
'label': '**accnt**:',
|
||||||
'type': 'select',
|
'type': 'select',
|
||||||
'default_value': ['paper'],
|
'default_value': ['paper'],
|
||||||
},
|
},
|
||||||
'size_unit': {
|
'size_unit': {
|
||||||
'label': '**allocate**:',
|
'label': '**alloc**:',
|
||||||
'type': 'select',
|
'type': 'select',
|
||||||
'default_value': [
|
'default_value': [
|
||||||
'$ size',
|
'$ size',
|
||||||
|
@ -778,7 +803,7 @@ def mk_order_pane_layout(
|
||||||
form.top_label = top_label
|
form.top_label = top_label
|
||||||
|
|
||||||
# add pp fill bar + spacing
|
# add pp fill bar + spacing
|
||||||
vbox.addLayout(hbox, stretch=1/3)
|
vbox.addLayout(hbox, stretch=3)
|
||||||
|
|
||||||
# TODO: handle resize events and appropriately scale this
|
# TODO: handle resize events and appropriately scale this
|
||||||
# to the sidepane height?
|
# to the sidepane height?
|
||||||
|
|
|
@ -0,0 +1,970 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
FSP UI and graphics components.
|
||||||
|
|
||||||
|
Financial signal processing cluster and real-time graphics management.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from contextlib import asynccontextmanager as acm
|
||||||
|
from functools import partial
|
||||||
|
import inspect
|
||||||
|
from itertools import cycle
|
||||||
|
from typing import Optional, AsyncGenerator, Any
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from pydantic import create_model
|
||||||
|
import tractor
|
||||||
|
import pyqtgraph as pg
|
||||||
|
import trio
|
||||||
|
from trio_typing import TaskStatus
|
||||||
|
|
||||||
|
from ._axes import PriceAxis
|
||||||
|
from .._cacheables import maybe_open_context
|
||||||
|
from ..calc import humanize
|
||||||
|
from ..data._sharedmem import (
|
||||||
|
ShmArray,
|
||||||
|
_Token,
|
||||||
|
try_read,
|
||||||
|
)
|
||||||
|
from ._chart import (
|
||||||
|
ChartPlotWidget,
|
||||||
|
LinkedSplits,
|
||||||
|
)
|
||||||
|
from ._forms import (
|
||||||
|
FieldsForm,
|
||||||
|
mk_form,
|
||||||
|
open_form_input_handling,
|
||||||
|
)
|
||||||
|
from ..fsp._api import maybe_mk_fsp_shm, Fsp
|
||||||
|
from ..fsp import cascade
|
||||||
|
from ..fsp._volume import (
|
||||||
|
tina_vwap,
|
||||||
|
dolla_vlm,
|
||||||
|
flow_rates,
|
||||||
|
)
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
|
log = get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def has_vlm(ohlcv: ShmArray) -> bool:
|
||||||
|
# make sure that the instrument supports volume history
|
||||||
|
# (sometimes this is not the case for some commodities and
|
||||||
|
# derivatives)
|
||||||
|
vlm = ohlcv.array['volume']
|
||||||
|
return not bool(np.all(np.isin(vlm, -1)) or np.all(np.isnan(vlm)))
|
||||||
|
|
||||||
|
|
||||||
|
def update_fsp_chart(
|
||||||
|
chart: ChartPlotWidget,
|
||||||
|
flow,
|
||||||
|
graphics_name: str,
|
||||||
|
array_key: Optional[str],
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
shm = flow.shm
|
||||||
|
if not shm:
|
||||||
|
return
|
||||||
|
|
||||||
|
array = shm.array
|
||||||
|
last_row = try_read(array)
|
||||||
|
|
||||||
|
# guard against unreadable case
|
||||||
|
if not last_row:
|
||||||
|
log.warning(f'Read-race on shm array: {graphics_name}@{shm.token}')
|
||||||
|
return
|
||||||
|
|
||||||
|
# update graphics
|
||||||
|
# NOTE: this does a length check internally which allows it
|
||||||
|
# staying above the last row check below..
|
||||||
|
chart.update_graphics_from_flow(
|
||||||
|
graphics_name,
|
||||||
|
array_key=array_key or graphics_name,
|
||||||
|
**kwargs,
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: re: ``array_key``: fsp func names must be unique meaning we
|
||||||
|
# can't have duplicates of the underlying data even if multiple
|
||||||
|
# sub-charts reference it under different 'named charts'.
|
||||||
|
|
||||||
|
# read from last calculated value and update any label
|
||||||
|
last_val_sticky = chart._ysticks.get(graphics_name)
|
||||||
|
if last_val_sticky:
|
||||||
|
last = last_row[array_key]
|
||||||
|
last_val_sticky.update_from_data(-1, last)
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_fsp_sidepane(
|
||||||
|
linked: LinkedSplits,
|
||||||
|
conf: dict[str, dict[str, str]],
|
||||||
|
|
||||||
|
) -> FieldsForm:
|
||||||
|
|
||||||
|
schema = {}
|
||||||
|
|
||||||
|
assert len(conf) == 1 # for now
|
||||||
|
|
||||||
|
# add (single) selection widget
|
||||||
|
for name, config in conf.items():
|
||||||
|
schema[name] = {
|
||||||
|
'label': '**fsp**:',
|
||||||
|
'type': 'select',
|
||||||
|
'default_value': [name],
|
||||||
|
}
|
||||||
|
|
||||||
|
# add parameters for selection "options"
|
||||||
|
params = config.get('params', {})
|
||||||
|
for name, config in params.items():
|
||||||
|
|
||||||
|
default = config['default_value']
|
||||||
|
kwargs = config.get('widget_kwargs', {})
|
||||||
|
|
||||||
|
# add to ORM schema
|
||||||
|
schema.update({
|
||||||
|
name: {
|
||||||
|
'label': f'**{name}**:',
|
||||||
|
'type': 'edit',
|
||||||
|
'default_value': default,
|
||||||
|
'kwargs': kwargs,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
|
||||||
|
sidepane: FieldsForm = mk_form(
|
||||||
|
parent=linked.godwidget,
|
||||||
|
fields_schema=schema,
|
||||||
|
)
|
||||||
|
|
||||||
|
# https://pydantic-docs.helpmanual.io/usage/models/#dynamic-model-creation
|
||||||
|
FspConfig = create_model(
|
||||||
|
'FspConfig',
|
||||||
|
name=name,
|
||||||
|
**params,
|
||||||
|
)
|
||||||
|
sidepane.model = FspConfig()
|
||||||
|
|
||||||
|
# just a logger for now until we get fsp configs up and running.
|
||||||
|
async def settings_change(
|
||||||
|
key: str,
|
||||||
|
value: str
|
||||||
|
|
||||||
|
) -> bool:
|
||||||
|
print(f'{key}: {value}')
|
||||||
|
return True
|
||||||
|
|
||||||
|
# TODO:
|
||||||
|
async with (
|
||||||
|
open_form_input_handling(
|
||||||
|
sidepane,
|
||||||
|
focus_next=linked.godwidget,
|
||||||
|
on_value_change=settings_change,
|
||||||
|
)
|
||||||
|
):
|
||||||
|
yield sidepane
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_fsp_actor_cluster(
|
||||||
|
names: list[str] = ['fsp_0', 'fsp_1'],
|
||||||
|
|
||||||
|
) -> AsyncGenerator[int, dict[str, tractor.Portal]]:
|
||||||
|
|
||||||
|
from tractor._clustering import open_actor_cluster
|
||||||
|
|
||||||
|
# profiler = pg.debug.Profiler(
|
||||||
|
# delayed=False,
|
||||||
|
# disabled=False
|
||||||
|
# )
|
||||||
|
async with open_actor_cluster(
|
||||||
|
count=2,
|
||||||
|
names=names,
|
||||||
|
modules=['piker.fsp._engine'],
|
||||||
|
|
||||||
|
) as cluster_map:
|
||||||
|
# profiler('started fsp cluster')
|
||||||
|
yield cluster_map
|
||||||
|
|
||||||
|
|
||||||
|
async def run_fsp_ui(
|
||||||
|
|
||||||
|
linkedsplits: LinkedSplits,
|
||||||
|
shm: ShmArray,
|
||||||
|
started: trio.Event,
|
||||||
|
target: Fsp,
|
||||||
|
conf: dict[str, dict],
|
||||||
|
loglevel: str,
|
||||||
|
# profiler: pg.debug.Profiler,
|
||||||
|
# _quote_throttle_rate: int = 58,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Taskf for UI spawning around a ``LinkedSplits`` chart for fsp
|
||||||
|
related graphics/UX management.
|
||||||
|
|
||||||
|
This is normally spawned/called once for each entry in the fsp
|
||||||
|
config.
|
||||||
|
|
||||||
|
'''
|
||||||
|
name = target.name
|
||||||
|
# profiler(f'started UI task for fsp: {name}')
|
||||||
|
|
||||||
|
async with (
|
||||||
|
# side UI for parameters/controls
|
||||||
|
open_fsp_sidepane(
|
||||||
|
linkedsplits,
|
||||||
|
{name: conf},
|
||||||
|
) as sidepane,
|
||||||
|
):
|
||||||
|
await started.wait()
|
||||||
|
# profiler(f'fsp:{name} attached to fsp ctx-stream')
|
||||||
|
|
||||||
|
overlay_with = conf.get('overlay', False)
|
||||||
|
if overlay_with:
|
||||||
|
if overlay_with == 'ohlc':
|
||||||
|
chart = linkedsplits.chart
|
||||||
|
else:
|
||||||
|
chart = linkedsplits.subplots[overlay_with]
|
||||||
|
|
||||||
|
chart.draw_curve(
|
||||||
|
name=name,
|
||||||
|
shm=shm,
|
||||||
|
overlay=True,
|
||||||
|
color='default_light',
|
||||||
|
array_key=name,
|
||||||
|
**conf.get('chart_kwargs', {})
|
||||||
|
)
|
||||||
|
|
||||||
|
else:
|
||||||
|
# create a new sub-chart widget for this fsp
|
||||||
|
chart = linkedsplits.add_plot(
|
||||||
|
name=name,
|
||||||
|
shm=shm,
|
||||||
|
|
||||||
|
array_key=name,
|
||||||
|
sidepane=sidepane,
|
||||||
|
|
||||||
|
# curve by default
|
||||||
|
ohlc=False,
|
||||||
|
|
||||||
|
# settings passed down to ``ChartPlotWidget``
|
||||||
|
**conf.get('chart_kwargs', {})
|
||||||
|
)
|
||||||
|
|
||||||
|
# should **not** be the same sub-chart widget
|
||||||
|
assert chart.name != linkedsplits.chart.name
|
||||||
|
|
||||||
|
array_key = name
|
||||||
|
|
||||||
|
# profiler(f'fsp:{name} chart created')
|
||||||
|
|
||||||
|
# first UI update, usually from shm pushed history
|
||||||
|
update_fsp_chart(
|
||||||
|
chart,
|
||||||
|
chart._flows[array_key],
|
||||||
|
name,
|
||||||
|
array_key=array_key,
|
||||||
|
)
|
||||||
|
|
||||||
|
chart.linked.focus()
|
||||||
|
|
||||||
|
# TODO: figure out if we can roll our own `FillToThreshold` to
|
||||||
|
# get brush filled polygons for OS/OB conditions.
|
||||||
|
# ``pg.FillBetweenItems`` seems to be one technique using
|
||||||
|
# generic fills between curve types while ``PlotCurveItem`` has
|
||||||
|
# logic inside ``.paint()`` for ``self.opts['fillLevel']`` which
|
||||||
|
# might be the best solution?
|
||||||
|
|
||||||
|
# graphics = chart.update_from_array(chart.name, array[name])
|
||||||
|
# graphics.curve.setBrush(50, 50, 200, 100)
|
||||||
|
# graphics.curve.setFillLevel(50)
|
||||||
|
|
||||||
|
# if func_name == 'rsi':
|
||||||
|
# from ._lines import level_line
|
||||||
|
# # add moveable over-[sold/bought] lines
|
||||||
|
# # and labels only for the 70/30 lines
|
||||||
|
# level_line(chart, 20)
|
||||||
|
# level_line(chart, 30, orient_v='top')
|
||||||
|
# level_line(chart, 70, orient_v='bottom')
|
||||||
|
# level_line(chart, 80, orient_v='top')
|
||||||
|
|
||||||
|
chart.view._set_yrange()
|
||||||
|
# done() # status updates
|
||||||
|
|
||||||
|
# profiler(f'fsp:{func_name} starting update loop')
|
||||||
|
# profiler.finish()
|
||||||
|
|
||||||
|
# update chart graphics
|
||||||
|
# last = time.time()
|
||||||
|
|
||||||
|
# XXX: this currently doesn't loop since
|
||||||
|
# the FSP engine does **not** push updates atm
|
||||||
|
# since we do graphics update in the main loop
|
||||||
|
# in ``._display.
|
||||||
|
# async for value in stream:
|
||||||
|
# print(value)
|
||||||
|
|
||||||
|
# # chart isn't actively shown so just skip render cycle
|
||||||
|
# if chart.linked.isHidden():
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# else:
|
||||||
|
# now = time.time()
|
||||||
|
# period = now - last
|
||||||
|
|
||||||
|
# if period <= 1/_quote_throttle_rate:
|
||||||
|
# # faster then display refresh rate
|
||||||
|
# print(f'fsp too fast: {1/period}')
|
||||||
|
# continue
|
||||||
|
|
||||||
|
# # run synchronous update
|
||||||
|
# update_fsp_chart(
|
||||||
|
# chart,
|
||||||
|
# shm,
|
||||||
|
# display_name,
|
||||||
|
# array_key=func_name,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# # set time of last graphics update
|
||||||
|
# last = time.time()
|
||||||
|
|
||||||
|
|
||||||
|
class FspAdmin:
|
||||||
|
'''
|
||||||
|
Client API for orchestrating FSP actors and displaying
|
||||||
|
real-time graphics output.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
tn: trio.Nursery,
|
||||||
|
cluster: dict[str, tractor.Portal],
|
||||||
|
linked: LinkedSplits,
|
||||||
|
src_shm: ShmArray,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
self.tn = tn
|
||||||
|
self.cluster = cluster
|
||||||
|
self.linked = linked
|
||||||
|
self._rr_next_actor = cycle(cluster.items())
|
||||||
|
self._registry: dict[
|
||||||
|
tuple,
|
||||||
|
tuple[tractor.MsgStream, ShmArray]
|
||||||
|
] = {}
|
||||||
|
self._flow_registry: dict[_Token, str] = {}
|
||||||
|
self.src_shm = src_shm
|
||||||
|
|
||||||
|
def rr_next_portal(self) -> tractor.Portal:
|
||||||
|
name, portal = next(self._rr_next_actor)
|
||||||
|
return portal
|
||||||
|
|
||||||
|
async def open_chain(
|
||||||
|
self,
|
||||||
|
|
||||||
|
portal: tractor.Portal,
|
||||||
|
complete: trio.Event,
|
||||||
|
started: trio.Event,
|
||||||
|
fqsn: str,
|
||||||
|
dst_shm: ShmArray,
|
||||||
|
conf: dict,
|
||||||
|
target: Fsp,
|
||||||
|
loglevel: str,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Task which opens a remote FSP endpoint in the managed
|
||||||
|
cluster and sleeps until signalled to exit.
|
||||||
|
|
||||||
|
'''
|
||||||
|
ns_path = str(target.ns_path)
|
||||||
|
async with (
|
||||||
|
portal.open_context(
|
||||||
|
|
||||||
|
# chaining entrypoint
|
||||||
|
cascade,
|
||||||
|
|
||||||
|
# data feed key
|
||||||
|
fqsn=fqsn,
|
||||||
|
|
||||||
|
# mems
|
||||||
|
src_shm_token=self.src_shm.token,
|
||||||
|
dst_shm_token=dst_shm.token,
|
||||||
|
|
||||||
|
# target
|
||||||
|
ns_path=ns_path,
|
||||||
|
|
||||||
|
loglevel=loglevel,
|
||||||
|
zero_on_step=conf.get('zero_on_step', False),
|
||||||
|
shm_registry=[
|
||||||
|
(token.as_msg(), fsp_name, dst_token.as_msg())
|
||||||
|
for (token, fsp_name), dst_token
|
||||||
|
in self._flow_registry.items()
|
||||||
|
],
|
||||||
|
|
||||||
|
) as (ctx, last_index),
|
||||||
|
ctx.open_stream() as stream,
|
||||||
|
):
|
||||||
|
|
||||||
|
# register output data
|
||||||
|
self._registry[
|
||||||
|
(fqsn, ns_path)
|
||||||
|
] = (
|
||||||
|
stream,
|
||||||
|
dst_shm,
|
||||||
|
complete
|
||||||
|
)
|
||||||
|
|
||||||
|
started.set()
|
||||||
|
|
||||||
|
# wait for graceful shutdown signal
|
||||||
|
async with stream.subscribe() as stream:
|
||||||
|
async for msg in stream:
|
||||||
|
info = msg.get('fsp_update')
|
||||||
|
if info:
|
||||||
|
# if the chart isn't hidden try to update
|
||||||
|
# the data on screen.
|
||||||
|
if not self.linked.isHidden():
|
||||||
|
log.debug(f'Re-syncing graphics for fsp: {ns_path}')
|
||||||
|
self.linked.graphics_cycle(
|
||||||
|
trigger_all=True,
|
||||||
|
prepend_update_index=info['first'],
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
log.info(f'recved unexpected fsp engine msg: {msg}')
|
||||||
|
|
||||||
|
await complete.wait()
|
||||||
|
|
||||||
|
async def start_engine_task(
|
||||||
|
self,
|
||||||
|
|
||||||
|
target: Fsp,
|
||||||
|
conf: dict[str, dict[str, Any]],
|
||||||
|
|
||||||
|
worker_name: Optional[str] = None,
|
||||||
|
loglevel: str = 'info',
|
||||||
|
|
||||||
|
) -> (ShmArray, trio.Event):
|
||||||
|
|
||||||
|
fqsn = self.linked.symbol.front_fqsn()
|
||||||
|
|
||||||
|
# allocate an output shm array
|
||||||
|
key, dst_shm, opened = maybe_mk_fsp_shm(
|
||||||
|
fqsn,
|
||||||
|
target=target,
|
||||||
|
readonly=True,
|
||||||
|
)
|
||||||
|
self._flow_registry[
|
||||||
|
(self.src_shm._token, target.name)
|
||||||
|
] = dst_shm._token
|
||||||
|
|
||||||
|
# if not opened:
|
||||||
|
# raise RuntimeError(
|
||||||
|
# f'Already started FSP `{fqsn}:{func_name}`'
|
||||||
|
# )
|
||||||
|
|
||||||
|
portal = self.cluster.get(worker_name) or self.rr_next_portal()
|
||||||
|
complete = trio.Event()
|
||||||
|
started = trio.Event()
|
||||||
|
self.tn.start_soon(
|
||||||
|
self.open_chain,
|
||||||
|
portal,
|
||||||
|
complete,
|
||||||
|
started,
|
||||||
|
fqsn,
|
||||||
|
dst_shm,
|
||||||
|
conf,
|
||||||
|
target,
|
||||||
|
loglevel,
|
||||||
|
)
|
||||||
|
|
||||||
|
return dst_shm, started
|
||||||
|
|
||||||
|
async def open_fsp_chart(
|
||||||
|
self,
|
||||||
|
|
||||||
|
target: Fsp,
|
||||||
|
|
||||||
|
conf: dict, # yeah probably dumb..
|
||||||
|
loglevel: str = 'error',
|
||||||
|
|
||||||
|
) -> (trio.Event, ChartPlotWidget):
|
||||||
|
|
||||||
|
shm, started = await self.start_engine_task(
|
||||||
|
target,
|
||||||
|
conf,
|
||||||
|
loglevel,
|
||||||
|
)
|
||||||
|
|
||||||
|
# init async
|
||||||
|
self.tn.start_soon(
|
||||||
|
partial(
|
||||||
|
run_fsp_ui,
|
||||||
|
|
||||||
|
self.linked,
|
||||||
|
shm,
|
||||||
|
started,
|
||||||
|
target,
|
||||||
|
|
||||||
|
conf=conf,
|
||||||
|
loglevel=loglevel,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return started
|
||||||
|
|
||||||
|
|
||||||
|
@acm
|
||||||
|
async def open_fsp_admin(
|
||||||
|
linked: LinkedSplits,
|
||||||
|
src_shm: ShmArray,
|
||||||
|
**kwargs,
|
||||||
|
|
||||||
|
) -> AsyncGenerator[dict, dict[str, tractor.Portal]]:
|
||||||
|
|
||||||
|
async with (
|
||||||
|
maybe_open_context(
|
||||||
|
# for now make a cluster per client?
|
||||||
|
acm_func=open_fsp_actor_cluster,
|
||||||
|
kwargs=kwargs,
|
||||||
|
) as (cache_hit, cluster_map),
|
||||||
|
|
||||||
|
trio.open_nursery() as tn,
|
||||||
|
):
|
||||||
|
if cache_hit:
|
||||||
|
log.info('re-using existing fsp cluster')
|
||||||
|
|
||||||
|
admin = FspAdmin(
|
||||||
|
tn,
|
||||||
|
cluster_map,
|
||||||
|
linked,
|
||||||
|
src_shm,
|
||||||
|
)
|
||||||
|
try:
|
||||||
|
yield admin
|
||||||
|
finally:
|
||||||
|
# terminate all tasks via signals
|
||||||
|
for key, entry in admin._registry.items():
|
||||||
|
_, _, event = entry
|
||||||
|
event.set()
|
||||||
|
|
||||||
|
|
||||||
|
async def open_vlm_displays(
|
||||||
|
|
||||||
|
linked: LinkedSplits,
|
||||||
|
ohlcv: ShmArray,
|
||||||
|
dvlm: bool = True,
|
||||||
|
|
||||||
|
task_status: TaskStatus[ChartPlotWidget] = trio.TASK_STATUS_IGNORED,
|
||||||
|
|
||||||
|
) -> ChartPlotWidget:
|
||||||
|
'''
|
||||||
|
Volume subchart displays.
|
||||||
|
|
||||||
|
Since "volume" is often included directly alongside OHLCV price
|
||||||
|
data, we don't really need a separate FSP-actor + shm array for it
|
||||||
|
since it's likely already directly adjacent to OHLC samples from the
|
||||||
|
data provider.
|
||||||
|
|
||||||
|
Further only if volume data is detected (it sometimes isn't provided
|
||||||
|
eg. forex, certain commodities markets) will volume dependent FSPs
|
||||||
|
be spawned here.
|
||||||
|
|
||||||
|
'''
|
||||||
|
sig = inspect.signature(flow_rates.func)
|
||||||
|
params = sig.parameters
|
||||||
|
|
||||||
|
async with (
|
||||||
|
open_fsp_sidepane(
|
||||||
|
linked, {
|
||||||
|
'flows': {
|
||||||
|
|
||||||
|
# TODO: add support for dynamically changing these
|
||||||
|
'params': {
|
||||||
|
u'\u03BC' + '_type': {
|
||||||
|
'default_value': str(params['mean_type'].default),
|
||||||
|
},
|
||||||
|
'period': {
|
||||||
|
'default_value': str(params['period'].default),
|
||||||
|
# make widget un-editable for now.
|
||||||
|
'widget_kwargs': {'readonly': True},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
) as sidepane,
|
||||||
|
open_fsp_admin(linked, ohlcv) as admin,
|
||||||
|
):
|
||||||
|
# TODO: support updates
|
||||||
|
# period_field = sidepane.fields['period']
|
||||||
|
# period_field.setText(
|
||||||
|
# str(period_param.default)
|
||||||
|
# )
|
||||||
|
|
||||||
|
# built-in vlm which we plot ASAP since it's
|
||||||
|
# usually data provided directly with OHLC history.
|
||||||
|
shm = ohlcv
|
||||||
|
chart = linked.add_plot(
|
||||||
|
name='volume',
|
||||||
|
shm=shm,
|
||||||
|
|
||||||
|
array_key='volume',
|
||||||
|
sidepane=sidepane,
|
||||||
|
|
||||||
|
# curve by default
|
||||||
|
ohlc=False,
|
||||||
|
|
||||||
|
# Draw vertical bars from zero.
|
||||||
|
# we do this internally ourselves since
|
||||||
|
# the curve item internals are pretty convoluted.
|
||||||
|
style='step',
|
||||||
|
)
|
||||||
|
|
||||||
|
# force 0 to always be in view
|
||||||
|
def multi_maxmin(
|
||||||
|
names: list[str],
|
||||||
|
|
||||||
|
) -> tuple[float, float]:
|
||||||
|
|
||||||
|
mx = 0
|
||||||
|
for name in names:
|
||||||
|
|
||||||
|
mxmn = chart.maxmin(name=name)
|
||||||
|
if mxmn:
|
||||||
|
ymax = mxmn[1]
|
||||||
|
if ymax > mx:
|
||||||
|
mx = ymax
|
||||||
|
|
||||||
|
return 0, mx
|
||||||
|
|
||||||
|
chart.view.maxmin = partial(multi_maxmin, names=['volume'])
|
||||||
|
|
||||||
|
# TODO: fix the x-axis label issue where if you put
|
||||||
|
# the axis on the left it's totally not lined up...
|
||||||
|
# show volume units value on LHS (for dinkus)
|
||||||
|
# chart.hideAxis('right')
|
||||||
|
# chart.showAxis('left')
|
||||||
|
|
||||||
|
# send back new chart to caller
|
||||||
|
task_status.started(chart)
|
||||||
|
|
||||||
|
# should **not** be the same sub-chart widget
|
||||||
|
assert chart.name != linked.chart.name
|
||||||
|
|
||||||
|
# sticky only on sub-charts atm
|
||||||
|
last_val_sticky = chart._ysticks[chart.name]
|
||||||
|
|
||||||
|
# read from last calculated value
|
||||||
|
value = shm.array['volume'][-1]
|
||||||
|
|
||||||
|
last_val_sticky.update_from_data(-1, value)
|
||||||
|
|
||||||
|
vlm_curve = chart.update_graphics_from_flow(
|
||||||
|
'volume',
|
||||||
|
# shm.array,
|
||||||
|
)
|
||||||
|
|
||||||
|
# size view to data once at outset
|
||||||
|
chart.view._set_yrange()
|
||||||
|
|
||||||
|
# add axis title
|
||||||
|
axis = chart.getAxis('right')
|
||||||
|
axis.set_title(' vlm')
|
||||||
|
|
||||||
|
if dvlm:
|
||||||
|
|
||||||
|
tasks_ready = []
|
||||||
|
# spawn and overlay $ vlm on the same subchart
|
||||||
|
dvlm_shm, started = await admin.start_engine_task(
|
||||||
|
dolla_vlm,
|
||||||
|
|
||||||
|
{ # fsp engine conf
|
||||||
|
'func_name': 'dolla_vlm',
|
||||||
|
'zero_on_step': True,
|
||||||
|
'params': {
|
||||||
|
'price_func': {
|
||||||
|
'default_value': 'chl3',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
# loglevel,
|
||||||
|
)
|
||||||
|
tasks_ready.append(started)
|
||||||
|
|
||||||
|
# FIXME: we should error on starting the same fsp right
|
||||||
|
# since it might collide with existing shm.. or wait we
|
||||||
|
# had this before??
|
||||||
|
# dolla_vlm,
|
||||||
|
|
||||||
|
tasks_ready.append(started)
|
||||||
|
# profiler(f'created shm for fsp actor: {display_name}')
|
||||||
|
|
||||||
|
# wait for all engine tasks to startup
|
||||||
|
async with trio.open_nursery() as n:
|
||||||
|
for event in tasks_ready:
|
||||||
|
n.start_soon(event.wait)
|
||||||
|
|
||||||
|
# dolla vlm overlay
|
||||||
|
# XXX: the main chart already contains a vlm "units" axis
|
||||||
|
# so here we add an overlay wth a y-range in
|
||||||
|
# $ liquidity-value units (normally a fiat like USD).
|
||||||
|
dvlm_pi = chart.overlay_plotitem(
|
||||||
|
'dolla_vlm',
|
||||||
|
index=0, # place axis on inside (nearest to chart)
|
||||||
|
axis_title=' $vlm',
|
||||||
|
axis_side='right',
|
||||||
|
axis_kwargs={
|
||||||
|
'typical_max_str': ' 100.0 M ',
|
||||||
|
'formatter': partial(
|
||||||
|
humanize,
|
||||||
|
digits=2,
|
||||||
|
),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
# all to be overlayed curve names
|
||||||
|
fields = [
|
||||||
|
'dolla_vlm',
|
||||||
|
'dark_vlm',
|
||||||
|
]
|
||||||
|
# dvlm_rate_fields = [
|
||||||
|
# 'dvlm_rate',
|
||||||
|
# 'dark_dvlm_rate',
|
||||||
|
# ]
|
||||||
|
trade_rate_fields = [
|
||||||
|
'trade_rate',
|
||||||
|
'dark_trade_rate',
|
||||||
|
]
|
||||||
|
|
||||||
|
group_mxmn = partial(
|
||||||
|
multi_maxmin,
|
||||||
|
# keep both regular and dark vlm in view
|
||||||
|
names=fields,
|
||||||
|
# names=fields + dvlm_rate_fields,
|
||||||
|
)
|
||||||
|
|
||||||
|
# add custom auto range handler
|
||||||
|
dvlm_pi.vb._maxmin = group_mxmn
|
||||||
|
|
||||||
|
# use slightly less light (then bracket) gray
|
||||||
|
# for volume from "main exchange" and a more "bluey"
|
||||||
|
# gray for "dark" vlm.
|
||||||
|
vlm_color = 'i3'
|
||||||
|
dark_vlm_color = 'charcoal'
|
||||||
|
|
||||||
|
# add dvlm (step) curves to common view
|
||||||
|
def chart_curves(
|
||||||
|
names: list[str],
|
||||||
|
pi: pg.PlotItem,
|
||||||
|
shm: ShmArray,
|
||||||
|
step_mode: bool = False,
|
||||||
|
style: str = 'solid',
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
for name in names:
|
||||||
|
if 'dark' in name:
|
||||||
|
color = dark_vlm_color
|
||||||
|
elif 'rate' in name:
|
||||||
|
color = vlm_color
|
||||||
|
else:
|
||||||
|
color = 'bracket'
|
||||||
|
|
||||||
|
curve, _ = chart.draw_curve(
|
||||||
|
name=name,
|
||||||
|
shm=shm,
|
||||||
|
array_key=name,
|
||||||
|
overlay=pi,
|
||||||
|
color=color,
|
||||||
|
step_mode=step_mode,
|
||||||
|
style=style,
|
||||||
|
pi=pi,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: we need a better API to do this..
|
||||||
|
# specially store ref to shm for lookup in display loop
|
||||||
|
# since only a placeholder of `None` is entered in
|
||||||
|
# ``.draw_curve()``.
|
||||||
|
flow = chart._flows[name]
|
||||||
|
assert flow.plot is pi
|
||||||
|
|
||||||
|
chart_curves(
|
||||||
|
fields,
|
||||||
|
dvlm_pi,
|
||||||
|
dvlm_shm,
|
||||||
|
step_mode=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# spawn flow rates fsp **ONLY AFTER** the 'dolla_vlm' fsp is
|
||||||
|
# up since this one depends on it.
|
||||||
|
|
||||||
|
fr_shm, started = await admin.start_engine_task(
|
||||||
|
flow_rates,
|
||||||
|
{ # fsp engine conf
|
||||||
|
'func_name': 'flow_rates',
|
||||||
|
'zero_on_step': False,
|
||||||
|
},
|
||||||
|
# loglevel,
|
||||||
|
)
|
||||||
|
await started.wait()
|
||||||
|
|
||||||
|
# chart_curves(
|
||||||
|
# dvlm_rate_fields,
|
||||||
|
# dvlm_pi,
|
||||||
|
# fr_shm,
|
||||||
|
# )
|
||||||
|
|
||||||
|
# TODO: is there a way to "sync" the dual axes such that only
|
||||||
|
# one curve is needed?
|
||||||
|
# hide the original vlm curve since the $vlm one is now
|
||||||
|
# displayed and the curves are effectively the same minus
|
||||||
|
# liquidity events (well at least on low OHLC periods - 1s).
|
||||||
|
vlm_curve.hide()
|
||||||
|
chart.removeItem(vlm_curve)
|
||||||
|
vflow = chart._flows['volume']
|
||||||
|
vflow.render = False
|
||||||
|
|
||||||
|
# avoid range sorting on volume once disabled
|
||||||
|
chart.view.disable_auto_yrange()
|
||||||
|
|
||||||
|
# Trade rate overlay
|
||||||
|
# XXX: requires an additional overlay for
|
||||||
|
# a trades-per-period (time) y-range.
|
||||||
|
tr_pi = chart.overlay_plotitem(
|
||||||
|
'trade_rates',
|
||||||
|
|
||||||
|
# TODO: dynamically update period (and thus this axis?)
|
||||||
|
# title from user input.
|
||||||
|
axis_title='clears',
|
||||||
|
|
||||||
|
axis_side='left',
|
||||||
|
axis_kwargs={
|
||||||
|
'typical_max_str': ' 10.0 M ',
|
||||||
|
'formatter': partial(
|
||||||
|
humanize,
|
||||||
|
digits=2,
|
||||||
|
),
|
||||||
|
'text_color': vlm_color,
|
||||||
|
},
|
||||||
|
|
||||||
|
)
|
||||||
|
# add custom auto range handler
|
||||||
|
tr_pi.vb.maxmin = partial(
|
||||||
|
multi_maxmin,
|
||||||
|
# keep both regular and dark vlm in view
|
||||||
|
names=trade_rate_fields,
|
||||||
|
)
|
||||||
|
|
||||||
|
chart_curves(
|
||||||
|
trade_rate_fields,
|
||||||
|
tr_pi,
|
||||||
|
fr_shm,
|
||||||
|
# step_mode=True,
|
||||||
|
|
||||||
|
# dashed line to represent "individual trades" being
|
||||||
|
# more "granular" B)
|
||||||
|
style='dash',
|
||||||
|
)
|
||||||
|
|
||||||
|
for pi in (
|
||||||
|
dvlm_pi,
|
||||||
|
tr_pi,
|
||||||
|
):
|
||||||
|
for name, axis_info in pi.axes.items():
|
||||||
|
# lol this sux XD
|
||||||
|
axis = axis_info['item']
|
||||||
|
if isinstance(axis, PriceAxis):
|
||||||
|
axis.size_to_values()
|
||||||
|
|
||||||
|
# built-in vlm fsps
|
||||||
|
for target, conf in {
|
||||||
|
# tina_vwap: {
|
||||||
|
# 'overlay': 'ohlc', # overlays with OHLCV (main) chart
|
||||||
|
# 'anchor': 'session',
|
||||||
|
# },
|
||||||
|
}.items():
|
||||||
|
started = await admin.open_fsp_chart(
|
||||||
|
target,
|
||||||
|
conf,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def start_fsp_displays(
|
||||||
|
|
||||||
|
linked: LinkedSplits,
|
||||||
|
ohlcv: ShmArray,
|
||||||
|
group_status_key: str,
|
||||||
|
loglevel: str,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Create fsp charts from a config input attached to a local actor
|
||||||
|
compute cluster.
|
||||||
|
|
||||||
|
Pass target entrypoint and historical data via ``ShmArray``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
linked.focus()
|
||||||
|
|
||||||
|
# TODO: eventually we'll support some kind of n-compose syntax
|
||||||
|
fsp_conf = {
|
||||||
|
# 'rsi': {
|
||||||
|
# 'func_name': 'rsi', # literal python func ref lookup name
|
||||||
|
|
||||||
|
# # map of parameters to place on the fsp sidepane widget
|
||||||
|
# # which should map to dynamic inputs available to the
|
||||||
|
# # fsp function at runtime.
|
||||||
|
# 'params': {
|
||||||
|
# 'period': {
|
||||||
|
# 'default_value': 14,
|
||||||
|
# 'widget_kwargs': {'readonly': True},
|
||||||
|
# },
|
||||||
|
# },
|
||||||
|
|
||||||
|
# # ``ChartPlotWidget`` options passthrough
|
||||||
|
# 'chart_kwargs': {
|
||||||
|
# 'static_yrange': (0, 100),
|
||||||
|
# },
|
||||||
|
# },
|
||||||
|
}
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
delayed=False,
|
||||||
|
disabled=False
|
||||||
|
)
|
||||||
|
|
||||||
|
async with (
|
||||||
|
|
||||||
|
# NOTE: this admin internally opens an actor cluster
|
||||||
|
open_fsp_admin(linked, ohlcv) as admin,
|
||||||
|
):
|
||||||
|
statuses = []
|
||||||
|
for target, conf in fsp_conf.items():
|
||||||
|
started = await admin.open_fsp_chart(
|
||||||
|
target,
|
||||||
|
conf,
|
||||||
|
)
|
||||||
|
done = linked.window().status_bar.open_status(
|
||||||
|
f'loading fsp, {target}..',
|
||||||
|
group_key=group_status_key,
|
||||||
|
)
|
||||||
|
statuses.append((started, done))
|
||||||
|
|
||||||
|
for fsp_loaded, status_cb in statuses:
|
||||||
|
await fsp_loaded.wait()
|
||||||
|
profiler(f'attached to fsp portal: {target}')
|
||||||
|
status_cb()
|
||||||
|
|
||||||
|
# blocks on nursery until all fsp actors complete
|
|
@ -18,6 +18,7 @@
|
||||||
Chart view box primitives
|
Chart view box primitives
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
import time
|
import time
|
||||||
from typing import Optional, Callable
|
from typing import Optional, Callable
|
||||||
|
@ -32,7 +33,8 @@ import numpy as np
|
||||||
import trio
|
import trio
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ._style import _min_points_to_show
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
|
# from ._style import _min_points_to_show
|
||||||
from ._editors import SelectRect
|
from ._editors import SelectRect
|
||||||
from . import _event
|
from . import _event
|
||||||
|
|
||||||
|
@ -155,6 +157,7 @@ async def handle_viewmode_kb_inputs(
|
||||||
# View modes
|
# View modes
|
||||||
if key == Qt.Key_R:
|
if key == Qt.Key_R:
|
||||||
|
|
||||||
|
# TODO: set this for all subplots
|
||||||
# edge triggered default view activation
|
# edge triggered default view activation
|
||||||
view.chart.default_view()
|
view.chart.default_view()
|
||||||
|
|
||||||
|
@ -316,6 +319,7 @@ async def handle_viewmode_mouse(
|
||||||
):
|
):
|
||||||
# when in order mode, submit execution
|
# when in order mode, submit execution
|
||||||
# msg.event.accept()
|
# msg.event.accept()
|
||||||
|
# breakpoint()
|
||||||
view.order_mode.submit_order()
|
view.order_mode.submit_order()
|
||||||
|
|
||||||
|
|
||||||
|
@ -332,19 +336,44 @@ class ChartView(ViewBox):
|
||||||
'''
|
'''
|
||||||
mode_name: str = 'view'
|
mode_name: str = 'view'
|
||||||
|
|
||||||
|
# "relay events" for making overlaid views work.
|
||||||
|
# NOTE: these MUST be defined here (and can't be monkey patched
|
||||||
|
# on later) due to signal construction requiring refs to be
|
||||||
|
# in place during the run of meta-class machinery.
|
||||||
|
mouseDragEventRelay = QtCore.Signal(object, object, object)
|
||||||
|
wheelEventRelay = QtCore.Signal(object, object, object)
|
||||||
|
|
||||||
|
event_relay_source: 'Optional[ViewBox]' = None
|
||||||
|
relays: dict[str, QtCore.Signal] = {}
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
name: str,
|
name: str,
|
||||||
|
|
||||||
parent: pg.PlotItem = None,
|
parent: pg.PlotItem = None,
|
||||||
|
static_yrange: Optional[tuple[float, float]] = None,
|
||||||
**kwargs,
|
**kwargs,
|
||||||
|
|
||||||
):
|
):
|
||||||
super().__init__(parent=parent, **kwargs)
|
super().__init__(
|
||||||
|
parent=parent,
|
||||||
|
name=name,
|
||||||
|
# TODO: look into the default view padding
|
||||||
|
# support that might replace somem of our
|
||||||
|
# ``ChartPlotWidget._set_yrange()`
|
||||||
|
# defaultPadding=0.,
|
||||||
|
**kwargs
|
||||||
|
)
|
||||||
|
# for "known y-range style"
|
||||||
|
self._static_yrange = static_yrange
|
||||||
|
self._maxmin = None
|
||||||
|
|
||||||
# disable vertical scrolling
|
# disable vertical scrolling
|
||||||
self.setMouseEnabled(x=True, y=False)
|
self.setMouseEnabled(
|
||||||
|
x=True,
|
||||||
|
y=True,
|
||||||
|
)
|
||||||
|
|
||||||
self.linkedsplits = None
|
self.linkedsplits = None
|
||||||
self._chart: 'ChartPlotWidget' = None # noqa
|
self._chart: 'ChartPlotWidget' = None # noqa
|
||||||
|
@ -357,6 +386,34 @@ class ChartView(ViewBox):
|
||||||
self.order_mode: bool = False
|
self.order_mode: bool = False
|
||||||
|
|
||||||
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
self.setFocusPolicy(QtCore.Qt.StrongFocus)
|
||||||
|
self._ic = None
|
||||||
|
|
||||||
|
def start_ic(
|
||||||
|
self,
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Signal the beginning of a click-drag interaction
|
||||||
|
to any interested task waiters.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if self._ic is None:
|
||||||
|
self.chart.pause_all_feeds()
|
||||||
|
self._ic = trio.Event()
|
||||||
|
|
||||||
|
def signal_ic(
|
||||||
|
self,
|
||||||
|
*args,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Signal the end of a click-drag interaction
|
||||||
|
to any waiters.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if self._ic:
|
||||||
|
self._ic.set()
|
||||||
|
self._ic = None
|
||||||
|
self.chart.resume_all_feeds()
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def open_async_input_handler(
|
async def open_async_input_handler(
|
||||||
|
@ -391,9 +448,25 @@ class ChartView(ViewBox):
|
||||||
def chart(self, chart: 'ChartPlotWidget') -> None: # type: ignore # noqa
|
def chart(self, chart: 'ChartPlotWidget') -> None: # type: ignore # noqa
|
||||||
self._chart = chart
|
self._chart = chart
|
||||||
self.select_box.chart = chart
|
self.select_box.chart = chart
|
||||||
|
if self._maxmin is None:
|
||||||
|
self._maxmin = chart.maxmin
|
||||||
|
|
||||||
def wheelEvent(self, ev, axis=None):
|
@property
|
||||||
'''Override "center-point" location for scrolling.
|
def maxmin(self) -> Callable:
|
||||||
|
return self._maxmin
|
||||||
|
|
||||||
|
@maxmin.setter
|
||||||
|
def maxmin(self, callback: Callable) -> None:
|
||||||
|
self._maxmin = callback
|
||||||
|
|
||||||
|
def wheelEvent(
|
||||||
|
self,
|
||||||
|
ev,
|
||||||
|
axis=None,
|
||||||
|
relayed_from: ChartView = None,
|
||||||
|
):
|
||||||
|
'''
|
||||||
|
Override "center-point" location for scrolling.
|
||||||
|
|
||||||
This is an override of the ``ViewBox`` method simply changing
|
This is an override of the ``ViewBox`` method simply changing
|
||||||
the center of the zoom to be the y-axis.
|
the center of the zoom to be the y-axis.
|
||||||
|
@ -411,20 +484,48 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
# don't zoom more then the min points setting
|
# don't zoom more then the min points setting
|
||||||
l, lbar, rbar, r = chart.bars_range()
|
l, lbar, rbar, r = chart.bars_range()
|
||||||
vl = r - l
|
# vl = r - l
|
||||||
|
|
||||||
if ev.delta() > 0 and vl <= _min_points_to_show:
|
# if ev.delta() > 0 and vl <= _min_points_to_show:
|
||||||
log.debug("Max zoom bruh...")
|
# log.debug("Max zoom bruh...")
|
||||||
return
|
# return
|
||||||
|
|
||||||
if ev.delta() < 0 and vl >= len(chart._arrays['ohlc']) + 666:
|
# if (
|
||||||
log.debug("Min zoom bruh...")
|
# ev.delta() < 0
|
||||||
return
|
# and vl >= len(chart._flows[chart.name].shm.array) + 666
|
||||||
|
# ):
|
||||||
|
# log.debug("Min zoom bruh...")
|
||||||
|
# return
|
||||||
|
|
||||||
# actual scaling factor
|
# actual scaling factor
|
||||||
s = 1.015 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor'])
|
s = 1.015 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor'])
|
||||||
s = [(None if m is False else s) for m in mask]
|
s = [(None if m is False else s) for m in mask]
|
||||||
|
|
||||||
|
if (
|
||||||
|
# zoom happened on axis
|
||||||
|
axis == 1
|
||||||
|
|
||||||
|
# if already in axis zoom mode then keep it
|
||||||
|
or self.chart._static_yrange == 'axis'
|
||||||
|
):
|
||||||
|
self.chart._static_yrange = 'axis'
|
||||||
|
self.setLimits(yMin=None, yMax=None)
|
||||||
|
|
||||||
|
# print(scale_y)
|
||||||
|
# pos = ev.pos()
|
||||||
|
# lastPos = ev.lastPos()
|
||||||
|
# dif = pos - lastPos
|
||||||
|
# dif = dif * -1
|
||||||
|
center = Point(
|
||||||
|
fn.invertQTransform(
|
||||||
|
self.childGroup.transform()
|
||||||
|
).map(ev.pos())
|
||||||
|
)
|
||||||
|
# scale_y = 1.3 ** (center.y() * -1 / 20)
|
||||||
|
self.scaleBy(s, center)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
# center = pg.Point(
|
# center = pg.Point(
|
||||||
# fn.invertQTransform(self.childGroup.transform()).map(ev.pos())
|
# fn.invertQTransform(self.childGroup.transform()).map(ev.pos())
|
||||||
# )
|
# )
|
||||||
|
@ -449,7 +550,7 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
end_of_l1 = pg.Point(
|
end_of_l1 = pg.Point(
|
||||||
round(
|
round(
|
||||||
chart._vb.mapToView(
|
chart.cv.mapToView(
|
||||||
pg.Point(r_axis_x - chart._max_l1_line_len)
|
pg.Point(r_axis_x - chart._max_l1_line_len)
|
||||||
# QPointF(chart._max_l1_line_len, 0)
|
# QPointF(chart._max_l1_line_len, 0)
|
||||||
).x()
|
).x()
|
||||||
|
@ -457,7 +558,6 @@ class ChartView(ViewBox):
|
||||||
) # .x()
|
) # .x()
|
||||||
|
|
||||||
# self.state['viewRange'][0][1] = end_of_l1
|
# self.state['viewRange'][0][1] = end_of_l1
|
||||||
|
|
||||||
# focal = pg.Point((last_bar.x() + end_of_l1)/2)
|
# focal = pg.Point((last_bar.x() + end_of_l1)/2)
|
||||||
|
|
||||||
focal = min(
|
focal = min(
|
||||||
|
@ -469,23 +569,42 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
self._resetTarget()
|
self._resetTarget()
|
||||||
self.scaleBy(s, focal)
|
self.scaleBy(s, focal)
|
||||||
ev.accept()
|
|
||||||
|
# XXX: the order of the next 2 lines i'm pretty sure
|
||||||
|
# matters, we want the resize to trigger before the graphics
|
||||||
|
# update, but i gotta feelin that because this one is signal
|
||||||
|
# based (and thus not necessarily sync invoked right away)
|
||||||
|
# that calling the resize method manually might work better.
|
||||||
self.sigRangeChangedManually.emit(mask)
|
self.sigRangeChangedManually.emit(mask)
|
||||||
|
|
||||||
|
# XXX: without this is seems as though sometimes
|
||||||
|
# when zooming in from far out (and maybe vice versa?)
|
||||||
|
# the signal isn't being fired enough since if you pan
|
||||||
|
# just after you'll see further downsampling code run
|
||||||
|
# (pretty noticeable on the OHLC ds curve) but with this
|
||||||
|
# that never seems to happen? Only question is how much this
|
||||||
|
# "double work" is causing latency when these missing event
|
||||||
|
# fires don't happen?
|
||||||
|
self.maybe_downsample_graphics()
|
||||||
|
|
||||||
|
ev.accept()
|
||||||
|
|
||||||
def mouseDragEvent(
|
def mouseDragEvent(
|
||||||
self,
|
self,
|
||||||
ev,
|
ev,
|
||||||
axis: Optional[int] = None,
|
axis: Optional[int] = None,
|
||||||
|
relayed_from: ChartView = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
# if axis is specified, event will only affect that axis.
|
|
||||||
ev.accept() # we accept all buttons
|
|
||||||
button = ev.button()
|
|
||||||
|
|
||||||
pos = ev.pos()
|
pos = ev.pos()
|
||||||
lastPos = ev.lastPos()
|
lastPos = ev.lastPos()
|
||||||
dif = pos - lastPos
|
dif = pos - lastPos
|
||||||
dif = dif * -1
|
dif = dif * -1
|
||||||
|
|
||||||
|
# NOTE: if axis is specified, event will only affect that axis.
|
||||||
|
button = ev.button()
|
||||||
|
|
||||||
# Ignore axes if mouse is disabled
|
# Ignore axes if mouse is disabled
|
||||||
mouseEnabled = np.array(self.state['mouseEnabled'], dtype=np.float)
|
mouseEnabled = np.array(self.state['mouseEnabled'], dtype=np.float)
|
||||||
mask = mouseEnabled.copy()
|
mask = mouseEnabled.copy()
|
||||||
|
@ -493,21 +612,28 @@ class ChartView(ViewBox):
|
||||||
mask[1-axis] = 0.0
|
mask[1-axis] = 0.0
|
||||||
|
|
||||||
# Scale or translate based on mouse button
|
# Scale or translate based on mouse button
|
||||||
if button & (QtCore.Qt.LeftButton | QtCore.Qt.MidButton):
|
if button & (
|
||||||
|
QtCore.Qt.LeftButton | QtCore.Qt.MidButton
|
||||||
|
):
|
||||||
# zoom y-axis ONLY when click-n-drag on it
|
# zoom y-axis ONLY when click-n-drag on it
|
||||||
if axis == 1:
|
# if axis == 1:
|
||||||
# set a static y range special value on chart widget to
|
# # set a static y range special value on chart widget to
|
||||||
# prevent sizing to data in view.
|
# # prevent sizing to data in view.
|
||||||
self.chart._static_yrange = 'axis'
|
# self.chart._static_yrange = 'axis'
|
||||||
|
|
||||||
scale_y = 1.3 ** (dif.y() * -1 / 20)
|
# scale_y = 1.3 ** (dif.y() * -1 / 20)
|
||||||
self.setLimits(yMin=None, yMax=None)
|
# self.setLimits(yMin=None, yMax=None)
|
||||||
|
|
||||||
# print(scale_y)
|
# # print(scale_y)
|
||||||
self.scaleBy((0, scale_y))
|
# self.scaleBy((0, scale_y))
|
||||||
|
|
||||||
if self.state['mouseMode'] == ViewBox.RectMode:
|
# SELECTION MODE
|
||||||
|
if (
|
||||||
|
self.state['mouseMode'] == ViewBox.RectMode
|
||||||
|
and axis is None
|
||||||
|
):
|
||||||
|
# XXX: WHY
|
||||||
|
ev.accept()
|
||||||
|
|
||||||
down_pos = ev.buttonDownPos()
|
down_pos = ev.buttonDownPos()
|
||||||
|
|
||||||
|
@ -516,23 +642,40 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
self.select_box.mouse_drag_released(down_pos, pos)
|
self.select_box.mouse_drag_released(down_pos, pos)
|
||||||
|
|
||||||
# ax = QtCore.QRectF(down_pos, pos)
|
ax = QtCore.QRectF(down_pos, pos)
|
||||||
# ax = self.childGroup.mapRectFromParent(ax)
|
ax = self.childGroup.mapRectFromParent(ax)
|
||||||
# print(ax)
|
|
||||||
|
|
||||||
# this is the zoom transform cmd
|
# this is the zoom transform cmd
|
||||||
# self.showAxRect(ax)
|
self.showAxRect(ax)
|
||||||
|
|
||||||
|
# axis history tracking
|
||||||
|
self.axHistoryPointer += 1
|
||||||
|
self.axHistory = self.axHistory[
|
||||||
|
:self.axHistoryPointer] + [ax]
|
||||||
|
|
||||||
# self.axHistoryPointer += 1
|
|
||||||
# self.axHistory = self.axHistory[
|
|
||||||
# :self.axHistoryPointer] + [ax]
|
|
||||||
else:
|
else:
|
||||||
|
print('drag finish?')
|
||||||
self.select_box.set_pos(down_pos, pos)
|
self.select_box.set_pos(down_pos, pos)
|
||||||
|
|
||||||
# update shape of scale box
|
# update shape of scale box
|
||||||
# self.updateScaleBox(ev.buttonDownPos(), ev.pos())
|
# self.updateScaleBox(ev.buttonDownPos(), ev.pos())
|
||||||
|
self.updateScaleBox(
|
||||||
|
down_pos,
|
||||||
|
ev.pos(),
|
||||||
|
)
|
||||||
|
|
||||||
|
# PANNING MODE
|
||||||
else:
|
else:
|
||||||
# default bevavior: click to pan view
|
# XXX: WHY
|
||||||
|
ev.accept()
|
||||||
|
|
||||||
|
self.start_ic()
|
||||||
|
# if self._ic is None:
|
||||||
|
# self.chart.pause_all_feeds()
|
||||||
|
# self._ic = trio.Event()
|
||||||
|
|
||||||
|
if axis == 1:
|
||||||
|
self.chart._static_yrange = 'axis'
|
||||||
|
|
||||||
tr = self.childGroup.transform()
|
tr = self.childGroup.transform()
|
||||||
tr = fn.invertQTransform(tr)
|
tr = fn.invertQTransform(tr)
|
||||||
|
@ -548,9 +691,14 @@ class ChartView(ViewBox):
|
||||||
|
|
||||||
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
||||||
|
|
||||||
elif button & QtCore.Qt.RightButton:
|
if ev.isFinish():
|
||||||
|
self.signal_ic()
|
||||||
|
# self._ic.set()
|
||||||
|
# self._ic = None
|
||||||
|
# self.chart.resume_all_feeds()
|
||||||
|
|
||||||
# right click zoom to center behaviour
|
# WEIRD "RIGHT-CLICK CENTER ZOOM" MODE
|
||||||
|
elif button & QtCore.Qt.RightButton:
|
||||||
|
|
||||||
if self.state['aspectLocked'] is not False:
|
if self.state['aspectLocked'] is not False:
|
||||||
mask[0] = 0
|
mask[0] = 0
|
||||||
|
@ -571,6 +719,9 @@ class ChartView(ViewBox):
|
||||||
self.scaleBy(x=x, y=y, center=center)
|
self.scaleBy(x=x, y=y, center=center)
|
||||||
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
self.sigRangeChangedManually.emit(self.state['mouseEnabled'])
|
||||||
|
|
||||||
|
# XXX: WHY
|
||||||
|
ev.accept()
|
||||||
|
|
||||||
# def mouseClickEvent(self, event: QtCore.QEvent) -> None:
|
# def mouseClickEvent(self, event: QtCore.QEvent) -> None:
|
||||||
# '''This routine is rerouted to an async handler.
|
# '''This routine is rerouted to an async handler.
|
||||||
# '''
|
# '''
|
||||||
|
@ -585,3 +736,212 @@ class ChartView(ViewBox):
|
||||||
'''This routine is rerouted to an async handler.
|
'''This routine is rerouted to an async handler.
|
||||||
'''
|
'''
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def _set_yrange(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
|
||||||
|
yrange: Optional[tuple[float, float]] = None,
|
||||||
|
range_margin: float = 0.06,
|
||||||
|
bars_range: Optional[tuple[int, int, int, int]] = None,
|
||||||
|
|
||||||
|
# flag to prevent triggering sibling charts from the same linked
|
||||||
|
# set from recursion errors.
|
||||||
|
autoscale_linked_plots: bool = False,
|
||||||
|
name: Optional[str] = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Set the viewable y-range based on embedded data.
|
||||||
|
|
||||||
|
This adds auto-scaling like zoom on the scroll wheel such
|
||||||
|
that data always fits nicely inside the current view of the
|
||||||
|
data set.
|
||||||
|
|
||||||
|
'''
|
||||||
|
name = self.name
|
||||||
|
# print(f'YRANGE ON {name}')
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
msg=f'`ChartView._set_yrange()`: `{name}`',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
ms_threshold=ms_slower_then,
|
||||||
|
delayed=True,
|
||||||
|
)
|
||||||
|
set_range = True
|
||||||
|
chart = self._chart
|
||||||
|
|
||||||
|
# view has been set in 'axis' mode
|
||||||
|
# meaning it can be panned and zoomed
|
||||||
|
# arbitrarily on the y-axis:
|
||||||
|
# - disable autoranging
|
||||||
|
# - remove any y range limits
|
||||||
|
if chart._static_yrange == 'axis':
|
||||||
|
set_range = False
|
||||||
|
self.setLimits(yMin=None, yMax=None)
|
||||||
|
|
||||||
|
# static y-range has been set likely by
|
||||||
|
# a specialized FSP configuration.
|
||||||
|
elif chart._static_yrange is not None:
|
||||||
|
ylow, yhigh = chart._static_yrange
|
||||||
|
|
||||||
|
# range passed in by caller, usually a
|
||||||
|
# maxmin detection algos inside the
|
||||||
|
# display loop for re-draw efficiency.
|
||||||
|
elif yrange is not None:
|
||||||
|
ylow, yhigh = yrange
|
||||||
|
|
||||||
|
if set_range:
|
||||||
|
|
||||||
|
# XXX: only compute the mxmn range
|
||||||
|
# if none is provided as input!
|
||||||
|
if not yrange:
|
||||||
|
# flow = chart._flows[name]
|
||||||
|
yrange = self._maxmin()
|
||||||
|
|
||||||
|
if yrange is None:
|
||||||
|
log.warning(f'No yrange provided for {name}!?')
|
||||||
|
print(f"WTF NO YRANGE {name}")
|
||||||
|
return
|
||||||
|
|
||||||
|
ylow, yhigh = yrange
|
||||||
|
|
||||||
|
profiler(f'callback ._maxmin(): {yrange}')
|
||||||
|
|
||||||
|
# view margins: stay within a % of the "true range"
|
||||||
|
diff = yhigh - ylow
|
||||||
|
ylow = ylow - (diff * range_margin)
|
||||||
|
yhigh = yhigh + (diff * range_margin)
|
||||||
|
|
||||||
|
# XXX: this often needs to be unset
|
||||||
|
# to get different view modes to operate
|
||||||
|
# correctly!
|
||||||
|
self.setLimits(
|
||||||
|
yMin=ylow,
|
||||||
|
yMax=yhigh,
|
||||||
|
)
|
||||||
|
self.setYRange(ylow, yhigh)
|
||||||
|
profiler(f'set limits: {(ylow, yhigh)}')
|
||||||
|
|
||||||
|
profiler.finish()
|
||||||
|
|
||||||
|
def enable_auto_yrange(
|
||||||
|
self,
|
||||||
|
src_vb: Optional[ChartView] = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Assign callback for rescaling y-axis automatically
|
||||||
|
based on data contents and ``ViewBox`` state.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if src_vb is None:
|
||||||
|
src_vb = self
|
||||||
|
|
||||||
|
# splitter(s) resizing
|
||||||
|
src_vb.sigResized.connect(self._set_yrange)
|
||||||
|
|
||||||
|
# TODO: a smarter way to avoid calling this needlessly?
|
||||||
|
# 2 things i can think of:
|
||||||
|
# - register downsample-able graphics specially and only
|
||||||
|
# iterate those.
|
||||||
|
# - only register this when certain downsampleable graphics are
|
||||||
|
# "added to scene".
|
||||||
|
src_vb.sigRangeChangedManually.connect(
|
||||||
|
self.maybe_downsample_graphics
|
||||||
|
)
|
||||||
|
|
||||||
|
# mouse wheel doesn't emit XRangeChanged
|
||||||
|
src_vb.sigRangeChangedManually.connect(self._set_yrange)
|
||||||
|
|
||||||
|
# src_vb.sigXRangeChanged.connect(self._set_yrange)
|
||||||
|
# src_vb.sigXRangeChanged.connect(
|
||||||
|
# self.maybe_downsample_graphics
|
||||||
|
# )
|
||||||
|
|
||||||
|
def disable_auto_yrange(self) -> None:
|
||||||
|
|
||||||
|
self.sigResized.disconnect(
|
||||||
|
self._set_yrange,
|
||||||
|
)
|
||||||
|
self.sigRangeChangedManually.disconnect(
|
||||||
|
self.maybe_downsample_graphics
|
||||||
|
)
|
||||||
|
self.sigRangeChangedManually.disconnect(
|
||||||
|
self._set_yrange,
|
||||||
|
)
|
||||||
|
|
||||||
|
# self.sigXRangeChanged.disconnect(self._set_yrange)
|
||||||
|
# self.sigXRangeChanged.disconnect(
|
||||||
|
# self.maybe_downsample_graphics
|
||||||
|
# )
|
||||||
|
|
||||||
|
def x_uppx(self) -> float:
|
||||||
|
'''
|
||||||
|
Return the "number of x units" within a single
|
||||||
|
pixel currently being displayed for relevant
|
||||||
|
graphics items which are our children.
|
||||||
|
|
||||||
|
'''
|
||||||
|
graphics = [f.graphics for f in self._chart._flows.values()]
|
||||||
|
if not graphics:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
for graphic in graphics:
|
||||||
|
xvec = graphic.pixelVectors()[0]
|
||||||
|
if xvec:
|
||||||
|
return xvec.x()
|
||||||
|
else:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
def maybe_downsample_graphics(
|
||||||
|
self,
|
||||||
|
autoscale_overlays: bool = True,
|
||||||
|
):
|
||||||
|
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
msg=f'ChartView.maybe_downsample_graphics() for {self.name}',
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
|
||||||
|
# XXX: important to avoid not seeing underlying
|
||||||
|
# ``.update_graphics_from_flow()`` nested profiling likely
|
||||||
|
# due to the way delaying works and garbage collection of
|
||||||
|
# the profiler in the delegated method calls.
|
||||||
|
ms_threshold=6,
|
||||||
|
# ms_threshold=ms_slower_then,
|
||||||
|
)
|
||||||
|
|
||||||
|
# TODO: a faster single-loop-iterator way of doing this XD
|
||||||
|
chart = self._chart
|
||||||
|
linked = self.linkedsplits
|
||||||
|
plots = linked.subplots | {chart.name: chart}
|
||||||
|
for chart_name, chart in plots.items():
|
||||||
|
for name, flow in chart._flows.items():
|
||||||
|
|
||||||
|
if (
|
||||||
|
not flow.render
|
||||||
|
|
||||||
|
# XXX: super important to be aware of this.
|
||||||
|
# or not flow.graphics.isVisible()
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# pass in no array which will read and render from the last
|
||||||
|
# passed array (normally provided by the display loop.)
|
||||||
|
chart.update_graphics_from_flow(
|
||||||
|
name,
|
||||||
|
use_vr=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# for each overlay on this chart auto-scale the
|
||||||
|
# y-range to max-min values.
|
||||||
|
if autoscale_overlays:
|
||||||
|
overlay = chart.pi_overlay
|
||||||
|
if overlay:
|
||||||
|
for pi in overlay.overlays:
|
||||||
|
pi.vb._set_yrange(
|
||||||
|
# TODO: get the range once up front...
|
||||||
|
# bars_range=br,
|
||||||
|
)
|
||||||
|
profiler('autoscaled linked plots')
|
||||||
|
|
||||||
|
profiler(f'<{chart_name}>.update_graphics_from_flow({name})')
|
||||||
|
|
|
@ -199,7 +199,14 @@ class LevelLabel(YAxisLabel):
|
||||||
elif self._orient_v == 'top':
|
elif self._orient_v == 'top':
|
||||||
lp, rp = rect.bottomLeft(), rect.bottomRight()
|
lp, rp = rect.bottomLeft(), rect.bottomRight()
|
||||||
|
|
||||||
p.drawLine(lp.x(), lp.y(), rp.x(), rp.y())
|
p.drawLine(
|
||||||
|
*map(int, [
|
||||||
|
lp.x(),
|
||||||
|
lp.y(),
|
||||||
|
rp.x(),
|
||||||
|
rp.y(),
|
||||||
|
])
|
||||||
|
)
|
||||||
|
|
||||||
def highlight(self, pen) -> None:
|
def highlight(self, pen) -> None:
|
||||||
self._pen = pen
|
self._pen = pen
|
||||||
|
|
|
@ -34,7 +34,7 @@ from ._style import (
|
||||||
|
|
||||||
|
|
||||||
class Label:
|
class Label:
|
||||||
"""
|
'''
|
||||||
A plain ol' "scene label" using an underlying ``QGraphicsTextItem``.
|
A plain ol' "scene label" using an underlying ``QGraphicsTextItem``.
|
||||||
|
|
||||||
After hacking for many days on multiple "label" systems inside
|
After hacking for many days on multiple "label" systems inside
|
||||||
|
@ -50,10 +50,8 @@ class Label:
|
||||||
small, re-usable label components that can actually be used to build
|
small, re-usable label components that can actually be used to build
|
||||||
production grade UIs...
|
production grade UIs...
|
||||||
|
|
||||||
"""
|
'''
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
|
|
||||||
self,
|
self,
|
||||||
view: pg.ViewBox,
|
view: pg.ViewBox,
|
||||||
fmt_str: str,
|
fmt_str: str,
|
||||||
|
@ -63,6 +61,7 @@ class Label:
|
||||||
font_size: str = 'small',
|
font_size: str = 'small',
|
||||||
opacity: float = 1,
|
opacity: float = 1,
|
||||||
fields: dict = {},
|
fields: dict = {},
|
||||||
|
parent: pg.GraphicsObject = None,
|
||||||
update_on_range_change: bool = True,
|
update_on_range_change: bool = True,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -71,11 +70,13 @@ class Label:
|
||||||
self._fmt_str = fmt_str
|
self._fmt_str = fmt_str
|
||||||
self._view_xy = QPointF(0, 0)
|
self._view_xy = QPointF(0, 0)
|
||||||
|
|
||||||
self.scene_anchor: Optional[Callable[..., QPointF]] = None
|
self.scene_anchor: Optional[
|
||||||
|
Callable[..., QPointF]
|
||||||
|
] = None
|
||||||
|
|
||||||
self._x_offset = x_offset
|
self._x_offset = x_offset
|
||||||
|
|
||||||
txt = self.txt = QtWidgets.QGraphicsTextItem()
|
txt = self.txt = QtWidgets.QGraphicsTextItem(parent=parent)
|
||||||
txt.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
txt.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
|
||||||
vb.scene().addItem(txt)
|
vb.scene().addItem(txt)
|
||||||
|
@ -86,7 +87,6 @@ class Label:
|
||||||
)
|
)
|
||||||
dpi_font.configure_to_dpi()
|
dpi_font.configure_to_dpi()
|
||||||
txt.setFont(dpi_font.font)
|
txt.setFont(dpi_font.font)
|
||||||
|
|
||||||
txt.setOpacity(opacity)
|
txt.setOpacity(opacity)
|
||||||
|
|
||||||
# register viewbox callbacks
|
# register viewbox callbacks
|
||||||
|
@ -109,7 +109,7 @@ class Label:
|
||||||
# self.setTextInteractionFlags(QtGui.Qt.TextEditorInteraction)
|
# self.setTextInteractionFlags(QtGui.Qt.TextEditorInteraction)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def color(self):
|
def color(self) -> str:
|
||||||
return self._hcolor
|
return self._hcolor
|
||||||
|
|
||||||
@color.setter
|
@color.setter
|
||||||
|
@ -118,9 +118,10 @@ class Label:
|
||||||
self._hcolor = color
|
self._hcolor = color
|
||||||
|
|
||||||
def update(self) -> None:
|
def update(self) -> None:
|
||||||
'''Update this label either by invoking its
|
'''
|
||||||
user defined anchoring function, or by positioning
|
Update this label either by invoking its user defined anchoring
|
||||||
to the last recorded data view coordinates.
|
function, or by positioning to the last recorded data view
|
||||||
|
coordinates.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
# move label in scene coords to desired position
|
# move label in scene coords to desired position
|
||||||
|
@ -234,7 +235,8 @@ class Label:
|
||||||
|
|
||||||
|
|
||||||
class FormatLabel(QLabel):
|
class FormatLabel(QLabel):
|
||||||
'''Kinda similar to above but using the widget apis.
|
'''
|
||||||
|
Kinda similar to above but using the widget apis.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
|
@ -273,8 +275,8 @@ class FormatLabel(QLabel):
|
||||||
QSizePolicy.Expanding,
|
QSizePolicy.Expanding,
|
||||||
QSizePolicy.Expanding,
|
QSizePolicy.Expanding,
|
||||||
)
|
)
|
||||||
self.setAlignment(Qt.AlignVCenter
|
self.setAlignment(
|
||||||
| Qt.AlignLeft
|
Qt.AlignVCenter | Qt.AlignLeft
|
||||||
)
|
)
|
||||||
self.setText(self.fmt_str)
|
self.setText(self.fmt_str)
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ Lines for orders, alerts, L2.
|
||||||
"""
|
"""
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from math import floor
|
from math import floor
|
||||||
from typing import Tuple, Optional, List, Callable
|
from typing import Optional, Callable
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from pyqtgraph import Point, functions as fn
|
from pyqtgraph import Point, functions as fn
|
||||||
|
@ -29,10 +29,8 @@ from PyQt5.QtCore import QPointF
|
||||||
|
|
||||||
from ._annotate import qgo_draw_markers, LevelMarker
|
from ._annotate import qgo_draw_markers, LevelMarker
|
||||||
from ._anchors import (
|
from ._anchors import (
|
||||||
marker_right_points,
|
|
||||||
vbr_left,
|
vbr_left,
|
||||||
right_axis,
|
right_axis,
|
||||||
# pp_tight_and_right, # wanna keep it straight in the long run
|
|
||||||
gpath_pin,
|
gpath_pin,
|
||||||
)
|
)
|
||||||
from ..calc import humanize
|
from ..calc import humanize
|
||||||
|
@ -104,8 +102,8 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
# list of labels anchored at one of the 2 line endpoints
|
# list of labels anchored at one of the 2 line endpoints
|
||||||
# inside the viewbox
|
# inside the viewbox
|
||||||
self._labels: List[Label] = []
|
self._labels: list[Label] = []
|
||||||
self._markers: List[(int, Label)] = []
|
self._markers: list[(int, Label)] = []
|
||||||
|
|
||||||
# whenever this line is moved trigger label updates
|
# whenever this line is moved trigger label updates
|
||||||
self.sigPositionChanged.connect(self.on_pos_change)
|
self.sigPositionChanged.connect(self.on_pos_change)
|
||||||
|
@ -124,7 +122,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
self._y_incr_mult = 1 / chart.linked.symbol.tick_size
|
||||||
self._right_end_sc: float = 0
|
self._right_end_sc: float = 0
|
||||||
|
|
||||||
def txt_offsets(self) -> Tuple[int, int]:
|
def txt_offsets(self) -> tuple[int, int]:
|
||||||
return 0, 0
|
return 0, 0
|
||||||
|
|
||||||
@property
|
@property
|
||||||
|
@ -315,17 +313,6 @@ class LevelLine(pg.InfiniteLine):
|
||||||
# TODO: enter labels edit mode
|
# TODO: enter labels edit mode
|
||||||
print(f'double click {ev}')
|
print(f'double click {ev}')
|
||||||
|
|
||||||
def right_point(
|
|
||||||
self,
|
|
||||||
) -> float:
|
|
||||||
|
|
||||||
chart = self._chart
|
|
||||||
l1_len = chart._max_l1_line_len
|
|
||||||
ryaxis = chart.getAxis('right')
|
|
||||||
up_to_l1_sc = ryaxis.pos().x() - l1_len
|
|
||||||
|
|
||||||
return up_to_l1_sc
|
|
||||||
|
|
||||||
def paint(
|
def paint(
|
||||||
self,
|
self,
|
||||||
|
|
||||||
|
@ -334,17 +321,18 @@ class LevelLine(pg.InfiniteLine):
|
||||||
w: QtWidgets.QWidget
|
w: QtWidgets.QWidget
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Core paint which we override (yet again)
|
'''
|
||||||
|
Core paint which we override (yet again)
|
||||||
from pg..
|
from pg..
|
||||||
|
|
||||||
"""
|
'''
|
||||||
p.setRenderHint(p.Antialiasing)
|
p.setRenderHint(p.Antialiasing)
|
||||||
|
|
||||||
# these are in viewbox coords
|
# these are in viewbox coords
|
||||||
vb_left, vb_right = self._endPoints
|
vb_left, vb_right = self._endPoints
|
||||||
vb = self.getViewBox()
|
vb = self.getViewBox()
|
||||||
|
|
||||||
line_end, marker_right, r_axis_x = marker_right_points(self._chart)
|
line_end, marker_right, r_axis_x = self._chart.marker_right_points()
|
||||||
|
|
||||||
if self.show_markers and self.markers:
|
if self.show_markers and self.markers:
|
||||||
|
|
||||||
|
@ -410,7 +398,7 @@ class LevelLine(pg.InfiniteLine):
|
||||||
def scene_endpoint(self) -> QPointF:
|
def scene_endpoint(self) -> QPointF:
|
||||||
|
|
||||||
if not self._right_end_sc:
|
if not self._right_end_sc:
|
||||||
line_end, _, _ = marker_right_points(self._chart)
|
line_end, _, _ = self._chart.marker_right_points()
|
||||||
self._right_end_sc = line_end - 10
|
self._right_end_sc = line_end - 10
|
||||||
|
|
||||||
return QPointF(self._right_end_sc, self.scene_y())
|
return QPointF(self._right_end_sc, self.scene_y())
|
||||||
|
@ -421,23 +409,23 @@ class LevelLine(pg.InfiniteLine):
|
||||||
|
|
||||||
) -> QtWidgets.QGraphicsPathItem:
|
) -> QtWidgets.QGraphicsPathItem:
|
||||||
|
|
||||||
|
self._marker = path
|
||||||
|
self._marker.setPen(self.currentPen)
|
||||||
|
self._marker.setBrush(fn.mkBrush(self.currentPen.color()))
|
||||||
# add path to scene
|
# add path to scene
|
||||||
self.getViewBox().scene().addItem(path)
|
self.getViewBox().scene().addItem(path)
|
||||||
|
|
||||||
self._marker = path
|
# place to just-left of L1 labels
|
||||||
|
rsc = self._chart.pre_l1_xs()[0]
|
||||||
rsc = self.right_point()
|
|
||||||
|
|
||||||
self._marker.setPen(self.currentPen)
|
|
||||||
self._marker.setBrush(fn.mkBrush(self.currentPen.color()))
|
|
||||||
path.setPos(QPointF(rsc, self.scene_y()))
|
path.setPos(QPointF(rsc, self.scene_y()))
|
||||||
|
|
||||||
return path
|
return path
|
||||||
|
|
||||||
def hoverEvent(self, ev):
|
def hoverEvent(self, ev):
|
||||||
"""Mouse hover callback.
|
'''
|
||||||
|
Mouse hover callback.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
cur = self._chart.linked.cursor
|
cur = self._chart.linked.cursor
|
||||||
|
|
||||||
# hovered
|
# hovered
|
||||||
|
@ -613,7 +601,8 @@ def order_line(
|
||||||
**line_kwargs,
|
**line_kwargs,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
'''Convenience routine to add a line graphic representing an order
|
'''
|
||||||
|
Convenience routine to add a line graphic representing an order
|
||||||
execution submitted to the EMS via the chart's "order mode".
|
execution submitted to the EMS via the chart's "order mode".
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -688,7 +677,6 @@ def order_line(
|
||||||
|
|
||||||
return f'{account}: '
|
return f'{account}: '
|
||||||
|
|
||||||
|
|
||||||
label.fields = {
|
label.fields = {
|
||||||
'size': size,
|
'size': size,
|
||||||
'size_digits': 0,
|
'size_digits': 0,
|
||||||
|
|
|
@ -17,40 +17,40 @@
|
||||||
Super fast OHLC sampling graphics types.
|
Super fast OHLC sampling graphics types.
|
||||||
|
|
||||||
"""
|
"""
|
||||||
from typing import List, Optional, Tuple
|
from __future__ import annotations
|
||||||
|
from typing import (
|
||||||
|
Optional,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from numba import njit, float64, int64 # , optional
|
|
||||||
from PyQt5 import QtCore, QtGui, QtWidgets
|
from PyQt5 import QtCore, QtGui, QtWidgets
|
||||||
from PyQt5.QtCore import QLineF, QPointF
|
from PyQt5.QtCore import QLineF, QPointF
|
||||||
# from numba import types as ntypes
|
from PyQt5.QtGui import QPainterPath
|
||||||
# from ..data._source import numba_ohlc_dtype
|
|
||||||
|
|
||||||
from .._profile import pg_profile_enabled
|
from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
from ._style import hcolor
|
from ._style import hcolor
|
||||||
|
from ..log import get_logger
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._chart import LinkedSplits
|
||||||
|
|
||||||
|
|
||||||
def _mk_lines_array(
|
log = get_logger(__name__)
|
||||||
data: List,
|
|
||||||
size: int,
|
|
||||||
elements_step: int = 6,
|
|
||||||
) -> np.ndarray:
|
|
||||||
"""Create an ndarray to hold lines graphics info.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return np.zeros_like(
|
|
||||||
data,
|
|
||||||
shape=(int(size), elements_step),
|
|
||||||
dtype=object,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def lines_from_ohlc(
|
def bar_from_ohlc_row(
|
||||||
row: np.ndarray,
|
row: np.ndarray,
|
||||||
w: float
|
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||||
) -> Tuple[QLineF]:
|
w: float = 0.43
|
||||||
|
|
||||||
|
) -> tuple[QLineF]:
|
||||||
|
'''
|
||||||
|
Generate the minimal ``QLineF`` lines to construct a single
|
||||||
|
OHLC "bar" for use in the "last datum" of a series.
|
||||||
|
|
||||||
|
'''
|
||||||
open, high, low, close, index = row[
|
open, high, low, close, index = row[
|
||||||
['open', 'high', 'low', 'close', 'index']]
|
['open', 'high', 'low', 'close', 'index']]
|
||||||
|
|
||||||
|
@ -81,302 +81,37 @@ def lines_from_ohlc(
|
||||||
return [hl, o, c]
|
return [hl, o, c]
|
||||||
|
|
||||||
|
|
||||||
@njit(
|
|
||||||
# TODO: for now need to construct this manually for readonly arrays, see
|
|
||||||
# https://github.com/numba/numba/issues/4511
|
|
||||||
# ntypes.Tuple((float64[:], float64[:], float64[:]))(
|
|
||||||
# numba_ohlc_dtype[::1], # contiguous
|
|
||||||
# int64,
|
|
||||||
# optional(float64),
|
|
||||||
# ),
|
|
||||||
nogil=True
|
|
||||||
)
|
|
||||||
def path_arrays_from_ohlc(
|
|
||||||
data: np.ndarray,
|
|
||||||
start: int64,
|
|
||||||
bar_gap: float64 = 0.43,
|
|
||||||
) -> np.ndarray:
|
|
||||||
"""Generate an array of lines objects from input ohlc data.
|
|
||||||
|
|
||||||
"""
|
|
||||||
size = int(data.shape[0] * 6)
|
|
||||||
|
|
||||||
x = np.zeros(
|
|
||||||
# data,
|
|
||||||
shape=size,
|
|
||||||
dtype=float64,
|
|
||||||
)
|
|
||||||
y, c = x.copy(), x.copy()
|
|
||||||
|
|
||||||
# TODO: report bug for assert @
|
|
||||||
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
|
||||||
for i, q in enumerate(data[start:], start):
|
|
||||||
|
|
||||||
# TODO: ask numba why this doesn't work..
|
|
||||||
# open, high, low, close, index = q[
|
|
||||||
# ['open', 'high', 'low', 'close', 'index']]
|
|
||||||
|
|
||||||
open = q['open']
|
|
||||||
high = q['high']
|
|
||||||
low = q['low']
|
|
||||||
close = q['close']
|
|
||||||
index = float64(q['index'])
|
|
||||||
|
|
||||||
istart = i * 6
|
|
||||||
istop = istart + 6
|
|
||||||
|
|
||||||
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
|
||||||
x[istart:istop] = (
|
|
||||||
index - bar_gap,
|
|
||||||
index,
|
|
||||||
index,
|
|
||||||
index,
|
|
||||||
index,
|
|
||||||
index + bar_gap,
|
|
||||||
)
|
|
||||||
y[istart:istop] = (
|
|
||||||
open,
|
|
||||||
open,
|
|
||||||
low,
|
|
||||||
high,
|
|
||||||
close,
|
|
||||||
close,
|
|
||||||
)
|
|
||||||
|
|
||||||
# specifies that the first edge is never connected to the
|
|
||||||
# prior bars last edge thus providing a small "gap"/"space"
|
|
||||||
# between bars determined by ``bar_gap``.
|
|
||||||
c[istart:istop] = (0, 1, 1, 1, 1, 1)
|
|
||||||
|
|
||||||
return x, y, c
|
|
||||||
|
|
||||||
|
|
||||||
def gen_qpath(
|
|
||||||
data,
|
|
||||||
start, # XXX: do we need this?
|
|
||||||
w,
|
|
||||||
) -> QtGui.QPainterPath:
|
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
|
||||||
|
|
||||||
x, y, c = path_arrays_from_ohlc(data, start, bar_gap=w)
|
|
||||||
profiler("generate stream with numba")
|
|
||||||
|
|
||||||
# TODO: numba the internals of this!
|
|
||||||
path = pg.functions.arrayToQPath(x, y, connect=c)
|
|
||||||
profiler("generate path with arrayToQPath")
|
|
||||||
|
|
||||||
return path
|
|
||||||
|
|
||||||
|
|
||||||
class BarItems(pg.GraphicsObject):
|
class BarItems(pg.GraphicsObject):
|
||||||
"""Price range bars graphics rendered from a OHLC sequence.
|
'''
|
||||||
"""
|
"Price range" bars graphics rendered from a OHLC sampled sequence.
|
||||||
sigPlotChanged = QtCore.pyqtSignal(object)
|
|
||||||
|
|
||||||
# 0.5 is no overlap between arms, 1.0 is full overlap
|
|
||||||
w: float = 0.43
|
|
||||||
|
|
||||||
|
'''
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
# scene: 'QGraphicsScene', # noqa
|
linked: LinkedSplits,
|
||||||
plotitem: 'pg.PlotItem', # noqa
|
plotitem: 'pg.PlotItem', # noqa
|
||||||
pen_color: str = 'bracket',
|
pen_color: str = 'bracket',
|
||||||
|
last_bar_color: str = 'bracket',
|
||||||
|
|
||||||
|
name: Optional[str] = None,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
|
self.linked = linked
|
||||||
# XXX: for the mega-lulz increasing width here increases draw latency...
|
# XXX: for the mega-lulz increasing width here increases draw
|
||||||
# so probably don't do it until we figure that out.
|
# latency... so probably don't do it until we figure that out.
|
||||||
|
self._color = pen_color
|
||||||
self.bars_pen = pg.mkPen(hcolor(pen_color), width=1)
|
self.bars_pen = pg.mkPen(hcolor(pen_color), width=1)
|
||||||
|
self.last_bar_pen = pg.mkPen(hcolor(last_bar_color), width=2)
|
||||||
|
self._name = name
|
||||||
|
|
||||||
# NOTE: this prevents redraws on mouse interaction which is
|
|
||||||
# a huge boon for avg interaction latency.
|
|
||||||
|
|
||||||
# TODO: one question still remaining is if this makes trasform
|
|
||||||
# interactions slower (such as zooming) and if so maybe if/when
|
|
||||||
# we implement a "history" mode for the view we disable this in
|
|
||||||
# that mode?
|
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
||||||
|
self.path = QPainterPath()
|
||||||
|
self._last_bar_lines: Optional[tuple[QLineF, ...]] = None
|
||||||
|
|
||||||
# not sure if this is actually impoving anything but figured it
|
def x_uppx(self) -> int:
|
||||||
# was worth a shot:
|
# we expect the downsample curve report this.
|
||||||
# self.path.reserve(int(100e3 * 6))
|
return 0
|
||||||
|
|
||||||
self.path = QtGui.QPainterPath()
|
|
||||||
|
|
||||||
self._pi = plotitem
|
|
||||||
|
|
||||||
self._xrange: Tuple[int, int]
|
|
||||||
self._yrange: Tuple[float, float]
|
|
||||||
|
|
||||||
# TODO: don't render the full backing array each time
|
|
||||||
# self._path_data = None
|
|
||||||
self._last_bar_lines: Optional[Tuple[QLineF, ...]] = None
|
|
||||||
|
|
||||||
# track the current length of drawable lines within the larger array
|
|
||||||
self.start_index: int = 0
|
|
||||||
self.stop_index: int = 0
|
|
||||||
|
|
||||||
def draw_from_data(
|
|
||||||
self,
|
|
||||||
data: np.ndarray,
|
|
||||||
start: int = 0,
|
|
||||||
) -> QtGui.QPainterPath:
|
|
||||||
"""Draw OHLC datum graphics from a ``np.ndarray``.
|
|
||||||
|
|
||||||
This routine is usually only called to draw the initial history.
|
|
||||||
"""
|
|
||||||
hist, last = data[:-1], data[-1]
|
|
||||||
|
|
||||||
self.path = gen_qpath(hist, start, self.w)
|
|
||||||
|
|
||||||
# save graphics for later reference and keep track
|
|
||||||
# of current internal "last index"
|
|
||||||
# self.start_index = len(data)
|
|
||||||
index = data['index']
|
|
||||||
self._xrange = (index[0], index[-1])
|
|
||||||
self._yrange = (
|
|
||||||
np.nanmax(data['high']),
|
|
||||||
np.nanmin(data['low']),
|
|
||||||
)
|
|
||||||
|
|
||||||
# up to last to avoid double draw of last bar
|
|
||||||
self._last_bar_lines = lines_from_ohlc(last, self.w)
|
|
||||||
|
|
||||||
# trigger render
|
|
||||||
# https://doc.qt.io/qt-5/qgraphicsitem.html#update
|
|
||||||
self.update()
|
|
||||||
|
|
||||||
return self.path
|
|
||||||
|
|
||||||
def update_from_array(
|
|
||||||
self,
|
|
||||||
array: np.ndarray,
|
|
||||||
just_history=False,
|
|
||||||
) -> None:
|
|
||||||
"""Update the last datum's bar graphic from input data array.
|
|
||||||
|
|
||||||
This routine should be interface compatible with
|
|
||||||
``pg.PlotCurveItem.setData()``. Normally this method in
|
|
||||||
``pyqtgraph`` seems to update all the data passed to the
|
|
||||||
graphics object, and then update/rerender, but here we're
|
|
||||||
assuming the prior graphics havent changed (OHLC history rarely
|
|
||||||
does) so this "should" be simpler and faster.
|
|
||||||
|
|
||||||
This routine should be made (transitively) as fast as possible.
|
|
||||||
"""
|
|
||||||
# index = self.start_index
|
|
||||||
istart, istop = self._xrange
|
|
||||||
|
|
||||||
index = array['index']
|
|
||||||
first_index, last_index = index[0], index[-1]
|
|
||||||
|
|
||||||
# length = len(array)
|
|
||||||
prepend_length = istart - first_index
|
|
||||||
append_length = last_index - istop
|
|
||||||
|
|
||||||
flip_cache = False
|
|
||||||
|
|
||||||
# TODO: allow mapping only a range of lines thus
|
|
||||||
# only drawing as many bars as exactly specified.
|
|
||||||
|
|
||||||
if prepend_length:
|
|
||||||
|
|
||||||
# new history was added and we need to render a new path
|
|
||||||
new_bars = array[:prepend_length]
|
|
||||||
prepend_path = gen_qpath(new_bars, 0, self.w)
|
|
||||||
|
|
||||||
# XXX: SOMETHING IS MAYBE FISHY HERE what with the old_path
|
|
||||||
# y value not matching the first value from
|
|
||||||
# array[prepend_length + 1] ???
|
|
||||||
|
|
||||||
# update path
|
|
||||||
old_path = self.path
|
|
||||||
self.path = prepend_path
|
|
||||||
self.path.addPath(old_path)
|
|
||||||
|
|
||||||
# trigger redraw despite caching
|
|
||||||
self.prepareGeometryChange()
|
|
||||||
|
|
||||||
if append_length:
|
|
||||||
# generate new lines objects for updatable "current bar"
|
|
||||||
self._last_bar_lines = lines_from_ohlc(array[-1], self.w)
|
|
||||||
|
|
||||||
# generate new graphics to match provided array
|
|
||||||
# path appending logic:
|
|
||||||
# we need to get the previous "current bar(s)" for the time step
|
|
||||||
# and convert it to a sub-path to append to the historical set
|
|
||||||
# new_bars = array[istop - 1:istop + append_length - 1]
|
|
||||||
new_bars = array[-append_length - 1:-1]
|
|
||||||
append_path = gen_qpath(new_bars, 0, self.w)
|
|
||||||
self.path.moveTo(float(istop - self.w), float(new_bars[0]['open']))
|
|
||||||
self.path.addPath(append_path)
|
|
||||||
|
|
||||||
# trigger redraw despite caching
|
|
||||||
self.prepareGeometryChange()
|
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.NoCache)
|
|
||||||
flip_cache = True
|
|
||||||
|
|
||||||
self._xrange = first_index, last_index
|
|
||||||
|
|
||||||
# last bar update
|
|
||||||
i, o, h, l, last, v = array[-1][
|
|
||||||
['index', 'open', 'high', 'low', 'close', 'volume']
|
|
||||||
]
|
|
||||||
# assert i == self.start_index - 1
|
|
||||||
# assert i == last_index
|
|
||||||
body, larm, rarm = self._last_bar_lines
|
|
||||||
|
|
||||||
# XXX: is there a faster way to modify this?
|
|
||||||
rarm.setLine(rarm.x1(), last, rarm.x2(), last)
|
|
||||||
|
|
||||||
# writer is responsible for changing open on "first" volume of bar
|
|
||||||
larm.setLine(larm.x1(), o, larm.x2(), o)
|
|
||||||
|
|
||||||
if l != h: # noqa
|
|
||||||
|
|
||||||
if body is None:
|
|
||||||
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
|
||||||
else:
|
|
||||||
# update body
|
|
||||||
body.setLine(i, l, i, h)
|
|
||||||
|
|
||||||
# XXX: pretty sure this is causing an issue where the bar has
|
|
||||||
# a large upward move right before the next sample and the body
|
|
||||||
# is getting set to None since the next bar is flat but the shm
|
|
||||||
# array index update wasn't read by the time this code runs. Iow
|
|
||||||
# we're doing this removal of the body for a bar index that is
|
|
||||||
# now out of date / from some previous sample. It's weird
|
|
||||||
# though because i've seen it do this to bars i - 3 back?
|
|
||||||
|
|
||||||
self.update()
|
|
||||||
|
|
||||||
if flip_cache:
|
|
||||||
self.setCacheMode(QtWidgets.QGraphicsItem.DeviceCoordinateCache)
|
|
||||||
|
|
||||||
def paint(
|
|
||||||
self,
|
|
||||||
p: QtGui.QPainter,
|
|
||||||
opt: QtWidgets.QStyleOptionGraphicsItem,
|
|
||||||
w: QtWidgets.QWidget
|
|
||||||
) -> None:
|
|
||||||
|
|
||||||
profiler = pg.debug.Profiler(disabled=not pg_profile_enabled())
|
|
||||||
|
|
||||||
# p.setCompositionMode(0)
|
|
||||||
p.setPen(self.bars_pen)
|
|
||||||
|
|
||||||
# TODO: one thing we could try here is pictures being drawn of
|
|
||||||
# a fixed count of bars such that based on the viewbox indices we
|
|
||||||
# only draw the "rounded up" number of "pictures worth" of bars
|
|
||||||
# as is necesarry for what's in "view". Not sure if this will
|
|
||||||
# lead to any perf gains other then when zoomed in to less bars
|
|
||||||
# in view.
|
|
||||||
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
|
||||||
profiler('draw last bar')
|
|
||||||
|
|
||||||
p.drawPath(self.path)
|
|
||||||
profiler('draw history path')
|
|
||||||
|
|
||||||
def boundingRect(self):
|
def boundingRect(self):
|
||||||
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
|
||||||
|
@ -395,12 +130,17 @@ class BarItems(pg.GraphicsObject):
|
||||||
# apparently this a lot faster says the docs?
|
# apparently this a lot faster says the docs?
|
||||||
# https://doc.qt.io/qt-5/qpainterpath.html#controlPointRect
|
# https://doc.qt.io/qt-5/qpainterpath.html#controlPointRect
|
||||||
hb = self.path.controlPointRect()
|
hb = self.path.controlPointRect()
|
||||||
hb_tl, hb_br = hb.topLeft(), hb.bottomRight()
|
hb_tl, hb_br = (
|
||||||
|
hb.topLeft(),
|
||||||
|
hb.bottomRight(),
|
||||||
|
)
|
||||||
|
|
||||||
# need to include last bar height or BR will be off
|
# need to include last bar height or BR will be off
|
||||||
mx_y = hb_br.y()
|
mx_y = hb_br.y()
|
||||||
mn_y = hb_tl.y()
|
mn_y = hb_tl.y()
|
||||||
|
|
||||||
|
last_lines = self._last_bar_lines
|
||||||
|
if last_lines:
|
||||||
body_line = self._last_bar_lines[0]
|
body_line = self._last_bar_lines[0]
|
||||||
if body_line:
|
if body_line:
|
||||||
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
mx_y = max(mx_y, max(body_line.y1(), body_line.y2()))
|
||||||
|
@ -421,3 +161,90 @@ class BarItems(pg.GraphicsObject):
|
||||||
)
|
)
|
||||||
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def paint(
|
||||||
|
self,
|
||||||
|
p: QtGui.QPainter,
|
||||||
|
opt: QtWidgets.QStyleOptionGraphicsItem,
|
||||||
|
w: QtWidgets.QWidget
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
profiler = pg.debug.Profiler(
|
||||||
|
disabled=not pg_profile_enabled(),
|
||||||
|
ms_threshold=ms_slower_then,
|
||||||
|
)
|
||||||
|
|
||||||
|
# p.setCompositionMode(0)
|
||||||
|
|
||||||
|
# TODO: one thing we could try here is pictures being drawn of
|
||||||
|
# a fixed count of bars such that based on the viewbox indices we
|
||||||
|
# only draw the "rounded up" number of "pictures worth" of bars
|
||||||
|
# as is necesarry for what's in "view". Not sure if this will
|
||||||
|
# lead to any perf gains other then when zoomed in to less bars
|
||||||
|
# in view.
|
||||||
|
p.setPen(self.last_bar_pen)
|
||||||
|
if self._last_bar_lines:
|
||||||
|
p.drawLines(*tuple(filter(bool, self._last_bar_lines)))
|
||||||
|
profiler('draw last bar')
|
||||||
|
|
||||||
|
p.setPen(self.bars_pen)
|
||||||
|
p.drawPath(self.path)
|
||||||
|
profiler(f'draw history path: {self.path.capacity()}')
|
||||||
|
|
||||||
|
def draw_last_datum(
|
||||||
|
self,
|
||||||
|
path: QPainterPath,
|
||||||
|
src_data: np.ndarray,
|
||||||
|
render_data: np.ndarray,
|
||||||
|
reset: bool,
|
||||||
|
array_key: str,
|
||||||
|
|
||||||
|
fields: list[str] = [
|
||||||
|
'index',
|
||||||
|
'open',
|
||||||
|
'high',
|
||||||
|
'low',
|
||||||
|
'close',
|
||||||
|
],
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
# relevant fields
|
||||||
|
ohlc = src_data[fields]
|
||||||
|
last_row = ohlc[-1:]
|
||||||
|
|
||||||
|
# individual values
|
||||||
|
last_row = i, o, h, l, last = ohlc[-1]
|
||||||
|
|
||||||
|
# generate new lines objects for updatable "current bar"
|
||||||
|
self._last_bar_lines = bar_from_ohlc_row(last_row)
|
||||||
|
|
||||||
|
# assert i == graphics.start_index - 1
|
||||||
|
# assert i == last_index
|
||||||
|
body, larm, rarm = self._last_bar_lines
|
||||||
|
|
||||||
|
# XXX: is there a faster way to modify this?
|
||||||
|
rarm.setLine(rarm.x1(), last, rarm.x2(), last)
|
||||||
|
|
||||||
|
# writer is responsible for changing open on "first" volume of bar
|
||||||
|
larm.setLine(larm.x1(), o, larm.x2(), o)
|
||||||
|
|
||||||
|
if l != h: # noqa
|
||||||
|
|
||||||
|
if body is None:
|
||||||
|
body = self._last_bar_lines[0] = QLineF(i, l, i, h)
|
||||||
|
else:
|
||||||
|
# update body
|
||||||
|
body.setLine(i, l, i, h)
|
||||||
|
|
||||||
|
# XXX: pretty sure this is causing an issue where the
|
||||||
|
# bar has a large upward move right before the next
|
||||||
|
# sample and the body is getting set to None since the
|
||||||
|
# next bar is flat but the shm array index update wasn't
|
||||||
|
# read by the time this code runs. Iow we're doing this
|
||||||
|
# removal of the body for a bar index that is now out of
|
||||||
|
# date / from some previous sample. It's weird though
|
||||||
|
# because i've seen it do this to bars i - 3 back?
|
||||||
|
|
||||||
|
return ohlc['index'], ohlc['close']
|
||||||
|
|
|
@ -36,7 +36,7 @@ from PyQt5.QtWidgets import (
|
||||||
|
|
||||||
from ._forms import (
|
from ._forms import (
|
||||||
# FontScaledDelegate,
|
# FontScaledDelegate,
|
||||||
FontAndChartAwareLineEdit,
|
Edit,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -97,7 +97,7 @@ class Selection(Field[DataType], Generic[DataType]):
|
||||||
class Edit(Field[DataType], Generic[DataType]):
|
class Edit(Field[DataType], Generic[DataType]):
|
||||||
'''An edit field which takes a number.
|
'''An edit field which takes a number.
|
||||||
'''
|
'''
|
||||||
widget_factory = FontAndChartAwareLineEdit
|
widget_factory = Edit
|
||||||
|
|
||||||
|
|
||||||
class AllocatorPane(BaseModel):
|
class AllocatorPane(BaseModel):
|
||||||
|
|
|
@ -0,0 +1,648 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
'''
|
||||||
|
Charting overlay helpers.
|
||||||
|
|
||||||
|
'''
|
||||||
|
from typing import Callable, Optional
|
||||||
|
|
||||||
|
from pyqtgraph.Qt.QtCore import (
|
||||||
|
# QObject,
|
||||||
|
# Signal,
|
||||||
|
Qt,
|
||||||
|
# QEvent,
|
||||||
|
)
|
||||||
|
|
||||||
|
from pyqtgraph.graphicsItems.AxisItem import AxisItem
|
||||||
|
from pyqtgraph.graphicsItems.ViewBox import ViewBox
|
||||||
|
from pyqtgraph.graphicsItems.GraphicsWidget import GraphicsWidget
|
||||||
|
from pyqtgraph.graphicsItems.PlotItem.PlotItem import PlotItem
|
||||||
|
from pyqtgraph.Qt.QtCore import QObject, Signal, QEvent
|
||||||
|
from pyqtgraph.Qt.QtWidgets import QGraphicsGridLayout, QGraphicsLinearLayout
|
||||||
|
|
||||||
|
from ._interaction import ChartView
|
||||||
|
|
||||||
|
__all__ = ["PlotItemOverlay"]
|
||||||
|
|
||||||
|
|
||||||
|
# Define the layout "position" indices as to be passed
|
||||||
|
# to a ``QtWidgets.QGraphicsGridlayout.addItem()`` call:
|
||||||
|
# https://doc.qt.io/qt-5/qgraphicsgridlayout.html#addItem
|
||||||
|
# This was pulled from the internals of ``PlotItem.setAxisItem()``.
|
||||||
|
_axes_layout_indices: dict[str] = {
|
||||||
|
# row incremented axes
|
||||||
|
'top': (1, 1),
|
||||||
|
'bottom': (3, 1),
|
||||||
|
|
||||||
|
# view is @ (2, 1)
|
||||||
|
|
||||||
|
# column incremented axes
|
||||||
|
'left': (2, 0),
|
||||||
|
'right': (2, 2),
|
||||||
|
}
|
||||||
|
# NOTE: To clarify this indexing, ``PlotItem.__init__()`` makes a grid
|
||||||
|
# with dimensions 4x3 and puts the ``ViewBox`` at postiion (2, 1) (aka
|
||||||
|
# row=2, col=1) in the grid layout since row (0, 1) is reserved for
|
||||||
|
# a title label and row 1 is for any potential "top" axis. Column 1
|
||||||
|
# is the "middle" (since 3 columns) and is where the plot/vb is placed.
|
||||||
|
|
||||||
|
|
||||||
|
class ComposedGridLayout:
|
||||||
|
'''
|
||||||
|
List-like interface to managing a sequence of overlayed
|
||||||
|
``PlotItem``s in the form:
|
||||||
|
|
||||||
|
| | | | | top0 | | | | |
|
||||||
|
| | | | | top1 | | | | |
|
||||||
|
| | | | | ... | | | | |
|
||||||
|
| | | | | topN | | | | |
|
||||||
|
| lN | ... | l1 | l0 | ViewBox | r0 | r1 | ... | rN |
|
||||||
|
| | | | | bottom0 | | | | |
|
||||||
|
| | | | | bottom1 | | | | |
|
||||||
|
| | | | | ... | | | | |
|
||||||
|
| | | | | bottomN | | | | |
|
||||||
|
|
||||||
|
Where the index ``i`` in the sequence specifies the index
|
||||||
|
``<axis_name>i`` in the layout.
|
||||||
|
|
||||||
|
The ``item: PlotItem`` passed to the constructor's grid layout is
|
||||||
|
used verbatim as the "main plot" who's view box is give precedence
|
||||||
|
for input handling. The main plot's axes are removed from it's
|
||||||
|
layout and placed in the surrounding exterior layouts to allow for
|
||||||
|
re-ordering if desired.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
item: PlotItem,
|
||||||
|
grid: QGraphicsGridLayout,
|
||||||
|
reverse: bool = False, # insert items to the "center"
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
self.items: list[PlotItem] = []
|
||||||
|
# self.grid = grid
|
||||||
|
self.reverse = reverse
|
||||||
|
|
||||||
|
# TODO: use a ``bidict`` here?
|
||||||
|
self._pi2axes: dict[
|
||||||
|
int,
|
||||||
|
dict[str, AxisItem],
|
||||||
|
] = {}
|
||||||
|
|
||||||
|
# TODO: better name?
|
||||||
|
# construct surrounding layouts for placing outer axes and
|
||||||
|
# their legends and title labels.
|
||||||
|
self.sides: dict[
|
||||||
|
str,
|
||||||
|
tuple[QGraphicsLinearLayout, list[AxisItem]]
|
||||||
|
] = {}
|
||||||
|
|
||||||
|
for name, pos in _axes_layout_indices.items():
|
||||||
|
layout = QGraphicsLinearLayout()
|
||||||
|
self.sides[name] = (layout, [])
|
||||||
|
|
||||||
|
layout.setContentsMargins(0, 0, 0, 0)
|
||||||
|
layout.setSpacing(0)
|
||||||
|
|
||||||
|
if name in ('top', 'bottom'):
|
||||||
|
orient = Qt.Vertical
|
||||||
|
elif name in ('left', 'right'):
|
||||||
|
orient = Qt.Horizontal
|
||||||
|
|
||||||
|
layout.setOrientation(orient)
|
||||||
|
|
||||||
|
self.insert(0, item)
|
||||||
|
|
||||||
|
# insert surrounding linear layouts into the parent pi's layout
|
||||||
|
# such that additional axes can be appended arbitrarily without
|
||||||
|
# having to expand or resize the parent's grid layout.
|
||||||
|
for name, (linlayout, axes) in self.sides.items():
|
||||||
|
|
||||||
|
# TODO: do we need this?
|
||||||
|
# axis should have been removed during insert above
|
||||||
|
index = _axes_layout_indices[name]
|
||||||
|
axis = item.layout.itemAt(*index)
|
||||||
|
if axis and axis.isVisible():
|
||||||
|
assert linlayout.itemAt(0) is axis
|
||||||
|
|
||||||
|
# item.layout.removeItem(axis)
|
||||||
|
item.layout.addItem(linlayout, *index)
|
||||||
|
layout = item.layout.itemAt(*index)
|
||||||
|
assert layout is linlayout
|
||||||
|
|
||||||
|
def _register_item(
|
||||||
|
self,
|
||||||
|
index: int,
|
||||||
|
plotitem: PlotItem,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
for name, axis_info in plotitem.axes.items():
|
||||||
|
axis = axis_info['item']
|
||||||
|
# register this plot's (maybe re-placed) axes for lookup.
|
||||||
|
# print(f'inserting {name}:{axis} to index {index}')
|
||||||
|
self._pi2axes.setdefault(name, {})[index] = axis
|
||||||
|
|
||||||
|
# enter plot into list for index tracking
|
||||||
|
self.items.insert(index, plotitem)
|
||||||
|
|
||||||
|
def insert(
|
||||||
|
self,
|
||||||
|
index: int,
|
||||||
|
plotitem: PlotItem,
|
||||||
|
|
||||||
|
) -> (int, int):
|
||||||
|
'''
|
||||||
|
Place item at index by inserting all axes into the grid
|
||||||
|
at list-order appropriate position.
|
||||||
|
|
||||||
|
'''
|
||||||
|
if index < 0:
|
||||||
|
raise ValueError('`insert()` only supports an index >= 0')
|
||||||
|
|
||||||
|
# add plot's axes in sequence to the embedded linear layouts
|
||||||
|
# for each "side" thus avoiding graphics collisions.
|
||||||
|
for name, axis_info in plotitem.axes.copy().items():
|
||||||
|
linlayout, axes = self.sides[name]
|
||||||
|
axis = axis_info['item']
|
||||||
|
|
||||||
|
if axis in axes:
|
||||||
|
# TODO: re-order using ``.pop()`` ?
|
||||||
|
ValueError(f'{axis} is already in {name} layout!?')
|
||||||
|
|
||||||
|
# linking sanity
|
||||||
|
axis_view = axis.linkedView()
|
||||||
|
assert axis_view is plotitem.vb
|
||||||
|
|
||||||
|
if (
|
||||||
|
not axis.isVisible()
|
||||||
|
|
||||||
|
# XXX: we never skip moving the axes for the *first*
|
||||||
|
# plotitem inserted (even if not shown) since we need to
|
||||||
|
# move all the hidden axes into linear sub-layouts for
|
||||||
|
# that "central" plot in the overlay. Also if we don't
|
||||||
|
# do it there's weird geomoetry calc offsets that make
|
||||||
|
# view coords slightly off somehow .. smh
|
||||||
|
and not len(self.items) == 0
|
||||||
|
):
|
||||||
|
continue
|
||||||
|
|
||||||
|
# XXX: Remove old axis? No, turns out we don't need this?
|
||||||
|
# DON'T unlink it since we the original ``ViewBox``
|
||||||
|
# to still drive it B)
|
||||||
|
# popped = plotitem.removeAxis(name, unlink=False)
|
||||||
|
# assert axis is popped
|
||||||
|
|
||||||
|
# invert insert index for layouts which are
|
||||||
|
# not-left-to-right, top-to-bottom insert oriented
|
||||||
|
insert_index = index
|
||||||
|
if name in ('top', 'left'):
|
||||||
|
insert_index = min(len(axes) - index, 0)
|
||||||
|
assert insert_index >= 0
|
||||||
|
|
||||||
|
linlayout.insertItem(insert_index, axis)
|
||||||
|
axes.insert(index, axis)
|
||||||
|
|
||||||
|
self._register_item(index, plotitem)
|
||||||
|
|
||||||
|
return index
|
||||||
|
|
||||||
|
def append(
|
||||||
|
self,
|
||||||
|
item: PlotItem,
|
||||||
|
|
||||||
|
) -> (int, int):
|
||||||
|
'''
|
||||||
|
Append item's axes at indexes which put its axes "outside"
|
||||||
|
previously overlayed entries.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# for left and bottom axes we have to first remove
|
||||||
|
# items and re-insert to maintain a list-order.
|
||||||
|
return self.insert(len(self.items), item)
|
||||||
|
|
||||||
|
def get_axis(
|
||||||
|
self,
|
||||||
|
plot: PlotItem,
|
||||||
|
name: str,
|
||||||
|
|
||||||
|
) -> Optional[AxisItem]:
|
||||||
|
'''
|
||||||
|
Retrieve the named axis for overlayed ``plot`` or ``None``
|
||||||
|
if axis for that name is not shown.
|
||||||
|
|
||||||
|
'''
|
||||||
|
index = self.items.index(plot)
|
||||||
|
named = self._pi2axes[name]
|
||||||
|
return named.get(index)
|
||||||
|
|
||||||
|
def pop(
|
||||||
|
self,
|
||||||
|
item: PlotItem,
|
||||||
|
|
||||||
|
) -> PlotItem:
|
||||||
|
'''
|
||||||
|
Remove item and restack all axes in list-order.
|
||||||
|
|
||||||
|
'''
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
# Unimplemented features TODO:
|
||||||
|
# - 'A' (autobtn) should relay to all views
|
||||||
|
# - context menu single handler + relay?
|
||||||
|
# - layout unwind and re-pack for 'left' and 'top' axes
|
||||||
|
# - add labels to layout if detected in source ``PlotItem``
|
||||||
|
|
||||||
|
# UX nice-to-have TODO:
|
||||||
|
# - optional "focussed" view box support for view boxes
|
||||||
|
# that have custom input handlers (eg. you might want to
|
||||||
|
# scale the view to some "focussed" data view and have overlayed
|
||||||
|
# viewboxes only respond to relayed events.)
|
||||||
|
# - figure out how to deal with menu raise events for multi-viewboxes.
|
||||||
|
# (we might want to add a different menu which specs the name of the
|
||||||
|
# view box currently being handled?
|
||||||
|
# - allow selection of a particular view box by interacting with its
|
||||||
|
# axis?
|
||||||
|
|
||||||
|
|
||||||
|
# TODO: we might want to enabled some kind of manual flag to disable
|
||||||
|
# this method wrapping during type creation? As example a user could
|
||||||
|
# definitively decide **not** to enable broadcasting support by
|
||||||
|
# setting something like ``ViewBox.disable_relays = True``?
|
||||||
|
def mk_relay_method(
|
||||||
|
|
||||||
|
signame: str,
|
||||||
|
slot: Callable[
|
||||||
|
[ViewBox,
|
||||||
|
'QEvent',
|
||||||
|
Optional[AxisItem]],
|
||||||
|
None,
|
||||||
|
],
|
||||||
|
|
||||||
|
) -> Callable[
|
||||||
|
[
|
||||||
|
ViewBox,
|
||||||
|
# lol, there isn't really a generic type thanks
|
||||||
|
# to the rewrite of Qt's event system XD
|
||||||
|
'QEvent',
|
||||||
|
|
||||||
|
'Optional[AxisItem]',
|
||||||
|
'Optional[ViewBox]', # the ``relayed_from`` arg we provide
|
||||||
|
],
|
||||||
|
None,
|
||||||
|
]:
|
||||||
|
|
||||||
|
def maybe_broadcast(
|
||||||
|
vb: 'ViewBox',
|
||||||
|
ev: 'QEvent',
|
||||||
|
axis: 'Optional[int]' = None,
|
||||||
|
relayed_from: 'ViewBox' = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
(soon to be) Decorator which makes an event handler
|
||||||
|
"broadcastable" to overlayed ``GraphicsWidget``s.
|
||||||
|
|
||||||
|
Adds relay signals based on the decorated handler's name
|
||||||
|
and conducts a signal broadcast of the relay signal if there
|
||||||
|
are consumers registered.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# When no relay source has been set just bypass all
|
||||||
|
# the broadcast machinery.
|
||||||
|
if vb.event_relay_source is None:
|
||||||
|
ev.accept()
|
||||||
|
return slot(
|
||||||
|
vb,
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
if relayed_from:
|
||||||
|
assert axis is None
|
||||||
|
|
||||||
|
# this is a relayed event and should be ignored (so it does not
|
||||||
|
# halt/short circuit the graphicscene loop). Further the
|
||||||
|
# surrounding handler for this signal must be allowed to execute
|
||||||
|
# and get processed by **this consumer**.
|
||||||
|
# print(f'{vb.name} rx relayed from {relayed_from.name}')
|
||||||
|
ev.ignore()
|
||||||
|
|
||||||
|
return slot(
|
||||||
|
vb,
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
if axis is not None:
|
||||||
|
# print(f'{vb.name} handling axis event:\n{str(ev)}')
|
||||||
|
ev.accept()
|
||||||
|
return slot(
|
||||||
|
vb,
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
elif (
|
||||||
|
relayed_from is None
|
||||||
|
and vb.event_relay_source is vb # we are the broadcaster
|
||||||
|
and axis is None
|
||||||
|
):
|
||||||
|
# Broadcast case: this is a source event which will be
|
||||||
|
# relayed to attached consumers and accepted after all
|
||||||
|
# consumers complete their own handling followed by this
|
||||||
|
# routine's processing. Sequence is,
|
||||||
|
# - pre-relay to all consumers *first* - ``.emit()`` blocks
|
||||||
|
# until all downstream relay handlers have run.
|
||||||
|
# - run the source handler for **this** event and accept
|
||||||
|
# the event
|
||||||
|
|
||||||
|
# Access the "bound signal" that is created
|
||||||
|
# on the widget type as part of instantiation.
|
||||||
|
signal = getattr(vb, signame)
|
||||||
|
# print(f'{vb.name} emitting {signame}')
|
||||||
|
|
||||||
|
# TODO/NOTE: we could also just bypass a "relay" signal
|
||||||
|
# entirely and instead call the handlers manually in
|
||||||
|
# a loop? This probably is a lot simpler and also doesn't
|
||||||
|
# have any downside, and allows not touching target widget
|
||||||
|
# internals.
|
||||||
|
signal.emit(
|
||||||
|
ev,
|
||||||
|
axis,
|
||||||
|
# passing this demarks a broadcasted/relayed event
|
||||||
|
vb,
|
||||||
|
)
|
||||||
|
# accept event so no more relays are fired.
|
||||||
|
ev.accept()
|
||||||
|
|
||||||
|
# call underlying wrapped method with an extra
|
||||||
|
# ``relayed_from`` value to denote that this is a relayed
|
||||||
|
# event handling case.
|
||||||
|
return slot(
|
||||||
|
vb,
|
||||||
|
ev,
|
||||||
|
axis=axis,
|
||||||
|
)
|
||||||
|
|
||||||
|
return maybe_broadcast
|
||||||
|
|
||||||
|
|
||||||
|
# XXX: :( can't define signals **after** class compile time
|
||||||
|
# so this is not really useful.
|
||||||
|
# def mk_relay_signal(
|
||||||
|
# func,
|
||||||
|
# name: str = None,
|
||||||
|
|
||||||
|
# ) -> Signal:
|
||||||
|
# (
|
||||||
|
# args,
|
||||||
|
# varargs,
|
||||||
|
# varkw,
|
||||||
|
# defaults,
|
||||||
|
# kwonlyargs,
|
||||||
|
# kwonlydefaults,
|
||||||
|
# annotations
|
||||||
|
# ) = inspect.getfullargspec(func)
|
||||||
|
|
||||||
|
# # XXX: generate a relay signal with 1 extra
|
||||||
|
# # argument for a ``relayed_from`` kwarg. Since
|
||||||
|
# # ``'self'`` is already ignored by signals we just need
|
||||||
|
# # to count the arguments since we're adding only 1 (and
|
||||||
|
# # ``args`` will capture that).
|
||||||
|
# numargs = len(args + list(defaults))
|
||||||
|
# signal = Signal(*tuple(numargs * [object]))
|
||||||
|
# signame = name or func.__name__ + 'Relay'
|
||||||
|
# return signame, signal
|
||||||
|
|
||||||
|
|
||||||
|
def enable_relays(
|
||||||
|
widget: GraphicsWidget,
|
||||||
|
handler_names: list[str],
|
||||||
|
|
||||||
|
) -> list[Signal]:
|
||||||
|
'''
|
||||||
|
Method override helper which enables relay of a particular
|
||||||
|
``Signal`` from some chosen broadcaster widget to a set of
|
||||||
|
consumer widgets which should operate their event handlers normally
|
||||||
|
but instead of signals "relayed" from the broadcaster.
|
||||||
|
|
||||||
|
Mostly useful for overlaying widgets that handle user input
|
||||||
|
that you want to overlay graphically. The target ``widget`` type must
|
||||||
|
define ``QtCore.Signal``s each with a `'Relay'` suffix for each
|
||||||
|
name provided in ``handler_names: list[str]``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
signals = []
|
||||||
|
for name in handler_names:
|
||||||
|
handler = getattr(widget, name)
|
||||||
|
signame = name + 'Relay'
|
||||||
|
# ensure the target widget defines a relay signal
|
||||||
|
relay = getattr(widget, signame)
|
||||||
|
widget.relays[signame] = name
|
||||||
|
signals.append(relay)
|
||||||
|
method = mk_relay_method(signame, handler)
|
||||||
|
setattr(widget, name, method)
|
||||||
|
|
||||||
|
return signals
|
||||||
|
|
||||||
|
|
||||||
|
enable_relays(
|
||||||
|
ChartView,
|
||||||
|
['wheelEvent', 'mouseDragEvent']
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class PlotItemOverlay:
|
||||||
|
'''
|
||||||
|
A composite for managing overlaid ``PlotItem`` instances such that
|
||||||
|
you can make multiple graphics appear on the same graph with
|
||||||
|
separate (non-colliding) axes apply ``ViewBox`` signal broadcasting
|
||||||
|
such that all overlaid items respond to input simultaneously.
|
||||||
|
|
||||||
|
'''
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
root_plotitem: PlotItem
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
self.root_plotitem: PlotItem = root_plotitem
|
||||||
|
|
||||||
|
vb = root_plotitem.vb
|
||||||
|
vb.event_relay_source = vb # TODO: maybe change name?
|
||||||
|
vb.setZValue(1000) # XXX: critical for scene layering/relaying
|
||||||
|
|
||||||
|
self.overlays: list[PlotItem] = []
|
||||||
|
self.layout = ComposedGridLayout(
|
||||||
|
root_plotitem,
|
||||||
|
root_plotitem.layout,
|
||||||
|
)
|
||||||
|
self._relays: dict[str, Signal] = {}
|
||||||
|
|
||||||
|
def add_plotitem(
|
||||||
|
self,
|
||||||
|
plotitem: PlotItem,
|
||||||
|
index: Optional[int] = None,
|
||||||
|
|
||||||
|
# TODO: we could also put the ``ViewBox.XAxis``
|
||||||
|
# style enum here?
|
||||||
|
# (0,), # link x
|
||||||
|
# (1,), # link y
|
||||||
|
# (0, 1), # link both
|
||||||
|
link_axes: tuple[int] = (),
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
|
||||||
|
index = index or len(self.overlays)
|
||||||
|
root = self.root_plotitem
|
||||||
|
# layout: QGraphicsGridLayout = root.layout
|
||||||
|
self.overlays.insert(index, plotitem)
|
||||||
|
vb: ViewBox = plotitem.vb
|
||||||
|
|
||||||
|
# mark this consumer overlay as ready to expect relayed events
|
||||||
|
# from the root plotitem.
|
||||||
|
vb.event_relay_source = root.vb
|
||||||
|
|
||||||
|
# TODO: some sane way to allow menu event broadcast XD
|
||||||
|
# vb.setMenuEnabled(False)
|
||||||
|
|
||||||
|
# TODO: inside the `maybe_broadcast()` (soon to be) decorator
|
||||||
|
# we need have checks that consumers have been attached to
|
||||||
|
# these relay signals.
|
||||||
|
if link_axes != (0, 1):
|
||||||
|
|
||||||
|
# wire up relay signals
|
||||||
|
for relay_signal_name, handler_name in vb.relays.items():
|
||||||
|
# print(handler_name)
|
||||||
|
# XXX: Signal class attrs are bound after instantiation
|
||||||
|
# of the defining type, so we need to access that bound
|
||||||
|
# version here.
|
||||||
|
signal = getattr(root.vb, relay_signal_name)
|
||||||
|
handler = getattr(vb, handler_name)
|
||||||
|
signal.connect(handler)
|
||||||
|
|
||||||
|
# link dim-axes to root if requested by user.
|
||||||
|
# TODO: solve more-then-wanted scaled panning on click drag
|
||||||
|
# which seems to be due to broadcast. So we probably need to
|
||||||
|
# disable broadcast when axes are linked in a particular
|
||||||
|
# dimension?
|
||||||
|
for dim in link_axes:
|
||||||
|
# link x and y axes to new view box such that the top level
|
||||||
|
# viewbox propagates to the root (and whatever other
|
||||||
|
# plotitem overlays that have been added).
|
||||||
|
vb.linkView(dim, root.vb)
|
||||||
|
|
||||||
|
# make overlaid viewbox impossible to focus since the top
|
||||||
|
# level should handle all input and relay to overlays.
|
||||||
|
# NOTE: this was solved with the `setZValue()` above!
|
||||||
|
|
||||||
|
# TODO: we will probably want to add a "focus" api such that
|
||||||
|
# a new "top level" ``PlotItem`` can be selected dynamically
|
||||||
|
# (and presumably the axes dynamically sorted to match).
|
||||||
|
vb.setFlag(
|
||||||
|
vb.GraphicsItemFlag.ItemIsFocusable,
|
||||||
|
False
|
||||||
|
)
|
||||||
|
vb.setFocusPolicy(Qt.NoFocus)
|
||||||
|
|
||||||
|
# append-compose into the layout all axes from this plot
|
||||||
|
self.layout.insert(index, plotitem)
|
||||||
|
|
||||||
|
plotitem.setGeometry(root.vb.sceneBoundingRect())
|
||||||
|
|
||||||
|
def size_to_viewbox(vb: 'ViewBox'):
|
||||||
|
plotitem.setGeometry(vb.sceneBoundingRect())
|
||||||
|
|
||||||
|
root.vb.sigResized.connect(size_to_viewbox)
|
||||||
|
|
||||||
|
# ensure the overlayed view is redrawn on each cycle
|
||||||
|
root.scene().sigPrepareForPaint.connect(vb.prepareForPaint)
|
||||||
|
|
||||||
|
# focus state sanity
|
||||||
|
vb.clearFocus()
|
||||||
|
assert not vb.focusWidget()
|
||||||
|
root.vb.setFocus()
|
||||||
|
assert root.vb.focusWidget()
|
||||||
|
|
||||||
|
# XXX: do we need this? Why would you build then destroy?
|
||||||
|
def remove_plotitem(self, plotItem: PlotItem) -> None:
|
||||||
|
'''
|
||||||
|
Remove this ``PlotItem`` from the overlayed set making not shown
|
||||||
|
and unable to accept input.
|
||||||
|
|
||||||
|
'''
|
||||||
|
...
|
||||||
|
|
||||||
|
# TODO: i think this would be super hot B)
|
||||||
|
def focus_item(self, plotitem: PlotItem) -> PlotItem:
|
||||||
|
'''
|
||||||
|
Apply focus to a contained PlotItem thus making it the "top level"
|
||||||
|
item in the overlay able to accept peripheral's input from the user
|
||||||
|
and responsible for zoom and panning control via its ``ViewBox``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
...
|
||||||
|
|
||||||
|
def get_axis(
|
||||||
|
self,
|
||||||
|
plot: PlotItem,
|
||||||
|
name: str,
|
||||||
|
|
||||||
|
) -> AxisItem:
|
||||||
|
'''
|
||||||
|
Retrieve the named axis for overlayed ``plot``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
return self.layout.get_axis(plot, name)
|
||||||
|
|
||||||
|
def get_axes(
|
||||||
|
self,
|
||||||
|
name: str,
|
||||||
|
|
||||||
|
) -> list[AxisItem]:
|
||||||
|
'''
|
||||||
|
Retrieve all axes for all plots with ``name: str``.
|
||||||
|
|
||||||
|
If a particular overlay doesn't have a displayed named axis
|
||||||
|
then it is not delivered in the returned ``list``.
|
||||||
|
|
||||||
|
'''
|
||||||
|
axes = []
|
||||||
|
for plot in self.overlays:
|
||||||
|
axis = self.layout.get_axis(plot, name)
|
||||||
|
if axis:
|
||||||
|
axes.append(axis)
|
||||||
|
|
||||||
|
return axes
|
||||||
|
|
||||||
|
# TODO: i guess we need this if you want to detach existing plots
|
||||||
|
# dynamically? XXX: untested as of now.
|
||||||
|
def _disconnect_all(
|
||||||
|
self,
|
||||||
|
plotitem: PlotItem,
|
||||||
|
) -> list[Signal]:
|
||||||
|
'''
|
||||||
|
Disconnects all signals related to this widget for the given chart.
|
||||||
|
|
||||||
|
'''
|
||||||
|
disconnected = []
|
||||||
|
for pi, sig in self._relays.items():
|
||||||
|
QObject.disconnect(sig)
|
||||||
|
disconnected.append(sig)
|
||||||
|
|
||||||
|
return disconnected
|
|
@ -0,0 +1,236 @@
|
||||||
|
# piker: trading gear for hackers
|
||||||
|
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
|
||||||
|
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
"""
|
||||||
|
Super fast ``QPainterPath`` generation related operator routines.
|
||||||
|
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
from typing import (
|
||||||
|
# Optional,
|
||||||
|
TYPE_CHECKING,
|
||||||
|
)
|
||||||
|
|
||||||
|
import numpy as np
|
||||||
|
from numpy.lib import recfunctions as rfn
|
||||||
|
from numba import njit, float64, int64 # , optional
|
||||||
|
# import pyqtgraph as pg
|
||||||
|
from PyQt5 import QtGui
|
||||||
|
# from PyQt5.QtCore import QLineF, QPointF
|
||||||
|
|
||||||
|
from ..data._sharedmem import (
|
||||||
|
ShmArray,
|
||||||
|
)
|
||||||
|
# from .._profile import pg_profile_enabled, ms_slower_then
|
||||||
|
from ._compression import (
|
||||||
|
ds_m4,
|
||||||
|
)
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from ._flows import Renderer
|
||||||
|
|
||||||
|
|
||||||
|
def xy_downsample(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
uppx,
|
||||||
|
|
||||||
|
x_spacer: float = 0.5,
|
||||||
|
|
||||||
|
) -> tuple[np.ndarray, np.ndarray]:
|
||||||
|
|
||||||
|
# downsample whenever more then 1 pixels per datum can be shown.
|
||||||
|
# always refresh data bounds until we get diffing
|
||||||
|
# working properly, see above..
|
||||||
|
bins, x, y = ds_m4(
|
||||||
|
x,
|
||||||
|
y,
|
||||||
|
uppx,
|
||||||
|
)
|
||||||
|
|
||||||
|
# flatten output to 1d arrays suitable for path-graphics generation.
|
||||||
|
x = np.broadcast_to(x[:, None], y.shape)
|
||||||
|
x = (x + np.array(
|
||||||
|
[-x_spacer, 0, 0, x_spacer]
|
||||||
|
)).flatten()
|
||||||
|
y = y.flatten()
|
||||||
|
|
||||||
|
return x, y
|
||||||
|
|
||||||
|
|
||||||
|
@njit(
|
||||||
|
# TODO: for now need to construct this manually for readonly arrays, see
|
||||||
|
# https://github.com/numba/numba/issues/4511
|
||||||
|
# ntypes.tuple((float64[:], float64[:], float64[:]))(
|
||||||
|
# numba_ohlc_dtype[::1], # contiguous
|
||||||
|
# int64,
|
||||||
|
# optional(float64),
|
||||||
|
# ),
|
||||||
|
nogil=True
|
||||||
|
)
|
||||||
|
def path_arrays_from_ohlc(
|
||||||
|
data: np.ndarray,
|
||||||
|
start: int64,
|
||||||
|
bar_gap: float64 = 0.43,
|
||||||
|
|
||||||
|
) -> np.ndarray:
|
||||||
|
'''
|
||||||
|
Generate an array of lines objects from input ohlc data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
size = int(data.shape[0] * 6)
|
||||||
|
|
||||||
|
x = np.zeros(
|
||||||
|
# data,
|
||||||
|
shape=size,
|
||||||
|
dtype=float64,
|
||||||
|
)
|
||||||
|
y, c = x.copy(), x.copy()
|
||||||
|
|
||||||
|
# TODO: report bug for assert @
|
||||||
|
# /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
|
||||||
|
for i, q in enumerate(data[start:], start):
|
||||||
|
|
||||||
|
# TODO: ask numba why this doesn't work..
|
||||||
|
# open, high, low, close, index = q[
|
||||||
|
# ['open', 'high', 'low', 'close', 'index']]
|
||||||
|
|
||||||
|
open = q['open']
|
||||||
|
high = q['high']
|
||||||
|
low = q['low']
|
||||||
|
close = q['close']
|
||||||
|
index = float64(q['index'])
|
||||||
|
|
||||||
|
istart = i * 6
|
||||||
|
istop = istart + 6
|
||||||
|
|
||||||
|
# x,y detail the 6 points which connect all vertexes of a ohlc bar
|
||||||
|
x[istart:istop] = (
|
||||||
|
index - bar_gap,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index,
|
||||||
|
index + bar_gap,
|
||||||
|
)
|
||||||
|
y[istart:istop] = (
|
||||||
|
open,
|
||||||
|
open,
|
||||||
|
low,
|
||||||
|
high,
|
||||||
|
close,
|
||||||
|
close,
|
||||||
|
)
|
||||||
|
|
||||||
|
# specifies that the first edge is never connected to the
|
||||||
|
# prior bars last edge thus providing a small "gap"/"space"
|
||||||
|
# between bars determined by ``bar_gap``.
|
||||||
|
c[istart:istop] = (1, 1, 1, 1, 1, 0)
|
||||||
|
|
||||||
|
return x, y, c
|
||||||
|
|
||||||
|
|
||||||
|
def gen_ohlc_qpath(
|
||||||
|
r: Renderer,
|
||||||
|
data: np.ndarray,
|
||||||
|
array_key: str, # we ignore this
|
||||||
|
vr: tuple[int, int],
|
||||||
|
|
||||||
|
start: int = 0, # XXX: do we need this?
|
||||||
|
# 0.5 is no overlap between arms, 1.0 is full overlap
|
||||||
|
w: float = 0.43,
|
||||||
|
|
||||||
|
) -> QtGui.QPainterPath:
|
||||||
|
'''
|
||||||
|
More or less direct proxy to ``path_arrays_from_ohlc()``
|
||||||
|
but with closed in kwargs for line spacing.
|
||||||
|
|
||||||
|
'''
|
||||||
|
x, y, c = path_arrays_from_ohlc(
|
||||||
|
data,
|
||||||
|
start,
|
||||||
|
bar_gap=w,
|
||||||
|
)
|
||||||
|
return x, y, c
|
||||||
|
|
||||||
|
|
||||||
|
def ohlc_to_line(
|
||||||
|
ohlc_shm: ShmArray,
|
||||||
|
data_field: str,
|
||||||
|
fields: list[str] = ['open', 'high', 'low', 'close']
|
||||||
|
|
||||||
|
) -> tuple[
|
||||||
|
np.ndarray,
|
||||||
|
np.ndarray,
|
||||||
|
]:
|
||||||
|
'''
|
||||||
|
Convert an input struct-array holding OHLC samples into a pair of
|
||||||
|
flattened x, y arrays with the same size (datums wise) as the source
|
||||||
|
data.
|
||||||
|
|
||||||
|
'''
|
||||||
|
y_out = ohlc_shm.ustruct(fields)
|
||||||
|
first = ohlc_shm._first.value
|
||||||
|
last = ohlc_shm._last.value
|
||||||
|
|
||||||
|
# write pushed data to flattened copy
|
||||||
|
y_out[first:last] = rfn.structured_to_unstructured(
|
||||||
|
ohlc_shm.array[fields]
|
||||||
|
)
|
||||||
|
|
||||||
|
# generate an flat-interpolated x-domain
|
||||||
|
x_out = (
|
||||||
|
np.broadcast_to(
|
||||||
|
ohlc_shm._array['index'][:, None],
|
||||||
|
(
|
||||||
|
ohlc_shm._array.size,
|
||||||
|
# 4, # only ohlc
|
||||||
|
y_out.shape[1],
|
||||||
|
),
|
||||||
|
) + np.array([-0.5, 0, 0, 0.5])
|
||||||
|
)
|
||||||
|
assert y_out.any()
|
||||||
|
|
||||||
|
return (
|
||||||
|
x_out,
|
||||||
|
y_out,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def to_step_format(
|
||||||
|
shm: ShmArray,
|
||||||
|
data_field: str,
|
||||||
|
index_field: str = 'index',
|
||||||
|
|
||||||
|
) -> tuple[int, np.ndarray, np.ndarray]:
|
||||||
|
'''
|
||||||
|
Convert an input 1d shm array to a "step array" format
|
||||||
|
for use by path graphics generation.
|
||||||
|
|
||||||
|
'''
|
||||||
|
i = shm._array['index'].copy()
|
||||||
|
out = shm._array[data_field].copy()
|
||||||
|
|
||||||
|
x_out = np.broadcast_to(
|
||||||
|
i[:, None],
|
||||||
|
(i.size, 2),
|
||||||
|
) + np.array([-0.5, 0.5])
|
||||||
|
|
||||||
|
y_out = np.empty((len(out), 2), dtype=out.dtype)
|
||||||
|
y_out[:] = out[:, np.newaxis]
|
||||||
|
|
||||||
|
# start y at origin level
|
||||||
|
y_out[0, 0] = 0
|
||||||
|
return x_out, y_out
|
|
@ -54,6 +54,7 @@ async def update_pnl_from_feed(
|
||||||
|
|
||||||
feed: Feed,
|
feed: Feed,
|
||||||
order_mode: OrderMode, # noqa
|
order_mode: OrderMode, # noqa
|
||||||
|
tracker: PositionTracker,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Real-time display the current pp's PnL in the appropriate label.
|
'''Real-time display the current pp's PnL in the appropriate label.
|
||||||
|
@ -70,13 +71,14 @@ async def update_pnl_from_feed(
|
||||||
log.info(f'Starting pnl display for {pp.alloc.account}')
|
log.info(f'Starting pnl display for {pp.alloc.account}')
|
||||||
|
|
||||||
if live.size < 0:
|
if live.size < 0:
|
||||||
types = ('ask', 'last', 'last', 'utrade')
|
types = ('ask', 'last', 'last', 'dark_trade')
|
||||||
|
|
||||||
elif live.size > 0:
|
elif live.size > 0:
|
||||||
types = ('bid', 'last', 'last', 'utrade')
|
types = ('bid', 'last', 'last', 'dark_trade')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise RuntimeError('No pp?!?!')
|
log.info(f'No position (yet) for {tracker.alloc.account}@{key}')
|
||||||
|
return
|
||||||
|
|
||||||
# real-time update pnl on the status pane
|
# real-time update pnl on the status pane
|
||||||
try:
|
try:
|
||||||
|
@ -117,7 +119,8 @@ async def update_pnl_from_feed(
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class SettingsPane:
|
class SettingsPane:
|
||||||
'''Composite set of widgets plus an allocator model for configuring
|
'''
|
||||||
|
Composite set of widgets plus an allocator model for configuring
|
||||||
order entry sizes and position limits per tradable instrument.
|
order entry sizes and position limits per tradable instrument.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -149,10 +152,11 @@ class SettingsPane:
|
||||||
key: str,
|
key: str,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Called on any order pane drop down selection change.
|
'''
|
||||||
|
Called on any order pane drop down selection change.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
log.info(f'selection input: {text}')
|
log.info(f'selection input {key}:{text}')
|
||||||
self.on_ui_settings_change(key, text)
|
self.on_ui_settings_change(key, text)
|
||||||
|
|
||||||
def on_ui_settings_change(
|
def on_ui_settings_change(
|
||||||
|
@ -162,7 +166,8 @@ class SettingsPane:
|
||||||
value: str,
|
value: str,
|
||||||
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
'''Called on any order pane edit field value change.
|
'''
|
||||||
|
Called on any order pane edit field value change.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
mode = self.order_mode
|
mode = self.order_mode
|
||||||
|
@ -209,30 +214,51 @@ class SettingsPane:
|
||||||
|
|
||||||
# WRITE any settings to current pp's allocator
|
# WRITE any settings to current pp's allocator
|
||||||
try:
|
try:
|
||||||
|
if key == 'size_unit':
|
||||||
|
# implicit re-write of value if input
|
||||||
|
# is the "text name" of the units.
|
||||||
|
# yah yah, i know this is badd..
|
||||||
|
alloc.size_unit = value
|
||||||
|
else:
|
||||||
value = puterize(value)
|
value = puterize(value)
|
||||||
if key == 'limit':
|
if key == 'limit':
|
||||||
|
pp = mode.current_pp.live_pp
|
||||||
|
|
||||||
if size_unit == 'currency':
|
if size_unit == 'currency':
|
||||||
|
dsize = pp.dsize
|
||||||
|
if dsize > value:
|
||||||
|
log.error(
|
||||||
|
f'limit must > then current pp: {dsize}'
|
||||||
|
)
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
alloc.currency_limit = value
|
alloc.currency_limit = value
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
size = pp.size
|
||||||
|
if size > value:
|
||||||
|
log.error(
|
||||||
|
f'limit must > then current pp: {size}'
|
||||||
|
)
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
alloc.units_limit = value
|
alloc.units_limit = value
|
||||||
|
|
||||||
elif key == 'slots':
|
elif key == 'slots':
|
||||||
|
if value <= 0:
|
||||||
|
raise ValueError('slots must be > 0')
|
||||||
alloc.slots = int(value)
|
alloc.slots = int(value)
|
||||||
|
|
||||||
elif key == 'size_unit':
|
|
||||||
# TODO: if there's a limit size unit change re-compute
|
|
||||||
# the current settings in the new units
|
|
||||||
alloc.size_unit = value
|
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise ValueError(f'Unknown setting {key}')
|
log.error(f'Unknown setting {key}')
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
log.info(f'settings change: {key}: {value}')
|
log.info(f'settings change: {key}: {value}')
|
||||||
|
|
||||||
except ValueError:
|
except ValueError:
|
||||||
log.error(f'Invalid value for `{key}`: {value}')
|
log.error(f'Invalid value for `{key}`: {value}')
|
||||||
|
|
||||||
# READ out settings and update UI
|
# READ out settings and update the status UI / settings widgets
|
||||||
suffix = {'currency': ' $', 'units': ' u'}[size_unit]
|
suffix = {'currency': ' $', 'units': ' u'}[size_unit]
|
||||||
limit = alloc.limit()
|
limit = alloc.limit()
|
||||||
|
|
||||||
|
@ -259,6 +285,9 @@ class SettingsPane:
|
||||||
self.form.fields['slots'].setText(str(alloc.slots))
|
self.form.fields['slots'].setText(str(alloc.slots))
|
||||||
self.form.fields['limit'].setText(str(limit))
|
self.form.fields['limit'].setText(str(limit))
|
||||||
|
|
||||||
|
# update of level marker size label based on any new settings
|
||||||
|
tracker.update_from_pp()
|
||||||
|
|
||||||
# TODO: maybe return a diff of settings so if we can an error we
|
# TODO: maybe return a diff of settings so if we can an error we
|
||||||
# can have general input handling code to report it through the
|
# can have general input handling code to report it through the
|
||||||
# UI in some way?
|
# UI in some way?
|
||||||
|
@ -339,6 +368,7 @@ class SettingsPane:
|
||||||
update_pnl_from_feed,
|
update_pnl_from_feed,
|
||||||
feed,
|
feed,
|
||||||
mode,
|
mode,
|
||||||
|
tracker,
|
||||||
)
|
)
|
||||||
|
|
||||||
# immediately display in status label
|
# immediately display in status label
|
||||||
|
@ -356,7 +386,8 @@ def position_line(
|
||||||
marker: Optional[LevelMarker] = None,
|
marker: Optional[LevelMarker] = None,
|
||||||
|
|
||||||
) -> LevelLine:
|
) -> LevelLine:
|
||||||
'''Convenience routine to create a line graphic representing a "pp"
|
'''
|
||||||
|
Convenience routine to create a line graphic representing a "pp"
|
||||||
aka the acro for a,
|
aka the acro for a,
|
||||||
"{piker, private, personal, puny, <place your p-word here>} position".
|
"{piker, private, personal, puny, <place your p-word here>} position".
|
||||||
|
|
||||||
|
@ -410,7 +441,8 @@ def position_line(
|
||||||
|
|
||||||
|
|
||||||
class PositionTracker:
|
class PositionTracker:
|
||||||
'''Track and display real-time positions for a single symbol
|
'''
|
||||||
|
Track and display real-time positions for a single symbol
|
||||||
over multiple accounts on a single chart.
|
over multiple accounts on a single chart.
|
||||||
|
|
||||||
Graphically composed of a level line and marker as well as labels
|
Graphically composed of a level line and marker as well as labels
|
||||||
|
@ -490,7 +522,8 @@ class PositionTracker:
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pane(self) -> FieldsForm:
|
def pane(self) -> FieldsForm:
|
||||||
'''Return handle to pp side pane form.
|
'''
|
||||||
|
Return handle to pp side pane form.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
return self.chart.linked.godwidget.pp_pane
|
return self.chart.linked.godwidget.pp_pane
|
||||||
|
@ -500,7 +533,8 @@ class PositionTracker:
|
||||||
marker: LevelMarker
|
marker: LevelMarker
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Update all labels.
|
'''
|
||||||
|
Update all labels.
|
||||||
|
|
||||||
Meant to be called from the maker ``.paint()``
|
Meant to be called from the maker ``.paint()``
|
||||||
for immediate, lag free label draws.
|
for immediate, lag free label draws.
|
||||||
|
|
|
@ -49,7 +49,6 @@ from PyQt5 import QtCore
|
||||||
from PyQt5 import QtWidgets
|
from PyQt5 import QtWidgets
|
||||||
from PyQt5.QtCore import (
|
from PyQt5.QtCore import (
|
||||||
Qt,
|
Qt,
|
||||||
# QSize,
|
|
||||||
QModelIndex,
|
QModelIndex,
|
||||||
QItemSelectionModel,
|
QItemSelectionModel,
|
||||||
)
|
)
|
||||||
|
@ -72,7 +71,7 @@ from ._style import (
|
||||||
_font,
|
_font,
|
||||||
hcolor,
|
hcolor,
|
||||||
)
|
)
|
||||||
from ._forms import FontAndChartAwareLineEdit, FontScaledDelegate
|
from ._forms import Edit, FontScaledDelegate
|
||||||
|
|
||||||
|
|
||||||
log = get_logger(__name__)
|
log = get_logger(__name__)
|
||||||
|
@ -126,6 +125,10 @@ class CompleterView(QTreeView):
|
||||||
# self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustIgnored)
|
# self.setSizeAdjustPolicy(QAbstractScrollArea.AdjustIgnored)
|
||||||
|
|
||||||
# ux settings
|
# ux settings
|
||||||
|
self.setSizePolicy(
|
||||||
|
QtWidgets.QSizePolicy.Expanding,
|
||||||
|
QtWidgets.QSizePolicy.Expanding,
|
||||||
|
)
|
||||||
self.setItemsExpandable(True)
|
self.setItemsExpandable(True)
|
||||||
self.setExpandsOnDoubleClick(False)
|
self.setExpandsOnDoubleClick(False)
|
||||||
self.setAnimated(False)
|
self.setAnimated(False)
|
||||||
|
@ -153,23 +156,58 @@ class CompleterView(QTreeView):
|
||||||
|
|
||||||
self.setStyleSheet(f"font: {size}px")
|
self.setStyleSheet(f"font: {size}px")
|
||||||
|
|
||||||
def resize(self):
|
# def resizeEvent(self, event: 'QEvent') -> None:
|
||||||
|
# event.accept()
|
||||||
|
# super().resizeEvent(event)
|
||||||
|
|
||||||
|
def on_resize(self) -> None:
|
||||||
|
'''
|
||||||
|
Resize relay event from god.
|
||||||
|
|
||||||
|
'''
|
||||||
|
self.resize_to_results()
|
||||||
|
|
||||||
|
def resize_to_results(self):
|
||||||
model = self.model()
|
model = self.model()
|
||||||
cols = model.columnCount()
|
cols = model.columnCount()
|
||||||
|
# rows = model.rowCount()
|
||||||
|
|
||||||
|
col_w_tot = 0
|
||||||
for i in range(cols):
|
for i in range(cols):
|
||||||
self.resizeColumnToContents(i)
|
self.resizeColumnToContents(i)
|
||||||
|
col_w_tot += self.columnWidth(i)
|
||||||
|
|
||||||
# inclusive of search bar and header "rows" in pixel terms
|
win = self.window()
|
||||||
rows = 100
|
win_h = win.height()
|
||||||
# max_rows = 8 # 6 + search and headers
|
edit_h = self.parent().bar.height()
|
||||||
row_px = self.rowHeight(self.currentIndex())
|
sb_h = win.statusBar().height()
|
||||||
# print(f'font_h: {font_h}\n px_height: {px_height}')
|
|
||||||
|
|
||||||
# TODO: probably make this more general / less hacky
|
# TODO: probably make this more general / less hacky
|
||||||
self.setMinimumSize(self.width(), rows * row_px)
|
# we should figure out the exact number of rows to allow
|
||||||
self.setMaximumSize(self.width() + 10, rows * row_px)
|
# inclusive of search bar and header "rows", in pixel terms.
|
||||||
self.setFixedWidth(333)
|
# Eventually when we have an "info" widget below the results we
|
||||||
|
# will want space for it and likely terminating the results-view
|
||||||
|
# space **exactly on a row** would be ideal.
|
||||||
|
# if row_px > 0:
|
||||||
|
# rows = ceil(window_h / row_px) - 4
|
||||||
|
# else:
|
||||||
|
# rows = 16
|
||||||
|
# self.setFixedHeight(rows * row_px)
|
||||||
|
# self.resize(self.width(), rows * row_px)
|
||||||
|
|
||||||
|
# NOTE: if the heigh set here is **too large** then the resize
|
||||||
|
# event will perpetually trigger as the window causes some kind
|
||||||
|
# of recompute of callbacks.. so we have to ensure it's limited.
|
||||||
|
h = win_h - (edit_h + 1.666*sb_h)
|
||||||
|
assert h > 0
|
||||||
|
self.setFixedHeight(round(h))
|
||||||
|
|
||||||
|
# size to width of longest result seen thus far
|
||||||
|
# TODO: should we always dynamically scale to longest result?
|
||||||
|
if self.width() < col_w_tot:
|
||||||
|
self.setFixedWidth(col_w_tot)
|
||||||
|
|
||||||
|
self.update()
|
||||||
|
|
||||||
def is_selecting_d1(self) -> bool:
|
def is_selecting_d1(self) -> bool:
|
||||||
cidx = self.selectionModel().currentIndex()
|
cidx = self.selectionModel().currentIndex()
|
||||||
|
@ -218,7 +256,8 @@ class CompleterView(QTreeView):
|
||||||
idx: QModelIndex,
|
idx: QModelIndex,
|
||||||
|
|
||||||
) -> QStandardItem:
|
) -> QStandardItem:
|
||||||
'''Select and return the item at index ``idx``.
|
'''
|
||||||
|
Select and return the item at index ``idx``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
sel = self.selectionModel()
|
sel = self.selectionModel()
|
||||||
|
@ -233,7 +272,8 @@ class CompleterView(QTreeView):
|
||||||
return model.itemFromIndex(idx)
|
return model.itemFromIndex(idx)
|
||||||
|
|
||||||
def select_first(self) -> QStandardItem:
|
def select_first(self) -> QStandardItem:
|
||||||
'''Select the first depth >= 2 entry from the completer tree and
|
'''
|
||||||
|
Select the first depth >= 2 entry from the completer tree and
|
||||||
return it's item.
|
return it's item.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -296,7 +336,8 @@ class CompleterView(QTreeView):
|
||||||
section: str,
|
section: str,
|
||||||
|
|
||||||
) -> Optional[QModelIndex]:
|
) -> Optional[QModelIndex]:
|
||||||
'''Find the *first* depth = 1 section matching ``section`` in
|
'''
|
||||||
|
Find the *first* depth = 1 section matching ``section`` in
|
||||||
the tree and return its index.
|
the tree and return its index.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -334,7 +375,7 @@ class CompleterView(QTreeView):
|
||||||
else:
|
else:
|
||||||
model.setItem(idx.row(), 1, QStandardItem())
|
model.setItem(idx.row(), 1, QStandardItem())
|
||||||
|
|
||||||
self.resize()
|
self.resize_to_results()
|
||||||
|
|
||||||
return idx
|
return idx
|
||||||
else:
|
else:
|
||||||
|
@ -347,7 +388,8 @@ class CompleterView(QTreeView):
|
||||||
clear_all: bool = False,
|
clear_all: bool = False,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
'''Set result-rows for depth = 1 tree section ``section``.
|
'''
|
||||||
|
Set result-rows for depth = 1 tree section ``section``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
model = self.model()
|
model = self.model()
|
||||||
|
@ -404,10 +446,10 @@ class CompleterView(QTreeView):
|
||||||
|
|
||||||
def show_matches(self) -> None:
|
def show_matches(self) -> None:
|
||||||
self.show()
|
self.show()
|
||||||
self.resize()
|
self.resize_to_results()
|
||||||
|
|
||||||
|
|
||||||
class SearchBar(FontAndChartAwareLineEdit):
|
class SearchBar(Edit):
|
||||||
|
|
||||||
mode_name: str = 'search'
|
mode_name: str = 'search'
|
||||||
|
|
||||||
|
@ -424,6 +466,7 @@ class SearchBar(FontAndChartAwareLineEdit):
|
||||||
self.godwidget = godwidget
|
self.godwidget = godwidget
|
||||||
super().__init__(parent, **kwargs)
|
super().__init__(parent, **kwargs)
|
||||||
self.view: CompleterView = view
|
self.view: CompleterView = view
|
||||||
|
godwidget._widgets[view.mode_name] = view
|
||||||
|
|
||||||
def show(self) -> None:
|
def show(self) -> None:
|
||||||
super().show()
|
super().show()
|
||||||
|
@ -438,7 +481,8 @@ class SearchBar(FontAndChartAwareLineEdit):
|
||||||
|
|
||||||
|
|
||||||
class SearchWidget(QtWidgets.QWidget):
|
class SearchWidget(QtWidgets.QWidget):
|
||||||
'''Composed widget of ``SearchBar`` + ``CompleterView``.
|
'''
|
||||||
|
Composed widget of ``SearchBar`` + ``CompleterView``.
|
||||||
|
|
||||||
Includes helper methods for item management in the sub-widgets.
|
Includes helper methods for item management in the sub-widgets.
|
||||||
|
|
||||||
|
@ -457,7 +501,7 @@ class SearchWidget(QtWidgets.QWidget):
|
||||||
# size it as we specify
|
# size it as we specify
|
||||||
self.setSizePolicy(
|
self.setSizePolicy(
|
||||||
QtWidgets.QSizePolicy.Fixed,
|
QtWidgets.QSizePolicy.Fixed,
|
||||||
QtWidgets.QSizePolicy.Fixed,
|
QtWidgets.QSizePolicy.Expanding,
|
||||||
)
|
)
|
||||||
|
|
||||||
self.godwidget = godwidget
|
self.godwidget = godwidget
|
||||||
|
|
|
@ -14,14 +14,16 @@
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
"""
|
'''
|
||||||
Qt UI styling.
|
Qt UI styling.
|
||||||
"""
|
|
||||||
|
'''
|
||||||
from typing import Optional, Dict
|
from typing import Optional, Dict
|
||||||
import math
|
import math
|
||||||
|
|
||||||
import pyqtgraph as pg
|
import pyqtgraph as pg
|
||||||
from PyQt5 import QtCore, QtGui
|
from PyQt5 import QtCore, QtGui
|
||||||
|
from PyQt5.QtCore import Qt, QCoreApplication
|
||||||
from qdarkstyle import DarkPalette
|
from qdarkstyle import DarkPalette
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
@ -110,7 +112,7 @@ class DpiAwareFont:
|
||||||
|
|
||||||
mx_dpi = max(pdpi, ldpi)
|
mx_dpi = max(pdpi, ldpi)
|
||||||
mn_dpi = min(pdpi, ldpi)
|
mn_dpi = min(pdpi, ldpi)
|
||||||
scale = round(ldpi/pdpi)
|
scale = round(ldpi/pdpi, ndigits=2)
|
||||||
|
|
||||||
if mx_dpi <= 97: # for low dpi use larger font sizes
|
if mx_dpi <= 97: # for low dpi use larger font sizes
|
||||||
inches = _font_sizes['lo'][self._font_size]
|
inches = _font_sizes['lo'][self._font_size]
|
||||||
|
@ -120,18 +122,44 @@ class DpiAwareFont:
|
||||||
|
|
||||||
dpi = mn_dpi
|
dpi = mn_dpi
|
||||||
|
|
||||||
|
mult = 1.0
|
||||||
|
|
||||||
|
# No implicit DPI scaling was done by the DE so let's engage
|
||||||
|
# some hackery ad-hoc scaling shiat.
|
||||||
# dpi is likely somewhat scaled down so use slightly larger font size
|
# dpi is likely somewhat scaled down so use slightly larger font size
|
||||||
if scale > 1 and self._font_size:
|
if scale >= 1.1 and self._font_size:
|
||||||
# TODO: this denominator should probably be determined from
|
|
||||||
|
# no idea why
|
||||||
|
if 1.2 <= scale:
|
||||||
|
mult = 1.0375
|
||||||
|
|
||||||
|
if scale >= 1.5:
|
||||||
|
mult = 1.375
|
||||||
|
|
||||||
|
# TODO: this multiplier should probably be determined from
|
||||||
# relative aspect ratios or something?
|
# relative aspect ratios or something?
|
||||||
inches = inches * (1 / scale) * (1 + 6/16)
|
inches *= mult
|
||||||
dpi = mx_dpi
|
|
||||||
|
|
||||||
|
# XXX: if additionally we detect a known DE scaling factor we
|
||||||
|
# also scale *up* our font size on top of the existing
|
||||||
|
# heuristical (aka no clue why it works) scaling from the block
|
||||||
|
# above XD
|
||||||
|
if (
|
||||||
|
hasattr(Qt, 'AA_EnableHighDpiScaling')
|
||||||
|
and QCoreApplication.testAttribute(Qt.AA_EnableHighDpiScaling)
|
||||||
|
):
|
||||||
|
inches *= round(scale)
|
||||||
|
|
||||||
|
# TODO: we might want to fiddle with incrementing font size by
|
||||||
|
# +1 for the edge cases above. it seems doing it via scaling is
|
||||||
|
# always going to hit that error in range mapping from inches:
|
||||||
|
# float to px size: int.
|
||||||
self._font_inches = inches
|
self._font_inches = inches
|
||||||
|
|
||||||
font_size = math.floor(inches * dpi)
|
font_size = math.floor(inches * dpi)
|
||||||
|
|
||||||
log.debug(
|
log.debug(
|
||||||
f"\nscreen:{screen.name()} with pDPI: {pdpi}, lDPI: {ldpi}"
|
f"screen:{screen.name()}\n"
|
||||||
|
f"pDPI: {pdpi}, lDPI: {ldpi}, scale: {scale}\n"
|
||||||
f"\nOur best guess font size is {font_size}\n"
|
f"\nOur best guess font size is {font_size}\n"
|
||||||
)
|
)
|
||||||
# apply the size
|
# apply the size
|
||||||
|
@ -175,8 +203,6 @@ _xaxis_at = 'bottom'
|
||||||
# charting config
|
# charting config
|
||||||
CHART_MARGINS = (0, 0, 2, 2)
|
CHART_MARGINS = (0, 0, 2, 2)
|
||||||
_min_points_to_show = 6
|
_min_points_to_show = 6
|
||||||
_bars_to_left_in_follow_mode = int(61*6)
|
|
||||||
_bars_from_right_in_follow_mode = round(0.16 * _bars_to_left_in_follow_mode)
|
|
||||||
_tina_mode = False
|
_tina_mode = False
|
||||||
|
|
||||||
|
|
||||||
|
@ -205,19 +231,26 @@ def hcolor(name: str) -> str:
|
||||||
'svags': '#0a0e14',
|
'svags': '#0a0e14',
|
||||||
|
|
||||||
# fifty shades
|
# fifty shades
|
||||||
|
'original': '#a9a9a9',
|
||||||
'gray': '#808080', # like the kick
|
'gray': '#808080', # like the kick
|
||||||
'grayer': '#4c4c4c',
|
'grayer': '#4c4c4c',
|
||||||
'grayest': '#3f3f3f',
|
'grayest': '#3f3f3f',
|
||||||
'i3': '#494D4F',
|
|
||||||
'jet': '#343434',
|
|
||||||
'cadet': '#91A3B0',
|
'cadet': '#91A3B0',
|
||||||
'marengo': '#91A3B0',
|
'marengo': '#91A3B0',
|
||||||
'charcoal': '#36454F',
|
|
||||||
'gunmetal': '#91A3B0',
|
'gunmetal': '#91A3B0',
|
||||||
'battleship': '#848482',
|
'battleship': '#848482',
|
||||||
'davies': '#555555',
|
|
||||||
|
# bluish
|
||||||
|
'charcoal': '#36454F',
|
||||||
|
|
||||||
|
# default bars
|
||||||
'bracket': '#666666', # like the logo
|
'bracket': '#666666', # like the logo
|
||||||
'original': '#a9a9a9',
|
|
||||||
|
# work well for filled polygons which want a 'bracket' feel
|
||||||
|
# going light to dark
|
||||||
|
'davies': '#555555',
|
||||||
|
'i3': '#494D4F',
|
||||||
|
'jet': '#343434',
|
||||||
|
|
||||||
# from ``qdarkstyle`` palette
|
# from ``qdarkstyle`` palette
|
||||||
'default_darkest': DarkPalette.COLOR_BACKGROUND_1,
|
'default_darkest': DarkPalette.COLOR_BACKGROUND_1,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -25,7 +25,7 @@ from typing import Callable, Optional, Union
|
||||||
import uuid
|
import uuid
|
||||||
|
|
||||||
from pyqtgraph import QtGui
|
from pyqtgraph import QtGui
|
||||||
from PyQt5 import QtCore, QtWidgets
|
from PyQt5 import QtCore
|
||||||
from PyQt5.QtWidgets import QLabel, QStatusBar
|
from PyQt5.QtWidgets import QLabel, QStatusBar
|
||||||
|
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
@ -55,7 +55,8 @@ class MultiStatus:
|
||||||
group_key: Optional[Union[bool, str]] = False,
|
group_key: Optional[Union[bool, str]] = False,
|
||||||
|
|
||||||
) -> Union[Callable[..., None], str]:
|
) -> Union[Callable[..., None], str]:
|
||||||
'''Add a status to the status bar and return a close callback which
|
'''
|
||||||
|
Add a status to the status bar and return a close callback which
|
||||||
when called will remove the status ``msg``.
|
when called will remove the status ``msg``.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
|
@ -137,7 +138,8 @@ class MultiStatus:
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def render(self) -> None:
|
def render(self) -> None:
|
||||||
'''Display all open statuses to bar.
|
'''
|
||||||
|
Display all open statuses to bar.
|
||||||
|
|
||||||
'''
|
'''
|
||||||
if self.statuses:
|
if self.statuses:
|
||||||
|
@ -151,8 +153,7 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
# XXX: for tiling wms this should scale
|
# XXX: for tiling wms this should scale
|
||||||
# with the alloted window size.
|
# with the alloted window size.
|
||||||
# TODO: detect for tiling and if untrue set some size?
|
# TODO: detect for tiling and if untrue set some size?
|
||||||
# size = (300, 500)
|
size = (300, 500)
|
||||||
size = (0, 0)
|
|
||||||
|
|
||||||
title = 'piker chart (ur symbol is loading bby)'
|
title = 'piker chart (ur symbol is loading bby)'
|
||||||
|
|
||||||
|
@ -163,6 +164,7 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
|
|
||||||
self._status_bar: QStatusBar = None
|
self._status_bar: QStatusBar = None
|
||||||
self._status_label: QLabel = None
|
self._status_label: QLabel = None
|
||||||
|
self._size: Optional[tuple[int, int]] = None
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def mode_label(self) -> QtGui.QLabel:
|
def mode_label(self) -> QtGui.QLabel:
|
||||||
|
@ -267,6 +269,29 @@ class MainWindow(QtGui.QMainWindow):
|
||||||
assert screen, "Wow Qt is dumb as shit and has no screen..."
|
assert screen, "Wow Qt is dumb as shit and has no screen..."
|
||||||
return screen
|
return screen
|
||||||
|
|
||||||
|
def configure_to_desktop(
|
||||||
|
self,
|
||||||
|
size: Optional[tuple[int, int]] = None,
|
||||||
|
|
||||||
|
) -> None:
|
||||||
|
'''
|
||||||
|
Explicitly size the window dimensions (for stacked window
|
||||||
|
managers).
|
||||||
|
|
||||||
|
For tina systems (like windoze) try to do a sane window size on
|
||||||
|
startup.
|
||||||
|
|
||||||
|
'''
|
||||||
|
# https://stackoverflow.com/a/18975846
|
||||||
|
if not size and not self._size:
|
||||||
|
app = QtGui.QApplication.instance()
|
||||||
|
geo = self.current_screen().geometry()
|
||||||
|
h, w = geo.height(), geo.width()
|
||||||
|
# use approx 1/3 of the area of the screen by default
|
||||||
|
self._size = round(w * .666), round(h * .666)
|
||||||
|
|
||||||
|
self.resize(*size or self._size)
|
||||||
|
|
||||||
|
|
||||||
# singleton app per actor
|
# singleton app per actor
|
||||||
_qt_win: QtGui.QMainWindow = None
|
_qt_win: QtGui.QMainWindow = None
|
||||||
|
|
|
@ -122,7 +122,8 @@ def optschain(config, symbol, date, rate, test):
|
||||||
@cli.command()
|
@cli.command()
|
||||||
@click.option(
|
@click.option(
|
||||||
'--profile',
|
'--profile',
|
||||||
is_flag=True,
|
'-p',
|
||||||
|
default=None,
|
||||||
help='Enable pyqtgraph profiling'
|
help='Enable pyqtgraph profiling'
|
||||||
)
|
)
|
||||||
@click.option(
|
@click.option(
|
||||||
|
@ -133,9 +134,16 @@ def optschain(config, symbol, date, rate, test):
|
||||||
@click.argument('symbol', required=True)
|
@click.argument('symbol', required=True)
|
||||||
@click.pass_obj
|
@click.pass_obj
|
||||||
def chart(config, symbol, profile, pdb):
|
def chart(config, symbol, profile, pdb):
|
||||||
"""Start a real-time chartng UI
|
'''
|
||||||
"""
|
Start a real-time chartng UI
|
||||||
|
|
||||||
|
'''
|
||||||
|
# eg. ``--profile 3`` reports profiling for anything slower then 3 ms.
|
||||||
|
if profile is not None:
|
||||||
from .. import _profile
|
from .. import _profile
|
||||||
|
_profile._pg_profile = True
|
||||||
|
_profile.ms_slower_then = float(profile)
|
||||||
|
|
||||||
from ._app import _main
|
from ._app import _main
|
||||||
|
|
||||||
if '.' not in symbol:
|
if '.' not in symbol:
|
||||||
|
@ -145,8 +153,6 @@ def chart(config, symbol, profile, pdb):
|
||||||
))
|
))
|
||||||
return
|
return
|
||||||
|
|
||||||
# toggle to enable profiling
|
|
||||||
_profile._pg_profile = profile
|
|
||||||
|
|
||||||
# global opts
|
# global opts
|
||||||
brokernames = config['brokers']
|
brokernames = config['brokers']
|
||||||
|
|
|
@ -22,6 +22,7 @@ from contextlib import asynccontextmanager
|
||||||
from dataclasses import dataclass, field
|
from dataclasses import dataclass, field
|
||||||
from functools import partial
|
from functools import partial
|
||||||
from pprint import pformat
|
from pprint import pformat
|
||||||
|
import platform
|
||||||
import time
|
import time
|
||||||
from typing import Optional, Dict, Callable, Any
|
from typing import Optional, Dict, Callable, Any
|
||||||
import uuid
|
import uuid
|
||||||
|
@ -29,6 +30,7 @@ import uuid
|
||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
import tractor
|
import tractor
|
||||||
import trio
|
import trio
|
||||||
|
from PyQt5.QtCore import Qt
|
||||||
|
|
||||||
from .. import config
|
from .. import config
|
||||||
from ..clearing._client import open_ems, OrderBook
|
from ..clearing._client import open_ems, OrderBook
|
||||||
|
@ -36,6 +38,7 @@ from ..clearing._allocate import (
|
||||||
mk_allocator,
|
mk_allocator,
|
||||||
Position,
|
Position,
|
||||||
)
|
)
|
||||||
|
from ._style import _font
|
||||||
from ..data._source import Symbol
|
from ..data._source import Symbol
|
||||||
from ..data.feed import Feed
|
from ..data.feed import Feed
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
@ -45,9 +48,10 @@ from ._position import (
|
||||||
PositionTracker,
|
PositionTracker,
|
||||||
SettingsPane,
|
SettingsPane,
|
||||||
)
|
)
|
||||||
from ._label import FormatLabel
|
from ._forms import FieldsForm
|
||||||
|
# from ._label import FormatLabel
|
||||||
from ._window import MultiStatus
|
from ._window import MultiStatus
|
||||||
from ..clearing._messages import Order
|
from ..clearing._messages import Order, BrokerdPosition
|
||||||
from ._forms import open_form_input_handling
|
from ._forms import open_form_input_handling
|
||||||
|
|
||||||
|
|
||||||
|
@ -106,7 +110,8 @@ def on_level_change_update_next_order_info(
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class OrderMode:
|
class OrderMode:
|
||||||
'''Major UX mode for placing orders on a chart view providing so
|
'''
|
||||||
|
Major UX mode for placing orders on a chart view providing so
|
||||||
called, "chart trading".
|
called, "chart trading".
|
||||||
|
|
||||||
This is the other "main" mode that pairs with "view mode" (when
|
This is the other "main" mode that pairs with "view mode" (when
|
||||||
|
@ -266,13 +271,14 @@ class OrderMode:
|
||||||
|
|
||||||
'''
|
'''
|
||||||
staged = self._staged_order
|
staged = self._staged_order
|
||||||
symbol = staged.symbol
|
symbol: Symbol = staged.symbol
|
||||||
oid = str(uuid.uuid4())
|
oid = str(uuid.uuid4())
|
||||||
|
|
||||||
# format order data for ems
|
# format order data for ems
|
||||||
|
fqsn = symbol.front_fqsn()
|
||||||
order = staged.copy(
|
order = staged.copy(
|
||||||
update={
|
update={
|
||||||
'symbol': symbol.key,
|
'symbol': fqsn,
|
||||||
'oid': oid,
|
'oid': oid,
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -429,13 +435,19 @@ class OrderMode:
|
||||||
|
|
||||||
# TODO: make this not trash.
|
# TODO: make this not trash.
|
||||||
# XXX: linux only for now
|
# XXX: linux only for now
|
||||||
|
if platform.system() == "Windows":
|
||||||
|
return
|
||||||
|
|
||||||
result = await trio.run_process(
|
result = await trio.run_process(
|
||||||
[
|
[
|
||||||
'notify-send',
|
'notify-send',
|
||||||
'-u', 'normal',
|
'-u', 'normal',
|
||||||
'-t', '10000',
|
'-t', '1616',
|
||||||
'piker',
|
'piker',
|
||||||
f'alert: {msg}',
|
|
||||||
|
# TODO: add in standard fill/exec info that maybe we
|
||||||
|
# pack in a broker independent way?
|
||||||
|
f'{msg["resp"]}: {msg["trigger_price"]}',
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
log.runtime(result)
|
log.runtime(result)
|
||||||
|
@ -511,8 +523,7 @@ async def open_order_mode(
|
||||||
|
|
||||||
feed: Feed,
|
feed: Feed,
|
||||||
chart: 'ChartPlotWidget', # noqa
|
chart: 'ChartPlotWidget', # noqa
|
||||||
symbol: Symbol,
|
fqsn: str,
|
||||||
brokername: str,
|
|
||||||
started: trio.Event,
|
started: trio.Event,
|
||||||
|
|
||||||
) -> None:
|
) -> None:
|
||||||
|
@ -529,12 +540,16 @@ async def open_order_mode(
|
||||||
|
|
||||||
book: OrderBook
|
book: OrderBook
|
||||||
trades_stream: tractor.MsgStream
|
trades_stream: tractor.MsgStream
|
||||||
position_msgs: dict
|
|
||||||
|
# The keys in this dict **must** be in set our set of "normalized"
|
||||||
|
# symbol names (i.e. the same names you'd get back in search
|
||||||
|
# results) in order for position msgs to correctly trigger the
|
||||||
|
# display of a position indicator on screen.
|
||||||
|
position_msgs: dict[str, list[BrokerdPosition]]
|
||||||
|
|
||||||
# spawn EMS actor-service
|
# spawn EMS actor-service
|
||||||
async with (
|
async with (
|
||||||
|
open_ems(fqsn) as (
|
||||||
open_ems(brokername, symbol) as (
|
|
||||||
book,
|
book,
|
||||||
trades_stream,
|
trades_stream,
|
||||||
position_msgs,
|
position_msgs,
|
||||||
|
@ -543,8 +558,7 @@ async def open_order_mode(
|
||||||
trio.open_nursery() as tn,
|
trio.open_nursery() as tn,
|
||||||
|
|
||||||
):
|
):
|
||||||
log.info(f'Opening order mode for {brokername}.{symbol.key}')
|
log.info(f'Opening order mode for {fqsn}')
|
||||||
|
|
||||||
view = chart.view
|
view = chart.view
|
||||||
|
|
||||||
# annotations editors
|
# annotations editors
|
||||||
|
@ -553,7 +567,7 @@ async def open_order_mode(
|
||||||
|
|
||||||
# symbol id
|
# symbol id
|
||||||
symbol = chart.linked.symbol
|
symbol = chart.linked.symbol
|
||||||
symkey = symbol.key
|
symkey = symbol.front_fqsn()
|
||||||
|
|
||||||
# map of per-provider account keys to position tracker instances
|
# map of per-provider account keys to position tracker instances
|
||||||
trackers: dict[str, PositionTracker] = {}
|
trackers: dict[str, PositionTracker] = {}
|
||||||
|
@ -563,7 +577,9 @@ async def open_order_mode(
|
||||||
providers=symbol.brokers
|
providers=symbol.brokers
|
||||||
)
|
)
|
||||||
|
|
||||||
# use only loaded accounts according to brokerd
|
# XXX: ``brokerd`` delivers a set of account names that it allows
|
||||||
|
# use of but the user also can define the accounts they'd like
|
||||||
|
# to use, in order, in their `brokers.toml` file.
|
||||||
accounts = {}
|
accounts = {}
|
||||||
for name in brokerd_accounts:
|
for name in brokerd_accounts:
|
||||||
# ensure name is in ``brokers.toml``
|
# ensure name is in ``brokers.toml``
|
||||||
|
@ -571,7 +587,10 @@ async def open_order_mode(
|
||||||
|
|
||||||
# first account listed is the one we select at startup
|
# first account listed is the one we select at startup
|
||||||
# (aka order based selection).
|
# (aka order based selection).
|
||||||
pp_account = next(iter(accounts.keys())) if accounts else 'paper'
|
pp_account = next(
|
||||||
|
# choose first account based on line order from `brokers.toml`.
|
||||||
|
iter(accounts.keys())
|
||||||
|
) if accounts else 'paper'
|
||||||
|
|
||||||
# NOTE: requires the backend exactly specifies
|
# NOTE: requires the backend exactly specifies
|
||||||
# the expected symbol key in its positions msg.
|
# the expected symbol key in its positions msg.
|
||||||
|
@ -592,7 +611,7 @@ async def open_order_mode(
|
||||||
log.info(f'Loading pp for {symkey}:\n{pformat(msg)}')
|
log.info(f'Loading pp for {symkey}:\n{pformat(msg)}')
|
||||||
startup_pp.update_from_msg(msg)
|
startup_pp.update_from_msg(msg)
|
||||||
|
|
||||||
# allocator
|
# allocator config
|
||||||
alloc = mk_allocator(
|
alloc = mk_allocator(
|
||||||
symbol=symbol,
|
symbol=symbol,
|
||||||
account=account_name,
|
account=account_name,
|
||||||
|
@ -617,67 +636,27 @@ async def open_order_mode(
|
||||||
# alloc?
|
# alloc?
|
||||||
pp_tracker.update_from_pp()
|
pp_tracker.update_from_pp()
|
||||||
|
|
||||||
|
# on existing position, show pp tracking graphics
|
||||||
if pp_tracker.startup_pp.size != 0:
|
if pp_tracker.startup_pp.size != 0:
|
||||||
# if no position, don't show pp tracking graphics
|
|
||||||
pp_tracker.show()
|
pp_tracker.show()
|
||||||
pp_tracker.hide_info()
|
pp_tracker.hide_info()
|
||||||
|
|
||||||
# setup order mode sidepane widgets
|
# setup order mode sidepane widgets
|
||||||
form = chart.sidepane
|
form: FieldsForm = chart.sidepane
|
||||||
vbox = form.vbox
|
form.vbox.setSpacing(
|
||||||
|
int((1 + 5/8)*_font.px_size)
|
||||||
from textwrap import dedent
|
|
||||||
|
|
||||||
from PyQt5.QtCore import Qt
|
|
||||||
|
|
||||||
from ._style import _font, _font_small
|
|
||||||
from ..calc import humanize
|
|
||||||
|
|
||||||
feed_label = FormatLabel(
|
|
||||||
fmt_str=dedent("""
|
|
||||||
actor: **{actor_name}**\n
|
|
||||||
|_ @**{host}:{port}**\n
|
|
||||||
|_ throttle_hz: **{throttle_rate}**\n
|
|
||||||
|_ streams: **{symbols}**\n
|
|
||||||
|_ shm: **{shm}**\n
|
|
||||||
"""),
|
|
||||||
font=_font.font,
|
|
||||||
font_size=_font_small.px_size,
|
|
||||||
font_color='default_lightest',
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
from ._feedstatus import mk_feed_label
|
||||||
|
|
||||||
|
feed_label = mk_feed_label(
|
||||||
|
form,
|
||||||
|
feed,
|
||||||
|
chart,
|
||||||
|
)
|
||||||
|
|
||||||
|
# XXX: we set this because?
|
||||||
form.feed_label = feed_label
|
form.feed_label = feed_label
|
||||||
|
|
||||||
# add feed info label to top
|
|
||||||
vbox.insertWidget(
|
|
||||||
0,
|
|
||||||
feed_label,
|
|
||||||
alignment=Qt.AlignBottom,
|
|
||||||
)
|
|
||||||
# vbox.setAlignment(feed_label, Qt.AlignBottom)
|
|
||||||
# vbox.setAlignment(Qt.AlignBottom)
|
|
||||||
blank_h = chart.height() - (
|
|
||||||
form.height() +
|
|
||||||
form.fill_bar.height()
|
|
||||||
# feed_label.height()
|
|
||||||
)
|
|
||||||
vbox.setSpacing((1 + 5/8)*_font.px_size)
|
|
||||||
|
|
||||||
# fill in brokerd feed info
|
|
||||||
host, port = feed.portal.channel.raddr
|
|
||||||
if host == '127.0.0.1':
|
|
||||||
host = 'localhost'
|
|
||||||
mpshm = feed.shm._shm
|
|
||||||
shmstr = f'{humanize(mpshm.size)}'
|
|
||||||
form.feed_label.format(
|
|
||||||
actor_name=feed.portal.channel.uid[0],
|
|
||||||
host=host,
|
|
||||||
port=port,
|
|
||||||
symbols=len(feed.symbols),
|
|
||||||
shm=shmstr,
|
|
||||||
throttle_rate=feed.throttle_rate,
|
|
||||||
)
|
|
||||||
|
|
||||||
order_pane = SettingsPane(
|
order_pane = SettingsPane(
|
||||||
form=form,
|
form=form,
|
||||||
# XXX: ugh, so hideous...
|
# XXX: ugh, so hideous...
|
||||||
|
@ -688,6 +667,11 @@ async def open_order_mode(
|
||||||
)
|
)
|
||||||
order_pane.set_accounts(list(trackers.keys()))
|
order_pane.set_accounts(list(trackers.keys()))
|
||||||
|
|
||||||
|
form.vbox.addWidget(
|
||||||
|
feed_label,
|
||||||
|
alignment=Qt.AlignBottom,
|
||||||
|
)
|
||||||
|
|
||||||
# update pp icons
|
# update pp icons
|
||||||
for name, tracker in trackers.items():
|
for name, tracker in trackers.items():
|
||||||
order_pane.update_account_icons({name: tracker.live_pp})
|
order_pane.update_account_icons({name: tracker.live_pp})
|
||||||
|
@ -798,14 +782,25 @@ async def process_trades_and_update_ui(
|
||||||
'position',
|
'position',
|
||||||
):
|
):
|
||||||
sym = mode.chart.linked.symbol
|
sym = mode.chart.linked.symbol
|
||||||
if msg['symbol'].lower() in sym.key:
|
pp_msg_symbol = msg['symbol'].lower()
|
||||||
|
fqsn = sym.front_fqsn()
|
||||||
|
broker, key = sym.front_feed()
|
||||||
|
# print(
|
||||||
|
# f'pp msg symbol: {pp_msg_symbol}\n',
|
||||||
|
# f'fqsn: {fqsn}\n',
|
||||||
|
# f'front key: {key}\n',
|
||||||
|
# )
|
||||||
|
|
||||||
|
if (
|
||||||
|
pp_msg_symbol == fqsn.replace(f'.{broker}', '')
|
||||||
|
):
|
||||||
tracker = mode.trackers[msg['account']]
|
tracker = mode.trackers[msg['account']]
|
||||||
tracker.live_pp.update_from_msg(msg)
|
tracker.live_pp.update_from_msg(msg)
|
||||||
tracker.update_from_pp()
|
|
||||||
|
|
||||||
# update order pane widgets
|
# update order pane widgets
|
||||||
|
tracker.update_from_pp()
|
||||||
mode.pane.update_status_ui(tracker)
|
mode.pane.update_status_ui(tracker)
|
||||||
|
|
||||||
|
if tracker.live_pp.size:
|
||||||
# display pnl
|
# display pnl
|
||||||
mode.pane.display_pnl(tracker)
|
mode.pane.display_pnl(tracker)
|
||||||
|
|
||||||
|
@ -878,7 +873,9 @@ async def process_trades_and_update_ui(
|
||||||
mode.lines.remove_line(uuid=oid)
|
mode.lines.remove_line(uuid=oid)
|
||||||
|
|
||||||
# each clearing tick is responded individually
|
# each clearing tick is responded individually
|
||||||
elif resp in ('broker_filled',):
|
elif resp in (
|
||||||
|
'broker_filled',
|
||||||
|
):
|
||||||
|
|
||||||
known_order = book._sent_orders.get(oid)
|
known_order = book._sent_orders.get(oid)
|
||||||
if not known_order:
|
if not known_order:
|
||||||
|
|
|
@ -1,4 +1,21 @@
|
||||||
# we require a pinned dev branch to get some edge features that
|
# we require a pinned dev branch to get some edge features that
|
||||||
# are often untested in tractor's CI and/or being tested by us
|
# are often untested in tractor's CI and/or being tested by us
|
||||||
# first before committing as core features in tractor's base.
|
# first before committing as core features in tractor's base.
|
||||||
-e git+git://github.com/goodboy/tractor.git@piker_pin#egg=tractor
|
-e git+https://github.com/goodboy/tractor.git@master#egg=tractor
|
||||||
|
|
||||||
|
# `pyqtgraph` peeps keep breaking, fixing, improving so might as well
|
||||||
|
# pin this to a dev branch that we have more control over especially
|
||||||
|
# as more graphics stuff gets hashed out.
|
||||||
|
-e git+https://github.com/pikers/pyqtgraph.git@piker_pin#egg=pyqtgraph
|
||||||
|
|
||||||
|
|
||||||
|
# our async client for ``marketstore`` (the tsdb)
|
||||||
|
-e git+https://github.com/pikers/anyio-marketstore.git@master#egg=anyio-marketstore
|
||||||
|
|
||||||
|
|
||||||
|
# ``trimeter`` for asysnc history fetching
|
||||||
|
-e git+https://github.com/python-trio/trimeter.git@master#egg=trimeter
|
||||||
|
|
||||||
|
|
||||||
|
# ``asyncvnc`` for sending interactions to ib-gw inside docker
|
||||||
|
-e git+https://github.com/pikers/asyncvnc.git@vid_passthrough#egg=asyncvnc
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# piker: trading gear for hackers
|
# piker: trading gear for hackers
|
||||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
|
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
|
||||||
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -25,14 +25,18 @@ import i3ipc
|
||||||
i3 = i3ipc.Connection()
|
i3 = i3ipc.Connection()
|
||||||
t = i3.get_tree()
|
t = i3.get_tree()
|
||||||
|
|
||||||
|
orig_win_id = t.find_focused().window
|
||||||
|
|
||||||
# for tws
|
# for tws
|
||||||
win_names: list[str] = [
|
win_names: list[str] = [
|
||||||
'Interactive Brokers', # tws running in i3
|
'Interactive Brokers', # tws running in i3
|
||||||
'IB Gateway.', # gw running in i3
|
'IB Gateway', # gw running in i3
|
||||||
|
# 'IB', # gw running in i3 (newer version?)
|
||||||
]
|
]
|
||||||
|
|
||||||
for name in win_names:
|
for name in win_names:
|
||||||
results = t.find_named(name)
|
results = t.find_titled(name)
|
||||||
|
print(f'results for {name}: {results}')
|
||||||
if results:
|
if results:
|
||||||
con = results[0]
|
con = results[0]
|
||||||
print(f'Resetting data feed for {name}')
|
print(f'Resetting data feed for {name}')
|
||||||
|
@ -45,17 +49,36 @@ for name in win_names:
|
||||||
# https://github.com/rr-/pyxdotool
|
# https://github.com/rr-/pyxdotool
|
||||||
# https://github.com/ShaneHutter/pyxdotool
|
# https://github.com/ShaneHutter/pyxdotool
|
||||||
# https://github.com/cphyc/pyxdotool
|
# https://github.com/cphyc/pyxdotool
|
||||||
|
|
||||||
|
# TODO: only run the reconnect (2nd) kc on a detected
|
||||||
|
# disconnect?
|
||||||
|
for key_combo, timeout in [
|
||||||
|
# only required if we need a connection reset.
|
||||||
|
# ('ctrl+alt+r', 12),
|
||||||
|
# data feed reset.
|
||||||
|
('ctrl+alt+f', 6)
|
||||||
|
]:
|
||||||
subprocess.call([
|
subprocess.call([
|
||||||
'xdotool',
|
'xdotool',
|
||||||
'windowactivate', '--sync', win_id,
|
'windowactivate', '--sync', win_id,
|
||||||
|
|
||||||
# move mouse to bottom left of window (where there should
|
# move mouse to bottom left of window (where there should
|
||||||
# be nothing to click).
|
# be nothing to click).
|
||||||
'mousemove_relative', '--sync', str(w-3), str(h-3),
|
'mousemove_relative', '--sync', str(w-4), str(h-4),
|
||||||
|
|
||||||
# NOTE: we may need to stick a `--retry 3` in here..
|
# NOTE: we may need to stick a `--retry 3` in here..
|
||||||
'click', '--window', win_id, '1',
|
'click', '--window', win_id,
|
||||||
|
'--repeat', '3', '1',
|
||||||
|
|
||||||
# hackzorzes
|
# hackzorzes
|
||||||
'key', 'ctrl+alt+f',
|
'key', key_combo,
|
||||||
|
],
|
||||||
|
timeout=timeout,
|
||||||
|
)
|
||||||
|
|
||||||
|
# re-activate and focus original window
|
||||||
|
subprocess.call([
|
||||||
|
'xdotool',
|
||||||
|
'windowactivate', '--sync', str(orig_win_id),
|
||||||
|
'click', '--window', str(orig_win_id), '1',
|
||||||
])
|
])
|
38
setup.py
38
setup.py
|
@ -51,42 +51,56 @@ setup(
|
||||||
# async
|
# async
|
||||||
'trio',
|
'trio',
|
||||||
'trio-websocket',
|
'trio-websocket',
|
||||||
# 'tractor', # from github currently
|
'msgspec', # performant IPC messaging
|
||||||
'async_generator',
|
'async_generator',
|
||||||
|
|
||||||
|
# from github currently (see requirements.txt)
|
||||||
|
# 'trimeter', # not released yet..
|
||||||
|
# 'tractor',
|
||||||
|
# asyncvnc,
|
||||||
|
|
||||||
# brokers
|
# brokers
|
||||||
'asks==2.4.8',
|
'asks==2.4.8',
|
||||||
'ib_insync',
|
'ib_insync',
|
||||||
|
|
||||||
# numerics
|
# numerics
|
||||||
'arrow', # better datetimes
|
'pendulum', # easier datetimes
|
||||||
'bidict', # 2 way map
|
'bidict', # 2 way map
|
||||||
'cython',
|
'cython',
|
||||||
'numpy',
|
'numpy',
|
||||||
'numba',
|
'numba',
|
||||||
'pandas',
|
|
||||||
'msgpack-numpy',
|
|
||||||
|
|
||||||
# UI
|
# UI
|
||||||
'PyQt5',
|
'PyQt5',
|
||||||
'pyqtgraph',
|
# 'pyqtgraph', from our fork see reqs.txt
|
||||||
'qdarkstyle >= 3.0.2',
|
'qdarkstyle >= 3.0.2', # themeing
|
||||||
# fuzzy search
|
'fuzzywuzzy[speedup]', # fuzzy search
|
||||||
'fuzzywuzzy[speedup]',
|
|
||||||
|
|
||||||
# tsdbs
|
# tsdbs
|
||||||
'pymarketstore',
|
# anyio-marketstore # from gh see reqs.txt
|
||||||
],
|
],
|
||||||
|
extras_require={
|
||||||
|
'tsdb': [
|
||||||
|
'docker',
|
||||||
|
],
|
||||||
|
|
||||||
|
},
|
||||||
tests_require=['pytest'],
|
tests_require=['pytest'],
|
||||||
python_requires=">=3.9", # literally for ``datetime.datetime.fromisoformat``...
|
python_requires=">=3.10",
|
||||||
keywords=["async", "trading", "finance", "quant", "charting"],
|
keywords=[
|
||||||
|
"async",
|
||||||
|
"trading",
|
||||||
|
"finance",
|
||||||
|
"quant",
|
||||||
|
"charting",
|
||||||
|
],
|
||||||
classifiers=[
|
classifiers=[
|
||||||
'Development Status :: 3 - Alpha',
|
'Development Status :: 3 - Alpha',
|
||||||
'License :: OSI Approved :: ',
|
'License :: OSI Approved :: ',
|
||||||
'Operating System :: POSIX :: Linux',
|
'Operating System :: POSIX :: Linux',
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
"Programming Language :: Python :: 3 :: Only",
|
||||||
"Programming Language :: Python :: 3.9",
|
"Programming Language :: Python :: 3.10",
|
||||||
'Intended Audience :: Financial and Insurance Industry',
|
'Intended Audience :: Financial and Insurance Industry',
|
||||||
'Intended Audience :: Science/Research',
|
'Intended Audience :: Science/Research',
|
||||||
'Intended Audience :: Developers',
|
'Intended Audience :: Developers',
|
||||||
|
|
Loading…
Reference in New Issue