Merge pull request #112 from pikers/chart_hacking

Chart hacking
bar_select
goodboy 2020-11-06 15:42:24 -05:00 committed by GitHub
commit e2d4ed063b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
59 changed files with 6784 additions and 1180 deletions

22
Pipfile
View File

@ -1,22 +0,0 @@
[[source]]
url = "https://pypi.python.org/simple"
verify_ssl = true
name = "pypi"
[packages]
e1839a8 = {path = ".",editable = true}
trio = "*"
Cython = "*"
# use master branch kivy since wheels seem borked (due to cython stuff)
kivy = {git = "git://github.com/kivy/kivy.git"}
pdbpp = "*"
msgpack = "*"
tractor = {git = "git://github.com/goodboy/tractor.git"}
toml = "*"
pyqtgraph = "*"
pyside2 = "*"
[dev-packages]
pytest = "*"
pdbpp = "*"
piker = {editable = true,path = "."}

644
Pipfile.lock generated
View File

@ -1,644 +0,0 @@
{
"_meta": {
"hash": {
"sha256": "4f280bbb01bd2a384bbe963ec012e2bfa89b852a45a009674e30a3e281cd6b04"
},
"pipfile-spec": 6,
"requires": {},
"sources": [
{
"name": "pypi",
"url": "https://pypi.python.org/simple",
"verify_ssl": true
}
]
},
"default": {
"anyio": {
"hashes": [
"sha256:2b758634cf1adc3589937d91f6736b3696d091c1485a10c9cc3f1bd5e6d96813",
"sha256:78db97333af17381cadd2cc0dbd3d4e0258e4932368eab8895cb65a269db054e"
],
"version": "==1.2.3"
},
"asks": {
"hashes": [
"sha256:d7289cb5b7a28614e4fecab63b3734e2a4296d3c323e315f8dc4b546d64f71b7"
],
"version": "==2.3.6"
},
"async-generator": {
"hashes": [
"sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b",
"sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"
],
"version": "==1.10"
},
"attrs": {
"hashes": [
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"version": "==19.3.0"
},
"click": {
"hashes": [
"sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc",
"sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a"
],
"version": "==7.1.1"
},
"colorlog": {
"hashes": [
"sha256:30aaef5ab2a1873dec5da38fd6ba568fa761c9fa10b40241027fa3edea47f3d2",
"sha256:732c191ebbe9a353ec160d043d02c64ddef9028de8caae4cfa8bd49b6afed53e"
],
"version": "==4.1.0"
},
"cython": {
"hashes": [
"sha256:03f6bbb380ad0acb744fb06e42996ea217e9d00016ca0ff6f2e7d60f580d0360",
"sha256:05e8cfd3a3a6087aec49a1ae08a89171db991956209406d1e5576f9db70ece52",
"sha256:05eb79efc8029d487251c8a2702a909a8ba33c332e06d2f3980866541bd81253",
"sha256:094d28a34c3fa992ae02aea1edbe6ff89b3cc5870b6ee38b5baeb805dc57b013",
"sha256:0c70e842e52e2f50cc43bad43b5e5bc515f30821a374e544abb0e0746f2350ff",
"sha256:1dcdaa319558eb924294a554dcf6c12383ec947acc7e779e8d3622409a7f7d28",
"sha256:1fc5bdda28f25fec44e4721677458aa509d743cd350862270309d61aa148d6ff",
"sha256:280573a01d9348d44a42d6a9c651d9f7eb1fe9217df72555b2a118f902996a10",
"sha256:298ceca7b0f0da4205fcb0b7c9ac9e120e2dafffd5019ba1618e84ef89434b5a",
"sha256:4074a8bff0040035673cc6dd365a762476d6bff4d03d8ce6904e3e53f9a25dc8",
"sha256:41e7068e95fbf9ec94b41437f989caf9674135e770a39cdb9c00de459bafd1bc",
"sha256:47e5e1502d52ef03387cf9d3b3241007961a84a466e58a3b74028e1dd4957f8c",
"sha256:521340844cf388d109ceb61397f3fd5250ccb622a1a8e93559e8de76c80940a9",
"sha256:6c53338c1811f8c6d7f8cb7abd874810b15045e719e8207f957035c9177b4213",
"sha256:75c2dda47dcc3c77449712b1417bb6b89ec3b7b02e18c64262494dceffdf455e",
"sha256:773c5a98e463b52f7e8197254b39b703a5ea1972aef3a94b3b921515d77dd041",
"sha256:78c3068dcba300d473fef57cdf523e34b37de522f5a494ef9ee1ac9b4b8bbe3f",
"sha256:7bc18fc5a170f2c1cef5387a3d997c28942918bbee0f700e73fd2178ee8d474d",
"sha256:7f89eff20e4a7a64b55210dac17aea711ed8a3f2e78f2ff784c0e984302583dd",
"sha256:89458b49976b1dee5d89ab4ac943da3717b4292bf624367e862e4ee172fcce99",
"sha256:986f871c0fa649b293061236b93782d25c293a8dd8117c7ba05f8a61bdc261ae",
"sha256:a0f495a4fe5278aab278feee35e6102efecde5176a8a74dd28c28e3fc5c8d7c7",
"sha256:a14aa436586c41633339415de82a41164691d02d3e661038da533be5d40794a5",
"sha256:b8ab3ab38afc47d8f4fe629b836243544351cef681b6bdb1dc869028d6fdcbfb",
"sha256:bb487881608ebd293592553c618f0c83316f4f13a64cb18605b1d2fb9fd3da3e",
"sha256:c0b24bfe3431b3cb7ced323bca813dbd13aca973a1475b512d3331fd0de8ec60",
"sha256:c7894c06205166d360ab2915ae306d1f7403e9ce3d3aaeff4095eaf98e42ce66",
"sha256:d4039bb7f234ad32267c55e72fd49fb56078ea102f9d9d8559f6ec34d4887630",
"sha256:e4d6bb8703d0319eb04b7319b12ea41580df44fd84d83ccda13ea463c6801414",
"sha256:e8fab9911fd2fa8e5af407057cb8bdf87762f983cba483fa3234be20a9a0af77",
"sha256:f3818e578e687cdb21dc4aa4a3bc6278c656c9c393e9eda14dd04943f478863d",
"sha256:fe666645493d72712c46e4fbe8bec094b06aec3c337400479e9704439c9d9586"
],
"index": "pypi",
"version": "==0.29.14"
},
"e1839a8": {
"editable": true,
"path": "."
},
"fancycompleter": {
"hashes": [
"sha256:09e0feb8ae242abdfd7ef2ba55069a46f011814a80fe5476be48f51b00247272",
"sha256:dd076bca7d9d524cc7f25ec8f35ef95388ffef9ef46def4d3d25e9b044ad7080"
],
"version": "==0.9.1"
},
"h11": {
"hashes": [
"sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1",
"sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1"
],
"version": "==0.9.0"
},
"idna": {
"hashes": [
"sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
"sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
],
"version": "==2.9"
},
"kivy": {
"git": "git://github.com/kivy/kivy.git",
"ref": "9398c8d5d260c9f4d5dd0aadc7de5001bddaf984"
},
"msgpack": {
"hashes": [
"sha256:0cc7ca04e575ba34fea7cfcd76039f55def570e6950e4155a4174368142c8e1b",
"sha256:187794cd1eb73acccd528247e3565f6760bd842d7dc299241f830024a7dd5610",
"sha256:1904b7cb65342d0998b75908304a03cb004c63ef31e16c8c43fee6b989d7f0d7",
"sha256:229a0ccdc39e9b6c6d1033cd8aecd9c296823b6c87f0de3943c59b8bc7c64bee",
"sha256:24149a75643aeaa81ece4259084d11b792308a6cf74e796cbb35def94c89a25a",
"sha256:30b88c47e0cdb6062daed88ca283b0d84fa0d2ad6c273aa0788152a1c643e408",
"sha256:32fea0ea3cd1ef820286863a6202dcfd62a539b8ec3edcbdff76068a8c2cc6ce",
"sha256:355f7fd0f90134229eaeefaee3cf42e0afc8518e8f3cd4b25f541a7104dcb8f9",
"sha256:4abdb88a9b67e64810fb54b0c24a1fd76b12297b4f7a1467d85a14dd8367191a",
"sha256:757bd71a9b89e4f1db0622af4436d403e742506dbea978eba566815dc65ec895",
"sha256:76df51492bc6fa6cc8b65d09efdb67cbba3cbfe55004c3afc81352af92b4a43c",
"sha256:774f5edc3475917cd95fe593e625d23d8580f9b48b570d8853d06cac171cd170",
"sha256:8a3ada8401736df2bf497f65589293a86c56e197a80ae7634ec2c3150a2f5082",
"sha256:a06efd0482a1942aad209a6c18321b5e22d64eb531ea20af138b28172d8f35ba",
"sha256:b24afc52e18dccc8c175de07c1d680bdf315844566f4952b5bedb908894bec79",
"sha256:b8b4bd3dafc7b92608ae5462add1c8cc881851c2d4f5d8977fdea5b081d17f21",
"sha256:c6e5024fc0cdf7f83b6624850309ddd7e06c48a75fa0d1c5173de4d93300eb19",
"sha256:db7ff14abc73577b0bcbcf73ecff97d3580ecaa0fc8724babce21fdf3fe08ef6",
"sha256:dedf54d72d9e7b6d043c244c8213fe2b8bbfe66874b9a65b39c4cc892dd99dd4",
"sha256:ea3c2f859346fcd55fc46e96885301d9c2f7a36d453f5d8f2967840efa1e1830",
"sha256:f0f47bafe9c9b8ed03e19a100a743662dd8c6d0135e684feea720a0d0046d116"
],
"index": "pypi",
"version": "==0.6.2"
},
"numpy": {
"hashes": [
"sha256:1786a08236f2c92ae0e70423c45e1e62788ed33028f94ca99c4df03f5be6b3c6",
"sha256:17aa7a81fe7599a10f2b7d95856dc5cf84a4eefa45bc96123cbbc3ebc568994e",
"sha256:20b26aaa5b3da029942cdcce719b363dbe58696ad182aff0e5dcb1687ec946dc",
"sha256:2d75908ab3ced4223ccba595b48e538afa5ecc37405923d1fea6906d7c3a50bc",
"sha256:39d2c685af15d3ce682c99ce5925cc66efc824652e10990d2462dfe9b8918c6a",
"sha256:56bc8ded6fcd9adea90f65377438f9fea8c05fcf7c5ba766bef258d0da1554aa",
"sha256:590355aeade1a2eaba17617c19edccb7db8d78760175256e3cf94590a1a964f3",
"sha256:70a840a26f4e61defa7bdf811d7498a284ced303dfbc35acb7be12a39b2aa121",
"sha256:77c3bfe65d8560487052ad55c6998a04b654c2fbc36d546aef2b2e511e760971",
"sha256:9537eecf179f566fd1c160a2e912ca0b8e02d773af0a7a1120ad4f7507cd0d26",
"sha256:9acdf933c1fd263c513a2df3dceecea6f3ff4419d80bf238510976bf9bcb26cd",
"sha256:ae0975f42ab1f28364dcda3dde3cf6c1ddab3e1d4b2909da0cb0191fa9ca0480",
"sha256:b3af02ecc999c8003e538e60c89a2b37646b39b688d4e44d7373e11c2debabec",
"sha256:b6ff59cee96b454516e47e7721098e6ceebef435e3e21ac2d6c3b8b02628eb77",
"sha256:b765ed3930b92812aa698a455847141869ef755a87e099fddd4ccf9d81fffb57",
"sha256:c98c5ffd7d41611407a1103ae11c8b634ad6a43606eca3e2a5a269e5d6e8eb07",
"sha256:cf7eb6b1025d3e169989416b1adcd676624c2dbed9e3bcb7137f51bfc8cc2572",
"sha256:d92350c22b150c1cae7ebb0ee8b5670cc84848f6359cf6b5d8f86617098a9b73",
"sha256:e422c3152921cece8b6a2fb6b0b4d73b6579bd20ae075e7d15143e711f3ca2ca",
"sha256:e840f552a509e3380b0f0ec977e8124d0dc34dc0e68289ca28f4d7c1d0d79474",
"sha256:f3d0a94ad151870978fb93538e95411c83899c9dc63e6fb65542f769568ecfa5"
],
"version": "==1.18.1"
},
"outcome": {
"hashes": [
"sha256:ee46c5ce42780cde85d55a61819d0e6b8cb490f1dbd749ba75ff2629771dcd2d",
"sha256:fc7822068ba7dd0fc2532743611e8a73246708d3564e29a39f93d6ab3701b66f"
],
"version": "==1.0.1"
},
"pandas": {
"hashes": [
"sha256:23e177d43e4bf68950b0f8788b6a2fef2f478f4ec94883acb627b9264522a98a",
"sha256:2530aea4fe46e8df7829c3f05e0a0f821c893885d53cb8ac9b89cc67c143448c",
"sha256:303827f0bb40ff610fbada5b12d50014811efcc37aaf6ef03202dc3054bfdda1",
"sha256:3b019e3ea9f5d0cfee0efabae2cfd3976874e90bcc3e97b29600e5a9b345ae3d",
"sha256:3c07765308f091d81b6735d4f2242bb43c332cc3461cae60543df6b10967fe27",
"sha256:5036d4009012a44aa3e50173e482b664c1fae36decd277c49e453463798eca4e",
"sha256:6f38969e2325056f9959efbe06c27aa2e94dd35382265ad0703681d993036052",
"sha256:74a470d349d52b9d00a2ba192ae1ee22155bb0a300fd1ccb2961006c3fa98ed3",
"sha256:7d77034e402165b947f43050a8a415aa3205abfed38d127ea66e57a2b7b5a9e0",
"sha256:7f9a509f6f11fa8b9313002ebdf6f690a7aa1dd91efd95d90185371a0d68220e",
"sha256:942b5d04762feb0e55b2ad97ce2b254a0ffdd344b56493b04a627266e24f2d82",
"sha256:a9fbe41663416bb70ed05f4e16c5f377519c0dc292ba9aa45f5356e37df03a38",
"sha256:d10e83866b48c0cdb83281f786564e2a2b51a7ae7b8a950c3442ad3c9e36b48c",
"sha256:e2140e1bbf9c46db9936ee70f4be6584d15ff8dc3dfff1da022d71227d53bad3"
],
"version": "==1.0.1"
},
"pdbpp": {
"hashes": [
"sha256:73ff220d5006e0ecdc3e2705d8328d8aa5ac27fef95cc06f6e42cd7d22d55eb8"
],
"index": "pypi",
"version": "==0.10.2"
},
"psutil": {
"hashes": [
"sha256:06660136ab88762309775fd47290d7da14094422d915f0466e0adf8e4b22214e",
"sha256:0c11adde31011a286197630ba2671e34651f004cc418d30ae06d2033a43c9e20",
"sha256:0c211eec4185725847cb6c28409646c7cfa56fdb531014b35f97b5dc7fe04ff9",
"sha256:0fc7a5619b47f74331add476fbc6022d7ca801c22865c7069ec0867920858963",
"sha256:3004361c6b93dbad71330d992c1ae409cb8314a6041a0b67507cc882357f583e",
"sha256:5e8dbf31871b0072bcba8d1f2861c0ec6c84c78f13c723bb6e981bce51b58f12",
"sha256:6d81b9714791ef9a3a00b2ca846ee547fc5e53d259e2a6258c3d2054928039ff",
"sha256:724390895cff80add7a1c4e7e0a04d9c94f3ee61423a2dcafd83784fabbd1ee9",
"sha256:ad21281f7bd6c57578dd53913d2d44218e9e29fd25128d10ff7819ef16fa46e7",
"sha256:f21a7bb4b207e4e7c60b3c40ffa89d790997619f04bbecec9db8e3696122bc78",
"sha256:f60042bef7dc50a78c06334ca8e25580455948ba2fa98f240d034a4fed9141a5"
],
"index": "pypi",
"version": "==5.6.6"
},
"pygments": {
"hashes": [
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
"sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
],
"version": "==2.6.1"
},
"pyqtgraph": {
"hashes": [
"sha256:4c08ab34881fae5ecf9ddfe6c1220b9e41e6d3eb1579a7d8ef501abb8e509251"
],
"index": "pypi",
"version": "==0.10.0"
},
"pyrepl": {
"hashes": [
"sha256:292570f34b5502e871bbb966d639474f2b57fbfcd3373c2d6a2f3d56e681a775"
],
"version": "==0.9.0"
},
"pyside2": {
"hashes": [
"sha256:589b90944c24046d31bf76694590a600d59d20130015086491b793a81753629a",
"sha256:63cc845434388b398b79b65f7b5312b9b5348fbc772d84092c9245efbf341197",
"sha256:7c57fe60ed57a3a8b95d9163abca9caa803a1470f29b40bff8ef4103b97a96c8",
"sha256:7c61a6883f3474939097b9dabc80f028887046be003ce416da1b3565a08d1f92",
"sha256:ed6d22c7a3a99f480d4c9348bcced97ef7bc0c9d353ad3665ae705e8eb61feb5",
"sha256:ede8ed6e7021232184829d16614eb153f188ea250862304eac35e04b2bd0120c"
],
"index": "pypi",
"version": "==5.13.2"
},
"python-dateutil": {
"hashes": [
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
"sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
],
"version": "==2.8.1"
},
"pytz": {
"hashes": [
"sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d",
"sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"
],
"version": "==2019.3"
},
"shiboken2": {
"hashes": [
"sha256:5e84a4b4e7ab08bb5db0a8168e5d0316fbf3c25b788012701a82079faadfb19b",
"sha256:7c766c4160636a238e0e4430e2f40b504b13bcc4951902eb78cd5c971f26c898",
"sha256:81fa9b288c6c4b4c91220fcca2002eadb48fc5c3238e8bd88e982e00ffa77c53",
"sha256:ca08a3c95b1b20ac2b243b7b06379609bd73929dbc27b28c01415feffe3bcea1",
"sha256:e2f72b5cfdb8b48bdb55bda4b42ec7d36d1bce0be73d6d7d4a358225d6fb5f25",
"sha256:e6543506cb353d417961b9ec3c6fc726ec2f72eeab609dc88943c2e5cb6d6408"
],
"version": "==5.13.2"
},
"six": {
"hashes": [
"sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
"sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
],
"version": "==1.14.0"
},
"sniffio": {
"hashes": [
"sha256:20ed6d5b46f8ae136d00b9dcb807615d83ed82ceea6b2058cecb696765246da5",
"sha256:8e3810100f69fe0edd463d02ad407112542a11ffdc29f67db2bf3771afb87a21"
],
"version": "==1.1.0"
},
"sortedcontainers": {
"hashes": [
"sha256:974e9a32f56b17c1bac2aebd9dcf197f3eb9cd30553c5852a3187ad162e1a03a",
"sha256:d9e96492dd51fae31e60837736b38fe42a187b5404c16606ff7ee7cd582d4c60"
],
"version": "==2.1.0"
},
"toml": {
"hashes": [
"sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c",
"sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"
],
"index": "pypi",
"version": "==0.10.0"
},
"tractor": {
"git": "git://github.com/goodboy/tractor.git",
"ref": "ab349cdb8d8cbf2e3d48c0589cb710a43483f233"
},
"trio": {
"hashes": [
"sha256:a6d83c0cb4a177ec0f5179ce88e27914d5c8e6fd01c4285176b949e6ddc88c6c",
"sha256:f1cf00054ad974c86d9b7afa187a65d79fd5995340abe01e8e4784d86f4acb30"
],
"index": "pypi",
"version": "==0.13.0"
},
"wmctrl": {
"hashes": [
"sha256:d806f65ac1554366b6e31d29d7be2e8893996c0acbb2824bbf2b1f49cf628a13"
],
"version": "==0.3"
}
},
"develop": {
"anyio": {
"hashes": [
"sha256:2b758634cf1adc3589937d91f6736b3696d091c1485a10c9cc3f1bd5e6d96813",
"sha256:78db97333af17381cadd2cc0dbd3d4e0258e4932368eab8895cb65a269db054e"
],
"version": "==1.2.3"
},
"asks": {
"hashes": [
"sha256:d7289cb5b7a28614e4fecab63b3734e2a4296d3c323e315f8dc4b546d64f71b7"
],
"version": "==2.3.6"
},
"async-generator": {
"hashes": [
"sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b",
"sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"
],
"version": "==1.10"
},
"atomicwrites": {
"hashes": [
"sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
"sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
],
"version": "==1.3.0"
},
"attrs": {
"hashes": [
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
],
"version": "==19.3.0"
},
"click": {
"hashes": [
"sha256:8a18b4ea89d8820c5d0c7da8a64b2c324b4dabb695804dbfea19b9be9d88c0cc",
"sha256:e345d143d80bf5ee7534056164e5e112ea5e22716bbb1ce727941f4c8b471b9a"
],
"version": "==7.1.1"
},
"colorlog": {
"hashes": [
"sha256:30aaef5ab2a1873dec5da38fd6ba568fa761c9fa10b40241027fa3edea47f3d2",
"sha256:732c191ebbe9a353ec160d043d02c64ddef9028de8caae4cfa8bd49b6afed53e"
],
"version": "==4.1.0"
},
"cython": {
"hashes": [
"sha256:03f6bbb380ad0acb744fb06e42996ea217e9d00016ca0ff6f2e7d60f580d0360",
"sha256:05e8cfd3a3a6087aec49a1ae08a89171db991956209406d1e5576f9db70ece52",
"sha256:05eb79efc8029d487251c8a2702a909a8ba33c332e06d2f3980866541bd81253",
"sha256:094d28a34c3fa992ae02aea1edbe6ff89b3cc5870b6ee38b5baeb805dc57b013",
"sha256:0c70e842e52e2f50cc43bad43b5e5bc515f30821a374e544abb0e0746f2350ff",
"sha256:1dcdaa319558eb924294a554dcf6c12383ec947acc7e779e8d3622409a7f7d28",
"sha256:1fc5bdda28f25fec44e4721677458aa509d743cd350862270309d61aa148d6ff",
"sha256:280573a01d9348d44a42d6a9c651d9f7eb1fe9217df72555b2a118f902996a10",
"sha256:298ceca7b0f0da4205fcb0b7c9ac9e120e2dafffd5019ba1618e84ef89434b5a",
"sha256:4074a8bff0040035673cc6dd365a762476d6bff4d03d8ce6904e3e53f9a25dc8",
"sha256:41e7068e95fbf9ec94b41437f989caf9674135e770a39cdb9c00de459bafd1bc",
"sha256:47e5e1502d52ef03387cf9d3b3241007961a84a466e58a3b74028e1dd4957f8c",
"sha256:521340844cf388d109ceb61397f3fd5250ccb622a1a8e93559e8de76c80940a9",
"sha256:6c53338c1811f8c6d7f8cb7abd874810b15045e719e8207f957035c9177b4213",
"sha256:75c2dda47dcc3c77449712b1417bb6b89ec3b7b02e18c64262494dceffdf455e",
"sha256:773c5a98e463b52f7e8197254b39b703a5ea1972aef3a94b3b921515d77dd041",
"sha256:78c3068dcba300d473fef57cdf523e34b37de522f5a494ef9ee1ac9b4b8bbe3f",
"sha256:7bc18fc5a170f2c1cef5387a3d997c28942918bbee0f700e73fd2178ee8d474d",
"sha256:7f89eff20e4a7a64b55210dac17aea711ed8a3f2e78f2ff784c0e984302583dd",
"sha256:89458b49976b1dee5d89ab4ac943da3717b4292bf624367e862e4ee172fcce99",
"sha256:986f871c0fa649b293061236b93782d25c293a8dd8117c7ba05f8a61bdc261ae",
"sha256:a0f495a4fe5278aab278feee35e6102efecde5176a8a74dd28c28e3fc5c8d7c7",
"sha256:a14aa436586c41633339415de82a41164691d02d3e661038da533be5d40794a5",
"sha256:b8ab3ab38afc47d8f4fe629b836243544351cef681b6bdb1dc869028d6fdcbfb",
"sha256:bb487881608ebd293592553c618f0c83316f4f13a64cb18605b1d2fb9fd3da3e",
"sha256:c0b24bfe3431b3cb7ced323bca813dbd13aca973a1475b512d3331fd0de8ec60",
"sha256:c7894c06205166d360ab2915ae306d1f7403e9ce3d3aaeff4095eaf98e42ce66",
"sha256:d4039bb7f234ad32267c55e72fd49fb56078ea102f9d9d8559f6ec34d4887630",
"sha256:e4d6bb8703d0319eb04b7319b12ea41580df44fd84d83ccda13ea463c6801414",
"sha256:e8fab9911fd2fa8e5af407057cb8bdf87762f983cba483fa3234be20a9a0af77",
"sha256:f3818e578e687cdb21dc4aa4a3bc6278c656c9c393e9eda14dd04943f478863d",
"sha256:fe666645493d72712c46e4fbe8bec094b06aec3c337400479e9704439c9d9586"
],
"index": "pypi",
"version": "==0.29.14"
},
"fancycompleter": {
"hashes": [
"sha256:09e0feb8ae242abdfd7ef2ba55069a46f011814a80fe5476be48f51b00247272",
"sha256:dd076bca7d9d524cc7f25ec8f35ef95388ffef9ef46def4d3d25e9b044ad7080"
],
"version": "==0.9.1"
},
"h11": {
"hashes": [
"sha256:33d4bca7be0fa039f4e84d50ab00531047e53d6ee8ffbc83501ea602c169cae1",
"sha256:4bc6d6a1238b7615b266ada57e0618568066f57dd6fa967d1290ec9309b2f2f1"
],
"version": "==0.9.0"
},
"idna": {
"hashes": [
"sha256:7588d1c14ae4c77d74036e8c22ff447b26d0fde8f007354fd48a7814db15b7cb",
"sha256:a068a21ceac8a4d63dbfd964670474107f541babbd2250d61922f029858365fa"
],
"version": "==2.9"
},
"more-itertools": {
"hashes": [
"sha256:5dd8bcf33e5f9513ffa06d5ad33d78f31e1931ac9a18f33d37e77a180d393a7c",
"sha256:b1ddb932186d8a6ac451e1d95844b382f55e12686d51ca0c68b6f61f2ab7a507"
],
"version": "==8.2.0"
},
"msgpack": {
"hashes": [
"sha256:0cc7ca04e575ba34fea7cfcd76039f55def570e6950e4155a4174368142c8e1b",
"sha256:187794cd1eb73acccd528247e3565f6760bd842d7dc299241f830024a7dd5610",
"sha256:1904b7cb65342d0998b75908304a03cb004c63ef31e16c8c43fee6b989d7f0d7",
"sha256:229a0ccdc39e9b6c6d1033cd8aecd9c296823b6c87f0de3943c59b8bc7c64bee",
"sha256:24149a75643aeaa81ece4259084d11b792308a6cf74e796cbb35def94c89a25a",
"sha256:30b88c47e0cdb6062daed88ca283b0d84fa0d2ad6c273aa0788152a1c643e408",
"sha256:32fea0ea3cd1ef820286863a6202dcfd62a539b8ec3edcbdff76068a8c2cc6ce",
"sha256:355f7fd0f90134229eaeefaee3cf42e0afc8518e8f3cd4b25f541a7104dcb8f9",
"sha256:4abdb88a9b67e64810fb54b0c24a1fd76b12297b4f7a1467d85a14dd8367191a",
"sha256:757bd71a9b89e4f1db0622af4436d403e742506dbea978eba566815dc65ec895",
"sha256:76df51492bc6fa6cc8b65d09efdb67cbba3cbfe55004c3afc81352af92b4a43c",
"sha256:774f5edc3475917cd95fe593e625d23d8580f9b48b570d8853d06cac171cd170",
"sha256:8a3ada8401736df2bf497f65589293a86c56e197a80ae7634ec2c3150a2f5082",
"sha256:a06efd0482a1942aad209a6c18321b5e22d64eb531ea20af138b28172d8f35ba",
"sha256:b24afc52e18dccc8c175de07c1d680bdf315844566f4952b5bedb908894bec79",
"sha256:b8b4bd3dafc7b92608ae5462add1c8cc881851c2d4f5d8977fdea5b081d17f21",
"sha256:c6e5024fc0cdf7f83b6624850309ddd7e06c48a75fa0d1c5173de4d93300eb19",
"sha256:db7ff14abc73577b0bcbcf73ecff97d3580ecaa0fc8724babce21fdf3fe08ef6",
"sha256:dedf54d72d9e7b6d043c244c8213fe2b8bbfe66874b9a65b39c4cc892dd99dd4",
"sha256:ea3c2f859346fcd55fc46e96885301d9c2f7a36d453f5d8f2967840efa1e1830",
"sha256:f0f47bafe9c9b8ed03e19a100a743662dd8c6d0135e684feea720a0d0046d116"
],
"index": "pypi",
"version": "==0.6.2"
},
"numpy": {
"hashes": [
"sha256:1786a08236f2c92ae0e70423c45e1e62788ed33028f94ca99c4df03f5be6b3c6",
"sha256:17aa7a81fe7599a10f2b7d95856dc5cf84a4eefa45bc96123cbbc3ebc568994e",
"sha256:20b26aaa5b3da029942cdcce719b363dbe58696ad182aff0e5dcb1687ec946dc",
"sha256:2d75908ab3ced4223ccba595b48e538afa5ecc37405923d1fea6906d7c3a50bc",
"sha256:39d2c685af15d3ce682c99ce5925cc66efc824652e10990d2462dfe9b8918c6a",
"sha256:56bc8ded6fcd9adea90f65377438f9fea8c05fcf7c5ba766bef258d0da1554aa",
"sha256:590355aeade1a2eaba17617c19edccb7db8d78760175256e3cf94590a1a964f3",
"sha256:70a840a26f4e61defa7bdf811d7498a284ced303dfbc35acb7be12a39b2aa121",
"sha256:77c3bfe65d8560487052ad55c6998a04b654c2fbc36d546aef2b2e511e760971",
"sha256:9537eecf179f566fd1c160a2e912ca0b8e02d773af0a7a1120ad4f7507cd0d26",
"sha256:9acdf933c1fd263c513a2df3dceecea6f3ff4419d80bf238510976bf9bcb26cd",
"sha256:ae0975f42ab1f28364dcda3dde3cf6c1ddab3e1d4b2909da0cb0191fa9ca0480",
"sha256:b3af02ecc999c8003e538e60c89a2b37646b39b688d4e44d7373e11c2debabec",
"sha256:b6ff59cee96b454516e47e7721098e6ceebef435e3e21ac2d6c3b8b02628eb77",
"sha256:b765ed3930b92812aa698a455847141869ef755a87e099fddd4ccf9d81fffb57",
"sha256:c98c5ffd7d41611407a1103ae11c8b634ad6a43606eca3e2a5a269e5d6e8eb07",
"sha256:cf7eb6b1025d3e169989416b1adcd676624c2dbed9e3bcb7137f51bfc8cc2572",
"sha256:d92350c22b150c1cae7ebb0ee8b5670cc84848f6359cf6b5d8f86617098a9b73",
"sha256:e422c3152921cece8b6a2fb6b0b4d73b6579bd20ae075e7d15143e711f3ca2ca",
"sha256:e840f552a509e3380b0f0ec977e8124d0dc34dc0e68289ca28f4d7c1d0d79474",
"sha256:f3d0a94ad151870978fb93538e95411c83899c9dc63e6fb65542f769568ecfa5"
],
"version": "==1.18.1"
},
"outcome": {
"hashes": [
"sha256:ee46c5ce42780cde85d55a61819d0e6b8cb490f1dbd749ba75ff2629771dcd2d",
"sha256:fc7822068ba7dd0fc2532743611e8a73246708d3564e29a39f93d6ab3701b66f"
],
"version": "==1.0.1"
},
"packaging": {
"hashes": [
"sha256:3c292b474fda1671ec57d46d739d072bfd495a4f51ad01a055121d81e952b7a3",
"sha256:82f77b9bee21c1bafbf35a84905d604d5d1223801d639cf3ed140bd651c08752"
],
"version": "==20.3"
},
"pandas": {
"hashes": [
"sha256:23e177d43e4bf68950b0f8788b6a2fef2f478f4ec94883acb627b9264522a98a",
"sha256:2530aea4fe46e8df7829c3f05e0a0f821c893885d53cb8ac9b89cc67c143448c",
"sha256:303827f0bb40ff610fbada5b12d50014811efcc37aaf6ef03202dc3054bfdda1",
"sha256:3b019e3ea9f5d0cfee0efabae2cfd3976874e90bcc3e97b29600e5a9b345ae3d",
"sha256:3c07765308f091d81b6735d4f2242bb43c332cc3461cae60543df6b10967fe27",
"sha256:5036d4009012a44aa3e50173e482b664c1fae36decd277c49e453463798eca4e",
"sha256:6f38969e2325056f9959efbe06c27aa2e94dd35382265ad0703681d993036052",
"sha256:74a470d349d52b9d00a2ba192ae1ee22155bb0a300fd1ccb2961006c3fa98ed3",
"sha256:7d77034e402165b947f43050a8a415aa3205abfed38d127ea66e57a2b7b5a9e0",
"sha256:7f9a509f6f11fa8b9313002ebdf6f690a7aa1dd91efd95d90185371a0d68220e",
"sha256:942b5d04762feb0e55b2ad97ce2b254a0ffdd344b56493b04a627266e24f2d82",
"sha256:a9fbe41663416bb70ed05f4e16c5f377519c0dc292ba9aa45f5356e37df03a38",
"sha256:d10e83866b48c0cdb83281f786564e2a2b51a7ae7b8a950c3442ad3c9e36b48c",
"sha256:e2140e1bbf9c46db9936ee70f4be6584d15ff8dc3dfff1da022d71227d53bad3"
],
"version": "==1.0.1"
},
"pdbpp": {
"hashes": [
"sha256:73ff220d5006e0ecdc3e2705d8328d8aa5ac27fef95cc06f6e42cd7d22d55eb8"
],
"index": "pypi",
"version": "==0.10.2"
},
"piker": {
"editable": true,
"path": "."
},
"pluggy": {
"hashes": [
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
],
"version": "==0.13.1"
},
"py": {
"hashes": [
"sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa",
"sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"
],
"version": "==1.8.1"
},
"pygments": {
"hashes": [
"sha256:647344a061c249a3b74e230c739f434d7ea4d8b1d5f3721bc0f3558049b38f44",
"sha256:ff7a40b4860b727ab48fad6360eb351cc1b33cbf9b15a0f689ca5353e9463324"
],
"version": "==2.6.1"
},
"pyparsing": {
"hashes": [
"sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f",
"sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"
],
"version": "==2.4.6"
},
"pyrepl": {
"hashes": [
"sha256:292570f34b5502e871bbb966d639474f2b57fbfcd3373c2d6a2f3d56e681a775"
],
"version": "==0.9.0"
},
"pytest": {
"hashes": [
"sha256:8e256fe71eb74e14a4d20a5987bb5e1488f0511ee800680aaedc62b9358714e8",
"sha256:ff0090819f669aaa0284d0f4aad1a6d9d67a6efdc6dd4eb4ac56b704f890a0d6"
],
"index": "pypi",
"version": "==5.2.4"
},
"python-dateutil": {
"hashes": [
"sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c",
"sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"
],
"version": "==2.8.1"
},
"pytz": {
"hashes": [
"sha256:1c557d7d0e871de1f5ccd5833f60fb2550652da6be2693c1e02300743d21500d",
"sha256:b02c06db6cf09c12dd25137e563b31700d3b80fcc4ad23abb7a315f2789819be"
],
"version": "==2019.3"
},
"six": {
"hashes": [
"sha256:236bdbdce46e6e6a3d61a337c0f8b763ca1e8717c03b369e87a7ec7ce1319c0a",
"sha256:8f3cd2e254d8f793e7f3d6d9df77b92252b52637291d0f0da013c76ea2724b6c"
],
"version": "==1.14.0"
},
"sniffio": {
"hashes": [
"sha256:20ed6d5b46f8ae136d00b9dcb807615d83ed82ceea6b2058cecb696765246da5",
"sha256:8e3810100f69fe0edd463d02ad407112542a11ffdc29f67db2bf3771afb87a21"
],
"version": "==1.1.0"
},
"sortedcontainers": {
"hashes": [
"sha256:974e9a32f56b17c1bac2aebd9dcf197f3eb9cd30553c5852a3187ad162e1a03a",
"sha256:d9e96492dd51fae31e60837736b38fe42a187b5404c16606ff7ee7cd582d4c60"
],
"version": "==2.1.0"
},
"trio": {
"hashes": [
"sha256:a6d83c0cb4a177ec0f5179ce88e27914d5c8e6fd01c4285176b949e6ddc88c6c",
"sha256:f1cf00054ad974c86d9b7afa187a65d79fd5995340abe01e8e4784d86f4acb30"
],
"index": "pypi",
"version": "==0.13.0"
},
"wcwidth": {
"hashes": [
"sha256:8fd29383f539be45b20bd4df0dc29c20ba48654a41e661925e612311e9f3c603",
"sha256:f28b3e8a6483e5d49e7f8949ac1a78314e740333ae305b4ba5defd3e74fb37a8"
],
"version": "==0.1.8"
},
"wmctrl": {
"hashes": [
"sha256:d806f65ac1554366b6e31d29d7be2e8893996c0acbb2824bbf2b1f49cf628a13"
],
"version": "==0.3"
}
}
}

View File

@ -1,130 +1,112 @@
piker
-----
Trading gear for hackers.
trading gear for hackers.
|travis|
|gh_actions|
``piker`` is an attempt at a pro-grade, broker agnostic, next-gen FOSS toolset for real-time
trading and financial analysis targetted at hardcore Linux users.
.. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square
:target: https://actions-badge.atrox.dev/piker/pikers/goto
It tries to use as much bleeding edge tech as possible including (but not limited to):
``piker`` is a broker agnostic, next-gen FOSS toolset for real-time
trading targeted at hardcore Linux users.
- Python 3.7+ for glue_ and business logic
- trio_ for async
- tractor_ as the underlying actor model
we use as much bleeding edge tech as possible including (but not limited to):
- latest python for glue_
- trio_ for `structured concurrency`_
- tractor_ for distributed, multi-core, real-time streaming
- marketstore_ for historical and real-time tick data persistence and sharing
- techtonicdb_ for L2 book storage
- Qt_ for pristine high performance UIs
- pyqtgraph_ for real-time charting
- ``numpy`` and ``numba`` for `fast numerics`_
.. |travis| image:: https://img.shields.io/travis/pikers/piker/master.svg
:target: https://travis-ci.org/pikers/piker
.. _trio: https://github.com/python-trio/trio
.. _tractor: https://github.com/goodboy/tractor
.. _structured concurrency: https://trio.discourse.group/
.. _marketstore: https://github.com/alpacahq/marketstore
.. _techtonicdb: https://github.com/0b01/tectonicdb
.. _Qt: https://www.qt.io/
.. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph
.. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue
.. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/
Focus and Features:
focus and features:
*******************
- 100% federated: running your code on your hardware with your
broker's data feeds, privately, **is the point** (this is not a web-based *I
don't know how to run my own system* project).
- Asset class, broker, exchange agnostic.
- Built on a highly reliable `structured concurrent actor model
<tractor>`_ with built in async streaming and scalability protocols
allowing for a distributed architecture from the ground up.
- Privacy: your orders, indicators, algos are all run client side and
are shared only with the (groups of) traders you specify.
- Production grade, highly attractive native UIs that feel and fit like
a proper pair of skinny jeans; only meant to be used with a proper
tiling window manager (no, we are not ignorant enough to roll our own).
- Sophisticated charting capable of processing large data sets in real-time
while sanely displaying complex models and strategy systems.
- Built-in support for *hipstery* indicators and studies that you
probably haven't heard of but that the authors **know** generate alpha
when paired with the right strategies.
- Emphasis on collaboration through sharing of data, ideas, and processing
power. We will not host your code in the cloud nor ask you to
participate in any lame "alpha competitions".
- Adoption is very low priority, especially if you're not an experienced
trader; the system is not built for sale it is built for *people*.
- No, we will never have a "corporation friendly license"; if you intend to use
this code base we must know about it.
- zero web, cloud or "backtesting frameworks" (aka yabf)
- zero self promotion (aka pump); expected throughout the community
- 100% federated: your code, your hardware, your data feeds, your broker fills
- broker/exchange/asset-class agnostic
- privacy
- real-time financial signal processing from the ground up
- high quality, attractive, native UX with expected use in tiling wms
- sophisticated rt charting and data sharing facilities
- geared for collaboration within trader communities
- zero interest in adoption by suits; no corporate friendly license, ever.
- not built for *sale*; built for *people*
Fitting with these tenets, we're always open to new framework suggestions and ideas.
fitting with these tenets, we're always open to new framework
suggestions and ideas.
Building the best looking, most reliable, keyboard friendly trading platform is the dream.
Feel free to pipe in with your ideas and quiffs.
building the best looking, most reliable, keyboard friendly trading
platform is the dream. feel free to pipe in with your ideas and quiffs.
Install
install
*******
``piker`` is currently under heavy pre-alpha development and as such should
be cloned from this repo and hacked on directly.
``piker`` is currently under heavy pre-alpha development and as such
should be cloned from this repo and hacked on directly.
A couple bleeding edge components are being used atm pertaining to
async ports of libraries for use with `trio`_.
a couple bleeding edge components are being used atm pertaining to
new components within `trio`_.
Before installing make sure you have `pipenv`_ and have installed
``python3.7`` as well as `kivy source build`_ dependencies
since currently there's reliance on an async development branch.
`kivy` dependencies
===================
On Archlinux you need the following dependencies::
pacman -S python-docutils gstreamer sdl2_ttf sdl2_mixer sdl2_image xclip
To manually install the async branch of ``kivy`` from github do (though
this should be done as part of the ``pipenv install`` below)::
pipenv install -e 'git+git://github.com/matham/kivy.git@async-loop#egg=kivy'
.. _kivy source build:
https://kivy.org/docs/installation/installation-linux.html#installation-in-a-virtual-environment
For a development install::
for a development install::
git clone git@github.com:pikers/piker.git
cd piker
pipenv install --pre -e .
pipenv shell
pip install -e .
Broker Support
broker Support
**************
For live data feeds the only fully functional broker at the moment is Questrade_.
Eventual support is in the works for `IB`, `TD Ameritrade` and `IEX`.
If you want your broker supported and they have an API let us know.
for live data feeds the in-progress set of supported brokers is:
.. _Questrade: https://www.questrade.com/api/documentation
- IB_ via ``ib_insync``
- questrade_ which comes with effectively free L1
- kraken_ for crypto over their public websocket API
coming soon...
- webull_ via the reverse engineered public API
- yahoo via yliveticker_
- coinbase_ through websocket feed
if you want your broker supported and they have an API let us know.
.. _IB: https://interactivebrokers.github.io/tws-api/index.html
.. _questrade: https://www.questrade.com/api/documentation
.. _kraken: https://www.kraken.com/features/api#public-market-data
.. _webull: https://github.com/tedchou12/webull
.. _yliveticker: https://github.com/yahoofinancelive/yliveticker
.. _coinbase: https://docs.pro.coinbase.com/#websocket-feed
check out our charts
********************
bet you weren't expecting this from the foss bby::
piker -b kraken chart XBTUSD
Play with some UIs
******************
if anyone asks you what this project is about
*********************************************
you don't talk about it.
To start the real-time index monitor with the `questrade` backend::
how do i get involved?
**********************
enter the matrix.
piker -l info monitor indexes
If you want to see super granular price changes, increase the
broker quote query ``rate`` with ``-r``::
piker monitor indexes -r 10
It is also possible to run the broker data feed micro service as a daemon::
pikerd -l info
Then start the client app as normal::
piker monitor indexes
.. _pipenv: https://docs.pipenv.org/
learning the code is to your benefit and acts as a filter for desired
users; many alpha nuggets within.

View File

@ -17,3 +17,8 @@
"""
piker: trading gear for hackers.
"""
import msgpack # noqa
import msgpack_numpy
# patch msgpack for numpy arrays
msgpack_numpy.patch()

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Async utils no one seems to have built into a core lib (yet).
"""

35
piker/_profile.py 100644
View File

@ -0,0 +1,35 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Profiling wrappers for internal libs.
"""
import time
from functools import wraps
def timeit(fn):
@wraps(fn)
def wrapper(*args, **kwargs):
t = time.time()
res = fn(*args, **kwargs)
print(
'%s.%s: %.4f sec'
% (fn.__module__, fn.__qualname__, time.time() - t)
)
return res
return wrapper

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Broker clients, daemons and general back end machinery.
"""
@ -11,6 +27,8 @@ asks.init('trio')
__brokers__ = [
'questrade',
'robinhood',
'ib',
'kraken',
]

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Handy utils.
"""

View File

@ -0,0 +1,69 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Actor-aware broker agnostic interface.
"""
from contextlib import asynccontextmanager, AsyncExitStack
import trio
import tractor
from . import get_brokermod
from ..log import get_logger
log = get_logger(__name__)
@asynccontextmanager
async def get_cached_client(
brokername: str,
*args,
**kwargs,
) -> 'Client': # noqa
"""Get a cached broker client from the current actor's local vars.
If one has not been setup do it and cache it.
"""
# check if a cached client is in the local actor's statespace
ss = tractor.current_actor().statespace
clients = ss.setdefault('clients', {'_lock': trio.Lock()})
lock = clients['_lock']
client = None
try:
log.info(f"Loading existing `{brokername}` daemon")
async with lock:
client = clients[brokername]
client._consumers += 1
yield client
except KeyError:
log.info(f"Creating new client for broker {brokername}")
async with lock:
brokermod = get_brokermod(brokername)
exit_stack = AsyncExitStack()
client = await exit_stack.enter_async_context(
brokermod.get_client()
)
client._consumers = 0
client._exit_stack = exit_stack
clients[brokername] = client
yield client
finally:
client._consumers -= 1
if client._consumers <= 0:
# teardown the client
await client._exit_stack.aclose()

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Console interface to broker client/daemons.
"""
@ -131,7 +147,6 @@ def bars(config, symbol, count, df_output):
click.echo(colorize_json(bars))
@cli.command()
@click.option('--rate', '-r', default=5, help='Logging level')
@click.option('--filename', '-f', default='quotestream.jsonstream',
@ -260,7 +275,11 @@ def search(config, pattern):
# global opts
brokermod = config['brokermod']
quotes = trio.run(partial(core.symbol_search, brokermod, pattern))
quotes = tractor.run(
partial(core.symbol_search, brokermod, pattern),
start_method='forkserver',
loglevel='info',
)
if not quotes:
log.error(f"No matches could be found for {pattern}?")
return

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Broker configuration mgmt.
"""

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Broker high level cross-process API layer.
@ -112,11 +128,11 @@ async def symbol_info(
async def symbol_search(
brokermod: ModuleType,
symbol: str,
pattern: str,
**kwargs,
) -> Dict[str, Dict[str, Dict[str, Any]]]:
"""Return symbol info from broker.
"""
async with brokermod.get_client() as client:
# TODO: support multiple asset type concurrent searches.
return await client.search_stocks(symbol, **kwargs)
return await client.search_stocks(pattern=pattern, **kwargs)

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Real-time data feed machinery
"""
@ -81,10 +97,10 @@ class BrokerFeed:
@tractor.msg.pub(tasks=['stock', 'option'])
async def stream_poll_requests(
get_topics: typing.Callable,
get_topics: Callable,
get_quotes: Coroutine,
normalizer: Callable,
rate: int = 3, # delay between quote requests
diff_cached: bool = True, # only deliver "new" quotes to the queue
) -> None:
"""Stream requests for quotes for a set of symbols at the given
``rate`` (per second).
@ -129,57 +145,14 @@ async def stream_poll_requests(
quotes = await wait_for_network(request_quotes)
new_quotes = {}
if diff_cached:
# If cache is enabled then only deliver "new" changes.
# Useful for polling setups but obviously should be
# disabled if you're rx-ing per-tick data.
for quote in quotes:
symbol = quote['symbol']
last = _cache.setdefault(symbol, {})
last_volume = last.get('volume', 0)
# find all keys that have match to a new value compared
# to the last quote received
new = set(quote.items()) - set(last.items())
if new:
log.info(
f"New quote {quote['symbol']}:\n{new}")
_cache[symbol] = quote
# only ship diff updates and other required fields
payload = {k: quote[k] for k, v in new}
payload['symbol'] = symbol
# if there was volume likely the last size of
# shares traded is useful info and it's possible
# that the set difference from above will disregard
# a "size" value since the same # of shares were traded
volume = payload.get('volume')
if volume:
volume_since_last_quote = volume - last_volume
assert volume_since_last_quote > 0
payload['volume_delta'] = volume_since_last_quote
# TODO: We can emit 2 ticks here:
# - one for the volume differential
# - one for the last known trade size
# The first in theory can be unwound and
# interpolated assuming the broker passes an
# accurate daily VWAP value.
# To make this work we need a universal ``size``
# field that is normalized before hitting this logic.
# XXX: very questrade specific
payload['size'] = quote['lastTradeSize']
normalized = normalizer(quotes, _cache)
for symbol, quote in normalized.items():
# XXX: we append to a list for the options case where the
# subscription topic (key) is the same for all
# expiries even though this is uncessary for the
# stock case (different topic [i.e. symbol] for each
# quote).
new_quotes.setdefault(quote['key'], []).append(payload)
else:
# log.debug(f"Delivering quotes:\n{quotes}")
for quote in quotes:
new_quotes.setdefault(quote['key'], []).append(quote)
if new_quotes:
@ -207,53 +180,6 @@ async def symbol_data(broker: str, tickers: List[str]):
return await feed.client.symbol_info(tickers)
async def smoke_quote(get_quotes, tickers, broker):
"""Do an initial "smoke" request for symbols in ``tickers`` filtering
out any symbols not supported by the broker queried in the call to
``get_quotes()``.
"""
# TODO: trim out with #37
#################################################
# get a single quote filtering out any bad tickers
# NOTE: this code is always run for every new client
# subscription even when a broker quoter task is already running
# since the new client needs to know what symbols are accepted
log.warn(f"Retrieving smoke quote for symbols {tickers}")
quotes = await get_quotes(tickers)
# report any tickers that aren't returned in the first quote
invalid_tickers = set(tickers) - set(map(itemgetter('key'), quotes))
for symbol in invalid_tickers:
tickers.remove(symbol)
log.warn(
f"Symbol `{symbol}` not found by broker `{broker}`"
)
# pop any tickers that return "empty" quotes
payload = {}
for quote in quotes:
symbol = quote['symbol']
if quote is None:
log.warn(
f"Symbol `{symbol}` not found by broker"
f" `{broker}`")
# XXX: not this mutates the input list (for now)
tickers.remove(symbol)
continue
# report any unknown/invalid symbols (QT specific)
if quote.get('low52w', False) is None:
log.error(
f"{symbol} seems to be defunct")
payload[symbol] = quote
return payload
# end of section to be trimmed out with #37
###########################################
@asynccontextmanager
async def get_cached_feed(
brokername: str,
@ -296,7 +222,6 @@ async def start_quote_stream(
broker: str,
symbols: List[Any],
feed_type: str = 'stock',
diff_cached: bool = True,
rate: int = 3,
) -> None:
"""Handle per-broker quote stream subscriptions using a "lazy" pub-sub
@ -315,8 +240,6 @@ async def start_quote_stream(
f"{ctx.chan.uid} subscribed to {broker} for symbols {symbols}")
# another actor task may have already created it
async with get_cached_feed(broker) as feed:
# function to format packets delivered to subscribers
packetizer = None
if feed_type == 'stock':
get_quotes = feed.quoters.setdefault(
@ -325,7 +248,7 @@ async def start_quote_stream(
)
# do a smoke quote (note this mutates the input list and filters
# out bad symbols for now)
payload = await smoke_quote(get_quotes, symbols, broker)
first_quotes = await feed.mod.smoke_quote(get_quotes, symbols)
formatter = feed.mod.format_stock_quote
elif feed_type == 'option':
@ -337,22 +260,27 @@ async def start_quote_stream(
await feed.mod.option_quoter(feed.client, symbols)
)
# packetize
payload = {
first_quotes = {
quote['symbol']: quote
for quote in await get_quotes(symbols)
}
formatter = feed.mod.format_option_quote
sd = await feed.client.symbol_info(symbols)
# formatter = partial(formatter, symbol_data=sd)
feed.mod._symbol_info_cache.update(sd)
packetizer = partial(
feed.mod.packetizer,
normalize = partial(
feed.mod.normalize,
formatter=formatter,
symbol_data=sd,
)
# push initial smoke quote response for client initialization
# pre-process first set of quotes
payload = {}
for sym, quote in first_quotes.items():
fquote, _ = formatter(quote, sd)
assert fquote['displayable']
payload[sym] = fquote
await ctx.send_yield(payload)
await stream_poll_requests(
@ -361,11 +289,11 @@ async def start_quote_stream(
task_name=feed_type,
ctx=ctx,
topics=symbols,
packetizer=packetizer,
packetizer=feed.mod.packetizer,
# actual func args
get_quotes=get_quotes,
diff_cached=diff_cached,
normalizer=normalize,
rate=rate,
)
log.info(
@ -400,7 +328,6 @@ class DataFeed:
symbols: Sequence[str],
feed_type: str,
rate: int = 1,
diff_cached: bool = True,
test: str = '',
) -> (AsyncGenerator, dict):
if feed_type not in self._allowed:
@ -444,7 +371,6 @@ class DataFeed:
broker=self.brokermod.name,
symbols=symbols,
feed_type=feed_type,
diff_cached=diff_cached,
rate=rate,
)

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Interactive Brokers API backend.
@ -5,10 +21,10 @@ Note the client runs under an ``asyncio`` loop (since ``ib_insync`` is
built on it) and thus actor aware API calls must be spawned with
``infected_aio==True``.
"""
from contextlib import asynccontextmanager
from contextlib import asynccontextmanager, contextmanager
from dataclasses import asdict
from functools import partial
from typing import List, Dict, Any, Tuple, Optional, AsyncGenerator, Callable
from typing import List, Dict, Any, Tuple, Optional, AsyncIterator, Callable
import asyncio
import logging
import inspect
@ -25,8 +41,15 @@ import trio
import tractor
from ..log import get_logger, get_console_log
from ..data import maybe_spawn_brokerd
from ..ui._source import from_df
from ..data import (
maybe_spawn_brokerd,
iterticks,
attach_shm_array,
get_shm_token,
subscribe_ohlc_for_increment,
)
from ..data._source import from_df
from ._util import SymbolNotFound
log = get_logger(__name__)
@ -82,8 +105,7 @@ class NonShittyWrapper(Wrapper):
class NonShittyIB(ibis.IB):
"""The beginning of overriding quite a few quetionable decisions
in this lib.
"""The beginning of overriding quite a few decisions in this lib.
- Don't use datetimes
- Don't use named tuples
@ -104,7 +126,6 @@ _adhoc_cmdty_data_map = {
# NOTE: cmdtys don't have trade data:
# https://groups.io/g/twsapi/message/44174
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
}
@ -112,12 +133,14 @@ class Client:
"""IB wrapped for our broker backend API.
Note: this client requires running inside an ``asyncio`` loop.
"""
def __init__(
self,
ib: ibis.IB,
) -> None:
self.ib = ib
self.ib.RaiseRequestErrors = True
async def bars(
self,
@ -143,7 +166,7 @@ class Client:
# durationStr='1 D',
# time length calcs
durationStr='{count} S'.format(count=3000 * 5),
durationStr='{count} S'.format(count=5000 * 5),
barSizeSetting='5 secs',
# always use extended hours
@ -278,7 +301,7 @@ class Client:
self,
symbol: str,
to_trio,
opts: Tuple[int] = ('375',), # '233', ),
opts: Tuple[int] = ('375', '233',),
# opts: Tuple[int] = ('459',),
) -> None:
"""Stream a ticker using the std L1 api.
@ -287,7 +310,7 @@ class Client:
ticker: Ticker = self.ib.reqMktData(contract, ','.join(opts))
def push(t):
log.debug(t)
# log.debug(t)
try:
to_trio.send_nowait(t)
except trio.BrokenResourceError:
@ -309,6 +332,8 @@ class Client:
_tws_port: int = 7497
_gw_port: int = 4002
_try_ports = [_tws_port, _gw_port]
_client_ids = itertools.count()
_client_cache = {}
@asynccontextmanager
@ -319,17 +344,18 @@ async def _aio_get_client(
) -> Client:
"""Return an ``ib_insync.IB`` instance wrapped in our client API.
"""
# first check cache for existing client
try:
yield _client_cache[(host, port)]
except KeyError:
# TODO: in case the arbiter has no record
# of existing brokerd we need to broadcast for one.
if client_id is None:
# if this is a persistent brokerd, try to allocate a new id for
# each client
try:
ss = tractor.current_actor().statespace
client_id = next(ss.setdefault('client_ids', itertools.count()))
# TODO: in case the arbiter has no record
# of existing brokerd we need to broadcase for one.
except RuntimeError:
# tractor likely isn't running
client_id = 1
client_id = next(_client_ids)
ib = NonShittyIB()
ports = _try_ports if port is None else [port]
@ -345,7 +371,9 @@ async def _aio_get_client(
raise ConnectionRefusedError(_err)
try:
yield Client(ib)
client = Client(ib)
_client_cache[(host, port)] = client
yield client
except BaseException:
ib.disconnect()
raise
@ -448,6 +476,20 @@ async def get_client(
yield get_method_proxy(portal, Client)
# https://interactivebrokers.github.io/tws-api/tick_types.html
tick_types = {
77: 'trade',
48: 'utrade',
0: 'bsize',
1: 'bid',
2: 'ask',
3: 'asize',
4: 'last',
5: 'size',
8: 'volume',
}
def normalize(
ticker: Ticker,
calc_price: bool = False
@ -456,9 +498,7 @@ def normalize(
new_ticks = []
for tick in ticker.ticks:
td = tick._asdict()
if td['tickType'] in (48, 77):
td['type'] = 'trade'
td['type'] = tick_types.get(td['tickType'], 'n/a')
new_ticks.append(td)
@ -476,22 +516,45 @@ def normalize(
# add time stamps for downstream latency measurements
data['brokerd_ts'] = time.time()
if ticker.rtTime:
data['broker_ts'] = data['rtTime_s'] = float(ticker.rtTime) / 1000.
# stupid stupid shit...don't even care any more..
# leave it until we do a proper latency study
# if ticker.rtTime is not None:
# data['broker_ts'] = data['rtTime_s'] = float(
# ticker.rtTime.timestamp) / 1000.
data.pop('rtTime')
return data
_local_buffer_writers = {}
@contextmanager
def activate_writer(key: str):
try:
writer_already_exists = _local_buffer_writers.get(key, False)
if not writer_already_exists:
_local_buffer_writers[key] = True
yield writer_already_exists
finally:
_local_buffer_writers.pop(key, None)
# TODO: figure out how to share quote feeds sanely despite
# the wacky ``ib_insync`` api.
# @tractor.msg.pub
@tractor.stream
async def stream_quotes(
ctx: tractor.Context,
symbols: List[str],
shm_token: Tuple[str, str, List[tuple]],
loglevel: str = None,
# compat for @tractor.msg.pub
topics: Any = None,
get_topics: Callable = None,
) -> AsyncGenerator[str, Dict[str, Any]]:
) -> AsyncIterator[Dict[str, Any]]:
"""Stream symbol quotes.
This is a ``trio`` callable routine meant to be invoked
@ -503,31 +566,63 @@ async def stream_quotes(
# TODO: support multiple subscriptions
sym = symbols[0]
stream = await tractor.to_asyncio.run_task(
_trio_run_client_method,
stream = await _trio_run_client_method(
method='stream_ticker',
symbol=sym,
)
async with aclosing(stream):
# check if a writer already is alive in a streaming task,
# otherwise start one and mark it as now existing
with activate_writer(shm_token['shm_name']) as writer_already_exists:
# maybe load historical ohlcv in to shared mem
# check if shm has already been created by previous
# feed initialization
if not writer_already_exists:
shm = attach_shm_array(
token=shm_token,
# we are the buffer writer
readonly=False,
)
bars = await _trio_run_client_method(
method='bars',
symbol=sym,
)
if bars is None:
raise SymbolNotFound(sym)
# write historical data to buffer
shm.push(bars)
shm_token = shm.token
times = shm.array['time']
delay_s = times[-1] - times[times != times[-1]][-1]
subscribe_ohlc_for_increment(shm, delay_s)
# pass back token, and bool, signalling if we're the writer
await ctx.send_yield((shm_token, not writer_already_exists))
# first quote can be ignored as a 2nd with newer data is sent?
first_ticker = await stream.__anext__()
quote = normalize(first_ticker)
# ugh, clear ticks since we've consumed them
# (ahem, ib_insync is stateful trash)
first_ticker.ticks = []
log.debug(f"First ticker received {quote}")
if type(first_ticker.contract) not in (ibis.Commodity, ibis.Forex):
suffix = 'exchange'
calc_price = False # should be real volume for contract
quote = normalize(first_ticker)
log.debug(f"First ticker received {quote}")
con = quote['contract']
topic = '.'.join((con['symbol'], con[suffix])).lower()
yield {topic: quote}
# ugh, clear ticks since we've consumed them
# (ahem, ib_insync is stateful trash)
first_ticker.ticks = []
async for ticker in stream:
# spin consuming tickers until we get a real market datum
if not ticker.rtTime:
@ -535,12 +630,8 @@ async def stream_quotes(
continue
else:
log.debug("Received first real volume tick")
quote = normalize(ticker)
topic = '.'.join((con['symbol'], con[suffix])).lower()
yield {topic: quote}
# ugh, clear ticks since we've consumed them
# (ahem, ib_insync is stateful trash)
# (ahem, ib_insync is truly stateful trash)
ticker.ticks = []
# XXX: this works because we don't use
@ -550,28 +641,65 @@ async def stream_quotes(
# commodities don't have an exchange name for some reason?
suffix = 'secType'
calc_price = True
ticker = first_ticker
quote = normalize(ticker, calc_price=calc_price)
con = quote['contract']
topic = '.'.join((con['symbol'], con[suffix])).lower()
quote['symbol'] = topic
first_quote = {topic: quote}
ticker.ticks = []
# yield first quote asap
await ctx.send_yield(first_quote)
# real-time stream
async for ticker in stream:
quote = normalize(
ticker,
calc_price=calc_price
)
quote['symbol'] = topic
# TODO: in theory you can send the IPC msg *before*
# writing to the sharedmem array to decrease latency,
# however, that will require `tractor.msg.pub` support
# here or at least some way to prevent task switching
# at the yield such that the array write isn't delayed
# while another consumer is serviced..
# if we are the lone tick writer start writing
# the buffer with appropriate trade data
if not writer_already_exists:
for tick in iterticks(quote, types=('trade', 'utrade',)):
last = tick['price']
# update last entry
# benchmarked in the 4-5 us range
o, high, low, v = shm.array[-1][
['open', 'high', 'low', 'volume']
]
new_v = tick['size']
if v == 0 and new_v:
# no trades for this bar yet so the open
# is also the close/last trade price
o = last
shm.array[['open', 'high', 'low', 'close', 'volume']][-1] = (
o,
max(high, last),
min(low, last),
last,
v + new_v,
)
con = quote['contract']
topic = '.'.join((con['symbol'], con[suffix])).lower()
yield {topic: quote}
quote['symbol'] = topic
await ctx.send_yield({topic: quote})
# ugh, clear ticks since we've consumed them
ticker.ticks = []
if __name__ == '__main__':
import sys
sym = sys.argv[1]
contract = asyncio.run(
_aio_run_client_method(
'find_contract',
symbol=sym,
)
)
print(contract)

View File

@ -1,10 +1,25 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Kraken backend.
"""
from contextlib import asynccontextmanager
from dataclasses import dataclass, asdict, field
from itertools import starmap
from typing import List, Dict, Any, Callable
from typing import List, Dict, Any, Tuple, Optional
import json
import time
@ -18,6 +33,12 @@ import tractor
from ._util import resproc, SymbolNotFound, BrokerError
from ..log import get_logger, get_console_log
from ..data import (
# iterticks,
attach_shm_array,
get_shm_token,
subscribe_ohlc_for_increment,
)
log = get_logger(__name__)
@ -26,7 +47,7 @@ log = get_logger(__name__)
_url = 'https://api.kraken.com/0'
# conversion to numpy worthy types
# Broker specific ohlc schema which includes a vwap field
_ohlc_dtype = [
('index', int),
('time', int),
@ -34,9 +55,9 @@ _ohlc_dtype = [
('high', float),
('low', float),
('close', float),
('vwap', float),
('volume', float),
('count', int)
('count', int),
('vwap', float),
]
# UI components allow this to be declared such that additional
@ -102,23 +123,36 @@ class Client:
res.pop('last')
bars = next(iter(res.values()))
# convert all fields to native types
new_bars = []
last_nz_vwap = None
first = bars[0]
last_nz_vwap = first[-3]
if last_nz_vwap == 0:
# use close if vwap is zero
last_nz_vwap = first[-4]
# convert all fields to native types
for i, bar in enumerate(bars):
# normalize weird zero-ed vwap values..cmon kraken..
vwap = float(bar[-3])
# indicates vwap didn't change since last bar
vwap = float(bar.pop(-3))
if vwap != 0:
last_nz_vwap = vwap
if vwap == 0:
bar[-3] = last_nz_vwap
vwap = last_nz_vwap
# re-insert vwap as the last of the fields
bar.append(vwap)
new_bars.append(
(i,) + tuple(
ftype(bar[j]) for j, (name, ftype) in enumerate(_ohlc_dtype[1:])
ftype(bar[j]) for j, (name, ftype) in enumerate(
_ohlc_dtype[1:]
)
)
return np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
)
array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
return array
except KeyError:
raise SymbolNotFound(json['error'][0] + f': {symbol}')
@ -158,8 +192,9 @@ class OHLC:
setattr(self, f, val.type(getattr(self, f)))
async def recv_ohlc(recv):
async def recv_msg(recv):
too_slow_count = last_hb = 0
while True:
with trio.move_on_after(1.5) as cs:
msg = await recv()
@ -176,20 +211,50 @@ async def recv_ohlc(recv):
if isinstance(msg, dict):
if msg.get('event') == 'heartbeat':
now = time.time()
delay = now - last_hb
last_hb = now
log.trace(f"Heartbeat after {delay}")
# TODO: hmm i guess we should use this
# for determining when to do connection
# resets eh?
continue
err = msg.get('errorMessage')
if err:
raise BrokerError(err)
else:
chan_id, ohlc_array, chan_name, pair = msg
yield OHLC(chan_id, chan_name, pair, *ohlc_array)
chan_id, *payload_array, chan_name, pair = msg
if 'ohlc' in chan_name:
yield 'ohlc', OHLC(chan_id, chan_name, pair, *payload_array[0])
elif 'spread' in chan_name:
bid, ask, ts, bsize, asize = map(float, payload_array[0])
# TODO: really makes you think IB has a horrible API...
quote = {
'symbol': pair.replace('/', ''),
'ticks': [
{'type': 'bid', 'price': bid, 'size': bsize},
{'type': 'bsize', 'price': bid, 'size': bsize},
{'type': 'ask', 'price': ask, 'size': asize},
{'type': 'asize', 'price': ask, 'size': asize},
],
}
yield 'l1', quote
# elif 'book' in msg[-2]:
# chan_id, *payload_array, chan_name, pair = msg
# print(msg)
else:
print(f'UNHANDLED MSG: {msg}')
def normalize(
@ -198,7 +263,7 @@ def normalize(
quote = asdict(ohlc)
quote['broker_ts'] = quote['time']
quote['brokerd_ts'] = time.time()
quote['pair'] = quote['pair'].replace('/', '')
quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
# seriously eh? what's with this non-symmetry everywhere
# in subscription systems...
@ -208,14 +273,32 @@ def normalize(
return topic, quote
@tractor.msg.pub
def make_sub(pairs: List[str], data: Dict[str, Any]) -> Dict[str, str]:
"""Create a request subscription packet dict.
https://docs.kraken.com/websockets/#message-subscribe
"""
# eg. specific logic for this in kraken's sync client:
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
return {
'pair': pairs,
'event': 'subscribe',
'subscription': data,
}
# @tractor.msg.pub
async def stream_quotes(
get_topics: Callable,
# get_topics: Callable,
shm_token: Tuple[str, str, List[tuple]],
symbols: List[str] = ['XBTUSD', 'XMRUSD'],
# These are the symbols not expected by the ws api
# they are looked up inside this routine.
symbols: List[str] = ['XBTUSD', 'XMRUSD'],
sub_type: str = 'ohlc',
loglevel: str = None,
# compat with eventual ``tractor.msg.pub``
topics: Optional[List[str]] = None,
) -> None:
"""Subscribe for ohlc stream of quotes for ``pairs``.
@ -226,39 +309,70 @@ async def stream_quotes(
ws_pairs = {}
async with get_client() as client:
# keep client cached for real-time section
for sym in symbols:
ws_pairs[sym] = (await client.symbol_info(sym))['wsname']
# maybe load historical ohlcv in to shared mem
# check if shm has already been created by previous
# feed initialization
writer_exists = get_shm_token(shm_token['shm_name'])
symbol = symbols[0]
if not writer_exists:
shm = attach_shm_array(
token=shm_token,
# we are writer
readonly=False,
)
bars = await client.bars(symbol=symbol)
shm.push(bars)
shm_token = shm.token
times = shm.array['time']
delay_s = times[-1] - times[times != times[-1]][-1]
subscribe_ohlc_for_increment(shm, delay_s)
yield shm_token, not writer_exists
while True:
try:
async with trio_websocket.open_websocket_url(
'wss://ws.kraken.com',
) as ws:
# setup subs
# see: https://docs.kraken.com/websockets/#message-subscribe
subs = {
'pair': list(ws_pairs.values()),
'event': 'subscribe',
'subscription': {
'name': sub_type,
'interval': 1, # 1 min
# 'name': 'ticker',
# 'name': 'openOrders',
# 'depth': '25',
},
}
# XXX: setup subs
# https://docs.kraken.com/websockets/#message-subscribe
# specific logic for this in kraken's shitty sync client:
# https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
ohlc_sub = make_sub(
list(ws_pairs.values()),
{'name': 'ohlc', 'interval': 1}
)
# TODO: we want to eventually allow unsubs which should
# be completely fine to request from a separate task
# since internally the ws methods appear to be FIFO
# locked.
await ws.send_message(json.dumps(subs))
await ws.send_message(json.dumps(ohlc_sub))
# trade data (aka L1)
l1_sub = make_sub(
list(ws_pairs.values()),
{'name': 'spread'} # 'depth': 10}
)
await ws.send_message(json.dumps(l1_sub))
async def recv():
return json.loads(await ws.get_message())
# pull a first quote and deliver
ohlc_gen = recv_ohlc(recv)
ohlc_last = await ohlc_gen.__anext__()
msg_gen = recv_msg(recv)
typ, ohlc_last = await msg_gen.__anext__()
topic, quote = normalize(ohlc_last)
@ -269,41 +383,75 @@ async def stream_quotes(
last_interval_start = ohlc_last.etime
# start streaming
async for ohlc in ohlc_gen:
async for typ, ohlc in msg_gen:
if typ == 'ohlc':
# TODO: can get rid of all this by using
# ``trades`` subscription...
# generate tick values to match time & sales pane:
# https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
volume = ohlc.volume
if ohlc.etime > last_interval_start: # new interval
# new interval
if ohlc.etime > last_interval_start:
last_interval_start = ohlc.etime
tick_volume = volume
else:
# this is the tick volume *within the interval*
tick_volume = volume - ohlc_last.volume
last = ohlc.close
if tick_volume:
ohlc.ticks.append({
'type': 'trade',
'price': ohlc.close,
'price': last,
'size': tick_volume,
})
topic, quote = normalize(ohlc)
# if we are the lone tick writer start writing
# the buffer with appropriate trade data
if not writer_exists:
# update last entry
# benchmarked in the 4-5 us range
o, high, low, v = shm.array[-1][
['open', 'high', 'low', 'volume']
]
new_v = tick_volume
if v == 0 and new_v:
# no trades for this bar yet so the open
# is also the close/last trade price
o = last
# write shm
shm.array[
['open',
'high',
'low',
'close',
'vwap',
'volume']
][-1] = (
o,
max(high, last),
min(low, last),
last,
ohlc.vwap,
volume,
)
ohlc_last = ohlc
elif typ == 'l1':
quote = ohlc
topic = quote['symbol']
# XXX: format required by ``tractor.msg.pub``
# requires a ``Dict[topic: str, quote: dict]``
yield {topic: quote}
ohlc_last = ohlc
except (ConnectionClosed, DisconnectionTimeout):
log.exception("Good job kraken...reconnecting")
if __name__ == '__main__':
async def stream_ohlc():
async for msg in stream_quotes():
print(msg)
tractor.run(stream_ohlc)

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Questrade API backend.
"""
@ -9,6 +25,7 @@ from datetime import datetime
from functools import partial
import itertools
import configparser
from pprint import pformat
from typing import (
List, Tuple, Dict, Any, Iterator, NamedTuple,
AsyncGenerator,
@ -30,6 +47,8 @@ from ._util import resproc, BrokerError, SymbolNotFound
from ..log import get_logger, colorize_json, get_console_log
from .._async_utils import async_lifo_cache
from . import get_brokermod
from . import api
log = get_logger(__name__)
@ -837,7 +856,8 @@ _qt_stock_keys = {
# 'low52w': 'low52w', # put in info widget
# 'high52w': 'high52w',
# "lastTradePriceTrHrs": 7.99,
'lastTradeTime': ('fill_time', datetime.fromisoformat),
# 'lastTradeTime': ('fill_time', datetime.fromisoformat),
'lastTradeTime': 'fill_time',
"lastTradeTick": 'tick', # ("Equal", "Up", "Down")
# "symbolId": 3575753,
# "tier": "",
@ -913,6 +933,7 @@ def format_stock_quote(
new[new_key] = value
displayable[new_key] = display_value
new['displayable'] = displayable
return new, displayable
@ -973,6 +994,7 @@ def format_option_quote(
quote: dict,
symbol_data: dict,
keymap: dict = _qt_option_keys,
include_displayables: bool = True,
) -> Tuple[dict, dict]:
"""Remap a list of quote dicts ``quotes`` using the mapping of old keys
-> new keys ``keymap`` returning 2 dicts: one with raw data and the other
@ -989,13 +1011,19 @@ def format_option_quote(
# change = percent_change(previous, last)
computed = {
# why QT do you have to be an asshole shipping null values!!!
'$ vol': round((quote['VWAP'] or 0) * (quote['volume'] or 0), 3),
# '$ vol': round((quote['VWAP'] or 0) * (quote['volume'] or 0), 3),
# '%': round(change, 3),
# 'close': previous,
}
new = {}
displayable = {}
vwap = quote.get('VWAP')
volume = quote.get('volume')
if volume is not None: # could be 0
# why questrade do you have to be an asshole shipping null values!!!
computed['$ vol'] = round((vwap or 0) * (volume or 0), 3)
# structuring and normalization
for key, new_key in keymap.items():
display_value = value = computed.get(key) or quote.get(key)
@ -1014,47 +1042,10 @@ def format_option_quote(
return new, displayable
@asynccontextmanager
async def get_cached_client(
brokername: str,
*args,
**kwargs,
) -> 'Client':
"""Get a cached broker client from the current actor's local vars.
If one has not been setup do it and cache it.
"""
# check if a cached client is in the local actor's statespace
ss = tractor.current_actor().statespace
clients = ss.setdefault('clients', {'_lock': trio.Lock()})
lock = clients['_lock']
client = None
try:
log.info(f"Loading existing `{brokername}` daemon")
async with lock:
client = clients[brokername]
client._consumers += 1
yield client
except KeyError:
log.info(f"Creating new client for broker {brokername}")
async with lock:
brokermod = get_brokermod(brokername)
exit_stack = contextlib.AsyncExitStack()
client = await exit_stack.enter_async_context(
brokermod.get_client()
)
client._consumers = 0
client._exit_stack = exit_stack
clients[brokername] = client
yield client
finally:
client._consumers -= 1
if client._consumers <= 0:
# teardown the client
await client._exit_stack.aclose()
async def smoke_quote(get_quotes, tickers): # , broker):
async def smoke_quote(
get_quotes,
tickers
):
"""Do an initial "smoke" request for symbols in ``tickers`` filtering
out any symbols not supported by the broker queried in the call to
``get_quotes()``.
@ -1093,6 +1084,7 @@ async def smoke_quote(get_quotes, tickers): # , broker):
log.error(
f"{symbol} seems to be defunct")
quote['symbol'] = symbol
payload[symbol] = quote
return payload
@ -1101,20 +1093,90 @@ async def smoke_quote(get_quotes, tickers): # , broker):
###########################################
# unbounded, shared between streaming tasks
_symbol_info_cache = {}
# function to format packets delivered to subscribers
def packetizer(
topic: str,
quotes: Dict[str, Any],
formatter: Callable,
symbol_data: Dict[str, Any],
) -> Dict[str, Any]:
"""Normalize quotes by name into dicts using broker-specific
processing.
"""
new = {}
for quote in quotes:
new[quote['symbol']], _ = formatter(quote, symbol_data)
# repack into symbol keyed dict
return {q['symbol']: q for q in quotes}
def normalize(
quotes: Dict[str, Any],
_cache: Dict[str, Any], # dict held in scope of the streaming loop
formatter: Callable,
) -> Dict[str, Any]:
"""Deliver normalized quotes by name into dicts using
broker-specific processing; only emit changes differeing from the
last quote sample creating a psuedo-tick type datum.
"""
new = {}
# XXX: this is effectively emitting "sampled ticks"
# useful for polling setups but obviously should be
# disabled if you're already rx-ing per-tick data.
for quote in quotes:
symbol = quote['symbol']
# look up last quote from cache
last = _cache.setdefault(symbol, {})
_cache[symbol] = quote
# compute volume difference
last_volume = last.get('volume', 0)
current_volume = quote['volume']
volume_diff = current_volume - last_volume
# find all keys that have match to a new value compared
# to the last quote received
changed = set(quote.items()) - set(last.items())
if changed:
log.info(f"New quote {symbol}:\n{changed}")
# TODO: can we reduce the # of iterations here and in
# called funcs?
payload = {k: quote[k] for k, v in changed}
payload['symbol'] = symbol # required by formatter
# TODO: we should probaby do the "computed" fields
# processing found inside this func in a downstream actor?
fquote, _ = formatter(payload, _symbol_info_cache)
fquote['key'] = fquote['symbol'] = symbol
# if there was volume likely the last size of
# shares traded is useful info and it's possible
# that the set difference from above will disregard
# a "size" value since the same # of shares were traded
# volume = payload.get('volume')
if volume_diff:
if volume_diff < 0:
log.error(f"Uhhh {symbol} volume: {volume_diff} ?")
fquote['volume_delta'] = volume_diff
# TODO: We can emit 2 ticks here:
# - one for the volume differential
# - one for the last known trade size
# The first in theory can be unwound and
# interpolated assuming the broker passes an
# accurate daily VWAP value.
# To make this work we need a universal ``size``
# field that is normalized before hitting this logic.
fquote['size'] = quote.get('lastTradeSize', 0)
if 'last' not in fquote:
fquote['last'] = quote.get('lastTradePrice', float('nan'))
new[symbol] = fquote
if new:
log.info(f"New quotes:\n{pformat(new)}")
return new
@ -1123,32 +1185,39 @@ async def stream_quotes(
ctx: tractor.Context, # marks this as a streaming func
symbols: List[str],
feed_type: str = 'stock',
diff_cached: bool = True,
rate: int = 3,
loglevel: str = None,
# feed_type: str = 'stock',
) -> AsyncGenerator[str, Dict[str, Any]]:
# XXX: why do we need this again?
get_console_log(tractor.current_actor().loglevel)
# XXX: required to propagate ``tractor`` loglevel to piker logging
get_console_log(loglevel)
async with get_cached_client('questrade') as client:
async with api.get_cached_client('questrade') as client:
if feed_type == 'stock':
formatter = format_stock_quote
get_quotes = await stock_quoter(client, symbols)
# do a smoke quote (note this mutates the input list and filters
# out bad symbols for now)
payload = await smoke_quote(get_quotes, list(symbols))
first_quotes = await smoke_quote(get_quotes, list(symbols))
else:
formatter = format_option_quote
get_quotes = await option_quoter(client, symbols)
# packetize
payload = {
first_quotes = {
quote['symbol']: quote
for quote in await get_quotes(symbols)
}
# update global symbol data state
sd = await client.symbol_info(symbols)
_symbol_info_cache.update(sd)
# pre-process first set of quotes
payload = {}
for sym, quote in first_quotes.items():
fquote, _ = formatter(quote, sd)
payload[sym] = fquote
# push initial smoke quote response for client initialization
await ctx.send_yield(payload)
@ -1161,15 +1230,11 @@ async def stream_quotes(
task_name=feed_type,
ctx=ctx,
topics=symbols,
packetizer=partial(
packetizer,
formatter=formatter,
symbol_data=sd,
),
packetizer=packetizer,
# actual target "streaming func" args
get_quotes=get_quotes,
diff_cached=diff_cached,
normalizer=partial(normalize, formatter=formatter),
rate=rate,
)
log.info("Terminating stream quoter task")

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Robinhood API backend.

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Handy financial calculations.
"""

View File

@ -6,9 +6,10 @@ import os
import click
import tractor
from ..log import get_console_log, get_logger
from ..log import get_console_log, get_logger, colorize_json
from ..brokers import get_brokermod, config
log = get_logger('cli')
DEFAULT_BROKER = 'questrade'
@ -47,9 +48,10 @@ def pikerd(loglevel, host, tl):
@click.option('--broker', '-b', default=DEFAULT_BROKER,
help='Broker backend to use')
@click.option('--loglevel', '-l', default='warning', help='Logging level')
@click.option('--tl', is_flag=True, help='Enable tractor logging')
@click.option('--configdir', '-c', help='Configuration directory')
@click.pass_context
def cli(ctx, broker, loglevel, configdir):
def cli(ctx, broker, loglevel, tl, configdir):
if configdir is not None:
assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
config._override_config_dir(configdir)
@ -59,14 +61,47 @@ def cli(ctx, broker, loglevel, configdir):
'broker': broker,
'brokermod': get_brokermod(broker),
'loglevel': loglevel,
'tractorloglevel': None,
'log': get_console_log(loglevel),
'confdir': _config_dir,
'wl_path': _watchlists_data_path,
})
# allow enabling same loglevel in ``tractor`` machinery
if tl:
ctx.obj.update({'tractorloglevel': loglevel})
@cli.command()
@click.option('--tl', is_flag=True, help='Enable tractor logging')
@click.argument('names', nargs=-1, required=False)
@click.pass_obj
def services(config, tl, names):
async def list_services():
async with tractor.get_arbiter(
*tractor.current_actor()._arb_addr
) as portal:
registry = await portal.run('self', 'get_registry')
json_d = {}
for uid, socket in registry.items():
name, uuid = uid
host, port = socket
json_d[f'{name}.{uuid}'] = f'{host}:{port}'
click.echo(
f"Available `piker` services:\n{colorize_json(json_d)}"
)
tractor.run(
list_services,
name='service_query',
loglevel=config['loglevel'] if tl else None,
)
def _load_clis() -> None:
from ..data import marketstore as _
from ..data import cli as _
from ..brokers import cli as _ # noqa
from ..ui import cli as _ # noqa
from ..watchlists import cli as _ # noqa

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Data feed apis and infra.
@ -5,6 +21,7 @@ We provide tsdb integrations for retrieving
and storing data from your brokers as well as
sharing your feeds with other fellow pikers.
"""
from dataclasses import dataclass
from contextlib import asynccontextmanager
from importlib import import_module
from types import ModuleType
@ -13,11 +30,33 @@ from typing import (
Sequence, AsyncIterator, Optional
)
import trio
import tractor
from ..brokers import get_brokermod
from ..log import get_logger, get_console_log
from ._normalize import iterticks
from ._sharedmem import (
maybe_open_shm_array,
attach_shm_array,
open_shm_array,
ShmArray,
get_shm_token,
)
from ._source import base_ohlc_dtype
from ._buffer import (
increment_ohlc_buffer,
subscribe_ohlc_for_increment
)
__all__ = [
'iterticks',
'maybe_open_shm_array',
'attach_shm_array',
'open_shm_array',
'get_shm_token',
'subscribe_ohlc_for_increment',
]
log = get_logger(__name__)
@ -27,7 +66,7 @@ __ingestors__ = [
]
def get_ingestor(name: str) -> ModuleType:
def get_ingestormod(name: str) -> ModuleType:
"""Return the imported ingestor module by name.
"""
module = import_module('.' + name, 'piker.data')
@ -39,6 +78,7 @@ def get_ingestor(name: str) -> ModuleType:
_data_mods = [
'piker.brokers.core',
'piker.brokers.data',
'piker.data',
]
@ -56,6 +96,9 @@ async def maybe_spawn_brokerd(
if loglevel:
get_console_log(loglevel)
# disable debugger in brokerd?
# tractor._state._runtime_vars['_debug_mode'] = False
tractor_kwargs['loglevel'] = loglevel
brokermod = get_brokermod(brokername)
@ -84,6 +127,46 @@ async def maybe_spawn_brokerd(
await nursery.cancel()
@dataclass
class Feed:
"""A data feed for client-side interaction with far-process
real-time data sources.
This is an thin abstraction on top of ``tractor``'s portals for
interacting with IPC streams and conducting automatic
memory buffer orchestration.
"""
name: str
stream: AsyncIterator[Dict[str, Any]]
shm: ShmArray
_broker_portal: tractor._portal.Portal
_index_stream: Optional[AsyncIterator[Dict[str, Any]]] = None
async def receive(self) -> dict:
return await self.stream.__anext__()
async def index_stream(self) -> AsyncIterator[int]:
if not self._index_stream:
# XXX: this should be singleton on a host,
# a lone broker-daemon per provider should be
# created for all practical purposes
self._index_stream = await self._broker_portal.run(
'piker.data',
'increment_ohlc_buffer',
shm_token=self.shm.token,
topics=['index'],
)
return self._index_stream
def sym_to_shm_key(
broker: str,
symbol: str,
) -> str:
return f'{broker}.{symbol}'
@asynccontextmanager
async def open_feed(
name: str,
@ -100,6 +183,17 @@ async def open_feed(
if loglevel is None:
loglevel = tractor.current_actor().loglevel
# Attempt to allocate (or attach to) shm array for this broker/symbol
shm, opened = maybe_open_shm_array(
key=sym_to_shm_key(name, symbols[0]),
# use any broker defined ohlc dtype:
dtype=getattr(mod, '_ohlc_dtype', base_ohlc_dtype),
# we expect the sub-actor to write
readonly=True,
)
async with maybe_spawn_brokerd(
mod.name,
loglevel=loglevel,
@ -108,14 +202,27 @@ async def open_feed(
mod.__name__,
'stream_quotes',
symbols=symbols,
shm_token=shm.token,
# compat with eventual ``tractor.msg.pub``
topics=symbols,
)
# Feed is required to deliver an initial quote asap.
# TODO: should we timeout and raise a more explicit error?
# with trio.fail_after(5):
with trio.fail_after(float('inf')):
# Retreive initial quote for each symbol
# such that consumer code can know the data layout
first_quote = await stream.__anext__()
log.info(f"Received first quote {first_quote}")
yield (first_quote, stream)
# TODO: we can't do this **and** be compate with
# ``tractor.msg.pub``, should we maybe just drop this after
# tests are in?
shm_token, is_writer = await stream.receive()
if opened:
assert is_writer
log.info("Started shared mem bar writer")
shm_token['dtype_descr'] = list(shm_token['dtype_descr'])
assert shm_token == shm.token # sanity
yield Feed(
name=name,
stream=stream,
shm=shm,
_broker_portal=portal,
)

View File

@ -0,0 +1,115 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Data buffers for fast shared humpy.
"""
from typing import Tuple, Callable, Dict
# import time
import tractor
import trio
from ._sharedmem import ShmArray
_shms: Dict[int, ShmArray] = {}
@tractor.msg.pub
async def increment_ohlc_buffer(
shm_token: dict,
get_topics: Callable[..., Tuple[str]],
# delay_s: Optional[float] = None,
):
"""Task which inserts new bars into the provide shared memory array
every ``delay_s`` seconds.
This task fulfills 2 purposes:
- it takes the subscribed set of shm arrays and increments them
on a common time period
- broadcast of this increment "signal" message to other actor
subscribers
Note that if **no** actor has initiated this task then **none** of
the underlying buffers will actually be incremented.
"""
# TODO: right now we'll spin printing bars if the last time stamp is
# before a large period of no market activity. Likely the best way
# to solve this is to make this task aware of the instrument's
# tradable hours?
# adjust delay to compensate for trio processing time
ad = min(_shms.keys()) - 0.001
# async def sleep():
# """Sleep until next time frames worth has passed from last bar.
# """
# # last_ts = shm.array[-1]['time']
# # delay = max((last_ts + ad) - time.time(), 0)
# # await trio.sleep(delay)
# await trio.sleep(ad)
total_s = 0 # total seconds counted
lowest = min(_shms.keys())
ad = lowest - 0.001
while True:
# TODO: do we want to support dynamically
# adding a "lower" lowest increment period?
await trio.sleep(ad)
total_s += lowest
# # sleep for duration of current bar
# await sleep()
# increment all subscribed shm arrays
# TODO: this in ``numba``
for delay_s, shms in _shms.items():
if total_s % delay_s != 0:
continue
# TODO: numa this!
for shm in shms:
# TODO: in theory we could make this faster by copying the
# "last" readable value into the underlying larger buffer's
# next value and then incrementing the counter instead of
# using ``.push()``?
# append new entry to buffer thus "incrementing" the bar
array = shm.array
last = array[-1:].copy()
(index, t, close) = last[0][['index', 'time', 'close']]
# this copies non-std fields (eg. vwap) from the last datum
last[
['index', 'time', 'volume', 'open', 'high', 'low', 'close']
][0] = (index + 1, t + delay_s, 0, close, close, close, close)
# write to the buffer
shm.push(last)
# broadcast the buffer index step
yield {'index': shm._i.value}
def subscribe_ohlc_for_increment(
shm: ShmArray,
delay: int,
) -> None:
"""Add an OHLC ``ShmArray`` to the increment set.
"""
_shms.setdefault(delay, []).append(shm)

View File

@ -0,0 +1,38 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Stream format enforcement.
"""
from typing import AsyncIterator, Optional, Tuple
import numpy as np
def iterticks(
quote: dict,
types: Tuple[str] = ('trade', 'utrade'),
) -> AsyncIterator:
"""Iterate through ticks delivered per quote cycle.
"""
# print(f"{quote}\n\n")
ticks = quote.get('ticks', ())
if ticks:
for tick in ticks:
print(f"{quote['symbol']}: {tick}")
if tick.get('type') in types:
yield tick

View File

@ -0,0 +1,355 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
NumPy compatible shared memory buffers for real-time FSP.
"""
from typing import List
from dataclasses import dataclass, asdict
from sys import byteorder
from typing import Tuple, Optional
from multiprocessing import shared_memory
from multiprocessing import resource_tracker as mantracker
from _posixshmem import shm_unlink
import tractor
import numpy as np
from ..log import get_logger
from ._source import base_ohlc_dtype
log = get_logger(__name__)
# Tell the "resource tracker" thing to fuck off.
class ManTracker(mantracker.ResourceTracker):
def register(self, name, rtype):
pass
def unregister(self, name, rtype):
pass
def ensure_running(self):
pass
# "know your land and know your prey"
# https://www.dailymotion.com/video/x6ozzco
mantracker._resource_tracker = ManTracker()
mantracker.register = mantracker._resource_tracker.register
mantracker.ensure_running = mantracker._resource_tracker.ensure_running
ensure_running = mantracker._resource_tracker.ensure_running
mantracker.unregister = mantracker._resource_tracker.unregister
mantracker.getfd = mantracker._resource_tracker.getfd
class SharedInt:
def __init__(
self,
token: str,
create: bool = False,
) -> None:
# create a single entry array for storing an index counter
self._shm = shared_memory.SharedMemory(
name=token,
create=create,
size=4, # std int
)
@property
def value(self) -> int:
return int.from_bytes(self._shm.buf, byteorder)
@value.setter
def value(self, value) -> None:
self._shm.buf[:] = value.to_bytes(4, byteorder)
def destroy(self) -> None:
if shared_memory._USE_POSIX:
# We manually unlink to bypass all the "resource tracker"
# nonsense meant for non-SC systems.
shm_unlink(self._shm.name)
@dataclass
class _Token:
"""Internal represenation of a shared memory "token"
which can be used to key a system wide post shm entry.
"""
shm_name: str # this servers as a "key" value
shm_counter_name: str
dtype_descr: List[Tuple[str]]
def __post_init__(self):
# np.array requires a list for dtype
self.dtype_descr = np.dtype(
list(self.dtype_descr)).descr
def as_msg(self):
return asdict(self)
@classmethod
def from_msg(self, msg: dict) -> '_Token':
return msg if isinstance(msg, _Token) else _Token(**msg)
# TODO: this api?
# _known_tokens = tractor.ActorVar('_shm_tokens', {})
# _known_tokens = tractor.ContextStack('_known_tokens', )
# _known_tokens = trio.RunVar('shms', {})
# process-local store of keys to tokens
_known_tokens = {}
def get_shm_token(key: str) -> _Token:
"""Convenience func to check if a token
for the provided key is known by this process.
"""
return _known_tokens.get(key)
def _make_token(
key: str,
dtype: Optional[np.dtype] = None,
) -> _Token:
"""Create a serializable token that can be used
to access a shared array.
"""
dtype = base_ohlc_dtype if dtype is None else dtype
return _Token(
key,
key + "_counter",
np.dtype(dtype).descr
)
class ShmArray:
def __init__(
self,
shmarr: np.ndarray,
counter: SharedInt,
shm: shared_memory.SharedMemory,
readonly: bool = True,
) -> None:
self._array = shmarr
self._i = counter
self._len = len(shmarr)
self._shm = shm
self._readonly = readonly
# TODO: ringbuf api?
@property
def _token(self) -> _Token:
return _Token(
self._shm.name,
self._i._shm.name,
self._array.dtype.descr,
)
@property
def token(self) -> dict:
"""Shared memory token that can be serialized
and used by another process to attach to this array.
"""
return self._token.as_msg()
@property
def index(self) -> int:
return self._i.value % self._len
@property
def array(self) -> np.ndarray:
return self._array[:self._i.value]
def last(
self,
length: int = 1,
) -> np.ndarray:
return self.array[-length:]
def push(
self,
data: np.ndarray,
) -> int:
"""Ring buffer like "push" to append data
into the buffer and return updated index.
"""
length = len(data)
# TODO: use .index for actual ring logic?
index = self._i.value
end = index + length
self._array[index:end] = data[:]
self._i.value = end
return end
def close(self) -> None:
self._i._shm.close()
self._shm.close()
def destroy(self) -> None:
if shared_memory._USE_POSIX:
# We manually unlink to bypass all the "resource tracker"
# nonsense meant for non-SC systems.
shm_unlink(self._shm.name)
self._i.destroy()
def flush(self) -> None:
# TODO: flush to storage backend like markestore?
...
def open_shm_array(
key: Optional[str] = None,
# approx number of 5s bars in a "day" x2
size: int = int(2*60*60*10/5),
dtype: Optional[np.dtype] = None,
readonly: bool = False,
) -> ShmArray:
"""Open a memory shared ``numpy`` using the standard library.
This call unlinks (aka permanently destroys) the buffer on teardown
and thus should be used from the parent-most accessor (process).
"""
# create new shared mem segment for which we
# have write permission
a = np.zeros(size, dtype=dtype)
shm = shared_memory.SharedMemory(
name=key,
create=True,
size=a.nbytes
)
array = np.ndarray(a.shape, dtype=a.dtype, buffer=shm.buf)
array[:] = a[:]
array.setflags(write=int(not readonly))
token = _make_token(
key=key,
dtype=dtype
)
counter = SharedInt(
token=token.shm_counter_name,
create=True,
)
counter.value = 0
shmarr = ShmArray(
array,
counter,
shm,
readonly=readonly,
)
assert shmarr._token == token
_known_tokens[key] = shmarr.token
# "unlink" created shm on process teardown by
# pushing teardown calls onto actor context stack
actor = tractor.current_actor()
actor._lifetime_stack.callback(shmarr.close)
actor._lifetime_stack.callback(shmarr.destroy)
return shmarr
def attach_shm_array(
token: Tuple[str, str, Tuple[str, str]],
size: int = int(60*60*10/5),
readonly: bool = True,
) -> ShmArray:
"""Load and attach to an existing shared memory array previously
created by another process using ``open_shared_array``.
"""
token = _Token.from_msg(token)
key = token.shm_name
if key in _known_tokens:
assert _known_tokens[key] == token, "WTF"
shm = shared_memory.SharedMemory(name=key)
shmarr = np.ndarray(
(size,),
dtype=token.dtype_descr,
buffer=shm.buf
)
shmarr.setflags(write=int(not readonly))
counter = SharedInt(token=token.shm_counter_name)
# make sure we can read
counter.value
sha = ShmArray(
shmarr,
counter,
shm,
readonly=readonly,
)
# read test
sha.array
# Stash key -> token knowledge for future queries
# via `maybe_opepn_shm_array()` but only after we know
# we can attach.
if key not in _known_tokens:
_known_tokens[key] = token
# "close" attached shm on process teardown
actor = tractor.current_actor()
actor._lifetime_stack.callback(sha.close)
return sha
def maybe_open_shm_array(
key: str,
dtype: Optional[np.dtype] = None,
**kwargs,
) -> Tuple[ShmArray, bool]:
"""Attempt to attach to a shared memory block by a
"key" determined by the users overall "system"
(presumes you don't have the block's explicit token).
This function is meant to solve the problem of
discovering whether a shared array token has been
allocated or discovered by the actor running in
**this** process. Systems where multiple actors
may seek to access a common block can use this
function to attempt to acquire a token as discovered
by the actors who have previously stored a
"key" -> ``_Token`` map in an actor local variable.
If you know the explicit ``_Token`` for your memory
instead use ``attach_shm_array``.
"""
try:
# see if we already know this key
token = _known_tokens[key]
return attach_shm_array(token=token, **kwargs), False
except KeyError:
log.warning(f"Could not find {key} in shms cache")
if dtype:
token = _make_token(key, dtype)
try:
return attach_shm_array(token=token, **kwargs), False
except FileNotFoundError:
log.warning(f"Could not attach to shm with token {token}")
# This actor does not know about memory
# associated with the provided "key".
# Attempt to open a block and expect
# to fail if a block has been allocated
# on the OS by someone else.
return open_shm_array(key=key, dtype=dtype, **kwargs), True

View File

@ -0,0 +1,140 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Numpy data source machinery.
"""
import decimal
from dataclasses import dataclass
import numpy as np
import pandas as pd
# our minimum structured array layout for ohlc data
base_ohlc_dtype = np.dtype(
[
('index', int),
('time', float),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', int),
]
)
# map time frame "keys" to minutes values
tf_in_1m = {
'1m': 1,
'5m': 5,
'15m': 15,
'30m': 30,
'1h': 60,
'4h': 240,
'1d': 1440,
}
def float_digits(
value: float,
) -> int:
return int(-decimal.Decimal(str(value)).as_tuple().exponent)
def ohlc_zeros(length: int) -> np.ndarray:
"""Construct an OHLC field formatted structarray.
For "why a structarray" see here: https://stackoverflow.com/a/52443038
Bottom line, they're faster then ``np.recarray``.
"""
return np.zeros(length, dtype=base_ohlc_dtype)
@dataclass
class Symbol:
"""I guess this is some kinda container thing for dealing with
all the different meta-data formats from brokers?
"""
key: str = ''
min_tick: float = 0.01
contract: str = ''
def digits(self) -> int:
"""Return the trailing number of digits specified by the
min tick size for the instrument.
"""
return float_digits(self.min_tick)
def from_df(
df: pd.DataFrame,
source=None,
default_tf=None
) -> np.recarray:
"""Convert OHLC formatted ``pandas.DataFrame`` to ``numpy.recarray``.
"""
df.reset_index(inplace=True)
# hackery to convert field names
date = 'Date'
if 'date' in df.columns:
date = 'date'
# convert to POSIX time
df[date] = [d.timestamp() for d in df[date]]
# try to rename from some camel case
columns = {
'Date': 'time',
'date': 'time',
'Open': 'open',
'High': 'high',
'Low': 'low',
'Close': 'close',
'Volume': 'volume',
}
df = df.rename(columns=columns)
for name in df.columns:
if name not in base_ohlc_dtype.names[1:]:
del df[name]
# TODO: it turns out column access on recarrays is actually slower:
# https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
# it might make sense to make these structured arrays?
array = df.to_records()
_nan_to_closest_num(array)
return array
def _nan_to_closest_num(array: np.ndarray):
"""Return interpolated values instead of NaN.
"""
for col in ['open', 'high', 'low', 'close']:
mask = np.isnan(array[col])
if not mask.size:
continue
array[col][mask] = np.interp(
np.flatnonzero(mask), np.flatnonzero(~mask), array[col][~mask]
)

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
marketstore cli.
"""

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
``marketstore`` integration.
@ -16,6 +32,7 @@ import msgpack
import numpy as np
import pandas as pd
import pymarketstore as pymkts
import tractor
from trio_websocket import open_websocket_url
from ..log import get_logger, get_console_log
@ -320,7 +337,8 @@ async def stream_quotes(
# update cache
_cache[symbol].update(quote)
else:
quotes = {symbol: [{key.lower(): val for key, val in quote.items()}]}
quotes = {
symbol: [{key.lower(): val for key, val in quote.items()}]}
if quotes:
yield quotes

View File

@ -0,0 +1,160 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Financial signal processing for the peeps.
"""
from typing import AsyncIterator, Callable, Tuple
import trio
import tractor
import numpy as np
from ..log import get_logger
from .. import data
from ._momo import _rsi
from ..data import attach_shm_array, Feed
log = get_logger(__name__)
_fsps = {'rsi': _rsi}
async def latency(
source: 'TickStream[Dict[str, float]]', # noqa
ohlcv: np.ndarray
) -> AsyncIterator[np.ndarray]:
"""Latency measurements, broker to piker.
"""
# TODO: do we want to offer yielding this async
# before the rt data connection comes up?
# deliver zeros for all prior history
yield np.zeros(len(ohlcv))
async for quote in source:
ts = quote.get('broker_ts')
if ts:
# This is codified in the per-broker normalization layer
# TODO: Add more measure points and diffs for full system
# stack tracing.
value = quote['brokerd_ts'] - quote['broker_ts']
yield value
async def increment_signals(
feed: Feed,
dst_shm: 'SharedArray', # noqa
) -> None:
"""Increment the underlying shared memory buffer on every "increment"
msg received from the underlying data feed.
"""
async for msg in await feed.index_stream():
array = dst_shm.array
last = array[-1:].copy()
# write new slot to the buffer
dst_shm.push(last)
@tractor.stream
async def cascade(
ctx: tractor.Context,
brokername: str,
src_shm_token: dict,
dst_shm_token: Tuple[str, np.dtype],
symbol: str,
fsp_func_name: str,
) -> AsyncIterator[dict]:
"""Chain streaming signal processors and deliver output to
destination mem buf.
"""
src = attach_shm_array(token=src_shm_token)
dst = attach_shm_array(readonly=False, token=dst_shm_token)
func: Callable = _fsps[fsp_func_name]
# open a data feed stream with requested broker
async with data.open_feed(brokername, [symbol]) as feed:
assert src.token == feed.shm.token
# TODO: load appropriate fsp with input args
async def filter_by_sym(sym, stream):
async for quotes in stream:
for symbol, quotes in quotes.items():
if symbol == sym:
yield quotes
out_stream = func(
filter_by_sym(symbol, feed.stream),
feed.shm,
)
# TODO: XXX:
# THERE'S A BIG BUG HERE WITH THE `index` field since we're
# prepending a copy of the first value a few times to make
# sub-curves align with the parent bar chart.
#
# This likely needs to be fixed either by,
# - manually assigning the index and historical data
# seperately to the shm array (i.e. not using .push())
# - developing some system on top of the shared mem array that
# is `index` aware such that historical data can be indexed
# relative to the true first datum? Not sure if this is sane
# for derivatives.
# Conduct a single iteration of fsp with historical bars input
# and get historical output
history_output = await out_stream.__anext__()
# build a struct array which includes an 'index' field to push
# as history
history = np.array(
np.arange(len(history_output)),
dtype=dst.array.dtype
)
history[fsp_func_name] = history_output
# TODO: talk to ``pyqtgraph`` core about proper way to solve this:
# XXX: hack to get curves aligned with bars graphics: prepend
# a copy of the first datum..
# dst.push(history[:1])
# check for data length mis-allignment and fill missing values
diff = len(src.array) - len(history)
if diff >= 0:
print(f"WTF DIFFZZZ {diff}")
for _ in range(diff):
dst.push(history[:1])
# compare with source signal and time align
index = dst.push(history)
yield index
async with trio.open_nursery() as n:
n.start_soon(increment_signals, feed, dst)
async for processed in out_stream:
log.debug(f"{fsp_func_name}: {processed}")
index = src.index
dst.array[-1][fsp_func_name] = processed
await ctx.send_yield(index)

226
piker/fsp/_momo.py 100644
View File

@ -0,0 +1,226 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Momentum bby.
"""
from typing import AsyncIterator, Optional
import numpy as np
from numba import jit, float64, optional, int64
from ..data._normalize import iterticks
# TODO: things to figure the fuck out:
# - how to handle non-plottable values
# - composition of fsps / implicit chaining
@jit(
float64[:](
float64[:],
optional(float64),
optional(float64)
),
nopython=True,
nogil=True
)
def ema(
y: 'np.ndarray[float64]',
alpha: optional(float64) = None,
ylast: optional(float64) = None,
) -> 'np.ndarray[float64]':
r"""Exponential weighted moving average owka 'Exponential smoothing'.
- https://en.wikipedia.org/wiki/Moving_average#Exponential_moving_average
- https://en.wikipedia.org/wiki/Exponential_smoothing
Fun facts:
A geometric progression is the discrete version of an
exponential function, that is where the name for this
smoothing method originated according to statistics lore. In
signal processing parlance, an EMA is a first order IIR filter.
.. math::
.tex
{S_{t}={\begin{cases}Y_{1},&t=1
\\\alpha Y_{t}+(1-\alpha )\cdot S_{t-1},&t>1\end{cases}}}
.nerd
(2) s = {
s[0] = y[0]; t = 0
s[t] = a*y[t] + (1-a)*s[t-1], t > 0.
}
More discussion here:
https://stackoverflow.com/questions/42869495/numpy-version-of-exponential-weighted-moving-average-equivalent-to-pandas-ewm
"""
n = y.shape[0]
if alpha is None:
# https://en.wikipedia.org/wiki/Moving_average#Relationship_between_SMA_and_EMA
# use the "center of mass" convention making an ema compare
# directly to the com of a SMA or WMA:
alpha = 2 / float(n + 1)
s = np.empty(n, dtype=float64)
if n == 1:
s[0] = y[0] * alpha + ylast * (1 - alpha)
else:
if ylast is None:
s[0] = y[0]
else:
s[0] = ylast
for i in range(1, n):
s[i] = y[i] * alpha + s[i-1] * (1 - alpha)
return s
# @jit(
# float64[:](
# float64[:],
# int64,
# float64,
# float64,
# ),
# nopython=True,
# nogil=True
# )
def rsi(
signal: 'np.ndarray[float64]',
period: int64 = 14,
up_ema_last: float64 = None,
down_ema_last: float64 = None,
) -> 'np.ndarray[float64]':
alpha = 1/period
df = np.diff(signal)
up = np.where(df > 0, df, 0)
up_ema = ema(up, alpha, up_ema_last)
down = np.where(df < 0, -df, 0)
down_ema = ema(down, alpha, down_ema_last)
# avoid dbz errors
rs = np.divide(
up_ema,
down_ema,
out=np.zeros_like(up_ema),
where=down_ema != 0
)
# map rs through sigmoid (with range [0, 100])
rsi = 100 - 100 / (1 + rs)
# rsi = 100 * (up_ema / (up_ema + down_ema))
# also return the last ema state for next iteration
return rsi, up_ema[-1], down_ema[-1]
def wma(
signal: np.ndarray,
length: int,
weights: Optional[np.ndarray] = None,
) -> np.ndarray:
if weights is None:
# default is a standard arithmetic mean
seq = np.full((length,), 1)
weights = seq / seq.sum()
assert length == len(weights)
return np.convolve(signal, weights, 'valid')
# @piker.fsp(
# aggregates=[60, 60*5, 60*60, '4H', '1D'],
# )
async def _rsi(
source: 'QuoteStream[Dict[str, Any]]', # noqa
ohlcv: "ShmArray[T<'close'>]",
period: int = 14,
) -> AsyncIterator[np.ndarray]:
"""Multi-timeframe streaming RSI.
https://en.wikipedia.org/wiki/Relative_strength_index
"""
sig = ohlcv.array['close']
# wilder says to seed the RSI EMAs with the SMA for the "period"
seed = wma(ohlcv.last(period)['close'], period)[0]
# TODO: the emas here should be seeded with a period SMA as per
# wilder's original formula..
rsi_h, last_up_ema_close, last_down_ema_close = rsi(sig, period, seed, seed)
up_ema_last = last_up_ema_close
down_ema_last = last_down_ema_close
# deliver history
yield rsi_h
index = ohlcv.index
async for quote in source:
# tick based updates
for tick in iterticks(quote):
# though incorrect below is interesting
# sig = ohlcv.last(period)['close']
# get only the last 2 "datums" which will be diffed to
# calculate the real-time RSI output datum
sig = ohlcv.last(2)['close']
# the ema needs to be computed from the "last bar"
# TODO: how to make this cleaner
if ohlcv.index > index:
last_up_ema_close = up_ema_last
last_down_ema_close = down_ema_last
index = ohlcv.index
rsi_out, up_ema_last, down_ema_last = rsi(
sig,
period=period,
up_ema_last=last_up_ema_close,
down_ema_last=last_down_ema_close,
)
yield rsi_out[-1:]
async def _wma(
source, #: AsyncStream[np.ndarray],
length: int,
ohlcv: np.ndarray, # price time-frame "aware"
) -> AsyncIterator[np.ndarray]: # maybe something like like FspStream?
"""Streaming weighted moving average.
``weights`` is a sequence of already scaled values. As an example
for the WMA often found in "techincal analysis":
``weights = np.arange(1, N) * N*(N-1)/2``.
"""
# deliver historical output as "first yield"
yield wma(ohlcv.array['close'], length)
# begin real-time section
async for quote in source:
for tick in iterticks(quote, type='trade'):
yield wma(ohlcv.last(length))

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Log like a forester!
"""
@ -7,6 +23,8 @@ import json
import tractor
from pygments import highlight, lexers, formatters
# Makes it so we only see the full module name when using ``__name__``
# without the extra "piker." prefix.
_proj_name = 'piker'

View File

@ -0,0 +1,22 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Stuff for your eyes, aka super hawt Qt UI components.
Currently we only support PyQt5 due to this issue in Pyside2:
https://bugreports.qt.io/projects/PYSIDE/issues/PYSIDE-1313
"""

332
piker/ui/_axes.py 100644
View File

@ -0,0 +1,332 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Chart axes graphics and behavior.
"""
from typing import List, Tuple, Optional
import pandas as pd
import pyqtgraph as pg
from PyQt5 import QtCore, QtGui
from PyQt5.QtCore import QPointF
from ._style import DpiAwareFont, hcolor, _font
from ..data._source import float_digits
_axis_pen = pg.mkPen(hcolor('bracket'))
class Axis(pg.AxisItem):
"""A better axis that sizes to typical tick contents considering font size.
"""
def __init__(
self,
linked_charts,
typical_max_str: str = '100 000.00',
min_tick: int = 2,
**kwargs
) -> None:
super().__init__(**kwargs)
self.linked_charts = linked_charts
self._min_tick = min_tick
self.setTickFont(_font.font)
self.setStyle(**{
'textFillLimits': [(0, 0.666)],
'tickFont': _font.font,
})
self.setTickFont(_font.font)
self.setPen(_axis_pen)
self.typical_br = _font._qfm.boundingRect(typical_max_str)
# size the pertinent axis dimension to a "typical value"
self.resize()
def set_min_tick(self, size: int) -> None:
self._min_tick = size
class PriceAxis(Axis):
def __init__(
self,
*args,
**kwargs,
) -> None:
super().__init__(*args, orientation='right', **kwargs)
def resize(self) -> None:
self.setWidth(self.typical_br.width())
# XXX: drop for now since it just eats up h space
def tickStrings(self, vals, scale, spacing):
# TODO: figure out how to enforce min tick spacing by passing
# it into the parent type
digits = max(float_digits(spacing * scale), self._min_tick)
# print(f'vals: {vals}\nscale: {scale}\nspacing: {spacing}')
# print(f'digits: {digits}')
return [
('{value:,.{digits}f}')
.format(
digits=digits,
value=v,
).replace(',', ' ') for v in vals
]
class DynamicDateAxis(Axis):
# time formats mapped by seconds between bars
tick_tpl = {
60 * 60 * 24: '%Y-%b-%d',
60: '%H:%M',
30: '%H:%M:%S',
5: '%H:%M:%S',
}
def resize(self) -> None:
self.setHeight(self.typical_br.height() + 3)
def _indexes_to_timestrs(
self,
indexes: List[int],
) -> List[str]:
bars = self.linked_charts.chart._array
bars_len = len(bars)
times = bars['time']
epochs = times[list(
map(int, filter(lambda i: i < bars_len, indexes))
)]
# TODO: **don't** have this hard coded shift to EST
dts = pd.to_datetime(epochs, unit='s') # - 4*pd.offsets.Hour()
delay = times[-1] - times[-2]
return dts.strftime(self.tick_tpl[delay])
def tickStrings(self, values: List[float], scale, spacing):
return self._indexes_to_timestrs(values)
class AxisLabel(pg.GraphicsObject):
_w_margin = 0
_h_margin = 0
def __init__(
self,
parent: Axis,
digits: int = 2,
bg_color: str = 'bracket',
fg_color: str = 'black',
opacity: int = 0,
font_size_inches: Optional[float] = None,
):
super().__init__(parent)
self.setFlag(self.ItemIgnoresTransformations)
self.parent = parent
self.opacity = opacity
self.label_str = ''
self.digits = digits
self._txt_br: QtCore.QRect = None
self._dpifont = DpiAwareFont(size_in_inches=font_size_inches)
self._dpifont.configure_to_dpi(_font._screen)
self.bg_color = pg.mkColor(hcolor(bg_color))
self.fg_color = pg.mkColor(hcolor(fg_color))
self.rect = None
def paint(self, p, option, widget):
# p.setCompositionMode(QtGui.QPainter.CompositionMode_SourceOver)
if self.label_str:
if not self.rect:
self._size_br_from_str(self.label_str)
p.setFont(self._dpifont.font)
p.setPen(self.fg_color)
p.setOpacity(self.opacity)
p.fillRect(self.rect, self.bg_color)
# can be overrided in subtype
self.draw(p, self.rect)
p.drawText(self.rect, self.text_flags, self.label_str)
def draw(
self,
p: QtGui.QPainter,
rect: QtCore.QRectF
) -> None:
# this adds a nice black outline around the label for some odd
# reason; ok by us
p.setOpacity(self.opacity)
p.drawRect(self.rect)
def boundingRect(self): # noqa
# if self.label_str:
# self._size_br_from_str(self.label_str)
# return self.rect
# return QtCore.QRectF()
return self.rect or QtCore.QRectF()
def _size_br_from_str(self, value: str) -> None:
"""Do our best to render the bounding rect to a set margin
around provided string contents.
"""
# size the filled rect to text and/or parent axis
br = self._txt_br = self._dpifont.boundingRect(value)
txt_h, txt_w = br.height(), br.width()
h, w = self.size_hint()
self.rect = QtCore.QRectF(
0, 0,
(w or txt_w) + self._w_margin,
(h or txt_h) + self._h_margin,
)
# _common_text_flags = (
# QtCore.Qt.TextDontClip |
# QtCore.Qt.AlignCenter |
# QtCore.Qt.AlignTop |
# QtCore.Qt.AlignHCenter |
# QtCore.Qt.AlignVCenter
# )
class XAxisLabel(AxisLabel):
text_flags = (
QtCore.Qt.TextDontClip
| QtCore.Qt.AlignCenter
)
def size_hint(self) -> Tuple[float, float]:
# size to parent axis height
return self.parent.height(), None
def update_label(
self,
abs_pos: QPointF, # scene coords
value: float, # data for text
offset: int = 1 # if have margins, k?
) -> None:
timestrs = self.parent._indexes_to_timestrs([int(value)])
if not timestrs.any():
return
self.label_str = timestrs[0]
w = self.boundingRect().width()
self.setPos(QPointF(
abs_pos.x() - w / 2 - offset,
0,
))
self.update()
class YAxisLabel(AxisLabel):
_h_margin = 3
# _w_margin = 1
text_flags = (
# QtCore.Qt.AlignLeft
QtCore.Qt.AlignHCenter
| QtCore.Qt.AlignVCenter
| QtCore.Qt.TextDontClip
)
def size_hint(self) -> Tuple[float, float]:
# size to parent axis width
return None, self.parent.width()
def update_label(
self,
abs_pos: QPointF, # scene coords
value: float, # data for text
offset: int = 1 # on odd dimension and/or adds nice black line
) -> None:
# this is read inside ``.paint()``
self.label_str = '{value:,.{digits}f}'.format(
digits=self.digits, value=value).replace(',', ' ')
br = self.boundingRect()
h = br.height()
self.setPos(QPointF(
0,
abs_pos.y() - h / 2 - offset
))
self.update()
class YSticky(YAxisLabel):
"""Y-axis label that sticks to where it's placed despite chart resizing.
"""
def __init__(
self,
chart,
*args,
**kwargs
) -> None:
super().__init__(*args, **kwargs)
self._chart = chart
chart.sigRangeChanged.connect(self.update_on_resize)
self._last_datum = (None, None)
def update_on_resize(self, vr, r):
# TODO: add an `.index` to the array data-buffer layer
# and make this way less shitty...
# pretty sure we did that ^ ?
index, last = self._last_datum
if index is not None:
self.update_from_data(index, last)
def update_from_data(
self,
index: int,
value: float,
) -> None:
self._last_datum = (index, value)
self.update_label(
self._chart.mapFromView(QPointF(index, value)),
value
)

1110
piker/ui/_chart.py 100644

File diff suppressed because it is too large Load Diff

169
piker/ui/_exec.py 100644
View File

@ -0,0 +1,169 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Trio - Qt integration
Run ``trio`` in guest mode on top of the Qt event loop.
All global Qt runtime settings are mostly defined here.
"""
from functools import partial
import traceback
from typing import Tuple, Callable, Dict, Any
# Qt specific
import PyQt5 # noqa
from pyqtgraph import QtGui
from PyQt5 import QtCore
from PyQt5.QtCore import (
pyqtRemoveInputHook, Qt, QCoreApplication
)
import qdarkstyle
import trio
import tractor
from outcome import Error
# singleton app per actor
_qt_app: QtGui.QApplication = None
_qt_win: QtGui.QMainWindow = None
def current_screen() -> QtGui.QScreen:
global _qt_win, _qt_app
return _qt_app.screenAt(_qt_win.centralWidget().geometry().center())
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
# must be set before creating the application
if hasattr(Qt, 'AA_EnableHighDpiScaling'):
QCoreApplication.setAttribute(Qt.AA_EnableHighDpiScaling, True)
if hasattr(Qt, 'AA_UseHighDpiPixmaps'):
QCoreApplication.setAttribute(Qt.AA_UseHighDpiPixmaps, True)
class MainWindow(QtGui.QMainWindow):
size = (800, 500)
title = 'piker chart (ur symbol is loading bby)'
def __init__(self, parent=None):
super().__init__(parent)
self.setMinimumSize(*self.size)
self.setWindowTitle(self.title)
def run_qtractor(
func: Callable,
args: Tuple,
main_widget: QtGui.QWidget,
tractor_kwargs: Dict[str, Any] = {},
window_type: QtGui.QMainWindow = MainWindow,
) -> None:
# avoids annoying message when entering debugger from qt loop
pyqtRemoveInputHook()
app = QtGui.QApplication.instance()
if app is None:
app = PyQt5.QtWidgets.QApplication([])
# TODO: we might not need this if it's desired
# to cancel the tractor machinery on Qt loop
# close, however the details of doing that correctly
# currently seem tricky..
app.setQuitOnLastWindowClosed(False)
# set global app singleton
global _qt_app
_qt_app = app
# This code is from Nathaniel, and I quote:
# "This is substantially faster than using a signal... for some
# reason Qt signal dispatch is really slow (and relies on events
# underneath anyway, so this is strictly less work)."
REENTER_EVENT = QtCore.QEvent.Type(QtCore.QEvent.registerEventType())
class ReenterEvent(QtCore.QEvent):
pass
class Reenter(QtCore.QObject):
def event(self, event):
event.fn()
return False
reenter = Reenter()
def run_sync_soon_threadsafe(fn):
event = ReenterEvent(REENTER_EVENT)
event.fn = fn
app.postEvent(reenter, event)
def done_callback(outcome):
print(f"Outcome: {outcome}")
if isinstance(outcome, Error):
exc = outcome.error
traceback.print_exception(type(exc), exc, exc.__traceback__)
app.quit()
# load dark theme
app.setStyleSheet(qdarkstyle.load_stylesheet(qt_api='pyqt5'))
# make window and exec
window = window_type()
instance = main_widget()
instance.window = window
widgets = {
'window': window,
'main': instance,
}
# setup tractor entry point args
main = partial(
tractor._main,
async_fn=func,
args=args + (widgets,),
arbiter_addr=(
tractor._default_arbiter_host,
tractor._default_arbiter_port,
),
name='qtractor',
**tractor_kwargs,
)
# guest mode
trio.lowlevel.start_guest_run(
main,
run_sync_soon_threadsafe=run_sync_soon_threadsafe,
done_callback=done_callback,
)
window.main_widget = main_widget
window.setCentralWidget(instance)
# store global ref
# set global app singleton
global _qt_win
_qt_win = window
# actually render to screen
window.show()
app.exec_()

View File

@ -0,0 +1,877 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Chart graphics for displaying a slew of different data types.
"""
# import time
from typing import List, Optional, Tuple
import numpy as np
import pyqtgraph as pg
# from numba import jit, float64, optional, int64
from PyQt5 import QtCore, QtGui
from PyQt5.QtCore import QLineF, QPointF
# from .._profile import timeit
from ._style import (
_xaxis_at,
hcolor,
_font,
)
from ._axes import YAxisLabel, XAxisLabel, YSticky
# XXX: these settings seem to result in really decent mouse scroll
# latency (in terms of perceived lag in cross hair) so really be sure
# there's an improvement if you want to change it.
_mouse_rate_limit = 60 # calc current screen refresh rate?
_debounce_delay = 1 / 2e3
_ch_label_opac = 1
class LineDot(pg.CurvePoint):
def __init__(
self,
curve: pg.PlotCurveItem,
index: int,
pos=None,
size: int = 2, # in pxs
color: str = 'default_light',
) -> None:
pg.CurvePoint.__init__(
self,
curve,
index=index,
pos=pos,
rotate=False,
)
# TODO: get pen from curve if not defined?
cdefault = hcolor(color)
pen = pg.mkPen(cdefault)
brush = pg.mkBrush(cdefault)
# presuming this is fast since it's built in?
dot = self.dot = QtGui.QGraphicsEllipseItem(
QtCore.QRectF(-size / 2, -size / 2, size, size)
)
# if we needed transformable dot?
# dot.translate(-size*0.5, -size*0.5)
dot.setPen(pen)
dot.setBrush(brush)
dot.setParentItem(self)
# keep a static size
self.setFlag(self.ItemIgnoresTransformations)
_corner_anchors = {
'top': 0,
'left': 0,
'bottom': 1,
'right': 1,
}
# XXX: fyi naming here is confusing / opposite to coords
_corner_margins = {
('top', 'left'): (-4, -5),
('top', 'right'): (4, -5),
('bottom', 'left'): (-4, 5),
('bottom', 'right'): (4, 5),
}
class ContentsLabel(pg.LabelItem):
"""Label anchored to a ``ViewBox`` typically for displaying
datum-wise points from the "viewed" contents.
"""
def __init__(
self,
chart: 'ChartPlotWidget', # noqa
anchor_at: str = ('top', 'right'),
justify_text: str = 'left',
font_size: Optional[int] = None,
) -> None:
font_size = font_size or _font.font.pixelSize()
super().__init__(justify=justify_text, size=f'{str(font_size)}px')
# anchor to viewbox
self.setParentItem(chart._vb)
chart.scene().addItem(self)
self.chart = chart
v, h = anchor_at
index = (_corner_anchors[h], _corner_anchors[v])
margins = _corner_margins[(v, h)]
self.anchor(itemPos=index, parentPos=index, offset=margins)
def update_from_ohlc(
self,
name: str,
index: int,
array: np.ndarray,
) -> None:
# this being "html" is the dumbest shit :eyeroll:
self.setText(
"<b>i</b>:{index}<br/>"
"<b>O</b>:{}<br/>"
"<b>H</b>:{}<br/>"
"<b>L</b>:{}<br/>"
"<b>C</b>:{}<br/>"
"<b>V</b>:{}".format(
# *self._array[index].item()[2:8],
*array[index].item()[2:8],
name=name,
index=index,
)
)
def update_from_value(
self,
name: str,
index: int,
array: np.ndarray,
) -> None:
data = array[index][name]
self.setText(f"{name}: {data:.2f}")
class CrossHair(pg.GraphicsObject):
def __init__(
self,
linkedsplitcharts: 'LinkedSplitCharts', # noqa
digits: int = 0
) -> None:
super().__init__()
# XXX: not sure why these are instance variables?
# It's not like we can change them on the fly..?
self.pen = pg.mkPen(
color=hcolor('default'),
style=QtCore.Qt.DashLine,
)
self.lines_pen = pg.mkPen(
color='#a9a9a9', # gray?
style=QtCore.Qt.DashLine,
)
self.lsc = linkedsplitcharts
self.graphics = {}
self.plots = []
self.active_plot = None
self.digits = digits
self._lastx = None
def add_plot(
self,
plot: 'ChartPlotWidget', # noqa
digits: int = 0,
) -> None:
# add ``pg.graphicsItems.InfiniteLine``s
# vertical and horizonal lines and a y-axis label
vl = plot.addLine(x=0, pen=self.lines_pen, movable=False)
hl = plot.addLine(y=0, pen=self.lines_pen, movable=False)
hl.hide()
yl = YAxisLabel(
parent=plot.getAxis('right'),
digits=digits or self.digits,
opacity=_ch_label_opac,
bg_color='default',
)
yl.hide() # on startup if mouse is off screen
# TODO: checkout what ``.sigDelayed`` can be used for
# (emitted once a sufficient delay occurs in mouse movement)
px_moved = pg.SignalProxy(
plot.scene().sigMouseMoved,
rateLimit=_mouse_rate_limit,
slot=self.mouseMoved,
delay=_debounce_delay,
)
px_enter = pg.SignalProxy(
plot.sig_mouse_enter,
rateLimit=_mouse_rate_limit,
slot=lambda: self.mouseAction('Enter', plot),
delay=_debounce_delay,
)
px_leave = pg.SignalProxy(
plot.sig_mouse_leave,
rateLimit=_mouse_rate_limit,
slot=lambda: self.mouseAction('Leave', plot),
delay=_debounce_delay,
)
self.graphics[plot] = {
'vl': vl,
'hl': hl,
'yl': yl,
'px': (px_moved, px_enter, px_leave),
}
self.plots.append(plot)
# Determine where to place x-axis label.
# Place below the last plot by default, ow
# keep x-axis right below main chart
plot_index = -1 if _xaxis_at == 'bottom' else 0
self.xaxis_label = XAxisLabel(
parent=self.plots[plot_index].getAxis('bottom'),
opacity=_ch_label_opac,
bg_color='default',
)
# place label off-screen during startup
self.xaxis_label.setPos(self.plots[0].mapFromView(QPointF(0, 0)))
def add_curve_cursor(
self,
plot: 'ChartPlotWidget', # noqa
curve: 'PlotCurveItem', # noqa
) -> LineDot:
# if this plot contains curves add line dot "cursors" to denote
# the current sample under the mouse
cursor = LineDot(curve, index=len(plot._array))
plot.addItem(cursor)
self.graphics[plot].setdefault('cursors', []).append(cursor)
return cursor
def mouseAction(self, action, plot): # noqa
if action == 'Enter':
self.active_plot = plot
# show horiz line and y-label
self.graphics[plot]['hl'].show()
self.graphics[plot]['yl'].show()
else: # Leave
self.active_plot = None
# hide horiz line and y-label
self.graphics[plot]['hl'].hide()
self.graphics[plot]['yl'].hide()
def mouseMoved(
self,
evt: 'Tuple[QMouseEvent]', # noqa
) -> None: # noqa
"""Update horizonal and vertical lines when mouse moves inside
either the main chart or any indicator subplot.
"""
pos = evt[0]
# find position inside active plot
try:
# map to view coordinate system
mouse_point = self.active_plot.mapToView(pos)
except AttributeError:
# mouse was not on active plot
return
x, y = mouse_point.x(), mouse_point.y()
plot = self.active_plot
# update y-range items
self.graphics[plot]['hl'].setY(y)
self.graphics[self.active_plot]['yl'].update_label(
abs_pos=pos, value=y
)
# Update x if cursor changed after discretization calc
# (this saves draw cycles on small mouse moves)
lastx = self._lastx
ix = round(x) # since bars are centered around index
if ix != lastx:
for plot, opts in self.graphics.items():
# move the vertical line to the current "center of bar"
opts['vl'].setX(ix)
# update the chart's "contents" label
plot.update_contents_labels(ix)
# update all subscribed curve dots
for cursor in opts.get('cursors', ()):
cursor.setIndex(ix)
# update the label on the bottom of the crosshair
self.xaxis_label.update_label(
# XXX: requires:
# https://github.com/pyqtgraph/pyqtgraph/pull/1418
# otherwise gobbles tons of CPU..
# map back to abs (label-local) coordinates
abs_pos=plot.mapFromView(QPointF(ix, y)),
value=x,
)
self._lastx = ix
def boundingRect(self):
try:
return self.active_plot.boundingRect()
except AttributeError:
return self.plots[0].boundingRect()
# @jit(
# # float64[:](
# # float64[:],
# # optional(float64),
# # optional(int16)
# # ),
# nopython=True,
# nogil=True
# )
def _mk_lines_array(data: List, size: int) -> np.ndarray:
"""Create an ndarray to hold lines graphics objects.
"""
return np.zeros_like(
data,
shape=(int(size), 3),
dtype=object,
)
# TODO: `numba` this?
# @jit(
# # float64[:](
# # float64[:],
# # optional(float64),
# # optional(int16)
# # ),
# nopython=True,
# nogil=True
# )
def bars_from_ohlc(
data: np.ndarray,
w: float,
start: int = 0,
) -> np.ndarray:
"""Generate an array of lines objects from input ohlc data.
"""
lines = _mk_lines_array(data, data.shape[0])
for i, q in enumerate(data[start:], start=start):
open, high, low, close, index = q[
['open', 'high', 'low', 'close', 'index']]
# high -> low vertical (body) line
if low != high:
hl = QLineF(index, low, index, high)
else:
# XXX: if we don't do it renders a weird rectangle?
# see below for filtering this later...
hl = None
# NOTE: place the x-coord start as "middle" of the drawing range such
# that the open arm line-graphic is at the left-most-side of
# the index's range according to the view mapping.
# open line
o = QLineF(index - w, open, index, open)
# close line
c = QLineF(index, close, index + w, close)
# indexing here is as per the below comments
lines[i] = (hl, o, c)
# XXX: in theory we could get a further speedup by using a flat
# array and avoiding the call to `np.ravel()` below?
# lines[3*i:3*i+3] = (hl, o, c)
# XXX: legacy code from candles custom graphics:
# if not _tina_mode:
# else _tina_mode:
# self.lines = lines = np.concatenate(
# [high_to_low, open_sticks, close_sticks])
# use traditional up/down green/red coloring
# long_bars = np.resize(Quotes.close > Quotes.open, len(lines))
# short_bars = np.resize(
# Quotes.close < Quotes.open, len(lines))
# ups = lines[long_bars]
# downs = lines[short_bars]
# # draw "up" bars
# p.setPen(self.bull_brush)
# p.drawLines(*ups)
# # draw "down" bars
# p.setPen(self.bear_brush)
# p.drawLines(*downs)
return lines
class BarItems(pg.GraphicsObject):
"""Price range bars graphics rendered from a OHLC sequence.
"""
sigPlotChanged = QtCore.Signal(object)
# 0.5 is no overlap between arms, 1.0 is full overlap
w: float = 0.43
bars_pen = pg.mkPen(hcolor('bracket'))
# XXX: tina mode, see below
# bull_brush = pg.mkPen('#00cc00')
# bear_brush = pg.mkPen('#fa0000')
def __init__(
self,
# scene: 'QGraphicsScene', # noqa
plotitem: 'pg.PlotItem', # noqa
) -> None:
super().__init__()
self.last = QtGui.QPicture()
self.history = QtGui.QPicture()
# TODO: implement updateable pixmap solution
self._pi = plotitem
# self._scene = plotitem.vb.scene()
# self.picture = QtGui.QPixmap(1000, 300)
# plotitem.addItem(self.picture)
# self._pmi = None
# self._pmi = self._scene.addPixmap(self.picture)
# XXX: not sure this actually needs to be an array other
# then for the old tina mode calcs for up/down bars below?
# lines container
self.lines = _mk_lines_array([], 50e3)
# track the current length of drawable lines within the larger array
self.index: int = 0
# @timeit
def draw_from_data(
self,
data: np.ndarray,
start: int = 0,
):
"""Draw OHLC datum graphics from a ``np.ndarray``.
This routine is usually only called to draw the initial history.
"""
lines = bars_from_ohlc(data, self.w, start=start)
# save graphics for later reference and keep track
# of current internal "last index"
index = len(lines)
self.lines[:index] = lines
self.index = index
# up to last to avoid double draw of last bar
self.draw_lines(just_history=True, iend=self.index - 1)
self.draw_lines(iend=self.index)
# @timeit
def draw_lines(
self,
istart=0,
iend=None,
just_history=False,
# TODO: could get even fancier and only update the single close line?
lines=None,
) -> None:
"""Draw the current line set using the painter.
"""
if just_history:
# draw bars for the "history" picture
iend = iend or self.index - 1
pic = self.history
else:
# draw the last bar
istart = self.index - 1
iend = iend or self.index
pic = self.last
# use 2d array of lines objects, see conlusion on speed:
# https://stackoverflow.com/a/60089929
flat = np.ravel(self.lines[istart:iend])
# TODO: do this with numba for speed gain:
# https://stackoverflow.com/questions/58422690/filtering-a-numpy-array-what-is-the-best-approach
to_draw = flat[np.where(flat != None)] # noqa
# pre-computing a QPicture object allows paint() to run much
# more quickly, rather than re-drawing the shapes every time.
p = QtGui.QPainter(pic)
p.setPen(self.bars_pen)
# TODO: is there any way to not have to pass all the lines every
# iteration? It seems they won't draw unless it's done this way..
p.drawLines(*to_draw)
p.end()
# XXX: if we ever try using `QPixmap` again...
# if self._pmi is None:
# self._pmi = self.scene().addPixmap(self.picture)
# else:
# self._pmi.setPixmap(self.picture)
# trigger re-render
# https://doc.qt.io/qt-5/qgraphicsitem.html#update
self.update()
def update_from_array(
self,
array: np.ndarray,
just_history=False,
) -> None:
"""Update the last datum's bar graphic from input data array.
This routine should be interface compatible with
``pg.PlotCurveItem.setData()``. Normally this method in
``pyqtgraph`` seems to update all the data passed to the
graphics object, and then update/rerender, but here we're
assuming the prior graphics havent changed (OHLC history rarely
does) so this "should" be simpler and faster.
"""
index = self.index
length = len(array)
extra = length - index
# start_bar_to_update = index - 100
if extra > 0:
# generate new graphics to match provided array
new = array[index:index + extra]
lines = bars_from_ohlc(new, self.w)
bars_added = len(lines)
self.lines[index:index + bars_added] = lines
self.index += bars_added
# start_bar_to_update = index - bars_added
self.draw_lines(just_history=True)
if just_history:
return
# current bar update
i, o, h, l, last, v = array[-1][
['index', 'open', 'high', 'low', 'close', 'volume']
]
assert i == self.index - 1
body, larm, rarm = self.lines[i]
# XXX: is there a faster way to modify this?
rarm.setLine(rarm.x1(), last, rarm.x2(), last)
# writer is responsible for changing open on "first" volume of bar
larm.setLine(larm.x1(), o, larm.x2(), o)
if l != h: # noqa
if body is None:
body = self.lines[index - 1][0] = QLineF(i, l, i, h)
else:
# update body
body.setLine(i, l, i, h)
else:
# XXX: h == l -> remove any HL line to avoid render bug
if body is not None:
body = self.lines[index - 1][0] = None
self.draw_lines(just_history=False)
# @timeit
def paint(self, p, opt, widget):
# profiler = pg.debug.Profiler(disabled=False, delayed=False)
# TODO: use to avoid drawing artefacts?
# self.prepareGeometryChange()
# p.setCompositionMode(0)
# TODO: one thing we could try here is pictures being drawn of
# a fixed count of bars such that based on the viewbox indices we
# only draw the "rounded up" number of "pictures worth" of bars
# as is necesarry for what's in "view". Not sure if this will
# lead to any perf gains other then when zoomed in to less bars
# in view.
p.drawPicture(0, 0, self.history)
p.drawPicture(0, 0, self.last)
# TODO: if we can ever make pixmaps work...
# p.drawPixmap(0, 0, self.picture)
# self._pmi.setPixmap(self.picture)
# print(self.scene())
# profiler('bars redraw:')
def boundingRect(self):
# TODO: can we do rect caching to make this faster?
# Qt docs: https://doc.qt.io/qt-5/qgraphicsitem.html#boundingRect
# boundingRect _must_ indicate the entire area that will be
# drawn on or else we will get artifacts and possibly crashing.
# (in this case, QPicture does all the work of computing the
# bounding rect for us).
# compute aggregate bounding rectangle
lb = self.last.boundingRect()
hb = self.history.boundingRect()
return QtCore.QRectF(
# top left
QtCore.QPointF(hb.topLeft()),
# total size
QtCore.QSizeF(lb.size() + hb.size())
)
# XXX: when we get back to enabling tina mode for xb
# class CandlestickItems(BarItems):
# w2 = 0.7
# line_pen = pg.mkPen('#000000')
# bull_brush = pg.mkBrush('#00ff00')
# bear_brush = pg.mkBrush('#ff0000')
# def _generate(self, p):
# rects = np.array(
# [
# QtCore.QRectF(
# q.id - self.w,
# q.open,
# self.w2,
# q.close - q.open
# )
# for q in Quotes
# ]
# )
# p.setPen(self.line_pen)
# p.drawLines(
# [QtCore.QLineF(q.id, q.low, q.id, q.high)
# for q in Quotes]
# )
# p.setBrush(self.bull_brush)
# p.drawRects(*rects[Quotes.close > Quotes.open])
# p.setBrush(self.bear_brush)
# p.drawRects(*rects[Quotes.close < Quotes.open])
class LevelLabel(YSticky):
line_pen = pg.mkPen(hcolor('bracket'))
_w_margin = 4
_h_margin = 3
level: float = 0
def __init__(
self,
chart,
*args,
orient_v: str = 'bottom',
orient_h: str = 'left',
**kwargs
) -> None:
super().__init__(chart, *args, **kwargs)
# orientation around axis options
self._orient_v = orient_v
self._orient_h = orient_h
self._v_shift = {
'top': 1.,
'bottom': 0,
'middle': 1 / 2.
}[orient_v]
self._h_shift = {
'left': -1., 'right': 0
}[orient_h]
def update_label(
self,
abs_pos: QPointF, # scene coords
level: float, # data for text
offset: int = 1 # if have margins, k?
) -> None:
# write contents, type specific
self.set_label_str(level)
br = self.boundingRect()
h, w = br.height(), br.width()
# this triggers ``.pain()`` implicitly?
self.setPos(QPointF(
self._h_shift * w - offset,
abs_pos.y() - (self._v_shift * h) - offset
))
self.update()
self.level = level
def set_label_str(self, level: float):
# this is read inside ``.paint()``
# self.label_str = '{size} x {level:.{digits}f}'.format(
self.label_str = '{level:.{digits}f}'.format(
# size=self._size,
digits=self.digits,
level=level
).replace(',', ' ')
def size_hint(self) -> Tuple[None, None]:
return None, None
def draw(
self,
p: QtGui.QPainter,
rect: QtCore.QRectF
) -> None:
p.setPen(self.line_pen)
if self._orient_v == 'bottom':
lp, rp = rect.topLeft(), rect.topRight()
# p.drawLine(rect.topLeft(), rect.topRight())
elif self._orient_v == 'top':
lp, rp = rect.bottomLeft(), rect.bottomRight()
p.drawLine(lp.x(), lp.y(), rp.x(), rp.y())
class L1Label(LevelLabel):
size: float = 0
size_digits: float = 3
text_flags = (
QtCore.Qt.TextDontClip
| QtCore.Qt.AlignLeft
)
def set_label_str(self, level: float) -> None:
"""Reimplement the label string write to include the level's order-queue's
size in the text, eg. 100 x 323.3.
"""
self.label_str = '{size:.{size_digits}f} x {level:,.{digits}f}'.format(
size_digits=self.size_digits,
size=self.size or '?',
digits=self.digits,
level=level
).replace(',', ' ')
class L1Labels:
"""Level 1 bid ask labels for dynamic update on price-axis.
"""
max_value: float = '100.0 x 100 000.00'
def __init__(
self,
chart: 'ChartPlotWidget', # noqa
digits: int = 2,
size_digits: int = 0,
font_size_inches: float = 4 / 53.,
) -> None:
self.chart = chart
self.bid_label = L1Label(
chart=chart,
parent=chart.getAxis('right'),
# TODO: pass this from symbol data
digits=digits,
opacity=1,
font_size_inches=font_size_inches,
bg_color='papas_special',
fg_color='bracket',
orient_v='bottom',
)
self.bid_label.size_digits = size_digits
self.bid_label._size_br_from_str(self.max_value)
self.ask_label = L1Label(
chart=chart,
parent=chart.getAxis('right'),
# TODO: pass this from symbol data
digits=digits,
opacity=1,
font_size_inches=font_size_inches,
bg_color='papas_special',
fg_color='bracket',
orient_v='top',
)
self.ask_label.size_digits = size_digits
self.ask_label._size_br_from_str(self.max_value)
class LevelLine(pg.InfiniteLine):
def __init__(
self,
label: LevelLabel,
**kwargs,
) -> None:
self.label = label
super().__init__(**kwargs)
self.sigPositionChanged.connect(self.set_level)
def set_level(self, value: float) -> None:
self.label.update_from_data(0, self.value())
def level_line(
chart: 'ChartPlogWidget', # noqa
level: float,
digits: int = 1,
# size 4 font on 4k screen scaled down, so small-ish.
font_size_inches: float = 4 / 53.,
**linelabelkwargs
) -> LevelLine:
"""Convenience routine to add a styled horizontal line to a plot.
"""
label = LevelLabel(
chart=chart,
parent=chart.getAxis('right'),
# TODO: pass this from symbol data
digits=digits,
opacity=1,
font_size_inches=font_size_inches,
# TODO: make this take the view's bg pen
bg_color='papas_special',
fg_color='default',
**linelabelkwargs
)
label.update_from_data(0, level)
# TODO: can we somehow figure out a max value from the parent axis?
label._size_br_from_str(label.label_str)
line = LevelLine(
label,
movable=True,
angle=0,
)
line.setValue(level)
line.setPen(pg.mkPen(hcolor('default')))
# activate/draw label
line.setValue(level)
chart.plotItem.addItem(line)
return line

View File

@ -0,0 +1,101 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
UX interaction customs.
"""
import pyqtgraph as pg
from pyqtgraph import functions as fn
from ..log import get_logger
from ._style import _min_points_to_show
log = get_logger(__name__)
class ChartView(pg.ViewBox):
"""Price chart view box with interaction behaviors you'd expect from
any interactive platform:
- zoom on mouse scroll that auto fits y-axis
- no vertical scrolling
- zoom to a "fixed point" on the y-axis
"""
def __init__(
self,
parent=None,
**kwargs,
):
super().__init__(parent=parent, **kwargs)
# disable vertical scrolling
self.setMouseEnabled(x=True, y=False)
self.linked_charts = None
def wheelEvent(self, ev, axis=None):
"""Override "center-point" location for scrolling.
This is an override of the ``ViewBox`` method simply changing
the center of the zoom to be the y-axis.
TODO: PR a method into ``pyqtgraph`` to make this configurable
"""
if axis in (0, 1):
mask = [False, False]
mask[axis] = self.state['mouseEnabled'][axis]
else:
mask = self.state['mouseEnabled'][:]
# don't zoom more then the min points setting
l, lbar, rbar, r = self.linked_charts.chart.bars_range()
vl = r - l
if ev.delta() > 0 and vl <= _min_points_to_show:
log.debug("Max zoom bruh...")
return
if ev.delta() < 0 and vl >= len(self.linked_charts._array) + 666:
log.debug("Min zoom bruh...")
return
# actual scaling factor
s = 1.015 ** (ev.delta() * -1 / 20) # self.state['wheelScaleFactor'])
s = [(None if m is False else s) for m in mask]
# center = pg.Point(
# fn.invertQTransform(self.childGroup.transform()).map(ev.pos())
# )
# XXX: scroll "around" the right most element in the view
# which stays "pinned" in place.
# furthest_right_coord = self.boundingRect().topRight()
# yaxis = pg.Point(
# fn.invertQTransform(
# self.childGroup.transform()
# ).map(furthest_right_coord)
# )
# This seems like the most "intuitive option, a hybrdid of
# tws and tv styles
last_bar = pg.Point(rbar)
self._resetTarget()
self.scaleBy(s, last_bar)
ev.accept()
self.sigRangeChangedManually.emit(mask)

View File

@ -0,0 +1,114 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Signalling graphics and APIs.
WARNING: this code likely doesn't work at all (yet)
since it was copied from another class that shouldn't
have had it.
"""
import numpy as np
import pyqtgraph as pg
from PyQt5 import QtCore, QtGui
from .quantdom.charts import CenteredTextItem
from .quantdom.base import Quotes
from .quantdom.portfolio import Order, Portfolio
class SignallingApi(object):
def __init__(self, plotgroup):
self.plotgroup = plotgroup
self.chart = plotgroup.chart
def _show_text_signals(self, lbar, rbar):
signals = [
sig
for sig in self.signals_text_items[lbar:rbar]
if isinstance(sig, CenteredTextItem)
]
if len(signals) <= 50:
for sig in signals:
sig.show()
else:
for sig in signals:
sig.hide()
def _remove_signals(self):
self.chart.removeItem(self.signals_group_arrow)
self.chart.removeItem(self.signals_group_text)
del self.signals_text_items
del self.signals_group_arrow
del self.signals_group_text
self.signals_visible = False
def add_signals(self):
self.signals_group_text = QtGui.QGraphicsItemGroup()
self.signals_group_arrow = QtGui.QGraphicsItemGroup()
self.signals_text_items = np.empty(len(Quotes), dtype=object)
for p in Portfolio.positions:
x, price = p.id_bar_open, p.open_price
if p.type == Order.BUY:
y = Quotes[x].low * 0.99
pg.ArrowItem(
parent=self.signals_group_arrow,
pos=(x, y),
pen=self.plotgroup.long_pen,
brush=self.plotgroup.long_brush,
angle=90,
headLen=12,
tipAngle=50,
)
text_sig = CenteredTextItem(
parent=self.signals_group_text,
pos=(x, y),
pen=self.plotgroup.long_pen,
brush=self.plotgroup.long_brush,
text=(
'Buy at {:.%df}' % self.plotgroup.digits).format(
price),
valign=QtCore.Qt.AlignBottom,
)
text_sig.hide()
else:
y = Quotes[x].high * 1.01
pg.ArrowItem(
parent=self.signals_group_arrow,
pos=(x, y),
pen=self.plotgroup.short_pen,
brush=self.plotgroup.short_brush,
angle=-90,
headLen=12,
tipAngle=50,
)
text_sig = CenteredTextItem(
parent=self.signals_group_text,
pos=(x, y),
pen=self.plotgroup.short_pen,
brush=self.plotgroup.short_brush,
text=('Sell at {:.%df}' % self.plotgroup.digits).format(
price),
valign=QtCore.Qt.AlignTop,
)
text_sig.hide()
self.signals_text_items[x] = text_sig
self.chart.addItem(self.signals_group_arrow)
self.chart.addItem(self.signals_group_text)
self.signals_visible = True

167
piker/ui/_style.py 100644
View File

@ -0,0 +1,167 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Qt UI styling.
"""
from typing import Optional
import pyqtgraph as pg
from PyQt5 import QtCore, QtGui
from qdarkstyle.palette import DarkPalette
from ..log import get_logger
log = get_logger(__name__)
# chart-wide font
# font size 6px / 53 dpi (3x scaled down on 4k hidpi)
_default_font_inches_we_like = 6 / 53 # px / (px / inch) = inch
_down_2_font_inches_we_like = 4 / 53
class DpiAwareFont:
def __init__(
self,
name: str = 'Hack',
size_in_inches: Optional[float] = None,
) -> None:
self.name = name
self._qfont = QtGui.QFont(name)
self._iwl = size_in_inches or _default_font_inches_we_like
self._qfm = QtGui.QFontMetrics(self._qfont)
self._physical_dpi = None
self._screen = None
self._dpi_scalar = 1.
def _set_qfont_px_size(self, px_size: int) -> None:
self._qfont.setPixelSize(px_size)
self._qfm = QtGui.QFontMetrics(self._qfont)
@property
def font(self):
return self._qfont
@property
def px_size(self):
return self._qfont.pixelSize()
def configure_to_dpi(self, screen: QtGui.QScreen):
"""Set an appropriately sized font size depending on the screen DPI.
If we end up needing to generalize this more here there are resources
listed in the script in ``snippets/qt_screen_info.py``.
"""
dpi = screen.physicalDotsPerInch()
font_size = round(self._iwl * dpi)
log.info(
f"\nscreen:{screen.name()} with DPI: {dpi}"
f"\nbest font size is {font_size}\n"
)
self._set_qfont_px_size(font_size)
self._physical_dpi = dpi
self._screen = screen
def boundingRect(self, value: str) -> QtCore.QRectF:
screen = self._screen
if screen is None:
raise RuntimeError("You must call .configure_to_dpi() first!")
unscaled_br = self._qfm.boundingRect(value)
return QtCore.QRectF(
0,
0,
unscaled_br.width(),
unscaled_br.height(),
)
# use inches size to be cross-resolution compatible?
_font = DpiAwareFont()
# TODO: re-compute font size when main widget switches screens?
# https://forum.qt.io/topic/54136/how-do-i-get-the-qscreen-my-widget-is-on-qapplication-desktop-screen-returns-a-qwidget-and-qobject_cast-qscreen-returns-null/3
# _i3_rgba = QtGui.QColor.fromRgbF(*[0.14]*3 + [1])
# splitter widget config
_xaxis_at = 'bottom'
# charting config
CHART_MARGINS = (0, 0, 2, 2)
_min_points_to_show = 6
_bars_from_right_in_follow_mode = int(6**2)
_bars_to_left_in_follow_mode = int(6**3)
_tina_mode = False
def enable_tina_mode() -> None:
"""Enable "tina mode" to make everything look "conventional"
like your pet hedgehog always wanted.
"""
# white background (for tinas like our pal xb)
pg.setConfigOption('background', 'w')
def hcolor(name: str) -> str:
"""Hex color codes by hipster speak.
"""
return {
# lives matter
'black': '#000000',
'erie_black': '#1B1B1B',
'licorice': '#1A1110',
'papas_special': '#06070c',
'svags': '#0a0e14',
# fifty shades
'gray': '#808080', # like the kick
'jet': '#343434',
'cadet': '#91A3B0',
'marengo': '#91A3B0',
'charcoal': '#36454F',
'gunmetal': '#91A3B0',
'battleship': '#848482',
'davies': '#555555',
'bracket': '#666666', # like the logo
'original': '#a9a9a9',
# palette
'default': DarkPalette.COLOR_BACKGROUND_NORMAL,
'default_light': DarkPalette.COLOR_BACKGROUND_LIGHT,
'white': '#ffffff', # for tinas and sunbathers
# blue zone
'dad_blue': '#326693', # like his shirt
'vwap_blue': '#0582fb',
'dodger_blue': '#1e90ff', # like the team?
'panasonic_blue': '#0040be', # from japan
# 'bid_blue': '#0077ea', # like the L1
'bid_blue': '#3094d9', # like the L1
'aquaman': '#39abd0',
# traditional
'tina_green': '#00cc00',
'tina_red': '#fa0000',
}[name]

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Console interface to UI components.
"""
@ -63,7 +79,7 @@ def monitor(config, rate, name, dhost, test, tl):
name='monitor',
loglevel=loglevel if tl else None,
rpc_module_paths=['piker.ui.kivy.monitor'],
start_method='forkserver',
debug_mode=True,
)
@ -101,5 +117,29 @@ def optschain(config, symbol, date, tl, rate, test):
partial(main, tries=1),
name='kivy-options-chain',
loglevel=loglevel if tl else None,
start_method='forkserver',
)
@cli.command()
@click.option('--date', '-d', help='Contracts expiry date')
@click.option('--test', '-t', help='Test quote stream file')
@click.option('--rate', '-r', default=1, help='Logging level')
@click.argument('symbol', required=True)
@click.pass_obj
def chart(config, symbol, date, rate, test):
"""Start an option chain UI
"""
from ._chart import _main
# global opts
brokername = config['broker']
tractorloglevel = config['tractorloglevel']
_main(
sym=symbol,
brokername=brokername,
tractor_kwargs={
'debug_mode': True,
'loglevel': tractorloglevel,
},
)

View File

@ -15,11 +15,11 @@ from kivy.lang import Builder
from kivy.app import async_runTouchApp
from kivy.core.window import Window
from ..brokers.data import DataFeed
from ...brokers.data import DataFeed
from .tabular import (
Row, TickerTable, _kv, _black_rgba, colorcode,
)
from ..log import get_logger
from ...log import get_logger
from .pager import PagerView
@ -69,7 +69,6 @@ async def update_quotes(
chngcell.color = color
hdrcell.color = color
# briefly highlight bg of certain cells on each trade execution
unflash = set()
tick_color = None
@ -105,39 +104,37 @@ async def update_quotes(
# initial coloring
to_sort = set()
for sym, quote in first_quotes.items():
row = table.get_row(sym)
record, displayable = formatter(
quote, symbol_data=symbol_data)
row.update(record, displayable)
color_row(row, record, {})
for quote in first_quotes:
row = table.get_row(quote['symbol'])
row.update(quote)
color_row(row, quote, {})
to_sort.add(row.widget)
table.render_rows(to_sort)
log.debug("Finished initializing update loop")
task_status.started()
# real-time cell update loop
async for quotes in agen: # new quotes data only
to_sort = set()
for symbol, quote in quotes.items():
row = table.get_row(symbol)
record, displayable = formatter(
quote, symbol_data=symbol_data)
# don't red/green the header cell in ``row.update()``
record.pop('symbol')
quote.pop('symbol')
quote.pop('key')
# determine if sorting should happen
sort_key = table.sort_key
last = row.get_field(sort_key)
new = record.get(sort_key, last)
new = quote.get(sort_key, last)
if new != last:
to_sort.add(row.widget)
# update and color
cells = row.update(record, displayable)
color_row(row, record, cells)
cells = row.update(quote)
color_row(row, quote, cells)
if to_sort:
table.render_rows(to_sort)
@ -179,18 +176,14 @@ async def _async_main(
This is started with cli cmd `piker monitor`.
'''
feed = DataFeed(portal, brokermod)
quote_gen, quotes = await feed.open_stream(
quote_gen, first_quotes = await feed.open_stream(
symbols,
'stock',
rate=rate,
test=test,
)
first_quotes, _ = feed.format_quotes(quotes)
if first_quotes[0].get('last') is None:
log.error("Broker API is down temporarily")
return
first_quotes_list = list(first_quotes.copy().values())
quotes = list(first_quotes.copy().values())
# build out UI
Window.set_title(f"monitor: {name}\t(press ? for help)")
@ -202,7 +195,9 @@ async def _async_main(
bidasks = brokermod._stock_bidasks
# add header row
headers = first_quotes[0].keys()
headers = list(first_quotes_list[0].keys())
headers.remove('displayable')
header = Row(
{key: key for key in headers},
headers=headers,
@ -217,11 +212,17 @@ async def _async_main(
cols=1,
size_hint=(1, None),
)
for ticker_record in first_quotes:
for ticker_record in first_quotes_list:
symbol = ticker_record['symbol']
table.append_row(
ticker_record['symbol'],
Row(ticker_record, headers=('symbol',),
bidasks=bidasks, table=table)
symbol,
Row(
ticker_record,
headers=('symbol',),
bidasks=bidasks,
no_cell=('displayable',),
table=table
)
)
table.last_clicked_row = next(iter(table.symbols2rows.values()))

View File

@ -11,7 +11,6 @@ from kivy.properties import BooleanProperty, ObjectProperty
from kivy.core.window import Window
from kivy.clock import Clock
from ...log import get_logger
@ -100,7 +99,8 @@ class MouseOverBehavior(object):
# throttle at 10ms latency
@triggered(timeout=0.01, interval=False)
def _on_mouse_pos(cls, *args):
log.debug(f"{cls} time since last call: {time.time() - cls._last_time}")
log.debug(
f"{cls} time since last call: {time.time() - cls._last_time}")
cls._last_time = time.time()
# XXX: how to still do this at the class level?
# don't proceed if I'm not displayed <=> If have no parent

View File

@ -15,11 +15,10 @@ from kivy.app import async_runTouchApp
from kivy.core.window import Window
from kivy.uix.label import Label
from ..log import get_logger, get_console_log
from ..brokers.data import DataFeed
from ..brokers import get_brokermod
from ...log import get_logger, get_console_log
from ...brokers.data import DataFeed
from ...brokers import get_brokermod
from .pager import PagerView
from .tabular import Row, HeaderCell, Cell, TickerTable
from .monitor import update_quotes

View File

@ -9,8 +9,8 @@ from kivy.uix.widget import Widget
from kivy.uix.textinput import TextInput
from kivy.uix.scrollview import ScrollView
from ..log import get_logger
from .kivy.utils_async import async_bind
from ...log import get_logger
from .utils_async import async_bind
log = get_logger('keyboard')

View File

@ -12,8 +12,8 @@ from kivy.uix.button import Button
from kivy import utils
from kivy.properties import BooleanProperty
from ..log import get_logger
from .kivy.mouse_over import new_mouse_over_group
from ...log import get_logger
from .mouse_over import new_mouse_over_group
HoverBehavior = new_mouse_over_group()
@ -300,10 +300,10 @@ class Row(HoverBehavior, GridLayout):
# handle bidask cells
if key in layouts:
self.add_widget(layouts[key])
elif key in children_flat:
elif key in children_flat or key in no_cell:
# these cells have already been added to the `BidAskLayout`
continue
elif key not in no_cell:
else:
cell = self._append_cell(val, key, header=header)
cell.key = key
self._cell_widgets[key] = cell
@ -329,7 +329,7 @@ class Row(HoverBehavior, GridLayout):
self.add_widget(cell)
return cell
def update(self, record, displayable):
def update(self, record):
"""Update this row's cells with new values from a quote
``record``.
@ -341,7 +341,11 @@ class Row(HoverBehavior, GridLayout):
fgreen = colorcode('forestgreen')
red = colorcode('red2')
displayable = record['displayable']
for key, val in record.items():
if key not in displayable:
continue
last = self.get_field(key)
color = gray
try:

View File

@ -61,10 +61,10 @@ class AsyncCallbackQueue(object):
return self
async def __anext__(self):
self.event.clear()
self.event = async_lib.Event()
while not self.callback_result and not self.quit:
await self.event.wait()
self.event.clear()
self.event = async_lib.Event()
if self.callback_result:
return self.callback_result.popleft()

View File

@ -0,0 +1,3 @@
"""
Super hawt Qt UI components
"""

View File

@ -0,0 +1,67 @@
import sys
from PySide2.QtCharts import QtCharts
from PySide2.QtWidgets import QApplication, QMainWindow
from PySide2.QtCore import Qt, QPointF
from PySide2 import QtGui
import qdarkstyle
data = ((1, 7380, 7520, 7380, 7510, 7324),
(2, 7520, 7580, 7410, 7440, 7372),
(3, 7440, 7650, 7310, 7520, 7434),
(4, 7450, 7640, 7450, 7550, 7480),
(5, 7510, 7590, 7460, 7490, 7502),
(6, 7500, 7590, 7480, 7560, 7512),
(7, 7560, 7830, 7540, 7800, 7584))
app = QApplication([])
# set dark stylesheet
# import pdb; pdb.set_trace()
app.setStyleSheet(qdarkstyle.load_stylesheet_pyside())
series = QtCharts.QCandlestickSeries()
series.setDecreasingColor(Qt.darkRed)
series.setIncreasingColor(Qt.darkGreen)
ma5 = QtCharts.QLineSeries() # 5-days average data line
tm = [] # stores str type data
# in a loop, series and ma5 append corresponding data
for num, o, h, l, c, m in data:
candle = QtCharts.QCandlestickSet(o, h, l, c)
series.append(candle)
ma5.append(QPointF(num, m))
tm.append(str(num))
pen = candle.pen()
# import pdb; pdb.set_trace()
chart = QtCharts.QChart()
# import pdb; pdb.set_trace()
series.setBodyOutlineVisible(False)
series.setCapsVisible(False)
# brush = QtGui.QBrush()
# brush.setColor(Qt.green)
# series.setBrush(brush)
chart.addSeries(series) # candle
chart.addSeries(ma5) # ma5 line
chart.setAnimationOptions(QtCharts.QChart.SeriesAnimations)
chart.createDefaultAxes()
chart.legend().hide()
chart.axisX(series).setCategories(tm)
chart.axisX(ma5).setVisible(False)
view = QtCharts.QChartView(chart)
view.chart().setTheme(QtCharts.QChart.ChartTheme.ChartThemeDark)
view.setRubberBand(QtCharts.QChartView.HorizontalRubberBand)
# chartview.chart().setTheme(QtCharts.QChart.ChartTheme.ChartThemeBlueCerulean)
ui = QMainWindow()
# ui.setGeometry(50, 50, 500, 300)
ui.setCentralWidget(view)
ui.show()
sys.exit(app.exec_())

View File

@ -0,0 +1,10 @@
"""
Curated set of components from ``Quantdom`` used as a starting
draft for real-time charting with ``pyqtgraph``.
Much thanks to the author:
https://github.com/constverum/Quantdom
Note this code is licensed Apache 2.0:
https://github.com/constverum/Quantdom/blob/master/LICENSE
"""

View File

@ -0,0 +1,188 @@
"""
Strategy and performance charting
"""
import numpy as np
import pyqtgraph as pg
from PyQt5 import QtCore, QtGui
from .base import Quotes
from .portfolio import Portfolio
from .utils import timeit
from .charts import (
PriceAxis,
CHART_MARGINS,
SampleLegendItem,
YAxisLabel,
CrossHairItem,
)
class EquityChart(QtGui.QWidget):
eq_pen_pos_color = pg.mkColor('#00cc00')
eq_pen_neg_color = pg.mkColor('#cc0000')
eq_brush_pos_color = pg.mkColor('#40ee40')
eq_brush_neg_color = pg.mkColor('#ee4040')
long_pen_color = pg.mkColor('#008000')
short_pen_color = pg.mkColor('#800000')
buy_and_hold_pen_color = pg.mkColor('#4444ff')
def __init__(self):
super().__init__()
self.xaxis = pg.DateAxisItem()
self.xaxis.setStyle(tickTextOffset=7, textFillLimits=[(0, 0.80)])
self.yaxis = PriceAxis()
self.layout = QtGui.QVBoxLayout(self)
self.layout.setContentsMargins(0, 0, 0, 0)
self.chart = pg.PlotWidget(
axisItems={'bottom': self.xaxis, 'right': self.yaxis},
enableMenu=False,
)
self.chart.setFrameStyle(QtGui.QFrame.StyledPanel | QtGui.QFrame.Plain)
self.chart.getPlotItem().setContentsMargins(*CHART_MARGINS)
self.chart.showGrid(x=True, y=True)
self.chart.hideAxis('left')
self.chart.showAxis('right')
self.chart.setCursor(QtCore.Qt.BlankCursor)
self.chart.sigXRangeChanged.connect(self._update_yrange_limits)
self.layout.addWidget(self.chart)
def _add_legend(self):
legend = pg.LegendItem((140, 100), offset=(10, 10))
legend.setParentItem(self.chart.getPlotItem())
for arr, item in self.curves:
legend.addItem(
SampleLegendItem(item),
item.opts['name']
if not isinstance(item, tuple)
else item[0].opts['name'],
)
def _add_ylabels(self):
self.ylabels = []
for arr, item in self.curves:
color = (
item.opts['pen']
if not isinstance(item, tuple)
else [i.opts['pen'] for i in item]
)
label = YAxisLabel(parent=self.yaxis, color=color)
self.ylabels.append(label)
def _update_ylabels(self, vb, rbar):
for i, curve in enumerate(self.curves):
arr, item = curve
ylast = arr[rbar]
ypos = vb.mapFromView(QtCore.QPointF(0, ylast)).y()
axlabel = self.ylabels[i]
axlabel.update_label_test(ypos=ypos, ydata=ylast)
def _update_yrange_limits(self, vb=None):
if not hasattr(self, 'min_curve'):
return
vr = self.chart.viewRect()
lbar, rbar = int(vr.left()), int(vr.right())
ylow = self.min_curve[lbar:rbar].min() * 1.1
yhigh = self.max_curve[lbar:rbar].max() * 1.1
std = np.std(self.max_curve[lbar:rbar]) * 4
self.chart.setLimits(yMin=ylow, yMax=yhigh, minYRange=std)
self.chart.setYRange(ylow, yhigh)
self._update_ylabels(vb, rbar)
@timeit
def plot(self):
equity_curve = Portfolio.equity_curve
eq_pos = np.zeros_like(equity_curve)
eq_neg = np.zeros_like(equity_curve)
eq_pos[equity_curve >= 0] = equity_curve[equity_curve >= 0]
eq_neg[equity_curve <= 0] = equity_curve[equity_curve <= 0]
# Equity
self.eq_pos_curve = pg.PlotCurveItem(
eq_pos,
name='Equity',
fillLevel=0,
antialias=True,
pen=self.eq_pen_pos_color,
brush=self.eq_brush_pos_color,
)
self.eq_neg_curve = pg.PlotCurveItem(
eq_neg,
name='Equity',
fillLevel=0,
antialias=True,
pen=self.eq_pen_neg_color,
brush=self.eq_brush_neg_color,
)
self.chart.addItem(self.eq_pos_curve)
self.chart.addItem(self.eq_neg_curve)
# Only Long
self.long_curve = pg.PlotCurveItem(
Portfolio.long_curve,
name='Only Long',
pen=self.long_pen_color,
antialias=True,
)
self.chart.addItem(self.long_curve)
# Only Short
self.short_curve = pg.PlotCurveItem(
Portfolio.short_curve,
name='Only Short',
pen=self.short_pen_color,
antialias=True,
)
self.chart.addItem(self.short_curve)
# Buy and Hold
self.buy_and_hold_curve = pg.PlotCurveItem(
Portfolio.buy_and_hold_curve,
name='Buy and Hold',
pen=self.buy_and_hold_pen_color,
antialias=True,
)
self.chart.addItem(self.buy_and_hold_curve)
self.curves = [
(Portfolio.equity_curve, (self.eq_pos_curve, self.eq_neg_curve)),
(Portfolio.long_curve, self.long_curve),
(Portfolio.short_curve, self.short_curve),
(Portfolio.buy_and_hold_curve, self.buy_and_hold_curve),
]
self._add_legend()
self._add_ylabels()
ch = CrossHairItem(self.chart)
self.chart.addItem(ch)
arrs = (
Portfolio.equity_curve,
Portfolio.buy_and_hold_curve,
Portfolio.long_curve,
Portfolio.short_curve,
)
np_arrs = np.concatenate(arrs)
_min = abs(np_arrs.min()) * -1.1
_max = np_arrs.max() * 1.1
self.chart.setLimits(
xMin=Quotes[0].id,
xMax=Quotes[-1].id,
yMin=_min,
yMax=_max,
minXRange=60,
)
self.min_curve = arrs[0].copy()
self.max_curve = arrs[0].copy()
for arr in arrs[1:]:
self.min_curve = np.minimum(self.min_curve, arr)
self.max_curve = np.maximum(self.max_curve, arr)

View File

@ -0,0 +1,147 @@
"""Base classes."""
from enum import Enum, auto
import numpy as np
import pandas as pd
from .const import ChartType, TimeFrame
__all__ = ('Indicator', 'Symbol', 'Quotes')
# I actually can't think of a worse reason to override an array than
# this:
# - a method .new() that mutates the data from an input data frame
# - mutating the time column wholesale based on a setting
# - enforcing certain fields / columns
# - zero overriding of any of the array interface for the purposes of
# a different underlying implementation.
# Literally all this can be done in a simple function with way less
# confusion for the reader.
class BaseQuotes(np.recarray):
def __new__(cls, shape=None, dtype=None, order='C'):
dt = np.dtype(
[
('id', int),
('time', float),
('open', float),
('high', float),
('low', float),
('close', float),
('volume', int),
]
)
shape = shape or (1,)
return np.ndarray.__new__(cls, shape, (np.record, dt), order=order)
def _nan_to_closest_num(self):
"""Return interpolated values instead of NaN."""
for col in ['open', 'high', 'low', 'close']:
mask = np.isnan(self[col])
if not mask.size:
continue
self[col][mask] = np.interp(
np.flatnonzero(mask), np.flatnonzero(~mask), self[col][~mask]
)
def _set_time_frame(self, default_tf):
tf = {
1: TimeFrame.M1,
5: TimeFrame.M5,
15: TimeFrame.M15,
30: TimeFrame.M30,
60: TimeFrame.H1,
240: TimeFrame.H4,
1440: TimeFrame.D1,
}
minutes = int(np.diff(self.time[-10:]).min() / 60)
self.timeframe = tf.get(minutes) or tf[default_tf]
# bruh this isn't creating anything it's copying data in
# from a data frame...
def new(self, data, source=None, default_tf=None):
shape = (len(data),)
self.resize(shape, refcheck=False)
if isinstance(data, pd.DataFrame):
data.reset_index(inplace=True)
data.insert(0, 'id', data.index)
data.Date = self.convert_dates(data.Date)
data = data.rename(
columns={
'Date': 'time',
'Open': 'open',
'High': 'high',
'Low': 'low',
'Close': 'close',
'Volume': 'volume',
}
)
for name in self.dtype.names:
self[name] = data[name]
elif isinstance(data, (np.recarray, BaseQuotes)):
self[:] = data[:]
self._nan_to_closest_num()
self._set_time_frame(default_tf)
return self
def convert_dates(self, dates):
breakpoint()
return np.array([d.timestamp() for d in dates])
class SymbolType(Enum):
FOREX = auto()
CFD = auto()
FUTURES = auto()
SHARES = auto()
class Symbol:
FOREX = SymbolType.FOREX
CFD = SymbolType.CFD
FUTURES = SymbolType.FUTURES
SHARES = SymbolType.SHARES
def __init__(self, ticker, mode, tick_size=0, tick_value=None):
self.ticker = ticker
self.mode = mode
if self.mode in [self.FOREX, self.CFD]:
# number of units of the commodity, currency
# or financial asset in one lot
self.contract_size = 100_000 # (100000 == 1 Lot)
elif self.mode == self.FUTURES:
# cost of a single price change point ($10) /
# one minimum price movement
self.tick_value = tick_value
# minimum price change step (0.0001)
self.tick_size = tick_size
if isinstance(tick_size, float):
self.digits = len(str(tick_size).split('.')[1])
else:
self.digits = 0
def __repr__(self):
return 'Symbol (%s | %s)' % (self.ticker, self.mode)
class Indicator:
def __init__(
self, label=None, window=None, data=None, tp=None, base=None, **kwargs
):
self.label = label
self.window = window
self.data = data or [0]
self.type = tp or ChartType.LINE
self.base = base or {'linewidth': 0.5, 'color': 'black'}
self.lineStyle = {'linestyle': '-', 'linewidth': 0.5, 'color': 'blue'}
self.lineStyle.update(kwargs)
# This creates a global array that seems to be shared between all
# charting UI components
Quotes = BaseQuotes()

View File

@ -0,0 +1,79 @@
"""
Real-time quotes charting components
"""
import pyqtgraph as pg
from PyQt5 import QtCore, QtGui
class SampleLegendItem(pg.graphicsItems.LegendItem.ItemSample):
def paint(self, p, *args):
p.setRenderHint(p.Antialiasing)
if isinstance(self.item, tuple):
positive = self.item[0].opts
negative = self.item[1].opts
p.setPen(pg.mkPen(positive['pen']))
p.setBrush(pg.mkBrush(positive['brush']))
p.drawPolygon(
QtGui.QPolygonF(
[
QtCore.QPointF(0, 0),
QtCore.QPointF(18, 0),
QtCore.QPointF(18, 18),
]
)
)
p.setPen(pg.mkPen(negative['pen']))
p.setBrush(pg.mkBrush(negative['brush']))
p.drawPolygon(
QtGui.QPolygonF(
[
QtCore.QPointF(0, 0),
QtCore.QPointF(0, 18),
QtCore.QPointF(18, 18),
]
)
)
else:
opts = self.item.opts
p.setPen(pg.mkPen(opts['pen']))
p.drawRect(0, 10, 18, 0.5)
class CenteredTextItem(QtGui.QGraphicsTextItem):
def __init__(
self,
text='',
parent=None,
pos=(0, 0),
pen=None,
brush=None,
valign=None,
opacity=0.1,
):
super().__init__(text, parent)
self.pen = pen
self.brush = brush
self.opacity = opacity
self.valign = valign
self.text_flags = QtCore.Qt.AlignCenter
self.setPos(*pos)
self.setFlag(self.ItemIgnoresTransformations)
def boundingRect(self): # noqa
r = super().boundingRect()
if self.valign == QtCore.Qt.AlignTop:
return QtCore.QRectF(-r.width() / 2, -37, r.width(), r.height())
elif self.valign == QtCore.Qt.AlignBottom:
return QtCore.QRectF(-r.width() / 2, 15, r.width(), r.height())
def paint(self, p, option, widget):
p.setRenderHint(p.Antialiasing, False)
p.setRenderHint(p.TextAntialiasing, True)
p.setPen(self.pen)
if self.brush.style() != QtCore.Qt.NoBrush:
p.setOpacity(self.opacity)
p.fillRect(option.rect, self.brush)
p.setOpacity(1)
p.drawText(option.rect, self.text_flags, self.toPlainText())

View File

@ -0,0 +1,36 @@
"""Constants."""
from enum import Enum, auto
__all__ = ('ChartType', 'TimeFrame')
class ChartType(Enum):
BAR = auto()
CANDLESTICK = auto()
LINE = auto()
class TimeFrame(Enum):
M1 = auto()
M5 = auto()
M15 = auto()
M30 = auto()
H1 = auto()
H4 = auto()
D1 = auto()
W1 = auto()
MN = auto()
ANNUAL_PERIOD = 252 # number of trading days in a year
# # TODO: 6.5 - US trading hours (trading session); fix it for fx
# ANNUALIZATION_FACTORS = {
# TimeFrame.M1: int(252 * 6.5 * 60),
# TimeFrame.M5: int(252 * 6.5 * 12),
# TimeFrame.M15: int(252 * 6.5 * 4),
# TimeFrame.M30: int(252 * 6.5 * 2),
# TimeFrame.H1: int(252 * 6.5),
# TimeFrame.D1: 252,
# }

View File

@ -0,0 +1,184 @@
"""Parser."""
import logging
import os.path
import pickle
import datetime
import numpy as np
import pandas as pd
import pandas_datareader.data as web
from pandas_datareader._utils import RemoteDataError
from pandas_datareader.data import (
get_data_quandl,
get_data_yahoo,
get_data_alphavantage,
)
from pandas_datareader.nasdaq_trader import get_nasdaq_symbols
from pandas_datareader.exceptions import ImmediateDeprecationError
from .base import Quotes, Symbol
from .utils import get_data_path, timeit
__all__ = (
'YahooQuotesLoader',
'QuandleQuotesLoader',
'get_symbols',
'get_quotes',
)
logger = logging.getLogger(__name__)
class QuotesLoader:
source = None
timeframe = '1D'
sort_index = False
default_tf = None
name_format = '%(symbol)s_%(tf)s_%(date_from)s_%(date_to)s.%(ext)s'
@classmethod
def _get(cls, symbol, date_from, date_to):
quotes = web.DataReader(
symbol, cls.source, start=date_from, end=date_to
)
if cls.sort_index:
quotes.sort_index(inplace=True)
return quotes
@classmethod
def _get_file_path(cls, symbol, tf, date_from, date_to):
fname = cls.name_format % {
'symbol': symbol,
'tf': tf,
'date_from': date_from.isoformat(),
'date_to': date_to.isoformat(),
'ext': 'qdom',
}
return os.path.join(get_data_path('stock_data'), fname)
@classmethod
def _save_to_disk(cls, fpath, data):
logger.debug('Saving quotes to a file: %s', fpath)
with open(fpath, 'wb') as f:
pickle.dump(data, f, pickle.HIGHEST_PROTOCOL)
d = pickle.load(f)
@classmethod
def _load_from_disk(cls, fpath):
logger.debug('Loading quotes from a file: %s', fpath)
with open(fpath, 'rb') as f:
breakpoint()
data = pickle.load(f)
@classmethod
@timeit
def get_quotes(
cls,
symbol: Symbol,
date_from: datetime.datetime,
date_to: datetime.datetime,
) -> Quotes:
"""Retrieve quotes data from a provider and return a ``numpy.ndarray`` subtype.
"""
quotes = None
fpath = cls._get_file_path(symbol, cls.timeframe, date_from, date_to)
# if os.path.exists(fpath):
# quotes = Quotes.new(cls._load_from_disk(fpath))
# else:
quotes_raw = cls._get(symbol, date_from, date_to)
# quotes = Quotes.new(
# quotes_raw, source=cls.source, default_tf=cls.default_tf
# )
# cls._save_to_disk(fpath, quotes)
return quotes_raw
class YahooQuotesLoader(QuotesLoader):
source = 'yahoo'
@classmethod
def _get(cls, symbol, date_from, date_to):
return get_data_yahoo(symbol, date_from, date_to)
class QuandleQuotesLoader(QuotesLoader):
source = 'quandle'
@classmethod
def _get(cls, symbol, date_from, date_to):
quotes = get_data_quandl(symbol, date_from, date_to)
quotes.sort_index(inplace=True)
return quotes
class AlphaVantageQuotesLoader(QuotesLoader):
source = 'alphavantage'
api_key = 'demo'
@classmethod
def _get(cls, symbol, date_from, date_to):
quotes = get_data_alphavantage(
symbol, date_from, date_to, api_key=cls.api_key
)
return quotes
class StooqQuotesLoader(QuotesLoader):
source = 'stooq'
sort_index = True
default_tf = 1440
class IEXQuotesLoader(QuotesLoader):
source = 'iex'
@classmethod
def _get(cls, symbol, date_from, date_to):
quotes = web.DataReader(
symbol, cls.source, start=date_from, end=date_to
)
quotes['Date'] = pd.to_datetime(quotes.index)
return quotes
class RobinhoodQuotesLoader(QuotesLoader):
source = 'robinhood'
def get_symbols():
fpath = os.path.join(get_data_path('stock_data'), 'symbols.qdom')
if os.path.exists(fpath):
with open(fpath, 'rb') as f:
symbols = pickle.load(f)
else:
symbols = get_nasdaq_symbols()
symbols.reset_index(inplace=True)
with open(fpath, 'wb') as f:
pickle.dump(symbols, f, pickle.HIGHEST_PROTOCOL)
return symbols
def get_quotes(*args, **kwargs):
quotes = []
# don't work:
# GoogleQuotesLoader, QuandleQuotesLoader,
# AlphaVantageQuotesLoader, RobinhoodQuotesLoader
loaders = [YahooQuotesLoader, IEXQuotesLoader, StooqQuotesLoader]
while loaders:
loader = loaders.pop(0)
try:
quotes = loader.get_quotes(*args, **kwargs)
break
except (RemoteDataError, ImmediateDeprecationError) as e:
logger.error('get_quotes => error: %r', e)
return quotes

View File

@ -0,0 +1,350 @@
"""Performance."""
import codecs
import json
from collections import OrderedDict, defaultdict
import numpy as np
from .base import Quotes
from .const import ANNUAL_PERIOD
from .utils import fromtimestamp, get_resource_path
__all__ = (
'BriefPerformance',
'Performance',
'Stats',
'REPORT_COLUMNS',
'REPORT_ROWS',
)
REPORT_COLUMNS = ('All', 'Long', 'Short', 'Market')
with codecs.open(
get_resource_path('report_rows.json'), mode='r', encoding='utf-8'
) as f:
REPORT_ROWS = OrderedDict(json.load(f))
class Stats(np.recarray):
def __new__(cls, positions, shape=None, dtype=None, order='C'):
shape = shape or (len(positions['All']),)
dtype = np.dtype(
[
('type', object),
('symbol', object),
('volume', float),
('open_time', float),
('close_time', float),
('open_price', float),
('close_price', float),
('total_profit', float),
('entry_name', object),
('exit_name', object),
('status', object),
('comment', object),
('abs', float),
('perc', float),
('bars', float),
('on_bar', float),
('mae', float),
('mfe', float),
]
)
dt = [(col, dtype) for col in REPORT_COLUMNS]
return np.ndarray.__new__(cls, shape, (np.record, dt), order=order)
def __init__(self, positions, **kwargs):
for col, _positions in positions.items():
for i, p in enumerate(_positions):
self._add_position(p, col, i)
def _add_position(self, p, col, i):
self[col][i].type = p.type
self[col][i].symbol = p.symbol
self[col][i].volume = p.volume
self[col][i].open_time = p.open_time
self[col][i].close_time = p.close_time
self[col][i].open_price = p.open_price
self[col][i].close_price = p.close_price
self[col][i].total_profit = p.total_profit
self[col][i].entry_name = p.entry_name
self[col][i].exit_name = p.exit_name
self[col][i].status = p.status
self[col][i].comment = p.comment
self[col][i].abs = p.profit
self[col][i].perc = p.profit_perc
quotes_on_trade = Quotes[p.id_bar_open : p.id_bar_close]
if not quotes_on_trade.size:
# if position was opened and closed on the last bar
quotes_on_trade = Quotes[p.id_bar_open : p.id_bar_close + 1]
kwargs = {
'low': quotes_on_trade.low.min(),
'high': quotes_on_trade.high.max(),
}
self[col][i].mae = p.calc_mae(**kwargs)
self[col][i].mfe = p.calc_mfe(**kwargs)
bars = p.id_bar_close - p.id_bar_open
self[col][i].bars = bars
self[col][i].on_bar = p.profit_perc / bars
class BriefPerformance(np.recarray):
def __new__(cls, shape=None, dtype=None, order='C'):
dt = np.dtype(
[
('kwargs', object),
('net_profit_abs', float),
('net_profit_perc', float),
('year_profit', float),
('win_average_profit_perc', float),
('loss_average_profit_perc', float),
('max_drawdown_abs', float),
('total_trades', int),
('win_trades_abs', int),
('win_trades_perc', float),
('profit_factor', float),
('recovery_factor', float),
('payoff_ratio', float),
]
)
shape = shape or (1,)
return np.ndarray.__new__(cls, shape, (np.record, dt), order=order)
def _days_count(self, positions):
if hasattr(self, 'days'):
return self.days
self.days = (
(
fromtimestamp(positions[-1].close_time)
- fromtimestamp(positions[0].open_time)
).days
if positions
else 1
)
return self.days
def add(self, initial_balance, positions, i, kwargs):
position_count = len(positions)
profit = np.recarray(
(position_count,), dtype=[('abs', float), ('perc', float)]
)
for n, position in enumerate(positions):
profit[n].abs = position.profit
profit[n].perc = position.profit_perc
s = self[i]
s.kwargs = kwargs
s.net_profit_abs = np.sum(profit.abs)
s.net_profit_perc = np.sum(profit.perc)
days = self._days_count(positions)
gain_factor = (s.net_profit_abs + initial_balance) / initial_balance
s.year_profit = (gain_factor ** (365 / days) - 1) * 100
s.win_average_profit_perc = np.mean(profit.perc[profit.perc > 0])
s.loss_average_profit_perc = np.mean(profit.perc[profit.perc < 0])
s.max_drawdown_abs = profit.abs.min()
s.total_trades = position_count
wins = profit.abs[profit.abs > 0]
loss = profit.abs[profit.abs < 0]
s.win_trades_abs = len(wins)
s.win_trades_perc = round(s.win_trades_abs / s.total_trades * 100, 2)
s.profit_factor = abs(np.sum(wins) / np.sum(loss))
s.recovery_factor = abs(s.net_profit_abs / s.max_drawdown_abs)
s.payoff_ratio = abs(np.mean(wins) / np.mean(loss))
class Performance:
"""Performance Metrics."""
rows = REPORT_ROWS
columns = REPORT_COLUMNS
def __init__(self, initial_balance, stats, positions):
self._data = {}
for col in self.columns:
column = type('Column', (object,), dict.fromkeys(self.rows, 0))
column.initial_balance = initial_balance
self._data[col] = column
self.calculate(column, stats[col], positions[col])
def __getitem__(self, col):
return self._data[col]
def _calc_trade_series(self, col, positions):
win_in_series, loss_in_series = 0, 0
for i, p in enumerate(positions):
if p.profit >= 0:
win_in_series += 1
loss_in_series = 0
if win_in_series > col.win_in_series:
col.win_in_series = win_in_series
else:
win_in_series = 0
loss_in_series += 1
if loss_in_series > col.loss_in_series:
col.loss_in_series = loss_in_series
def calculate(self, col, stats, positions):
self._calc_trade_series(col, positions)
col.total_trades = len(positions)
profit_abs = stats[np.flatnonzero(stats.abs)].abs
profit_perc = stats[np.flatnonzero(stats.perc)].perc
bars = stats[np.flatnonzero(stats.bars)].bars
on_bar = stats[np.flatnonzero(stats.on_bar)].on_bar
gt_zero_abs = stats[stats.abs > 0].abs
gt_zero_perc = stats[stats.perc > 0].perc
win_bars = stats[stats.perc > 0].bars
lt_zero_abs = stats[stats.abs < 0].abs
lt_zero_perc = stats[stats.perc < 0].perc
los_bars = stats[stats.perc < 0].bars
col.average_profit_abs = np.mean(profit_abs) if profit_abs.size else 0
col.average_profit_perc = (
np.mean(profit_perc) if profit_perc.size else 0
)
col.bars_on_trade = np.mean(bars) if bars.size else 0
col.bar_profit = np.mean(on_bar) if on_bar.size else 0
col.win_average_profit_abs = (
np.mean(gt_zero_abs) if gt_zero_abs.size else 0
)
col.win_average_profit_perc = (
np.mean(gt_zero_perc) if gt_zero_perc.size else 0
)
col.win_bars_on_trade = np.mean(win_bars) if win_bars.size else 0
col.loss_average_profit_abs = (
np.mean(lt_zero_abs) if lt_zero_abs.size else 0
)
col.loss_average_profit_perc = (
np.mean(lt_zero_perc) if lt_zero_perc.size else 0
)
col.loss_bars_on_trade = np.mean(los_bars) if los_bars.size else 0
col.win_trades_abs = len(gt_zero_abs)
col.win_trades_perc = (
round(col.win_trades_abs / col.total_trades * 100, 2)
if col.total_trades
else 0
)
col.loss_trades_abs = len(lt_zero_abs)
col.loss_trades_perc = (
round(col.loss_trades_abs / col.total_trades * 100, 2)
if col.total_trades
else 0
)
col.total_profit = np.sum(gt_zero_abs)
col.total_loss = np.sum(lt_zero_abs)
col.net_profit_abs = np.sum(stats.abs)
col.net_profit_perc = np.sum(stats.perc)
col.total_mae = np.sum(stats.mae)
col.total_mfe = np.sum(stats.mfe)
# https://financial-calculators.com/roi-calculator
days = (
(
fromtimestamp(positions[-1].close_time)
- fromtimestamp(positions[0].open_time)
).days
if positions
else 1
)
gain_factor = (
col.net_profit_abs + col.initial_balance
) / col.initial_balance
col.year_profit = (gain_factor ** (365 / days) - 1) * 100
col.month_profit = (gain_factor ** (365 / days / 12) - 1) * 100
col.max_profit_abs = stats.abs.max()
col.max_profit_perc = stats.perc.max()
col.max_profit_abs_day = fromtimestamp(
stats.close_time[stats.abs == col.max_profit_abs][0]
)
col.max_profit_perc_day = fromtimestamp(
stats.close_time[stats.perc == col.max_profit_perc][0]
)
col.max_drawdown_abs = stats.abs.min()
col.max_drawdown_perc = stats.perc.min()
col.max_drawdown_abs_day = fromtimestamp(
stats.close_time[stats.abs == col.max_drawdown_abs][0]
)
col.max_drawdown_perc_day = fromtimestamp(
stats.close_time[stats.perc == col.max_drawdown_perc][0]
)
col.profit_factor = (
abs(col.total_profit / col.total_loss) if col.total_loss else 0
)
col.recovery_factor = (
abs(col.net_profit_abs / col.max_drawdown_abs)
if col.max_drawdown_abs
else 0
)
col.payoff_ratio = (
abs(col.win_average_profit_abs / col.loss_average_profit_abs)
if col.loss_average_profit_abs
else 0
)
col.sharpe_ratio = annualized_sharpe_ratio(stats)
col.sortino_ratio = annualized_sortino_ratio(stats)
# TODO:
col.alpha_ratio = np.nan
col.beta_ratio = np.nan
def day_percentage_returns(stats):
days = defaultdict(float)
trade_count = np.count_nonzero(stats)
if trade_count == 1:
# market position, so returns should based on quotes
# calculate percentage changes on a list of quotes
changes = np.diff(Quotes.close) / Quotes[:-1].close * 100
data = np.column_stack((Quotes[1:].time, changes)) # np.c_
else:
# slice `:trade_count` to exclude zero values in long/short columns
data = stats[['close_time', 'perc']][:trade_count]
# FIXME: [FutureWarning] https://github.com/numpy/numpy/issues/8383
for close_time, perc in data:
days[fromtimestamp(close_time).date()] += perc
returns = np.array(list(days.values()))
# if np.count_nonzero(stats) == 1:
# import pudb; pudb.set_trace()
if len(returns) >= ANNUAL_PERIOD:
return returns
_returns = np.zeros(ANNUAL_PERIOD)
_returns[: len(returns)] = returns
return _returns
def annualized_sharpe_ratio(stats):
# risk_free = 0
returns = day_percentage_returns(stats)
return np.sqrt(ANNUAL_PERIOD) * np.mean(returns) / np.std(returns)
def annualized_sortino_ratio(stats):
# http://www.cmegroup.com/education/files/sortino-a-sharper-ratio.pdf
required_return = 0
returns = day_percentage_returns(stats)
mask = [returns < required_return]
tdd = np.zeros(len(returns))
tdd[mask] = returns[mask] # keep only negative values and zeros
# "or 1" to prevent division by zero, if we don't have negative returns
tdd = np.sqrt(np.mean(np.square(tdd))) or 1
return np.sqrt(ANNUAL_PERIOD) * np.mean(returns) / tdd

View File

@ -0,0 +1,412 @@
"""Portfolio."""
import itertools
from contextlib import contextmanager
from enum import Enum, auto
import numpy as np
from .base import Quotes
from .performance import BriefPerformance, Performance, Stats
from .utils import fromtimestamp, timeit
__all__ = ('Portfolio', 'Position', 'Order')
class BasePortfolio:
def __init__(self, balance=100_000, leverage=5):
self._initial_balance = balance
self.balance = balance
self.equity = None
# TODO:
# self.cash
# self.currency
self.leverage = leverage
self.positions = []
self.balance_curve = None
self.equity_curve = None
self.long_curve = None
self.short_curve = None
self.mae_curve = None
self.mfe_curve = None
self.stats = None
self.performance = None
self.brief_performance = None
def clear(self):
self.positions.clear()
self.balance = self._initial_balance
@property
def initial_balance(self):
return self._initial_balance
@initial_balance.setter
def initial_balance(self, value):
self._initial_balance = value
def add_position(self, position):
position.ticket = len(self.positions) + 1
self.positions.append(position)
def position_count(self, tp=None):
if tp == Order.BUY:
return len([p for p in self.positions if p.type == Order.BUY])
elif tp == Order.SELL:
return len([p for p in self.positions if p.type == Order.SELL])
return len(self.positions)
def _close_open_positions(self):
for p in self.positions:
if p.status == Position.OPEN:
p.close(
price=Quotes[-1].open,
volume=p.volume,
time=Quotes[-1].time
)
def _get_market_position(self):
p = self.positions[0] # real postions
p = Position(
symbol=p.symbol,
ptype=Order.BUY,
volume=p.volume,
price=Quotes[0].open,
open_time=Quotes[0].time,
close_price=Quotes[-1].close,
close_time=Quotes[-1].time,
id_bar_close=len(Quotes) - 1,
status=Position.CLOSED,
)
p.profit = p.calc_profit(close_price=Quotes[-1].close)
p.profit_perc = p.profit / self._initial_balance * 100
return p
def _calc_equity_curve(self):
"""Equity curve."""
self.equity_curve = np.zeros_like(Quotes.time)
for i, p in enumerate(self.positions):
balance = np.sum(self.stats['All'][:i].abs)
for ibar in range(p.id_bar_open, p.id_bar_close):
profit = p.calc_profit(close_price=Quotes[ibar].close)
self.equity_curve[ibar] = balance + profit
# taking into account the real balance after the last trade
self.equity_curve[-1] = self.balance_curve[-1]
def _calc_buy_and_hold_curve(self):
"""Buy and Hold."""
p = self._get_market_position()
self.buy_and_hold_curve = np.array(
[p.calc_profit(close_price=price) for price in Quotes.close]
)
def _calc_long_short_curves(self):
"""Only Long/Short positions curve."""
self.long_curve = np.zeros_like(Quotes.time)
self.short_curve = np.zeros_like(Quotes.time)
for i, p in enumerate(self.positions):
if p.type == Order.BUY:
name = 'Long'
curve = self.long_curve
else:
name = 'Short'
curve = self.short_curve
balance = np.sum(self.stats[name][:i].abs)
# Calculate equity for this position
for ibar in range(p.id_bar_open, p.id_bar_close):
profit = p.calc_profit(close_price=Quotes[ibar].close)
curve[ibar] = balance + profit
for name, curve in [
('Long', self.long_curve),
('Short', self.short_curve),
]:
curve[:] = fill_zeros_with_last(curve)
# taking into account the real balance after the last trade
curve[-1] = np.sum(self.stats[name].abs)
def _calc_curves(self):
self.mae_curve = np.cumsum(self.stats['All'].mae)
self.mfe_curve = np.cumsum(self.stats['All'].mfe)
self.balance_curve = np.cumsum(self.stats['All'].abs)
self._calc_equity_curve()
self._calc_buy_and_hold_curve()
self._calc_long_short_curves()
@contextmanager
def optimization_mode(self):
"""Backup and restore current balance and positions."""
# mode='general',
self.backup_balance = self.balance
self.backup_positions = self.positions.copy()
self.balance = self._initial_balance
self.positions.clear()
yield
self.balance = self.backup_balance
self.positions = self.backup_positions.copy()
self.backup_positions.clear()
@timeit
def run_optimization(self, strategy, params):
keys = list(params.keys())
vals = list(params.values())
variants = list(itertools.product(*vals))
self.brief_performance = BriefPerformance(shape=(len(variants),))
with self.optimization_mode():
for i, vals in enumerate(variants):
kwargs = {keys[n]: val for n, val in enumerate(vals)}
strategy.start(**kwargs)
self._close_open_positions()
self.brief_performance.add(
self._initial_balance, self.positions, i, kwargs
)
self.clear()
@timeit
def summarize(self):
self._close_open_positions()
positions = {
'All': self.positions,
'Long': [p for p in self.positions if p.type == Order.BUY],
'Short': [p for p in self.positions if p.type == Order.SELL],
'Market': [self._get_market_position()],
}
self.stats = Stats(positions)
self.performance = Performance(
self._initial_balance, self.stats, positions
)
self._calc_curves()
Portfolio = BasePortfolio()
class PositionStatus(Enum):
OPEN = auto()
CLOSED = auto()
CANCELED = auto()
class Position:
OPEN = PositionStatus.OPEN
CLOSED = PositionStatus.CLOSED
CANCELED = PositionStatus.CANCELED
__slots__ = (
'type',
'symbol',
'ticket',
'open_price',
'close_price',
'open_time',
'close_time',
'volume',
'sl',
'tp',
'status',
'profit',
'profit_perc',
'commis',
'id_bar_open',
'id_bar_close',
'entry_name',
'exit_name',
'total_profit',
'comment',
)
def __init__(
self,
symbol,
ptype,
price,
volume,
open_time,
sl=None,
tp=None,
status=OPEN,
entry_name='',
exit_name='',
comment='',
**kwargs,
):
self.type = ptype
self.symbol = symbol
self.ticket = None
self.open_price = price
self.close_price = None
self.open_time = open_time
self.close_time = None
self.volume = volume
self.sl = sl
self.tp = tp
self.status = status
self.profit = None
self.profit_perc = None
self.commis = None
self.id_bar_open = np.where(Quotes.time == self.open_time)[0][0]
self.id_bar_close = None
self.entry_name = entry_name
self.exit_name = exit_name
self.total_profit = 0
self.comment = comment
# self.bars_on_trade = None
# self.is_profitable = False
for k, v in kwargs.items():
setattr(self, k, v)
def __repr__(self):
_type = 'LONG' if self.type == Order.BUY else 'SHORT'
time = fromtimestamp(self.open_time).strftime('%d.%m.%y %H:%M')
return '%s/%s/[%s - %.4f]' % (
self.status.name,
_type,
time,
self.open_price,
)
def close(self, price, time, volume=None):
# TODO: allow closing only part of the volume
self.close_price = price
self.close_time = time
self.id_bar_close = np.where(Quotes.time == self.close_time)[0][0]
self.profit = self.calc_profit(volume=volume or self.volume)
self.profit_perc = self.profit / Portfolio.balance * 100
Portfolio.balance += self.profit
self.total_profit = Portfolio.balance - Portfolio.initial_balance
self.status = self.CLOSED
def calc_profit(self, volume=None, close_price=None):
# TODO: rewrite it
close_price = close_price or self.close_price
volume = volume or self.volume
factor = 1 if self.type == Order.BUY else -1
price_delta = (close_price - self.open_price) * factor
if self.symbol.mode in [self.symbol.FOREX, self.symbol.CFD]:
# Margin: Lots*Contract_Size/Leverage
if (
self.symbol.mode == self.symbol.FOREX
and self.symbol.ticker[:3] == 'USD'
):
# Example: 'USD/JPY'
# Прибыль Размер Объем Текущий
# в пунктах пункта позиции курс
# 1 * 0.0001 * 100000 / 1.00770
# USD/CHF: 1*0.0001*100000/1.00770 = $9.92
# 0.01
# USD/JPY: 1*0.01*100000/121.35 = $8.24
# (1.00770-1.00595)/0.0001 = 17.5 пунктов
# (1.00770-1.00595)/0.0001*0.0001*100000*1/1.00770*1
_points = price_delta / self.symbol.tick_size
_profit = (
_points
* self.symbol.tick_size
* self.symbol.contract_size
/ close_price
* volume
)
elif (
self.symbol.mode == self.symbol.FOREX
and self.symbol.ticker[-3:] == 'USD'
):
# Example: 'EUR/USD'
# Profit: (close_price-open_price)*Contract_Size*Lots
# EUR/USD BUY: (1.05875-1.05850)*100000*1 = +$25 (без комиссии)
_profit = price_delta * self.symbol.contract_size * volume
else:
# Cross rates. Example: 'GBP/CHF'
# Цена пункта =
# объем поз.*размер п.*тек.курс баз.вал. к USD/тек. кросс-курс
# GBP/CHF: 100000*0.0001*1.48140/1.48985 = $9.94
# TODO: temporary patch (same as the previous choice) -
# in the future connect to some quotes provider and get rates
_profit = price_delta * self.symbol.contract_size * volume
elif self.symbol.mode == self.symbol.FUTURES:
# Margin: Lots *InitialMargin*Percentage/100
# Profit: (close_price-open_price)*TickPrice/TickSize*Lots
# CL BUY: (46.35-46.30)*10/0.01*1 = $50 (без учета комиссии!)
# EuroFX(6E) BUY:(1.05875-1.05850)*12.50/0.0001*1 =$31.25 (без ком)
# RTS (RIH5) BUY:(84510-84500)*12.26506/10*1 = @12.26506 (без ком)
# E-miniSP500 BUY:(2065.95-2065.25)*12.50/0.25 = $35 (без ком)
# http://americanclearing.ru/specifications.php
# http://www.moex.com/ru/contract.aspx?code=RTS-3.18
# http://www.cmegroup.com/trading/equity-index/us-index/e-mini-sandp500_contract_specifications.html
_profit = (
price_delta
* self.symbol.tick_value
/ self.symbol.tick_size
* volume
)
else:
# shares
_profit = price_delta * volume
return _profit
def calc_mae(self, low, high):
"""Return [MAE] Maximum Adverse Excursion."""
if self.type == Order.BUY:
return self.calc_profit(close_price=low)
return self.calc_profit(close_price=high)
def calc_mfe(self, low, high):
"""Return [MFE] Maximum Favorable Excursion."""
if self.type == Order.BUY:
return self.calc_profit(close_price=high)
return self.calc_profit(close_price=low)
class OrderType(Enum):
BUY = auto()
SELL = auto()
BUY_LIMIT = auto()
SELL_LIMIT = auto()
BUY_STOP = auto()
SELL_STOP = auto()
class Order:
BUY = OrderType.BUY
SELL = OrderType.SELL
BUY_LIMIT = OrderType.BUY_LIMIT
SELL_LIMIT = OrderType.SELL_LIMIT
BUY_STOP = OrderType.BUY_STOP
SELL_STOP = OrderType.SELL_STOP
@staticmethod
def open(symbol, otype, price, volume, time, sl=None, tp=None):
# TODO: add margin calculation
# and if the margin is not enough - do not open the position
position = Position(
symbol=symbol,
ptype=otype,
price=price,
volume=volume,
open_time=time,
sl=sl,
tp=tp,
)
Portfolio.add_position(position)
return position
@staticmethod
def close(position, price, time, volume=None):
# FIXME: may be closed not the whole volume, but
# the position status will be changed to CLOSED
position.close(price=price, time=time, volume=volume)
def fill_zeros_with_last(arr):
"""Fill empty(zero) elements (between positions)."""
index = np.arange(len(arr))
index[arr == 0] = 0
index = np.maximum.accumulate(index)
return arr[index]

View File

@ -1,3 +1,19 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import os
import json
from collections import defaultdict

View File

@ -1,3 +1,18 @@
# piker: trading gear for hackers
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
Watchlist management commands.

View File

@ -1,3 +1,4 @@
# no pypi package for tractor (yet)
# we require the asyncio-via-guest-mode dev branch
-e git+git://github.com/goodboy/tractor.git@infect_asyncio#egg=tractor
-e git+git://github.com/pikers/pyqtgraph.git@use_qt_inverted#egg=pyqtgraph

View File

@ -1,7 +1,7 @@
#!/usr/bin/env python
# piker: trading gear for hackers
# Copyright 2018 Tyler Goodlet
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of piker0)
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
@ -16,7 +16,7 @@
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
from setuptools import setup
from setuptools import setup, find_packages
with open('README.rst', encoding='utf-8') as f:
readme = f.read()
@ -32,12 +32,7 @@ setup(
maintainer='Tyler Goodlet',
url='https://github.com/pikers/piker',
platforms=['linux'],
packages=[
'piker',
'piker.brokers',
'piker.ui',
'piker.testing',
],
packages=find_packages(),
entry_points={
'console_scripts': [
'piker = piker.cli:cli',
@ -47,20 +42,32 @@ setup(
install_requires=[
'click',
'colorlog',
'trio',
'attrs',
'async_generator',
'pygments',
'colorama', # numba traceback coloring
# async
'trio',
'trio-websocket',
# 'tractor', # from github currently
'async_generator',
# brokers
'asks',
'asks==2.4.8',
'ib_insync',
# numerics
'arrow', # better datetimes
'cython',
'numpy',
'numba',
'pandas',
'msgpack-numpy',
# UI
'PyQt5',
'pyqtgraph',
'qdarkstyle',
# tsdbs
'pymarketstore',
@ -71,13 +78,11 @@ setup(
keywords=["async", "trading", "finance", "quant", "charting"],
classifiers=[
'Development Status :: 3 - Alpha',
'License :: OSI Approved :: Mozilla Public License 2.0 (MPL 2.0)',
'License :: OSI Approved :: ',
'Operating System :: POSIX :: Linux',
"Programming Language :: Python :: Implementation :: CPython",
# "Programming Language :: Python :: Implementation :: PyPy",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
'Intended Audience :: Financial and Insurance Industry',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',

View File

@ -0,0 +1,102 @@
"""
Resource list for mucking with DPIs on multiple screens:
- https://stackoverflow.com/questions/42141354/convert-pixel-size-to-point-size-for-fonts-on-multiple-platforms
- https://stackoverflow.com/questions/25761556/qt5-font-rendering-different-on-various-platforms/25929628#25929628
- https://doc.qt.io/qt-5/highdpi.html
- https://stackoverflow.com/questions/20464814/changing-dpi-scaling-size-of-display-make-qt-applications-font-size-get-rendere
- https://stackoverflow.com/a/20465247
- https://doc.qt.io/archives/qt-4.8/qfontmetrics.html#width
- https://forum.qt.io/topic/54136/how-do-i-get-the-qscreen-my-widget-is-on-qapplication-desktop-screen-returns-a-qwidget-and-qobject_cast-qscreen-returns-null/3
- https://forum.qt.io/topic/43625/point-sizes-are-they-reliable/4
- https://stackoverflow.com/questions/16561879/what-is-the-difference-between-logicaldpix-and-physicaldpix-in-qt
- https://doc.qt.io/qt-5/qguiapplication.html#screenAt
"""
from pyqtgraph import QtGui
from PyQt5.QtCore import (
Qt, QCoreApplication
)
# Proper high DPI scaling is available in Qt >= 5.6.0. This attibute
# must be set before creating the application
if hasattr(Qt, 'AA_EnableHighDpiScaling'):
QCoreApplication.setAttribute(Qt.AA_EnableHighDpiScaling, True)
if hasattr(Qt, 'AA_UseHighDpiPixmaps'):
QCoreApplication.setAttribute(Qt.AA_UseHighDpiPixmaps, True)
app = QtGui.QApplication([])
window = QtGui.QMainWindow()
main_widget = QtGui.QWidget()
window.setCentralWidget(main_widget)
window.show()
pxr = main_widget.devicePixelRatioF()
# screen_num = app.desktop().screenNumber()
# screen = app.screens()[screen_num]
screen = app.screenAt(main_widget.geometry().center())
name = screen.name()
size = screen.size()
geo = screen.availableGeometry()
phydpi = screen.physicalDotsPerInch()
logdpi = screen.logicalDotsPerInch()
print(
# f'screen number: {screen_num}\n',
f'screen name: {name}\n'
f'screen size: {size}\n'
f'screen geometry: {geo}\n\n'
f'devicePixelRationF(): {pxr}\n'
f'physical dpi: {phydpi}\n'
f'logical dpi: {logdpi}\n'
)
print('-'*50)
screen = app.primaryScreen()
name = screen.name()
size = screen.size()
geo = screen.availableGeometry()
phydpi = screen.physicalDotsPerInch()
logdpi = screen.logicalDotsPerInch()
print(
# f'screen number: {screen_num}\n',
f'screen name: {name}\n'
f'screen size: {size}\n'
f'screen geometry: {geo}\n\n'
f'devicePixelRationF(): {pxr}\n'
f'physical dpi: {phydpi}\n'
f'logical dpi: {logdpi}\n'
)
# app-wide font
font = QtGui.QFont("Hack")
# use pixel size to be cross-resolution compatible?
font.setPixelSize(6)
fm = QtGui.QFontMetrics(font)
fontdpi = fm.fontDpi()
font_h = fm.height()
string = '10000'
str_br = fm.boundingRect(string)
str_w = str_br.width()
print(
# f'screen number: {screen_num}\n',
f'font dpi: {fontdpi}\n'
f'font height: {font_h}\n'
f'string bounding rect: {str_br}\n'
f'string width : {str_w}\n'
)