Compare commits
	
		
			883 Commits 
		
	
	
		
			binance_ws
			...
			gitea_feat
		
	
	| Author | SHA1 | Date | 
|---|---|---|
| 
							
							
								
								 | 
						5e371f1d73 | |
| 
							
							
								
								 | 
						6c221bb348 | |
| 
							
							
								 | 
						e391c896f8 | |
| 
							
							
								 | 
						5633f5614d | |
| 
							
							
								 | 
						76735189de | |
| 
							
							
								 | 
						d49608f74e | |
| 
							
							
								 | 
						bf0ac93aa3 | |
| 
							
							
								 | 
						d7179d47b0 | |
| 
							
							
								 | 
						c390e87536 | |
| 
							
							
								 | 
						5e4a6d61c7 | |
| 
							
							
								 | 
						3caaa30b03 | |
| 
							
							
								
								 | 
						1e3942fdc2 | |
| 
							
							
								
									
								
								 | 
						49ea380503 | |
| 
							
							
								 | 
						933f169938 | |
| 
							
							
								
									
								
								 | 
						51337052a4 | |
| 
							
							
								 | 
						8abe55dcc6 | |
| 
							
							
								
								 | 
						c933f2ad56 | |
| 
							
							
								 | 
						00108010c9 | |
| 
							
							
								 | 
						8a4901c517 | |
| 
							
							
								 | 
						d7f6a5ab63 | |
| 
							
							
								 | 
						e0fdabf651 | |
| 
							
							
								 | 
						cb88dfc9da | |
| 
							
							
								
									
								
								 | 
						bb41dd6d18 | |
| 
							
							
								
									
								
								 | 
						99e90129ad | |
| 
							
							
								 | 
						cceb7a37b9 | |
| 
							
							
								
								 | 
						5382815b2d | |
| 
							
							
								 | 
						cb1ba8a05f | |
| 
							
							
								
									
								
								 | 
						6c65ec4d3b | |
| 
							
							
								
									
								
								 | 
						12e371b027 | |
| 
							
							
								
								 | 
						014bd58db4 | |
| 
							
							
								 | 
						844544ed8e | |
| 
							
							
								
									
								
								 | 
						f479252d26 | |
| 
							
							
								
									
								
								 | 
						033ef2e35e | |
| 
							
							
								 | 
						2cdece244c | |
| 
							
							
								 | 
						018694bbdb | |
| 
							
							
								 | 
						128a2d507f | |
| 
							
							
								 | 
						430650a6a7 | |
| 
							
							
								 | 
						1da3cf5698 | |
| 
							
							
								 | 
						a348603fc4 | |
| 
							
							
								
								 | 
						86047824d8 | |
| 
							
							
								 | 
						cb92abbc38 | |
| 
							
							
								 | 
						70332e375b | |
| 
							
							
								 | 
						4940aabe05 | |
| 
							
							
								 | 
						4f9998e9fb | |
| 
							
							
								 | 
						c92a236196 | |
| 
							
							
								
								 | 
						e4cd1f85f6 | |
| 
							
							
								 | 
						129cf58d41 | |
| 
							
							
								 | 
						1fd8654ca5 | |
| 
							
							
								 | 
						d0170982bf | |
| 
							
							
								 | 
						821e73a409 | |
| 
							
							
								 | 
						3d03781810 | |
| 
							
							
								 | 
						83d1f117a8 | |
| 
							
							
								 | 
						e4ce79f720 | |
| 
							
							
								 | 
						264246d89b | |
| 
							
							
								 | 
						7c96c9fafe | |
| 
							
							
								 | 
						52b349fe79 | |
| 
							
							
								 | 
						6959429af8 | |
| 
							
							
								 | 
						05f874001a | |
| 
							
							
								 | 
						fc216d37de | |
| 
							
							
								 | 
						03e429abf8 | |
| 
							
							
								 | 
						7ae7cc829f | |
| 
							
							
								 | 
						b23d44e21a | |
| 
							
							
								 | 
						2669db785c | |
| 
							
							
								 | 
						d3e7b5cd0e | |
| 
							
							
								 | 
						9be29a707d | |
| 
							
							
								 | 
						c82ca812a8 | |
| 
							
							
								 | 
						a7ad50cf8f | |
| 
							
							
								 | 
						661805695e | |
| 
							
							
								 | 
						3de7c9a9eb | |
| 
							
							
								 | 
						59536bd284 | |
| 
							
							
								 | 
						5702e422d8 | |
| 
							
							
								 | 
						07331a160e | |
| 
							
							
								 | 
						0d18cb65c3 | |
| 
							
							
								 | 
						ad565936ec | |
| 
							
							
								 | 
						d4b07cc95a | |
| 
							
							
								 | 
						1231c459aa | |
| 
							
							
								 | 
						88f415e5b8 | |
| 
							
							
								 | 
						d9c574e291 | |
| 
							
							
								 | 
						a86573b5a2 | |
| 
							
							
								 | 
						1d7e97a295 | |
| 
							
							
								 | 
						bbb98597a0 | |
| 
							
							
								 | 
						e33d6333ec | |
| 
							
							
								 | 
						263a5a8d07 | |
| 
							
							
								 | 
						a681b2f0bb | |
| 
							
							
								 | 
						5b0c94933b | |
| 
							
							
								 | 
						61e52213b2 | |
| 
							
							
								 | 
						b064a5f94d | |
| 
							
							
								 | 
						e7fa841263 | |
| 
							
							
								 | 
						1f346483a0 | |
| 
							
							
								 | 
						d006ecce7e | |
| 
							
							
								 | 
						69368f20c2 | |
| 
							
							
								 | 
						31fa0b02f5 | |
| 
							
							
								 | 
						5a60974990 | |
| 
							
							
								 | 
						8d324acf91 | |
| 
							
							
								 | 
						ab84303da7 | |
| 
							
							
								 | 
						659649ec48 | |
| 
							
							
								 | 
						f7cc43ee0b | |
| 
							
							
								 | 
						f5dc21d3f4 | |
| 
							
							
								 | 
						4568c55f17 | |
| 
							
							
								 | 
						d5d68f75ea | |
| 
							
							
								 | 
						1f9a497637 | |
| 
							
							
								 | 
						40c5d88a9b | |
| 
							
							
								 | 
						8989c73a93 | |
| 
							
							
								 | 
						3639f360c3 | |
| 
							
							
								 | 
						afd0781b62 | |
| 
							
							
								 | 
						ba154ef413 | |
| 
							
							
								 | 
						97e2403fb1 | |
| 
							
							
								 | 
						a4084d6a0b | |
| 
							
							
								 | 
						83bdca46a2 | |
| 
							
							
								 | 
						c129f5bb4a | |
| 
							
							
								 | 
						c4853a3fee | |
| 
							
							
								 | 
						f274c3db3b | |
| 
							
							
								 | 
						b95932ea09 | |
| 
							
							
								 | 
						e8bf4c6e04 | |
| 
							
							
								 | 
						8e4d1a48ed | |
| 
							
							
								 | 
						b03eceebef | |
| 
							
							
								 | 
						f7a8d79b7b | |
| 
							
							
								 | 
						49c458710e | |
| 
							
							
								 | 
						b94582cb35 | |
| 
							
							
								 | 
						7311000846 | |
| 
							
							
								 | 
						e719733f97 | |
| 
							
							
								 | 
						cb941a5554 | |
| 
							
							
								 | 
						2d72a052aa | |
| 
							
							
								 | 
						2eeef2a123 | |
| 
							
							
								 | 
						b6d2550f33 | |
| 
							
							
								 | 
						b9af6176c5 | |
| 
							
							
								 | 
						dd0167b9a5 | |
| 
							
							
								 | 
						9e71e0768f | |
| 
							
							
								 | 
						6029f39a3f | |
| 
							
							
								 | 
						656e2c6a88 | |
| 
							
							
								 | 
						b8065a413b | |
| 
							
							
								 | 
						9245d24b47 | |
| 
							
							
								 | 
						22bd83943b | |
| 
							
							
								 | 
						b94931bbdd | |
| 
							
							
								 | 
						239c1c457e | |
| 
							
							
								 | 
						24a54a7085 | |
| 
							
							
								 | 
						ebd1eb114e | |
| 
							
							
								 | 
						29ce8de462 | |
| 
							
							
								 | 
						d3dab17939 | |
| 
							
							
								 | 
						cadc200818 | |
| 
							
							
								 | 
						363c8dfdb1 | |
| 
							
							
								 | 
						00c046c280 | |
| 
							
							
								 | 
						9165515811 | |
| 
							
							
								 | 
						543c11f377 | |
| 
							
							
								 | 
						637d33d7cc | |
| 
							
							
								 | 
						e5fdb33e31 | |
| 
							
							
								 | 
						81a8cd1685 | |
| 
							
							
								 | 
						a382f01c85 | |
| 
							
							
								 | 
						653348fcd8 | |
| 
							
							
								 | 
						e139d2e259 | |
| 
							
							
								 | 
						7258d57c69 | |
| 
							
							
								 | 
						5d081a40d5 | |
| 
							
							
								 | 
						fcececce19 | |
| 
							
							
								 | 
						b6ac6069fe | |
| 
							
							
								 | 
						a98f5877bc | |
| 
							
							
								 | 
						50ddef0985 | |
| 
							
							
								 | 
						b1cde3df49 | |
| 
							
							
								 | 
						57010d479d | |
| 
							
							
								 | 
						f94244aad4 | |
| 
							
							
								 | 
						261c331602 | |
| 
							
							
								 | 
						3b4a4db7b6 | |
| 
							
							
								 | 
						ad59a581c7 | |
| 
							
							
								 | 
						c312f90c0c | |
| 
							
							
								 | 
						1a859bc1a2 | |
| 
							
							
								 | 
						e9887cb611 | |
| 
							
							
								 | 
						0ba75df877 | |
| 
							
							
								 | 
						a97a0ced8c | |
| 
							
							
								 | 
						46d83e9ca9 | |
| 
							
							
								 | 
						d4833eba21 | |
| 
							
							
								 | 
						14f124164a | |
| 
							
							
								 | 
						05959eaf70 | |
| 
							
							
								 | 
						30d55fdb27 | |
| 
							
							
								 | 
						2c88ebe697 | |
| 
							
							
								 | 
						4a180019f0 | |
| 
							
							
								 | 
						4d274b16d8 | |
| 
							
							
								 | 
						481618cc51 | |
| 
							
							
								 | 
						778d26067d | |
| 
							
							
								 | 
						e54c3dc523 | |
| 
							
							
								 | 
						ad37cfbe2f | |
| 
							
							
								 | 
						8369f557c7 | |
| 
							
							
								 | 
						461764419d | |
| 
							
							
								 | 
						1002ce1e10 | |
| 
							
							
								 | 
						546049b62f | |
| 
							
							
								 | 
						e9517cdb02 | |
| 
							
							
								 | 
						2b8cd031e8 | |
| 
							
							
								 | 
						2e6b1330f3 | |
| 
							
							
								 | 
						995d1534b6 | |
| 
							
							
								 | 
						9d31941d42 | |
| 
							
							
								 | 
						a695208992 | |
| 
							
							
								 | 
						fed89562dc | |
| 
							
							
								 | 
						9005335e18 | |
| 
							
							
								 | 
						c3f8b089be | |
| 
							
							
								 | 
						0068119a6d | |
| 
							
							
								 | 
						94540ce1cf | |
| 
							
							
								 | 
						ea9a5e524c | |
| 
							
							
								 | 
						6b22024570 | |
| 
							
							
								 | 
						847cb7740c | |
| 
							
							
								 | 
						84dd0ae4ce | |
| 
							
							
								 | 
						6b90e2e3ee | |
| 
							
							
								 | 
						482ad1cc83 | |
| 
							
							
								 | 
						6e8d07852c | |
| 
							
							
								 | 
						4aa04e1c8e | |
| 
							
							
								 | 
						c5ed6e6ac4 | |
| 
							
							
								 | 
						077d9bf1d2 | |
| 
							
							
								 | 
						78178c2fb7 | |
| 
							
							
								 | 
						f66a1f8b23 | |
| 
							
							
								 | 
						562d027ee6 | |
| 
							
							
								 | 
						ff2bbd5aca | |
| 
							
							
								 | 
						85a38d057b | |
| 
							
							
								 | 
						eba6a77966 | |
| 
							
							
								 | 
						5ed8544fd1 | |
| 
							
							
								 | 
						5d86d336f2 | |
| 
							
							
								 | 
						e4ea7d6193 | |
| 
							
							
								 | 
						60751acf85 | |
| 
							
							
								 | 
						e9dfd28aac | |
| 
							
							
								 | 
						ae444d1bc7 | |
| 
							
							
								 | 
						a51a61090d | |
| 
							
							
								 | 
						94ebe1e87e | |
| 
							
							
								 | 
						fff610fa8d | |
| 
							
							
								 | 
						7ecf2bd89a | |
| 
							
							
								 | 
						1e3a4ca36d | |
| 
							
							
								 | 
						b6a705852d | |
| 
							
							
								 | 
						29bab02c64 | |
| 
							
							
								 | 
						85ae180f8f | |
| 
							
							
								 | 
						5d24b5defb | |
| 
							
							
								 | 
						100be54641 | |
| 
							
							
								 | 
						a088ebf5e2 | |
| 
							
							
								 | 
						b37a447595 | |
| 
							
							
								 | 
						b1edaf0639 | |
| 
							
							
								 | 
						385561276b | |
| 
							
							
								 | 
						d94ab9d5b2 | |
| 
							
							
								 | 
						08e8990fe3 | |
| 
							
							
								 | 
						2c6ae5d994 | |
| 
							
							
								 | 
						f1289ccce2 | |
| 
							
							
								 | 
						7802febd20 | |
| 
							
							
								 | 
						64329d44e7 | |
| 
							
							
								 | 
						bd0af7a4c0 | |
| 
							
							
								 | 
						618c461bfb | |
| 
							
							
								 | 
						c00cf41541 | |
| 
							
							
								 | 
						4436342d33 | |
| 
							
							
								 | 
						58cf7ce10e | |
| 
							
							
								 | 
						9fbb75ce7f | |
| 
							
							
								 | 
						d0f72bf269 | |
| 
							
							
								 | 
						188508575a | |
| 
							
							
								 | 
						bebc817d19 | |
| 
							
							
								 | 
						1d35747fbf | |
| 
							
							
								 | 
						e344bdbf1b | |
| 
							
							
								 | 
						b33be86b2f | |
| 
							
							
								 | 
						50b221f788 | |
| 
							
							
								 | 
						897c20bd4a | |
| 
							
							
								 | 
						759ebe71e9 | |
| 
							
							
								 | 
						e88913e1f3 | |
| 
							
							
								 | 
						5e7916a0df | |
| 
							
							
								 | 
						5eb310cac9 | |
| 
							
							
								 | 
						8a10cbf6ab | |
| 
							
							
								 | 
						fe78277948 | |
| 
							
							
								 | 
						9e87b6515b | |
| 
							
							
								 | 
						a05a82486d | |
| 
							
							
								 | 
						e4731eff10 | |
| 
							
							
								 | 
						dfa13afe22 | |
| 
							
							
								 | 
						912f1bc635 | |
| 
							
							
								 | 
						82fd785646 | |
| 
							
							
								 | 
						71d0097dc7 | |
| 
							
							
								 | 
						8fb667686f | |
| 
							
							
								 | 
						2dab0e2e56 | |
| 
							
							
								 | 
						e8025d0985 | |
| 
							
							
								 | 
						430309b5dc | |
| 
							
							
								 | 
						4c5507301e | |
| 
							
							
								 | 
						a5821ae9b1 | |
| 
							
							
								 | 
						d794afcb5c | |
| 
							
							
								 | 
						3d20490ee5 | |
| 
							
							
								 | 
						69314e9fca | |
| 
							
							
								 | 
						b9fec091ca | |
| 
							
							
								 | 
						803f4a6354 | |
| 
							
							
								 | 
						494b3faa9b | |
| 
							
							
								 | 
						da206f5242 | |
| 
							
							
								 | 
						7f4884a6d9 | |
| 
							
							
								 | 
						c30d8ac9ba | |
| 
							
							
								 | 
						8b9494281d | |
| 
							
							
								 | 
						06c581bfab | |
| 
							
							
								 | 
						108e8c7082 | |
| 
							
							
								 | 
						ddcdbce1a2 | |
| 
							
							
								 | 
						14d5b3c963 | |
| 
							
							
								 | 
						8330b36e58 | |
| 
							
							
								 | 
						243821aab1 | |
| 
							
							
								 | 
						4123c97139 | |
| 
							
							
								 | 
						55c3d617fa | |
| 
							
							
								 | 
						a2c6749112 | |
| 
							
							
								 | 
						19be8348e5 | |
| 
							
							
								 | 
						3c84ac326a | |
| 
							
							
								 | 
						c9681d0aa2 | |
| 
							
							
								 | 
						8f40e522ef | |
| 
							
							
								 | 
						87185cf8bb | |
| 
							
							
								 | 
						ff267890d1 | |
| 
							
							
								 | 
						749401e500 | |
| 
							
							
								 | 
						3704e2ceac | |
| 
							
							
								 | 
						8f1983fd8e | |
| 
							
							
								 | 
						f5d4f58610 | |
| 
							
							
								 | 
						0e94e89373 | |
| 
							
							
								 | 
						520414a096 | |
| 
							
							
								 | 
						ddc5f2b441 | |
| 
							
							
								 | 
						3994fd8384 | |
| 
							
							
								 | 
						13f231b926 | |
| 
							
							
								 | 
						309b91676d | |
| 
							
							
								 | 
						c8c28df62f | |
| 
							
							
								 | 
						005023275e | |
| 
							
							
								 | 
						05af2b3e64 | |
| 
							
							
								 | 
						745c144314 | |
| 
							
							
								 | 
						10ebc855e4 | |
| 
							
							
								 | 
						c0929c042a | |
| 
							
							
								 | 
						9748b22d34 | |
| 
							
							
								 | 
						3ff9fb3e10 | |
| 
							
							
								 | 
						75f01e22d7 | |
| 
							
							
								 | 
						87d6115954 | |
| 
							
							
								 | 
						c780164f69 | |
| 
							
							
								 | 
						482403c887 | |
| 
							
							
								
								 | 
						2ac8191722 | |
| 
							
							
								 | 
						35af5f11fa | |
| 
							
							
								 | 
						a7ec59862a | |
| 
							
							
								 | 
						ad4847cbac | |
| 
							
							
								 | 
						da07685e8b | |
| 
							
							
								 | 
						f1eb76d29f | |
| 
							
							
								 | 
						46b22958f0 | |
| 
							
							
								 | 
						57399e4f5d | |
| 
							
							
								 | 
						5690595064 | |
| 
							
							
								 | 
						63a6c6efde | |
| 
							
							
								 | 
						f2fff5a5fa | |
| 
							
							
								 | 
						c0d575c009 | |
| 
							
							
								 | 
						66d402b80e | |
| 
							
							
								 | 
						ea270d3396 | |
| 
							
							
								 | 
						621634b5a2 | |
| 
							
							
								 | 
						eacc59226f | |
| 
							
							
								 | 
						7b4472e37e | |
| 
							
							
								 | 
						4a8eafabb8 | |
| 
							
							
								 | 
						e7e7919a43 | |
| 
							
							
								 | 
						cdf9105d0d | |
| 
							
							
								 | 
						49e67d5f36 | |
| 
							
							
								 | 
						85fa87fe6f | |
| 
							
							
								 | 
						249b091c2f | |
| 
							
							
								 | 
						2d291bd2c3 | |
| 
							
							
								 | 
						cf1f4bed75 | |
| 
							
							
								 | 
						032976b118 | |
| 
							
							
								 | 
						cbe364cb62 | |
| 
							
							
								 | 
						efd52e8ce3 | |
| 
							
							
								 | 
						3be1d610e0 | |
| 
							
							
								 | 
						b1ef549276 | |
| 
							
							
								 | 
						f7f76137ca | |
| 
							
							
								 | 
						3fcf44aa52 | |
| 
							
							
								 | 
						d9708e28c8 | |
| 
							
							
								 | 
						65f2549d90 | |
| 
							
							
								 | 
						a4d16ec6ab | |
| 
							
							
								 | 
						d82173dd50 | |
| 
							
							
								 | 
						5d930175e4 | |
| 
							
							
								 | 
						e4c1003aba | |
| 
							
							
								 | 
						676b00592d | |
| 
							
							
								 | 
						9970fa89ee | |
| 
							
							
								 | 
						fe902c017b | |
| 
							
							
								 | 
						77db2fa7c8 | |
| 
							
							
								 | 
						7f39de59d4 | |
| 
							
							
								 | 
						5c315ba163 | |
| 
							
							
								 | 
						dc3ac8de01 | |
| 
							
							
								 | 
						6eee6ead79 | |
| 
							
							
								 | 
						572badb4d8 | |
| 
							
							
								 | 
						4eeb232248 | |
| 
							
							
								 | 
						3f555b2f5a | |
| 
							
							
								 | 
						09007cbf08 | |
| 
							
							
								 | 
						8a06e4d073 | |
| 
							
							
								 | 
						45ded4f2d1 | |
| 
							
							
								 | 
						60b0b721c5 | |
| 
							
							
								 | 
						249d358737 | |
| 
							
							
								 | 
						a9c016ba10 | |
| 
							
							
								 | 
						98f6d85b65 | |
| 
							
							
								 | 
						f36061a149 | |
| 
							
							
								 | 
						43494e4994 | |
| 
							
							
								 | 
						c6d1007e66 | |
| 
							
							
								 | 
						1bb7c9a2e4 | |
| 
							
							
								 | 
						2ee11f65f0 | |
| 
							
							
								 | 
						0c74a67ee1 | |
| 
							
							
								 | 
						9972bd387a | |
| 
							
							
								 | 
						f792ecf3af | |
| 
							
							
								 | 
						3c89295efe | |
| 
							
							
								 | 
						9ff03ba00c | |
| 
							
							
								 | 
						8e03212e40 | |
| 
							
							
								 | 
						4c4787ce58 | |
| 
							
							
								 | 
						e68c55e9bd | |
| 
							
							
								 | 
						dc23f1c9bd | |
| 
							
							
								 | 
						d173d373cb | |
| 
							
							
								 | 
						8220bd152e | |
| 
							
							
								 | 
						aa49c38d55 | |
| 
							
							
								 | 
						dac93dd8f8 | |
| 
							
							
								 | 
						ae1c5a0db0 | |
| 
							
							
								 | 
						ed0c2555fc | |
| 
							
							
								 | 
						26a8638836 | |
| 
							
							
								 | 
						e035af2f42 | |
| 
							
							
								 | 
						2dc8ee2b4e | |
| 
							
							
								 | 
						06026ec661 | |
| 
							
							
								 | 
						7c00ca0254 | |
| 
							
							
								 | 
						eaaf6e4cc1 | |
| 
							
							
								 | 
						ef544ba55a | |
| 
							
							
								 | 
						e85e031df7 | |
| 
							
							
								 | 
						e03da40867 | |
| 
							
							
								 | 
						f8af13d010 | |
| 
							
							
								 | 
						1d9c195506 | |
| 
							
							
								 | 
						d3a504864a | |
| 
							
							
								 | 
						f99e8fe7eb | |
| 
							
							
								 | 
						bc4ded2662 | |
| 
							
							
								 | 
						35359861bb | |
| 
							
							
								 | 
						a44bc4aeb3 | |
| 
							
							
								 | 
						c4277ebd8e | |
| 
							
							
								 | 
						d42aa60325 | |
| 
							
							
								 | 
						c57d4b2181 | |
| 
							
							
								 | 
						6c10c2f623 | |
| 
							
							
								 | 
						ad31631a8f | |
| 
							
							
								 | 
						020a3955d2 | |
| 
							
							
								 | 
						736bbbff77 | |
| 
							
							
								 | 
						80461e18a5 | |
| 
							
							
								 | 
						a149e71fb1 | |
| 
							
							
								 | 
						b28b38afab | |
| 
							
							
								 | 
						84613cd596 | |
| 
							
							
								 | 
						909f880211 | |
| 
							
							
								 | 
						bc58e42a74 | |
| 
							
							
								 | 
						77dfeb4bf2 | |
| 
							
							
								 | 
						f2c1988536 | |
| 
							
							
								 | 
						81d5ca9bc2 | |
| 
							
							
								 | 
						a4b8fb2d6b | |
| 
							
							
								 | 
						e7437cb722 | |
| 
							
							
								 | 
						f81ea64cab | |
| 
							
							
								 | 
						2e878ca52a | |
| 
							
							
								 | 
						6b2e85e4b3 | |
| 
							
							
								 | 
						6a1c49be4e | |
| 
							
							
								 | 
						0f8c685735 | |
| 
							
							
								 | 
						921e18728c | |
| 
							
							
								 | 
						c0552fa352 | |
| 
							
							
								 | 
						90810dcffd | |
| 
							
							
								 | 
						ebbfa7f48d | |
| 
							
							
								 | 
						bb02775cab | |
| 
							
							
								 | 
						b15e736e3e | |
| 
							
							
								 | 
						cc3037149c | |
| 
							
							
								 | 
						d704d631ba | |
| 
							
							
								 | 
						58c096bfad | |
| 
							
							
								 | 
						9eeea51165 | |
| 
							
							
								 | 
						33ec27715b | |
| 
							
							
								 | 
						e1be098406 | |
| 
							
							
								 | 
						dd3e4b5a1f | |
| 
							
							
								 | 
						2a1835843f | |
| 
							
							
								 | 
						8947932289 | |
| 
							
							
								 | 
						0484e97382 | |
| 
							
							
								 | 
						5251561e20 | |
| 
							
							
								 | 
						937d8c410d | |
| 
							
							
								 | 
						75ff3921b6 | |
| 
							
							
								 | 
						c8f8724887 | |
| 
							
							
								 | 
						c1546eb043 | |
| 
							
							
								 | 
						f8ab3bde35 | |
| 
							
							
								 | 
						c1201c164c | |
| 
							
							
								 | 
						a575e67fab | |
| 
							
							
								 | 
						34dd6ffc22 | |
| 
							
							
								 | 
						fda7111305 | |
| 
							
							
								 | 
						8233d12afb | |
| 
							
							
								 | 
						f25248c871 | |
| 
							
							
								 | 
						54f8a615fc | |
| 
							
							
								 | 
						2dbcecdac7 | |
| 
							
							
								 | 
						0dcfcea6ee | |
| 
							
							
								 | 
						7a5c43d01a | |
| 
							
							
								 | 
						f1252983e4 | |
| 
							
							
								 | 
						6dc3ed8d6a | |
| 
							
							
								 | 
						4f4860cfb0 | |
| 
							
							
								 | 
						1e683a4b91 | |
| 
							
							
								 | 
						9fd412f631 | |
| 
							
							
								 | 
						d027ad5a4f | |
| 
							
							
								 | 
						106ebe94bf | |
| 
							
							
								 | 
						d2accdac9b | |
| 
							
							
								 | 
						c020ab76be | |
| 
							
							
								 | 
						c52e889fe5 | |
| 
							
							
								 | 
						0ba3c798d7 | |
| 
							
							
								 | 
						7b4f4bf804 | |
| 
							
							
								 | 
						8de92179da | |
| 
							
							
								 | 
						94733c4a0b | |
| 
							
							
								 | 
						7d1cc47db9 | |
| 
							
							
								 | 
						9859f601ca | |
| 
							
							
								 | 
						af64152640 | |
| 
							
							
								 | 
						bf21d2e329 | |
| 
							
							
								 | 
						848577488e | |
| 
							
							
								 | 
						e82538eded | |
| 
							
							
								 | 
						8ccb8b0744 | |
| 
							
							
								 | 
						e83de2906f | |
| 
							
							
								 | 
						33c464524b | |
| 
							
							
								 | 
						cb774e5a5d | |
| 
							
							
								 | 
						1ec9b0565f | |
| 
							
							
								 | 
						7ab97fb21d | |
| 
							
							
								 | 
						29211b200d | |
| 
							
							
								 | 
						ae8358a5e7 | |
| 
							
							
								 | 
						00a51c0288 | |
| 
							
							
								 | 
						994564f923 | |
| 
							
							
								 | 
						12172cc5cd | |
| 
							
							
								 | 
						a65910c732 | |
| 
							
							
								 | 
						949fa9fbb9 | |
| 
							
							
								 | 
						4b77de5e2d | |
| 
							
							
								
								 | 
						d660376206 | |
| 
							
							
								 | 
						201b0d99c1 | |
| 
							
							
								 | 
						c27da99e12 | |
| 
							
							
								 | 
						e51ba404fc | |
| 
							
							
								 | 
						abd3cefd84 | |
| 
							
							
								 | 
						f6549fcb62 | |
| 
							
							
								 | 
						41aa87f847 | |
| 
							
							
								 | 
						d6331ce9e1 | |
| 
							
							
								 | 
						4f67ac0337 | |
| 
							
							
								 | 
						024cf8b8c2 | |
| 
							
							
								 | 
						9ec664f7c8 | |
| 
							
							
								 | 
						5e2107ff15 | |
| 
							
							
								 | 
						5f1d0fcb8c | |
| 
							
							
								 | 
						3b5bd8f43e | |
| 
							
							
								 | 
						40c5f39f0d | |
| 
							
							
								 | 
						3d8c1a7b3c | |
| 
							
							
								 | 
						06cc3ac92c | |
| 
							
							
								 | 
						4a8e8a32f9 | |
| 
							
							
								 | 
						9bc11d8dd9 | |
| 
							
							
								 | 
						9c80969fd5 | |
| 
							
							
								 | 
						da4d344e63 | |
| 
							
							
								 | 
						073ff0103a | |
| 
							
							
								 | 
						f0a346dcc3 | |
| 
							
							
								 | 
						7381c361cd | |
| 
							
							
								 | 
						1b577eebf6 | |
| 
							
							
								 | 
						39af215d61 | |
| 
							
							
								 | 
						35f0520cb0 | |
| 
							
							
								 | 
						738d0ca38b | |
| 
							
							
								 | 
						bd8e4760d5 | |
| 
							
							
								 | 
						9a063ccb11 | |
| 
							
							
								 | 
						e8787d89c6 | |
| 
							
							
								 | 
						8e97814c1f | |
| 
							
							
								 | 
						e82f7f9012 | |
| 
							
							
								 | 
						b44b0915ca | |
| 
							
							
								 | 
						ff74d47fd5 | |
| 
							
							
								 | 
						6ad8c603d5 | |
| 
							
							
								 | 
						cd55d027c4 | |
| 
							
							
								 | 
						d094625bd6 | |
| 
							
							
								 | 
						e7a172b656 | |
| 
							
							
								 | 
						bd919f9d66 | |
| 
							
							
								 | 
						611d1ee3fc | |
| 
							
							
								 | 
						56b23e1fcc | |
| 
							
							
								 | 
						d3bafb0063 | |
| 
							
							
								 | 
						7f246697b4 | |
| 
							
							
								 | 
						dd10acbbf9 | |
| 
							
							
								 | 
						31a00eca94 | |
| 
							
							
								 | 
						c93d119873 | |
| 
							
							
								 | 
						588770d034 | |
| 
							
							
								 | 
						2f2d612b5f | |
| 
							
							
								 | 
						660a94d610 | |
| 
							
							
								 | 
						e4e4cacef3 | |
| 
							
							
								 | 
						60a6f3269c | |
| 
							
							
								 | 
						53003618cb | |
| 
							
							
								 | 
						c6da09f3c6 | |
| 
							
							
								 | 
						89d24cfe33 | |
| 
							
							
								 | 
						8d7a9fa19e | |
| 
							
							
								 | 
						a1a10676cd | |
| 
							
							
								 | 
						97b2b25256 | |
| 
							
							
								 | 
						b2bf0b06f2 | |
| 
							
							
								 | 
						907eaa68cb | |
| 
							
							
								 | 
						89e8a834bf | |
| 
							
							
								 | 
						12bfabf056 | |
| 
							
							
								 | 
						a44e926c2f | |
| 
							
							
								 | 
						d0ba9a0a58 | |
| 
							
							
								 | 
						3294defee1 | |
| 
							
							
								 | 
						ae049eb84f | |
| 
							
							
								 | 
						5c8a45c64a | |
| 
							
							
								 | 
						07b7d1d229 | |
| 
							
							
								 | 
						147e1baee9 | |
| 
							
							
								 | 
						b096ee3b7a | |
| 
							
							
								 | 
						f20e2d6ee2 | |
| 
							
							
								 | 
						1263835034 | |
| 
							
							
								 | 
						1e1e64f7f9 | |
| 
							
							
								 | 
						98c043815a | |
| 
							
							
								 | 
						ebe351e2ee | |
| 
							
							
								 | 
						cfb125beef | |
| 
							
							
								 | 
						1f0db3103d | |
| 
							
							
								 | 
						2e8268b53e | |
| 
							
							
								 | 
						b572cd1b77 | |
| 
							
							
								 | 
						b288d7051a | |
| 
							
							
								 | 
						c349d50f2f | |
| 
							
							
								 | 
						779c0b73c9 | |
| 
							
							
								 | 
						50a4c425d3 | |
| 
							
							
								 | 
						df96155057 | |
| 
							
							
								 | 
						a62283bae2 | |
| 
							
							
								 | 
						2865f0efe9 | |
| 
							
							
								 | 
						5f79434b23 | |
| 
							
							
								 | 
						5278f8b560 | |
| 
							
							
								 | 
						488a0cd119 | |
| 
							
							
								 | 
						957224bdc5 | |
| 
							
							
								 | 
						7ff8aa1ba0 | |
| 
							
							
								 | 
						e06f9dc5c0 | |
| 
							
							
								 | 
						c6e5368520 | |
| 
							
							
								 | 
						769b292dca | |
| 
							
							
								 | 
						361fc4645c | |
| 
							
							
								 | 
						f1f2ba2e02 | |
| 
							
							
								 | 
						80338e1ddd | |
| 
							
							
								 | 
						f8c8f63e87 | |
| 
							
							
								 | 
						96532ad38c | |
| 
							
							
								 | 
						88f3912b2d | |
| 
							
							
								 | 
						cb8833d430 | |
| 
							
							
								 | 
						038b20d13a | |
| 
							
							
								 | 
						05fb4a4014 | |
| 
							
							
								 | 
						c415bd1ee1 | |
| 
							
							
								 | 
						226c3364c3 | |
| 
							
							
								 | 
						685688d2b2 | |
| 
							
							
								 | 
						7a3bce3f33 | |
| 
							
							
								 | 
						363a2bbcc6 | |
| 
							
							
								 | 
						0a8dd7b6da | |
| 
							
							
								 | 
						0b43e0aa8c | |
| 
							
							
								 | 
						ed434e284b | |
| 
							
							
								 | 
						af068c5c51 | |
| 
							
							
								 | 
						f6cd08c6fa | |
| 
							
							
								 | 
						34ff5ff249 | |
| 
							
							
								 | 
						b03564da2c | |
| 
							
							
								 | 
						59743b7b73 | |
| 
							
							
								 | 
						9d04accf2e | |
| 
							
							
								 | 
						3cd853cb5d | |
| 
							
							
								 | 
						4a0beda77e | |
| 
							
							
								 | 
						d7288972b7 | |
| 
							
							
								 | 
						0d93871c88 | |
| 
							
							
								 | 
						d0e01ff9b6 | |
| 
							
							
								 | 
						af2f8756c5 | |
| 
							
							
								 | 
						bcf355e2c8 | |
| 
							
							
								 | 
						1b50bff625 | |
| 
							
							
								 | 
						e317310ed3 | |
| 
							
							
								 | 
						4131ff1152 | |
| 
							
							
								 | 
						83802e932a | |
| 
							
							
								 | 
						765b8f8e5c | |
| 
							
							
								 | 
						b4f2f49001 | |
| 
							
							
								 | 
						d1cf90e2ae | |
| 
							
							
								 | 
						6008497b89 | |
| 
							
							
								 | 
						adb62dc7b4 | |
| 
							
							
								 | 
						4129d693be | |
| 
							
							
								 | 
						d48b2c5b57 | |
| 
							
							
								 | 
						6f5a2654ab | |
| 
							
							
								 | 
						afdbf8e10a | |
| 
							
							
								 | 
						d4c8ba19a2 | |
| 
							
							
								 | 
						53a41ba93d | |
| 
							
							
								 | 
						06b80ff9ed | |
| 
							
							
								 | 
						fa88924f84 | |
| 
							
							
								 | 
						83f1922f6e | |
| 
							
							
								 | 
						4b7ac1d895 | |
| 
							
							
								 | 
						7ee6f36e62 | |
| 
							
							
								 | 
						f106472bcb | |
| 
							
							
								 | 
						bba1ee43ff | |
| 
							
							
								 | 
						0d2e713e9a | |
| 
							
							
								 | 
						10a39ca42c | |
| 
							
							
								 | 
						0917b580c9 | |
| 
							
							
								 | 
						a301fabd6c | |
| 
							
							
								 | 
						611d86d988 | |
| 
							
							
								 | 
						b1e162ebb4 | |
| 
							
							
								 | 
						b810de3089 | |
| 
							
							
								 | 
						48cae3c178 | |
| 
							
							
								 | 
						02eb966a87 | |
| 
							
							
								 | 
						146e0993a9 | |
| 
							
							
								 | 
						2cf7daca30 | |
| 
							
							
								 | 
						dedc51a939 | |
| 
							
							
								 | 
						3b7579990b | |
| 
							
							
								 | 
						7de914d54c | |
| 
							
							
								 | 
						589232d12d | |
| 
							
							
								 | 
						928765074f | |
| 
							
							
								 | 
						2ed9e40d5e | |
| 
							
							
								 | 
						30af91a82c | |
| 
							
							
								 | 
						e524c6fe4f | |
| 
							
							
								 | 
						abbba1fa6e | |
| 
							
							
								 | 
						484565988d | |
| 
							
							
								 | 
						f92c289842 | |
| 
							
							
								 | 
						b7ddf9cb05 | |
| 
							
							
								 | 
						250e1c4c51 | |
| 
							
							
								 | 
						62259880fd | |
| 
							
							
								 | 
						f42bc2dbce | |
| 
							
							
								 | 
						55b4866d5e | |
| 
							
							
								 | 
						83514b0e90 | |
| 
							
							
								 | 
						21401853c4 | |
| 
							
							
								 | 
						6decd4112a | |
| 
							
							
								 | 
						3f2f5edb28 | |
| 
							
							
								 | 
						1d2d4b40a8 | |
| 
							
							
								 | 
						5ee044e418 | |
| 
							
							
								 | 
						05a33ae634 | |
| 
							
							
								 | 
						b8a975a3fd | |
| 
							
							
								 | 
						33a78366ff | |
| 
							
							
								 | 
						2806a4c0e5 | |
| 
							
							
								 | 
						2d609dceac | |
| 
							
							
								 | 
						b2a5f8698d | |
| 
							
							
								 | 
						70efce1631 | |
| 
							
							
								 | 
						a63599828b | |
| 
							
							
								 | 
						f51361435f | |
| 
							
							
								 | 
						9770a39d7b | |
| 
							
							
								 | 
						97e3c06af8 | |
| 
							
							
								 | 
						4c1d174801 | |
| 
							
							
								 | 
						eb7a7462ad | |
| 
							
							
								 | 
						1944f75ae8 | |
| 
							
							
								 | 
						b619e4a82d | |
| 
							
							
								 | 
						d67031d9ab | |
| 
							
							
								 | 
						008bfed702 | |
| 
							
							
								 | 
						96006b2422 | |
| 
							
							
								 | 
						56cd15fa51 | |
| 
							
							
								 | 
						879657cc75 | |
| 
							
							
								 | 
						fb13c7cbf6 | |
| 
							
							
								 | 
						72abe98475 | |
| 
							
							
								 | 
						48f096995f | |
| 
							
							
								 | 
						2cc77c21ba | |
| 
							
							
								 | 
						1560330acd | |
| 
							
							
								 | 
						a74caa9f77 | |
| 
							
							
								 | 
						61fb783c4e | |
| 
							
							
								 | 
						9f7aa3d1ff | |
| 
							
							
								 | 
						50be10a9bd | |
| 
							
							
								 | 
						29a5910b90 | |
| 
							
							
								 | 
						a336def65f | |
| 
							
							
								 | 
						2cb59fe450 | |
| 
							
							
								 | 
						4494acbc01 | |
| 
							
							
								 | 
						7b3d724908 | |
| 
							
							
								 | 
						bc249fbeca | |
| 
							
							
								 | 
						53c76d3680 | |
| 
							
							
								 | 
						60123066e1 | |
| 
							
							
								 | 
						29ad20bc63 | |
| 
							
							
								 | 
						978c59f5f0 | |
| 
							
							
								 | 
						2c23bc166b | |
| 
							
							
								 | 
						ff285fbbda | |
| 
							
							
								 | 
						ccfafeeec2 | |
| 
							
							
								 | 
						e0067a4e1d | |
| 
							
							
								 | 
						485a17af26 | |
| 
							
							
								 | 
						c5b172a7df | |
| 
							
							
								 | 
						b718b5634e | |
| 
							
							
								 | 
						8f79c37b99 | |
| 
							
							
								 | 
						aa5f25231a | |
| 
							
							
								 | 
						f3049016d6 | |
| 
							
							
								 | 
						16e11d447c | |
| 
							
							
								 | 
						199a5e8b38 | |
| 
							
							
								 | 
						59b095b2d5 | |
| 
							
							
								 | 
						c59ec77d9c | |
| 
							
							
								 | 
						3e5da64571 | |
| 
							
							
								 | 
						1c576d72d1 | |
| 
							
							
								 | 
						ea42f66b54 | |
| 
							
							
								 | 
						2ae9576cd8 | |
| 
							
							
								 | 
						a462de6f2d | |
| 
							
							
								 | 
						3bf48ab597 | |
| 
							
							
								 | 
						2454dda18f | |
| 
							
							
								 | 
						7498cbb5f4 | |
| 
							
							
								 | 
						581782800d | |
| 
							
							
								 | 
						069466218e | |
| 
							
							
								 | 
						fd9e484b55 | |
| 
							
							
								 | 
						406565f74d | |
| 
							
							
								 | 
						6272cae8d4 | |
| 
							
							
								 | 
						dc2332c980 | |
| 
							
							
								 | 
						7be85a882b | |
| 
							
							
								 | 
						b6df83a0e9 | |
| 
							
							
								 | 
						d62fb655eb | |
| 
							
							
								 | 
						a9778e4001 | |
| 
							
							
								 | 
						580165f2f4 | |
| 
							
							
								 | 
						0f3041724b | |
| 
							
							
								 | 
						1d08ee6d01 | |
| 
							
							
								 | 
						d4a5a3057c | |
| 
							
							
								 | 
						452cd7db8a | |
| 
							
							
								 | 
						2cc80d53ca | |
| 
							
							
								 | 
						7eb0b1d249 | |
| 
							
							
								 | 
						589b3f4201 | |
| 
							
							
								 | 
						6d5d9731ed | |
| 
							
							
								 | 
						25363ebd2e | |
| 
							
							
								 | 
						b9c7e1b0c7 | |
| 
							
							
								 | 
						ea9ea4a6d7 | |
| 
							
							
								 | 
						76cd5519b3 | |
| 
							
							
								 | 
						677a6fc113 | |
| 
							
							
								 | 
						99199905b6 | |
| 
							
							
								 | 
						55b6cba31e | |
| 
							
							
								 | 
						17b976eb88 | |
| 
							
							
								 | 
						7a8e615fa6 | |
| 
							
							
								 | 
						335e8d10d4 | |
| 
							
							
								 | 
						6431071b2a | |
| 
							
							
								 | 
						8fdff8769d | |
| 
							
							
								 | 
						66782d29d1 | |
| 
							
							
								 | 
						cfbba9e0b3 | |
| 
							
							
								 | 
						7aba290541 | |
| 
							
							
								 | 
						da10422160 | |
| 
							
							
								 | 
						9e2eff507e | |
| 
							
							
								 | 
						71fc8b95dd | |
| 
							
							
								 | 
						72c97d4672 | |
| 
							
							
								 | 
						7b28c7a43f | |
| 
							
							
								 | 
						cf9442f4d5 | |
| 
							
							
								 | 
						85ddfc0f2d | |
| 
							
							
								 | 
						56f736e7ca | |
| 
							
							
								 | 
						63304f535c | |
| 
							
							
								 | 
						2583706b35 | |
| 
							
							
								 | 
						65a7853cf3 | |
| 
							
							
								 | 
						69c9ecc5e3 | |
| 
							
							
								 | 
						3be53540c1 | |
| 
							
							
								 | 
						a44b6f7c2f | |
| 
							
							
								 | 
						e65f3f84b9 | |
| 
							
							
								 | 
						acc5af1fdb | |
| 
							
							
								 | 
						91dda3020e | |
| 
							
							
								 | 
						9f03484c4d | |
| 
							
							
								 | 
						7904c27127 | |
| 
							
							
								 | 
						22622e1c01 | |
| 
							
							
								 | 
						f549de7c88 | |
| 
							
							
								 | 
						beb6544bad | |
| 
							
							
								 | 
						d01fdbf981 | |
| 
							
							
								 | 
						badc30baae | |
| 
							
							
								 | 
						4f36a03df2 | |
| 
							
							
								 | 
						0a2187a73f | |
| 
							
							
								 | 
						166f99b3d1 | |
| 
							
							
								 | 
						0d9acb1cb0 | |
| 
							
							
								 | 
						1ea0163b04 | |
| 
							
							
								
								 | 
						3836f7d458 | |
| 
							
							
								
								 | 
						ae3f6696a7 | |
| 
							
							
								
								 | 
						a06a4f67cc | |
| 
							
							
								
								 | 
						a69c8a8b44 | |
| 
							
							
								
								 | 
						efad49ec5b | |
| 
							
							
								
								 | 
						d772fe45c0 | |
| 
							
							
								
								 | 
						6f91c2932d | |
| 
							
							
								
								 | 
						d07a73cf70 | |
| 
							
							
								
								 | 
						fcdddadec1 | |
| 
							
							
								
								 | 
						9fcfb8d780 | |
| 
							
							
								
								 | 
						37ce04ca9a | |
| 
							
							
								
								 | 
						a109a8bf67 | |
| 
							
							
								
								 | 
						b01771be1b | |
| 
							
							
								
								 | 
						0e4095c947 | |
| 
							
							
								
								 | 
						dae56baeba | |
| 
							
							
								
								 | 
						9706803220 | |
| 
							
							
								
								 | 
						8403d8a482 | |
| 
							
							
								 | 
						59249a8c1e | |
| 
							
							
								 | 
						a111819667 | |
| 
							
							
								
								 | 
						4f576b6f36 | |
| 
							
							
								
								 | 
						672c01f13a | |
| 
							
							
								
								 | 
						f67ffeb70f | |
| 
							
							
								
								 | 
						1b1e35d32d | |
| 
							
							
								
								 | 
						9f5dfe8501 | |
| 
							
							
								
								 | 
						11bd2e2f65 | |
| 
							
							
								
								 | 
						ebfd490a1a | |
| 
							
							
								
								 | 
						89bb124728 | |
| 
							
							
								
								 | 
						63e34cf595 | |
| 
							
							
								
								 | 
						92f372dcc8 | |
| 
							
							
								
								 | 
						b00abd0e51 | |
| 
							
							
								
								 | 
						52a015d927 | |
| 
							
							
								
								 | 
						2c82b2aba9 | |
| 
							
							
								
								 | 
						ff0f8dfaca | |
| 
							
							
								
								 | 
						ace04af21a | |
| 
							
							
								 | 
						70db20b07c | |
| 
							
							
								
								 | 
						d2f3a79c09 | |
| 
							
							
								
								 | 
						bedbbc3025 | |
| 
							
							
								
								 | 
						6e55f6706f | |
| 
							
							
								
								 | 
						d1b0608c88 | |
| 
							
							
								
								 | 
						3bed3a64c3 | |
| 
							
							
								
								 | 
						93e7d54c5e | |
| 
							
							
								
								 | 
						9db84e8029 | |
| 
							
							
								
								 | 
						ea21656624 | |
| 
							
							
								
								 | 
						5a0d29c774 | |
| 
							
							
								
								 | 
						13df3e70d5 | |
| 
							
							
								
								 | 
						208a8e5d7a | |
| 
							
							
								
								 | 
						ca937dff5e | |
| 
							
							
								
								 | 
						c68fcf7e1c | |
| 
							
							
								
								 | 
						48c3b333b2 | |
| 
							
							
								
								 | 
						b71f6b6c67 | |
| 
							
							
								
								 | 
						54cf648d74 | |
| 
							
							
								
								 | 
						68d0327d41 | |
| 
							
							
								
								 | 
						68a06093e9 | |
| 
							
							
								
								 | 
						52aadb374b | |
| 
							
							
								
								 | 
						dfd030a6aa | |
| 
							
							
								
								 | 
						788e158d9f | |
| 
							
							
								
								 | 
						81890a39d9 | |
| 
							
							
								
								 | 
						ae170f2645 | |
| 
							
							
								
								 | 
						e2e5191ded | |
| 
							
							
								
								 | 
						dcbb7fa64f | |
| 
							
							
								
								 | 
						32107d0ac3 | |
| 
							
							
								
								 | 
						7bdebd47d1 | |
| 
							
							
								
								 | 
						ac31bca181 | |
| 
							
							
								
								 | 
						52070c00f9 | |
| 
							
							
								
								 | 
						5ff0cc7905 | |
| 
							
							
								
								 | 
						6ad1e3da38 | |
| 
							
							
								
								 | 
						9bf6f557ed | |
| 
							
							
								
								 | 
						50e1070004 | |
| 
							
							
								
								 | 
						1c4c19b351 | |
| 
							
							
								
								 | 
						199a70880c | |
| 
							
							
								
								 | 
						b14b323068 | |
| 
							
							
								
								 | 
						a3c7bec576 | |
| 
							
							
								
								 | 
						ac34ca7cad | |
| 
							
							
								
								 | 
						ade2c32adb | |
| 
							
							
								
								 | 
						109e7d7b43 | |
| 
							
							
								
								 | 
						1a655b7e39 | |
| 
							
							
								
								 | 
						cda045f123 | |
| 
							
							
								
								 | 
						7074ca7713 | |
| 
							
							
								 | 
						8e91e215b3 | |
| 
							
							
								
								 | 
						c751c36a8b | |
| 
							
							
								
								 | 
						ad9d645782 | |
| 
							
							
								
								 | 
						c96d4387c5 | |
| 
							
							
								
								 | 
						5fdec8012d | 
| 
						 | 
				
			
			@ -43,16 +43,21 @@ jobs:
 | 
			
		|||
      - name: Checkout
 | 
			
		||||
        uses: actions/checkout@v3
 | 
			
		||||
 | 
			
		||||
      - name: Build DB container
 | 
			
		||||
        run: docker build -t piker:elastic dockering/elastic
 | 
			
		||||
      # elastic only
 | 
			
		||||
      # - name: Build DB container
 | 
			
		||||
      #   run: docker build -t piker:elastic dockering/elastic
 | 
			
		||||
 | 
			
		||||
      - name: Setup python
 | 
			
		||||
        uses: actions/setup-python@v3
 | 
			
		||||
        uses: actions/setup-python@v4
 | 
			
		||||
        with:
 | 
			
		||||
          python-version: '3.10'
 | 
			
		||||
 | 
			
		||||
      # elastic only
 | 
			
		||||
      # - name: Install dependencies
 | 
			
		||||
      #   run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
 | 
			
		||||
 | 
			
		||||
      - name: Install dependencies
 | 
			
		||||
        run: pip install -U .[es] -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
 | 
			
		||||
        run: pip install -U . -r requirements-test.txt -r requirements.txt --upgrade-strategy eager
 | 
			
		||||
 | 
			
		||||
      - name: Test suite
 | 
			
		||||
        run: pytest tests -rs
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
							
								
								
									
										309
									
								
								README.rst
								
								
								
								
							
							
						
						
									
										309
									
								
								README.rst
								
								
								
								
							| 
						 | 
				
			
			@ -1,235 +1,161 @@
 | 
			
		|||
piker
 | 
			
		||||
-----
 | 
			
		||||
trading gear for hackers.
 | 
			
		||||
trading gear for hackers
 | 
			
		||||
 | 
			
		||||
|gh_actions|
 | 
			
		||||
 | 
			
		||||
.. |gh_actions| image:: https://img.shields.io/endpoint.svg?url=https%3A%2F%2Factions-badge.atrox.dev%2Fpikers%2Fpiker%2Fbadge&style=popout-square
 | 
			
		||||
    :target: https://actions-badge.atrox.dev/piker/pikers/goto
 | 
			
		||||
 | 
			
		||||
``piker`` is a broker agnostic, next-gen FOSS toolset for real-time
 | 
			
		||||
computational trading targeted at `hardcore Linux users <comp_trader>`_ .
 | 
			
		||||
``piker`` is a broker agnostic, next-gen FOSS toolset and runtime for
 | 
			
		||||
real-time computational trading targeted at `hardcore Linux users
 | 
			
		||||
<comp_trader>`_ .
 | 
			
		||||
 | 
			
		||||
we use as much bleeding edge tech as possible including (but not limited to):
 | 
			
		||||
we use much bleeding edge tech including (but not limited to):
 | 
			
		||||
 | 
			
		||||
- latest python for glue_
 | 
			
		||||
- trio_ for `structured concurrency`_
 | 
			
		||||
- tractor_ for distributed, multi-core, real-time streaming
 | 
			
		||||
- marketstore_ for historical and real-time tick data persistence and sharing
 | 
			
		||||
- techtonicdb_ for L2 book storage
 | 
			
		||||
- Qt_ for pristine high performance UIs
 | 
			
		||||
- pyqtgraph_ for real-time charting
 | 
			
		||||
- ``numpy`` and ``numba`` for `fast numerics`_
 | 
			
		||||
- uv_ for packaging and distribution
 | 
			
		||||
- trio_ & tractor_ for our distributed `structured concurrency`_ runtime
 | 
			
		||||
- Qt_ for pristine low latency UIs
 | 
			
		||||
- pyqtgraph_ (which we've extended) for real-time charting and graphics
 | 
			
		||||
- ``polars`` ``numpy`` and ``numba`` for redic `fast numerics`_
 | 
			
		||||
- `apache arrow and parquet`_ for time-series storage
 | 
			
		||||
 | 
			
		||||
.. |travis| image:: https://img.shields.io/travis/pikers/piker/master.svg
 | 
			
		||||
    :target: https://travis-ci.org/pikers/piker
 | 
			
		||||
potential projects we might integrate with soon,
 | 
			
		||||
 | 
			
		||||
- (already prototyped in ) techtonicdb_ for L2 book storage
 | 
			
		||||
 | 
			
		||||
.. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/
 | 
			
		||||
.. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue
 | 
			
		||||
.. _uv: https://docs.astral.sh/uv/
 | 
			
		||||
.. _trio: https://github.com/python-trio/trio
 | 
			
		||||
.. _tractor: https://github.com/goodboy/tractor
 | 
			
		||||
.. _structured concurrency: https://trio.discourse.group/
 | 
			
		||||
.. _marketstore: https://github.com/alpacahq/marketstore
 | 
			
		||||
.. _techtonicdb: https://github.com/0b01/tectonicdb
 | 
			
		||||
.. _Qt: https://www.qt.io/
 | 
			
		||||
.. _pyqtgraph: https://github.com/pyqtgraph/pyqtgraph
 | 
			
		||||
.. _glue: https://numpy.org/doc/stable/user/c-info.python-as-glue.html#using-python-as-glue
 | 
			
		||||
.. _apache arrow and parquet: https://arrow.apache.org/faq/
 | 
			
		||||
.. _fast numerics: https://zerowithdot.com/python-numpy-and-pandas-performance/
 | 
			
		||||
.. _comp_trader: https://jfaleiro.wordpress.com/2019/10/09/computational-trader/
 | 
			
		||||
.. _techtonicdb: https://github.com/0b01/tectonicdb
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
focus and features:
 | 
			
		||||
*******************
 | 
			
		||||
- 100% federated: your code, your hardware, your data feeds, your broker fills.
 | 
			
		||||
- zero web: low latency, native software that doesn't try to re-invent the OS
 | 
			
		||||
- maximal **privacy**: prevent brokers and mms from knowing your
 | 
			
		||||
  planz; smack their spreads with dark volume.
 | 
			
		||||
- zero clutter: modal, context oriented UIs that echew minimalism, reduce
 | 
			
		||||
  thought noise and encourage un-emotion.
 | 
			
		||||
- first class parallelism: built from the ground up on next-gen structured concurrency
 | 
			
		||||
  primitives.
 | 
			
		||||
- traders first: broker/exchange/asset-class agnostic
 | 
			
		||||
- systems grounded: real-time financial signal processing that will
 | 
			
		||||
  make any queuing or DSP eng juice their shorts.
 | 
			
		||||
- non-tina UX: sleek, powerful keyboard driven interaction with expected use in tiling wms
 | 
			
		||||
- data collaboration: every process and protocol is multi-host scalable.
 | 
			
		||||
- fight club ready: zero interest in adoption by suits; no corporate friendly license, ever.
 | 
			
		||||
 | 
			
		||||
fitting with these tenets, we're always open to new framework suggestions and ideas.
 | 
			
		||||
 | 
			
		||||
building the best looking, most reliable, keyboard friendly trading
 | 
			
		||||
platform is the dream; join the cause.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
install
 | 
			
		||||
*******
 | 
			
		||||
``piker`` is currently under heavy pre-alpha development and as such
 | 
			
		||||
should be cloned from this repo and hacked on directly.
 | 
			
		||||
 | 
			
		||||
for a development install::
 | 
			
		||||
 | 
			
		||||
    git clone git@github.com:pikers/piker.git
 | 
			
		||||
    cd piker
 | 
			
		||||
    virtualenv env
 | 
			
		||||
    source ./env/bin/activate
 | 
			
		||||
    pip install -r requirements.txt -e .
 | 
			
		||||
 | 
			
		||||
install for nixos
 | 
			
		||||
*****************
 | 
			
		||||
for users of `NixOS` we offer a development shell envoirment that can be
 | 
			
		||||
loaded with::
 | 
			
		||||
 | 
			
		||||
    nix-shell develop.nix
 | 
			
		||||
 | 
			
		||||
this will setup the required python environment to run piker, make sure to
 | 
			
		||||
run::
 | 
			
		||||
 | 
			
		||||
    pip install -r requirements.txt -e .
 | 
			
		||||
 | 
			
		||||
once after loading the shell
 | 
			
		||||
 | 
			
		||||
install for tinas
 | 
			
		||||
*****************
 | 
			
		||||
for windows peeps you can start by installing all the prerequisite software:
 | 
			
		||||
 | 
			
		||||
- install git with all default settings - https://git-scm.com/download/win
 | 
			
		||||
- install anaconda all default settings - https://www.anaconda.com/products/individual
 | 
			
		||||
- install microsoft build tools (check the box for Desktop development for C++, you might be able to uncheck some optional downloads)  - https://visualstudio.microsoft.com/visual-cpp-build-tools/
 | 
			
		||||
- install visual studio code default settings - https://code.visualstudio.com/download
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
then, `crack a conda shell`_ and run the following commands::
 | 
			
		||||
 | 
			
		||||
    mkdir code # create code directory
 | 
			
		||||
    cd code # change directory to code
 | 
			
		||||
    git clone https://github.com/pikers/piker.git # downloads piker installation package from github
 | 
			
		||||
    cd piker # change directory to piker
 | 
			
		||||
    
 | 
			
		||||
    conda create -n pikonda # creates conda environment named pikonda
 | 
			
		||||
    conda activate pikonda # activates pikonda
 | 
			
		||||
    
 | 
			
		||||
    conda install -c conda-forge python-levenshtein # in case it is not already installed
 | 
			
		||||
    conda install pip # may already be installed
 | 
			
		||||
    pip # will show if pip is installed
 | 
			
		||||
    
 | 
			
		||||
    pip install -e . -r requirements.txt # install piker in editable mode
 | 
			
		||||
 | 
			
		||||
test Piker to see if it is working::
 | 
			
		||||
 | 
			
		||||
    piker -b binance chart btcusdt.binance # formatting for loading a chart
 | 
			
		||||
    piker -b kraken -b binance chart xbtusdt.kraken
 | 
			
		||||
    piker -b kraken -b binance -b ib chart qqq.nasdaq.ib
 | 
			
		||||
    piker -b ib chart tsla.nasdaq.ib
 | 
			
		||||
 | 
			
		||||
potential error::
 | 
			
		||||
    
 | 
			
		||||
    FileNotFoundError: [Errno 2] No such file or directory: 'C:\\Users\\user\\AppData\\Roaming\\piker\\brokers.toml'
 | 
			
		||||
    
 | 
			
		||||
solution:
 | 
			
		||||
 | 
			
		||||
- navigate to file directory above (may be different on your machine, location should be listed in the error code)
 | 
			
		||||
- copy and paste file from 'C:\\Users\\user\\code\\data/brokers.toml' or create a blank file using notepad at the location above
 | 
			
		||||
 | 
			
		||||
Visual Studio Code setup:
 | 
			
		||||
 | 
			
		||||
- now that piker is installed we can set up vscode as the default terminal for running piker and editing the code
 | 
			
		||||
- open Visual Studio Code
 | 
			
		||||
- file --> Add Folder to Workspace --> C:\Users\user\code\piker (adds piker directory where all piker files are located)
 | 
			
		||||
- file --> Save Workspace As --> save it wherever you want and call it whatever you want, this is going to be your default workspace for running and editing piker code
 | 
			
		||||
- ctrl + shift + p --> start typing Python: Select Interpetter --> when the option comes up select it --> Select at the workspace level --> select the one that shows ('pikonda')
 | 
			
		||||
- change the default terminal to cmd.exe instead of powershell (default)
 | 
			
		||||
- now when you create a new terminal VScode should automatically activate you conda env so that piker can be run as the first command after a new terminal is created
 | 
			
		||||
 | 
			
		||||
also, try out fancyzones as part of powertoyz for a decent tiling windows manager to manage all the cool new software you are going to be running.
 | 
			
		||||
 | 
			
		||||
.. _conda installed: https://
 | 
			
		||||
.. _C++ build toolz: https://
 | 
			
		||||
.. _crack a conda shell: https://
 | 
			
		||||
.. _vscode: https://
 | 
			
		||||
 | 
			
		||||
.. link to the tina guide
 | 
			
		||||
.. _setup a coolio tiled wm console: https://
 | 
			
		||||
 | 
			
		||||
provider support
 | 
			
		||||
focus and feats:
 | 
			
		||||
****************
 | 
			
		||||
for live data feeds the in-progress set of supported brokers is:
 | 
			
		||||
fitting with these tenets, we're always open to new
 | 
			
		||||
framework/lib/service interop suggestions and ideas!
 | 
			
		||||
 | 
			
		||||
- IB_ via ``ib_insync``, also see our `container docs`_
 | 
			
		||||
- binance_ and kraken_ for crypto over their public websocket API
 | 
			
		||||
- questrade_ (ish) which comes with effectively free L1
 | 
			
		||||
- **100% federated**:
 | 
			
		||||
  your code, your hardware, your data feeds, your broker fills.
 | 
			
		||||
 | 
			
		||||
coming soon...
 | 
			
		||||
- **zero web**:
 | 
			
		||||
  low latency as a prime objective, native UIs and modern IPC
 | 
			
		||||
  protocols without trying to re-invent the "OS-as-an-app"..
 | 
			
		||||
 | 
			
		||||
- webull_ via the reverse engineered public API
 | 
			
		||||
- yahoo via yliveticker_
 | 
			
		||||
- **maximal privacy**:
 | 
			
		||||
  prevent brokers and mms from knowing your planz; smack their
 | 
			
		||||
  spreads with dark volume from a VPN tunnel.
 | 
			
		||||
 | 
			
		||||
if you want your broker supported and they have an API let us know.
 | 
			
		||||
- **zero clutter**:
 | 
			
		||||
  modal, context oriented UIs that echew minimalism, reduce thought
 | 
			
		||||
  noise and encourage un-emotion.
 | 
			
		||||
 | 
			
		||||
.. _IB: https://interactivebrokers.github.io/tws-api/index.html
 | 
			
		||||
.. _container docs: https://github.com/pikers/piker/tree/master/dockering/ib
 | 
			
		||||
.. _questrade: https://www.questrade.com/api/documentation
 | 
			
		||||
.. _kraken: https://www.kraken.com/features/api#public-market-data
 | 
			
		||||
.. _binance: https://github.com/pikers/piker/pull/182
 | 
			
		||||
.. _webull: https://github.com/tedchou12/webull
 | 
			
		||||
.. _yliveticker: https://github.com/yahoofinancelive/yliveticker
 | 
			
		||||
.. _coinbase: https://docs.pro.coinbase.com/#websocket-feed
 | 
			
		||||
- **first class parallelism**:
 | 
			
		||||
  built from the ground up on a next-gen structured concurrency
 | 
			
		||||
  supervision sys.
 | 
			
		||||
 | 
			
		||||
- **traders first**:
 | 
			
		||||
  broker/exchange/venue/asset-class/money-sys agnostic
 | 
			
		||||
 | 
			
		||||
- **systems grounded**:
 | 
			
		||||
  real-time financial signal processing (fsp) that will make any
 | 
			
		||||
  queuing or DSP eng juice their shorts.
 | 
			
		||||
 | 
			
		||||
- **non-tina UX**:
 | 
			
		||||
  sleek, powerful keyboard driven interaction with expected use in
 | 
			
		||||
  tiling wms (or maybe even a DDE).
 | 
			
		||||
 | 
			
		||||
- **data collab at scale**:
 | 
			
		||||
  every actor-process and protocol is multi-host aware.
 | 
			
		||||
 | 
			
		||||
- **fight club ready**:
 | 
			
		||||
  zero interest in adoption by suits; no corporate friendly license,
 | 
			
		||||
  ever.
 | 
			
		||||
 | 
			
		||||
building the hottest looking, fastest, most reliable, keyboard
 | 
			
		||||
friendly FOSS trading platform is the dream; join the cause.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
check out our charts
 | 
			
		||||
********************
 | 
			
		||||
bet you weren't expecting this from the foss::
 | 
			
		||||
a sane install with `uv`
 | 
			
		||||
************************
 | 
			
		||||
bc why install with `python` when you can faster with `rust` ::
 | 
			
		||||
 | 
			
		||||
    piker -l info -b kraken -b binance chart btcusdt.binance --pdb
 | 
			
		||||
    uv lock
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
this runs the main chart (currently with 1m sampled OHLC) in in debug
 | 
			
		||||
mode and you can practice paper trading using the following
 | 
			
		||||
micro-manual:
 | 
			
		||||
hacky install on nixos
 | 
			
		||||
**********************
 | 
			
		||||
``NixOS`` is our core devs' distro of choice for which we offer
 | 
			
		||||
a stringently defined development shell envoirment that can be loaded with::
 | 
			
		||||
 | 
			
		||||
``order_mode`` (
 | 
			
		||||
    edge triggered activation by any of the following keys,
 | 
			
		||||
    ``mouse-click`` on y-level to submit at that price
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
    - ``f``/ ``ctl-f`` to stage buy
 | 
			
		||||
    - ``d``/ ``ctl-d`` to stage sell
 | 
			
		||||
    - ``a`` to stage alert
 | 
			
		||||
    nix-shell default.nix
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
``search_mode`` (
 | 
			
		||||
    ``ctl-l`` or ``ctl-space`` to open,
 | 
			
		||||
    ``ctl-c`` or ``ctl-space`` to close
 | 
			
		||||
    ) :
 | 
			
		||||
start a chart
 | 
			
		||||
*************
 | 
			
		||||
run a realtime OHLCV chart stand-alone::
 | 
			
		||||
 | 
			
		||||
    - begin typing to have symbol search automatically lookup
 | 
			
		||||
      symbols from all loaded backend (broker) providers
 | 
			
		||||
    - arrow keys and mouse click to navigate selection
 | 
			
		||||
    - vi-like ``ctl-[hjkl]`` for navigation
 | 
			
		||||
    piker -l info chart btcusdt.spot.binance xmrusdt.spot.kraken
 | 
			
		||||
 | 
			
		||||
this runs a chart UI (with 1m sampled OHLCV) and shows 2 spot markets from 2 diff cexes
 | 
			
		||||
overlayed on the same graph. Use of `piker` without first starting
 | 
			
		||||
a daemon (`pikerd` - see below) means there is an implicit spawning of the
 | 
			
		||||
multi-actor-runtime (implemented as a `tractor` app).
 | 
			
		||||
 | 
			
		||||
For additional subsystem feats available through our chart UI see the
 | 
			
		||||
various sub-readmes:
 | 
			
		||||
 | 
			
		||||
- order control using a mouse-n-keyboard UX B)
 | 
			
		||||
- cross venue market-pair (what most call "symbol") search, select, overlay Bo
 | 
			
		||||
- financial-signal-processing (`piker.fsp`) write-n-reload to sub-chart BO
 | 
			
		||||
- src-asset derivatives scan for anal, like the infamous "max pain" XO
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
you can also configure your position allocation limits from the
 | 
			
		||||
sidepane.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
run in distributed mode
 | 
			
		||||
***********************
 | 
			
		||||
start the service manager and data feed daemon in the background and
 | 
			
		||||
connect to it::
 | 
			
		||||
spawn a daemon standalone
 | 
			
		||||
*************************
 | 
			
		||||
we call the root actor-process the ``pikerd``. it can be (and is
 | 
			
		||||
recommended normally to be) started separately from the ``piker
 | 
			
		||||
chart`` program::
 | 
			
		||||
 | 
			
		||||
    pikerd -l info --pdb
 | 
			
		||||
 | 
			
		||||
the daemon does nothing until a ``piker``-client (like ``piker
 | 
			
		||||
chart``) connects and requests some particular sub-system. for
 | 
			
		||||
a connecting chart ``pikerd`` will spawn and manage at least,
 | 
			
		||||
 | 
			
		||||
connect your chart::
 | 
			
		||||
- a data-feed daemon: ``datad`` which does all the work of comms with
 | 
			
		||||
  the backend provider (in this case the ``binance`` cex).
 | 
			
		||||
- a paper-trading engine instance, ``paperboi.binance``, (if no live
 | 
			
		||||
  account has been configured) which allows for auto/manual order
 | 
			
		||||
  control against the live quote stream.
 | 
			
		||||
 | 
			
		||||
    piker -l info -b kraken -b binance chart xmrusdt.binance --pdb
 | 
			
		||||
*using* an actor-service (aka micro-daemon) manager which dynamically
 | 
			
		||||
supervises various sub-subsystems-as-services throughout the ``piker``
 | 
			
		||||
runtime-stack.
 | 
			
		||||
 | 
			
		||||
now you can (implicitly) connect your chart::
 | 
			
		||||
 | 
			
		||||
enjoy persistent real-time data feeds tied to daemon lifetime. the next
 | 
			
		||||
time you spawn a chart it will load much faster since the data feed has
 | 
			
		||||
been cached and is now always running live in the background until you
 | 
			
		||||
kill ``pikerd``.
 | 
			
		||||
    piker chart btcusdt.spot.binance
 | 
			
		||||
 | 
			
		||||
since ``pikerd`` was started separately you can now enjoy a persistent
 | 
			
		||||
real-time data stream tied to the daemon-tree's lifetime. i.e. the next
 | 
			
		||||
time you spawn a chart it will obviously not only load much faster
 | 
			
		||||
(since the underlying ``datad.binance`` is left running with its
 | 
			
		||||
in-memory IPC data structures) but also the data-feed and any order
 | 
			
		||||
mgmt states should be persistent until you finally cancel ``pikerd``.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if anyone asks you what this project is about
 | 
			
		||||
*********************************************
 | 
			
		||||
you don't talk about it.
 | 
			
		||||
you don't talk about it; just use it.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
how do i get involved?
 | 
			
		||||
| 
						 | 
				
			
			@ -239,6 +165,15 @@ enter the matrix.
 | 
			
		|||
 | 
			
		||||
how come there ain't that many docs
 | 
			
		||||
***********************************
 | 
			
		||||
suck it up, learn the code; no one is trying to sell you on anything.
 | 
			
		||||
also, we need lotsa help so if you want to start somewhere and can't
 | 
			
		||||
necessarily write serious code, this might be the place for you!
 | 
			
		||||
i mean we want/need them but building the core right has been higher
 | 
			
		||||
prio then marketting (and likely will stay that way Bp).
 | 
			
		||||
 | 
			
		||||
soo, suck it up bc,
 | 
			
		||||
 | 
			
		||||
- no one is trying to sell you on anything
 | 
			
		||||
- learning the code base is prolly way more valuable
 | 
			
		||||
- the UI/UXs are intended to be "intuitive" for any hacker..
 | 
			
		||||
 | 
			
		||||
we obviously need tonz help so if you want to start somewhere and
 | 
			
		||||
can't necessarily write "advanced" concurrent python/rust code, this
 | 
			
		||||
helping document literally anything might be the place for you!
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,19 +1,52 @@
 | 
			
		|||
[questrade]
 | 
			
		||||
refresh_token = ""
 | 
			
		||||
access_token = ""
 | 
			
		||||
api_server = "https://api06.iq.questrade.com/"
 | 
			
		||||
expires_in = 1800
 | 
			
		||||
token_type = "Bearer"
 | 
			
		||||
expires_at = 1616095326.355846
 | 
			
		||||
################
 | 
			
		||||
# ---- CEXY ----
 | 
			
		||||
################
 | 
			
		||||
[binance]
 | 
			
		||||
accounts.paper = 'paper'
 | 
			
		||||
 | 
			
		||||
accounts.usdtm = 'futes'
 | 
			
		||||
futes.use_testnet = false
 | 
			
		||||
futes.api_key = ''
 | 
			
		||||
futes.api_secret = ''
 | 
			
		||||
 | 
			
		||||
accounts.spot = 'spot'
 | 
			
		||||
spot.use_testnet = false
 | 
			
		||||
spot.api_key = ''
 | 
			
		||||
spot.api_secret = ''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
[deribit]
 | 
			
		||||
key_id = ''
 | 
			
		||||
key_secret = ''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
[kraken]
 | 
			
		||||
key_descr = "api_0"
 | 
			
		||||
api_key = ""
 | 
			
		||||
secret = ""
 | 
			
		||||
key_descr = ''
 | 
			
		||||
api_key = ''
 | 
			
		||||
secret = ''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
[kucoin]
 | 
			
		||||
key_id = ''
 | 
			
		||||
key_secret = ''
 | 
			
		||||
key_passphrase = ''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
################
 | 
			
		||||
# -- BROKERZ ---
 | 
			
		||||
################
 | 
			
		||||
[questrade]
 | 
			
		||||
refresh_token = ''
 | 
			
		||||
access_token = ''
 | 
			
		||||
api_server = 'https://api06.iq.questrade.com/'
 | 
			
		||||
expires_in = 1800
 | 
			
		||||
token_type = 'Bearer'
 | 
			
		||||
expires_at = 1616095326.355846
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
[ib]
 | 
			
		||||
hosts = [
 | 
			
		||||
    "127.0.0.1",
 | 
			
		||||
    '127.0.0.1',
 | 
			
		||||
]
 | 
			
		||||
# XXX: the order in which ports will be scanned
 | 
			
		||||
# (by the `brokerd` daemon-actor)
 | 
			
		||||
| 
						 | 
				
			
			@ -30,8 +63,8 @@ ports = [
 | 
			
		|||
# is not supported so you have to manually download
 | 
			
		||||
# and XML report and put it in a location that can be
 | 
			
		||||
# accessed by the ``brokerd.ib`` backend code for parsing.
 | 
			
		||||
flex_token = '666666666666666666666666'
 | 
			
		||||
flex_trades_query_id = '666666'  # live account
 | 
			
		||||
flex_token = ''
 | 
			
		||||
flex_trades_query_id = ''  # live account
 | 
			
		||||
 | 
			
		||||
# when clients are being scanned this determines
 | 
			
		||||
# which clients are preferred to be used for data
 | 
			
		||||
| 
						 | 
				
			
			@ -47,11 +80,6 @@ prefer_data_account = [
 | 
			
		|||
# the order in which accounts will be selectable
 | 
			
		||||
# in the order mode UI (if found via clients during
 | 
			
		||||
# API-app scanning)when a new symbol is loaded.
 | 
			
		||||
paper = "XX0000000"
 | 
			
		||||
margin = "X0000000"
 | 
			
		||||
ira = "X0000000"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
[deribit]
 | 
			
		||||
key_id = 'XXXXXXXX'
 | 
			
		||||
key_secret = 'Xx_XxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXxXx'
 | 
			
		||||
paper = 'XX0000000'
 | 
			
		||||
margin = 'X0000000'
 | 
			
		||||
ira = 'X0000000'
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,12 @@
 | 
			
		|||
[network]
 | 
			
		||||
tsdb.backend = 'marketstore'
 | 
			
		||||
tsdb.host = 'localhost'
 | 
			
		||||
tsdb.grpc_port = 5995
 | 
			
		||||
 | 
			
		||||
[ui]
 | 
			
		||||
# set custom font + size which will scale entire UI
 | 
			
		||||
# font_size = 16
 | 
			
		||||
# font_name = 'Monospaced'
 | 
			
		||||
 | 
			
		||||
# colorscheme = 'default'  # UNUSED
 | 
			
		||||
# graphics.update_throttle = 60  # Hz  # TODO
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,134 @@
 | 
			
		|||
with (import <nixpkgs> {});
 | 
			
		||||
let
 | 
			
		||||
  glibStorePath = lib.getLib glib;
 | 
			
		||||
  zlibStorePath = lib.getLib zlib;
 | 
			
		||||
  zstdStorePath = lib.getLib zstd;
 | 
			
		||||
  dbusStorePath = lib.getLib dbus;
 | 
			
		||||
  libGLStorePath = lib.getLib libGL;
 | 
			
		||||
  freetypeStorePath = lib.getLib freetype;
 | 
			
		||||
  qt6baseStorePath = lib.getLib qt6.qtbase;
 | 
			
		||||
  fontconfigStorePath = lib.getLib fontconfig;
 | 
			
		||||
  libxkbcommonStorePath = lib.getLib libxkbcommon;
 | 
			
		||||
  xcbutilcursorStorePath = lib.getLib xcb-util-cursor;
 | 
			
		||||
 | 
			
		||||
  qtpyStorePath = lib.getLib python312Packages.qtpy;
 | 
			
		||||
  pyqt6StorePath = lib.getLib python312Packages.pyqt6;
 | 
			
		||||
  pyqt6SipStorePath = lib.getLib python312Packages.pyqt6-sip;
 | 
			
		||||
  rapidfuzzStorePath = lib.getLib python312Packages.rapidfuzz;
 | 
			
		||||
  qdarkstyleStorePath = lib.getLib python312Packages.qdarkstyle;
 | 
			
		||||
 | 
			
		||||
  xorgLibX11StorePath = lib.getLib xorg.libX11;
 | 
			
		||||
  xorgLibxcbStorePath = lib.getLib xorg.libxcb;
 | 
			
		||||
  xorgxcbutilwmStorePath = lib.getLib xorg.xcbutilwm;
 | 
			
		||||
  xorgxcbutilimageStorePath = lib.getLib xorg.xcbutilimage;
 | 
			
		||||
  xorgxcbutilerrorsStorePath = lib.getLib xorg.xcbutilerrors;
 | 
			
		||||
  xorgxcbutilkeysymsStorePath = lib.getLib xorg.xcbutilkeysyms;
 | 
			
		||||
  xorgxcbutilrenderutilStorePath = lib.getLib xorg.xcbutilrenderutil;
 | 
			
		||||
in
 | 
			
		||||
stdenv.mkDerivation {
 | 
			
		||||
  name = "piker-qt6-uv";
 | 
			
		||||
  buildInputs = [
 | 
			
		||||
    # System requirements.
 | 
			
		||||
    glib
 | 
			
		||||
    zlib
 | 
			
		||||
    dbus
 | 
			
		||||
    zstd
 | 
			
		||||
    libGL
 | 
			
		||||
    freetype
 | 
			
		||||
    qt6.qtbase
 | 
			
		||||
    libgcc.lib
 | 
			
		||||
    fontconfig
 | 
			
		||||
    libxkbcommon
 | 
			
		||||
 | 
			
		||||
    # Xorg requirements
 | 
			
		||||
    xcb-util-cursor
 | 
			
		||||
    xorg.libxcb
 | 
			
		||||
    xorg.libX11
 | 
			
		||||
    xorg.xcbutilwm
 | 
			
		||||
    xorg.xcbutilimage
 | 
			
		||||
    xorg.xcbutilerrors
 | 
			
		||||
    xorg.xcbutilkeysyms
 | 
			
		||||
    xorg.xcbutilrenderutil
 | 
			
		||||
 | 
			
		||||
    # Python requirements.
 | 
			
		||||
    python312Full
 | 
			
		||||
    python312Packages.uv
 | 
			
		||||
    python312Packages.qdarkstyle
 | 
			
		||||
    python312Packages.rapidfuzz
 | 
			
		||||
    python312Packages.pyqt6
 | 
			
		||||
    python312Packages.qtpy
 | 
			
		||||
  ];
 | 
			
		||||
  src = null;
 | 
			
		||||
  shellHook = ''
 | 
			
		||||
    set -e
 | 
			
		||||
 | 
			
		||||
    # Set the Qt plugin path
 | 
			
		||||
    # export QT_DEBUG_PLUGINS=1
 | 
			
		||||
 | 
			
		||||
    QTBASE_PATH="${qt6baseStorePath}/lib"
 | 
			
		||||
    QT_PLUGIN_PATH="$QTBASE_PATH/qt-6/plugins"
 | 
			
		||||
    QT_QPA_PLATFORM_PLUGIN_PATH="$QT_PLUGIN_PATH/platforms"
 | 
			
		||||
 | 
			
		||||
    LIB_GCC_PATH="${libgcc.lib}/lib"
 | 
			
		||||
    GLIB_PATH="${glibStorePath}/lib"
 | 
			
		||||
    ZSTD_PATH="${zstdStorePath}/lib"
 | 
			
		||||
    ZLIB_PATH="${zlibStorePath}/lib"
 | 
			
		||||
    DBUS_PATH="${dbusStorePath}/lib"
 | 
			
		||||
    LIBGL_PATH="${libGLStorePath}/lib"
 | 
			
		||||
    FREETYPE_PATH="${freetypeStorePath}/lib"
 | 
			
		||||
    FONTCONFIG_PATH="${fontconfigStorePath}/lib"
 | 
			
		||||
    LIB_XKB_COMMON_PATH="${libxkbcommonStorePath}/lib"
 | 
			
		||||
 | 
			
		||||
    XCB_UTIL_CURSOR_PATH="${xcbutilcursorStorePath}/lib"
 | 
			
		||||
    XORG_LIB_X11_PATH="${xorgLibX11StorePath}/lib"
 | 
			
		||||
    XORG_LIB_XCB_PATH="${xorgLibxcbStorePath}/lib"
 | 
			
		||||
    XORG_XCB_UTIL_IMAGE_PATH="${xorgxcbutilimageStorePath}/lib"
 | 
			
		||||
    XORG_XCB_UTIL_WM_PATH="${xorgxcbutilwmStorePath}/lib"
 | 
			
		||||
    XORG_XCB_UTIL_RENDER_UTIL_PATH="${xorgxcbutilrenderutilStorePath}/lib"
 | 
			
		||||
    XORG_XCB_UTIL_KEYSYMS_PATH="${xorgxcbutilkeysymsStorePath}/lib"
 | 
			
		||||
    XORG_XCB_UTIL_ERRORS_PATH="${xorgxcbutilerrorsStorePath}/lib"
 | 
			
		||||
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QTBASE_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_PLUGIN_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$QT_QPA_PLATFORM_PLUGIN_PATH"
 | 
			
		||||
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_GCC_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$DBUS_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$GLIB_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZLIB_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$ZSTD_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIBGL_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FONTCONFIG_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$FREETYPE_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$LIB_XKB_COMMON_PATH"
 | 
			
		||||
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XCB_UTIL_CURSOR_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_X11_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_LIB_XCB_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_IMAGE_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_WM_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_RENDER_UTIL_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_KEYSYMS_PATH"
 | 
			
		||||
    LD_LIBRARY_PATH="$LD_LIBRARY_PATH:$XORG_XCB_UTIL_ERRORS_PATH"
 | 
			
		||||
 | 
			
		||||
    export LD_LIBRARY_PATH
 | 
			
		||||
 | 
			
		||||
    RPDFUZZ_PATH="${rapidfuzzStorePath}/lib/python3.12/site-packages"
 | 
			
		||||
    QDRKSTYLE_PATH="${qdarkstyleStorePath}/lib/python3.12/site-packages"
 | 
			
		||||
    QTPY_PATH="${qtpyStorePath}/lib/python3.12/site-packages"
 | 
			
		||||
    PYQT6_PATH="${pyqt6StorePath}/lib/python3.12/site-packages"
 | 
			
		||||
    PYQT6_SIP_PATH="${pyqt6SipStorePath}/lib/python3.12/site-packages"
 | 
			
		||||
 | 
			
		||||
    PATCH="$PATCH:$RPDFUZZ_PATH"
 | 
			
		||||
    PATCH="$PATCH:$QDRKSTYLE_PATH"
 | 
			
		||||
    PATCH="$PATCH:$QTPY_PATH"
 | 
			
		||||
    PATCH="$PATCH:$PYQT6_PATH"
 | 
			
		||||
    PATCH="$PATCH:$PYQT6_SIP_PATH"
 | 
			
		||||
 | 
			
		||||
    export PATCH
 | 
			
		||||
 | 
			
		||||
    # Install deps
 | 
			
		||||
    uv lock
 | 
			
		||||
 | 
			
		||||
  '';
 | 
			
		||||
}
 | 
			
		||||
							
								
								
									
										37
									
								
								develop.nix
								
								
								
								
							
							
						
						
									
										37
									
								
								develop.nix
								
								
								
								
							| 
						 | 
				
			
			@ -1,18 +1,34 @@
 | 
			
		|||
with (import <nixpkgs> {});
 | 
			
		||||
with python310Packages;
 | 
			
		||||
 | 
			
		||||
stdenv.mkDerivation {
 | 
			
		||||
  name = "pip-env";
 | 
			
		||||
  name = "poetry-env";
 | 
			
		||||
  buildInputs = [
 | 
			
		||||
    # System requirements.
 | 
			
		||||
    readline
 | 
			
		||||
 | 
			
		||||
    # TODO: hacky non-poetry install stuff we need to get rid of!!
 | 
			
		||||
    poetry
 | 
			
		||||
    # virtualenv
 | 
			
		||||
    # setuptools
 | 
			
		||||
    # pip
 | 
			
		||||
 | 
			
		||||
    # Python requirements (enough to get a virtualenv going).
 | 
			
		||||
    python310Full
 | 
			
		||||
    virtualenv
 | 
			
		||||
    setuptools
 | 
			
		||||
    pyqt5
 | 
			
		||||
    pip
 | 
			
		||||
    python311Full
 | 
			
		||||
 | 
			
		||||
    # obviously, and see below for hacked linking
 | 
			
		||||
    python311Packages.pyqt5
 | 
			
		||||
    python311Packages.pyqt5_sip
 | 
			
		||||
    # python311Packages.qtpy
 | 
			
		||||
 | 
			
		||||
    # numerics deps
 | 
			
		||||
    python311Packages.levenshtein
 | 
			
		||||
    python311Packages.fastparquet
 | 
			
		||||
    python311Packages.polars
 | 
			
		||||
 | 
			
		||||
  ];
 | 
			
		||||
  # environment.sessionVariables = {
 | 
			
		||||
  #   LD_LIBRARY_PATH = "${pkgs.stdenv.cc.cc.lib}/lib";
 | 
			
		||||
  # };
 | 
			
		||||
  src = null;
 | 
			
		||||
  shellHook = ''
 | 
			
		||||
    # Allow the use of wheels.
 | 
			
		||||
| 
						 | 
				
			
			@ -20,13 +36,12 @@ stdenv.mkDerivation {
 | 
			
		|||
 | 
			
		||||
    # Augment the dynamic linker path
 | 
			
		||||
    export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:${R}/lib/R/lib:${readline}/lib
 | 
			
		||||
 | 
			
		||||
    export QT_QPA_PLATFORM_PLUGIN_PATH="${qt5.qtbase.bin}/lib/qt-${qt5.qtbase.version}/plugins";
 | 
			
		||||
 | 
			
		||||
    if [ ! -d "venv" ]; then
 | 
			
		||||
        virtualenv venv
 | 
			
		||||
    if [ ! -d ".venv" ]; then
 | 
			
		||||
        poetry install --with uis
 | 
			
		||||
    fi
 | 
			
		||||
 | 
			
		||||
    source venv/bin/activate
 | 
			
		||||
    poetry shell
 | 
			
		||||
  '';
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -2,12 +2,26 @@
 | 
			
		|||
# https://github.com/waytrade/ib-gateway-docker/blob/master/docker-compose.yml
 | 
			
		||||
version: "3.5"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
services:
 | 
			
		||||
 | 
			
		||||
  ib_gw_paper:
 | 
			
		||||
 | 
			
		||||
    # apparently java is a mega cukc:
 | 
			
		||||
    # https://stackoverflow.com/a/56895801
 | 
			
		||||
    # https://bugs.openjdk.org/browse/JDK-8150460
 | 
			
		||||
    ulimits:
 | 
			
		||||
      # nproc: 65535
 | 
			
		||||
      nproc: 6000
 | 
			
		||||
      nofile:
 | 
			
		||||
        soft: 2000
 | 
			
		||||
        hard: 3000
 | 
			
		||||
 | 
			
		||||
    # other image tags available:
 | 
			
		||||
    # https://github.com/waytrade/ib-gateway-docker#supported-tags
 | 
			
		||||
    # image: waytrade/ib-gateway:981.3j
 | 
			
		||||
    image: waytrade/ib-gateway:1012.2i
 | 
			
		||||
    # image: waytrade/ib-gateway:1012.2i
 | 
			
		||||
    image: ghcr.io/gnzsnz/ib-gateway:latest
 | 
			
		||||
 | 
			
		||||
    restart: 'no'  # restart on boot whenev there's a crash or user clicsk
 | 
			
		||||
    network_mode: 'host'
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -117,9 +117,57 @@ SecondFactorDevice=
 | 
			
		|||
 | 
			
		||||
# If you use the IBKR Mobile app for second factor authentication,
 | 
			
		||||
# and you fail to complete the process before the time limit imposed
 | 
			
		||||
# by IBKR, you can use this setting to tell IBC to exit: arrangements
 | 
			
		||||
# can then be made to automatically restart IBC in order to initiate
 | 
			
		||||
# the login sequence afresh. Otherwise, manual intervention at TWS's
 | 
			
		||||
# by IBKR, this setting tells IBC whether to automatically restart
 | 
			
		||||
# the login sequence, giving you another opportunity to complete
 | 
			
		||||
# second factor authentication. 
 | 
			
		||||
#
 | 
			
		||||
# Permitted values are 'yes' and 'no'.
 | 
			
		||||
#
 | 
			
		||||
# If this setting is not present or has no value, then the value
 | 
			
		||||
# of the deprecated ExitAfterSecondFactorAuthenticationTimeout is
 | 
			
		||||
# used instead. If this also has no value, then this setting defaults
 | 
			
		||||
# to 'no'.
 | 
			
		||||
#
 | 
			
		||||
# NB: you must be using IBC v3.14.0 or later to use this setting:
 | 
			
		||||
# earlier versions ignore it.
 | 
			
		||||
 | 
			
		||||
ReloginAfterSecondFactorAuthenticationTimeout=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# This setting is only relevant if
 | 
			
		||||
# ReloginAfterSecondFactorAuthenticationTimeout is set to 'yes',
 | 
			
		||||
# or if ExitAfterSecondFactorAuthenticationTimeout is set to 'yes'.
 | 
			
		||||
#
 | 
			
		||||
# It controls how long (in seconds) IBC waits for login to complete
 | 
			
		||||
# after the user acknowledges the second factor authentication
 | 
			
		||||
# alert at the IBKR Mobile app. If login has not completed after
 | 
			
		||||
# this time, IBC terminates.
 | 
			
		||||
# The default value is 60.
 | 
			
		||||
 | 
			
		||||
SecondFactorAuthenticationExitInterval=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# This setting specifies the timeout for second factor authentication
 | 
			
		||||
# imposed by IB. The value is in seconds. You should not change this
 | 
			
		||||
# setting unless you have reason to believe that IB has changed the
 | 
			
		||||
# timeout. The default value is 180.
 | 
			
		||||
 | 
			
		||||
SecondFactorAuthenticationTimeout=180
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# DEPRECATED SETTING
 | 
			
		||||
# ------------------
 | 
			
		||||
#
 | 
			
		||||
# ExitAfterSecondFactorAuthenticationTimeout - THIS SETTING WILL BE
 | 
			
		||||
# REMOVED IN A FUTURE RELEASE. For IBC version 3.14.0 and later, see
 | 
			
		||||
# the notes for ReloginAfterSecondFactorAuthenticationTimeout above.
 | 
			
		||||
#
 | 
			
		||||
# For IBC versions earlier than 3.14.0: If you use the IBKR Mobile
 | 
			
		||||
# app for second factor authentication, and you fail to complete the
 | 
			
		||||
# process before the time limit imposed by IBKR, you can use this
 | 
			
		||||
# setting to tell IBC to exit: arrangements can then be made to
 | 
			
		||||
# automatically restart IBC in order to initiate the login sequence
 | 
			
		||||
# afresh. Otherwise, manual intervention at TWS's
 | 
			
		||||
# Second Factor Authentication dialog is needed to complete the
 | 
			
		||||
# login.
 | 
			
		||||
#
 | 
			
		||||
| 
						 | 
				
			
			@ -132,29 +180,18 @@ SecondFactorDevice=
 | 
			
		|||
ExitAfterSecondFactorAuthenticationTimeout=no
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# This setting is only relevant if
 | 
			
		||||
# ExitAfterSecondFactorAuthenticationTimeout is set to 'yes'.
 | 
			
		||||
#
 | 
			
		||||
# It controls how long (in seconds) IBC waits for login to complete
 | 
			
		||||
# after the user acknowledges the second factor authentication
 | 
			
		||||
# alert at the IBKR Mobile app. If login has not completed after
 | 
			
		||||
# this time, IBC terminates.
 | 
			
		||||
# The default value is 40.
 | 
			
		||||
 | 
			
		||||
SecondFactorAuthenticationExitInterval=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Trading Mode
 | 
			
		||||
# ------------
 | 
			
		||||
#
 | 
			
		||||
# TWS 955 introduced a new Trading Mode combo box on its login
 | 
			
		||||
# dialog. This indicates whether the live account or the paper
 | 
			
		||||
# trading account corresponding to the supplied credentials is
 | 
			
		||||
# to be used. The allowed values are 'live' (the default) and
 | 
			
		||||
# 'paper'. For earlier versions of TWS this setting has no
 | 
			
		||||
# effect.
 | 
			
		||||
# This indicates whether the live account or the paper trading
 | 
			
		||||
# account corresponding to the supplied credentials is to be used.
 | 
			
		||||
# The allowed values are 'live' (the default) and 'paper'.
 | 
			
		||||
#
 | 
			
		||||
# If this is set to 'live', then the credentials for the live
 | 
			
		||||
# account must be supplied. If it is set to 'paper', then either
 | 
			
		||||
# the live or the paper-trading credentials may be supplied.
 | 
			
		||||
 | 
			
		||||
TradingMode=
 | 
			
		||||
TradingMode=paper
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Paper-trading Account Warning
 | 
			
		||||
| 
						 | 
				
			
			@ -188,7 +225,7 @@ AcceptNonBrokerageAccountWarning=yes
 | 
			
		|||
#
 | 
			
		||||
# The default value is 60.
 | 
			
		||||
 | 
			
		||||
LoginDialogDisplayTimeout=20
 | 
			
		||||
LoginDialogDisplayTimeout=60
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -217,7 +254,15 @@ LoginDialogDisplayTimeout=20
 | 
			
		|||
# but they are acceptable.
 | 
			
		||||
#
 | 
			
		||||
# The default is the current working directory when IBC is
 | 
			
		||||
# started.
 | 
			
		||||
# started, unless the TWS_SETTINGS_PATH setting in the relevant
 | 
			
		||||
# start script is set.
 | 
			
		||||
#
 | 
			
		||||
# If both this setting and TWS_SETTINGS_PATH are set, then this
 | 
			
		||||
# setting takes priority. Note that if they have different values,
 | 
			
		||||
# auto-restart will not work.
 | 
			
		||||
#
 | 
			
		||||
# NB: this setting is now DEPRECATED. You should use the
 | 
			
		||||
# TWS_SETTINGS_PATH setting in the relevant start script.
 | 
			
		||||
 | 
			
		||||
IbDir=/root/Jts
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -284,15 +329,32 @@ ExistingSessionDetectedAction=primary
 | 
			
		|||
# Override TWS API Port Number
 | 
			
		||||
# ----------------------------
 | 
			
		||||
#
 | 
			
		||||
# If OverrideTwsApiPort is set to an integer, IBC changes the
 | 
			
		||||
# 'Socket port' in TWS's API configuration to that number shortly
 | 
			
		||||
# after startup. Leaving the setting blank will make no change to
 | 
			
		||||
# the current setting. This setting is only intended for use in
 | 
			
		||||
# certain specialized situations where the port number needs to
 | 
			
		||||
# If OverrideTwsApiPort is set to an integer, IBC changes the 
 | 
			
		||||
# 'Socket port' in TWS's API configuration to that number shortly 
 | 
			
		||||
# after startup (but note that for the FIX Gateway, this setting is
 | 
			
		||||
# actually stored in jts.ini rather than the Gateway's settings
 | 
			
		||||
# file). Leaving the setting blank will make no change to 
 | 
			
		||||
# the current setting. This setting is only intended for use in 
 | 
			
		||||
# certain specialized situations where the port number needs to 
 | 
			
		||||
# be set dynamically at run-time, and for the FIX Gateway: most
 | 
			
		||||
# non-FIX users will never need it, so don't use it unless you know
 | 
			
		||||
# you need it.
 | 
			
		||||
 | 
			
		||||
OverrideTwsApiPort=4000
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Override TWS Master Client ID
 | 
			
		||||
# -----------------------------
 | 
			
		||||
#
 | 
			
		||||
# If OverrideTwsMasterClientID is set to an integer, IBC changes the
 | 
			
		||||
# 'Master Client ID' value in TWS's API configuration to that 
 | 
			
		||||
# value shortly after startup. Leaving the setting blank will make
 | 
			
		||||
# no change to the current setting. This setting is only intended 
 | 
			
		||||
# for use in certain specialized situations where the value needs to
 | 
			
		||||
# be set dynamically at run-time: most users will never need it,
 | 
			
		||||
# so don't use it unless you know you need it.
 | 
			
		||||
 | 
			
		||||
; OverrideTwsApiPort=4002
 | 
			
		||||
OverrideTwsMasterClientID=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Read-only Login
 | 
			
		||||
| 
						 | 
				
			
			@ -302,11 +364,13 @@ ExistingSessionDetectedAction=primary
 | 
			
		|||
# account security programme, the user will not be asked to perform
 | 
			
		||||
# the second factor authentication action, and login to TWS will
 | 
			
		||||
# occur automatically in read-only mode: in this mode, placing or
 | 
			
		||||
# managing orders is not allowed. If set to 'no', and the user is
 | 
			
		||||
# enrolled in IB's account security programme, the user must perform
 | 
			
		||||
# the relevant second factor authentication action to complete the
 | 
			
		||||
# login. 
 | 
			
		||||
 | 
			
		||||
# managing orders is not allowed. 
 | 
			
		||||
#
 | 
			
		||||
# If set to 'no', and the user is enrolled in IB's account security
 | 
			
		||||
# programme, the second factor authentication process is handled
 | 
			
		||||
# according to the Second Factor Authentication Settings described
 | 
			
		||||
# elsewhere in this file.
 | 
			
		||||
#
 | 
			
		||||
# If the user is not enrolled in IB's account security programme,
 | 
			
		||||
# this setting is ignored. The default is 'no'.
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -326,7 +390,44 @@ ReadOnlyLogin=no
 | 
			
		|||
# set the relevant checkbox (this only needs to be done once) and
 | 
			
		||||
# not provide a value for this setting.
 | 
			
		||||
 | 
			
		||||
ReadOnlyApi=no
 | 
			
		||||
ReadOnlyApi=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# API Precautions
 | 
			
		||||
# ---------------
 | 
			
		||||
# 
 | 
			
		||||
# These settings relate to the corresponding 'Precautions' checkboxes in the
 | 
			
		||||
# API section of the Global Configuration dialog.
 | 
			
		||||
#
 | 
			
		||||
# For all of these, the accepted values are:
 | 
			
		||||
# - 'yes' sets the checkbox
 | 
			
		||||
# - 'no' clears the checkbox
 | 
			
		||||
# - if not set, the existing TWS/Gateway configuration is unchanged
 | 
			
		||||
#
 | 
			
		||||
# NB: thess settings are really only supplied for the benefit of new TWS
 | 
			
		||||
# or Gateway instances that are being automatically installed and
 | 
			
		||||
# started without user intervention, or where user settings are not preserved
 | 
			
		||||
# between sessions (eg some Docker containers). Where a user is involved, they
 | 
			
		||||
# should use the Global Configuration to set the relevant checkboxes and not
 | 
			
		||||
# provide values for these settings.
 | 
			
		||||
 | 
			
		||||
BypassOrderPrecautions=
 | 
			
		||||
 | 
			
		||||
BypassBondWarning=
 | 
			
		||||
 | 
			
		||||
BypassNegativeYieldToWorstConfirmation=
 | 
			
		||||
 | 
			
		||||
BypassCalledBondWarning=
 | 
			
		||||
 | 
			
		||||
BypassSameActionPairTradeWarning=
 | 
			
		||||
 | 
			
		||||
BypassPriceBasedVolatilityRiskWarning=
 | 
			
		||||
 | 
			
		||||
BypassUSStocksMarketDataInSharesWarning=
 | 
			
		||||
 | 
			
		||||
BypassRedirectOrderWarning=
 | 
			
		||||
 | 
			
		||||
BypassNoOverfillProtectionPrecaution=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Market data size for US stocks - lots or shares
 | 
			
		||||
| 
						 | 
				
			
			@ -381,54 +482,145 @@ AcceptBidAskLastSizeDisplayUpdateNotification=accept
 | 
			
		|||
SendMarketDataInLotsForUSstocks=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Trusted API Client IPs
 | 
			
		||||
# ----------------------
 | 
			
		||||
#
 | 
			
		||||
# NB: THIS SETTING IS ONLY RELEVANT FOR THE GATEWAY, AND ONLY WHEN FIX=yes.
 | 
			
		||||
# In all other cases it is ignored.
 | 
			
		||||
#
 | 
			
		||||
# This is a list of IP addresses separated by commas. API clients with IP
 | 
			
		||||
# addresses in this list are able to connect to the API without Gateway
 | 
			
		||||
# generating the 'Incoming connection' popup.
 | 
			
		||||
#
 | 
			
		||||
# Note that 127.0.0.1 is always permitted to connect, so do not include it
 | 
			
		||||
# in this setting.
 | 
			
		||||
 | 
			
		||||
TrustedTwsApiClientIPs=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Reset Order ID Sequence
 | 
			
		||||
# -----------------------
 | 
			
		||||
#
 | 
			
		||||
# The setting resets the order id sequence for orders submitted via the API, so
 | 
			
		||||
# that the next invocation of the `NextValidId` API callback will return the
 | 
			
		||||
# value 1. The reset occurs when TWS starts.
 | 
			
		||||
#
 | 
			
		||||
# Note that order ids are reset for all API clients, except those that have
 | 
			
		||||
# outstanding (ie incomplete) orders: their order id sequence carries on as
 | 
			
		||||
# before.
 | 
			
		||||
#
 | 
			
		||||
# Valid values are 'yes', 'true', 'false' and 'no'. The default is 'no'.
 | 
			
		||||
 | 
			
		||||
ResetOrderIdsAtStart=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# This setting specifies IBC's action when TWS displays the dialog asking for
 | 
			
		||||
# confirmation of a request to reset the API order id sequence.
 | 
			
		||||
#
 | 
			
		||||
# Note that the Gateway never displays this dialog, so this setting is ignored
 | 
			
		||||
# for a Gateway session.
 | 
			
		||||
#
 | 
			
		||||
# Valid values consist of two strings separated by a solidus '/'. The first
 | 
			
		||||
# value specifies the action to take when the order id reset request resulted
 | 
			
		||||
# from setting ResetOrderIdsAtStart=yes. The second specifies the action to
 | 
			
		||||
# take when the order id reset request is a result of the user clicking the
 | 
			
		||||
# 'Reset API order ID sequence' button in the API configuration. Each value
 | 
			
		||||
# must be one of the following:
 | 
			
		||||
#
 | 
			
		||||
#    'confirm' 
 | 
			
		||||
#        order ids will be reset
 | 
			
		||||
#
 | 
			
		||||
#    'reject' 
 | 
			
		||||
#        order ids will not be reset
 | 
			
		||||
#
 | 
			
		||||
#    'ignore' 
 | 
			
		||||
#        IBC will ignore the dialog. The user must take action.
 | 
			
		||||
#
 | 
			
		||||
#    The default setting is ignore/ignore
 | 
			
		||||
 | 
			
		||||
# Examples:
 | 
			
		||||
#
 | 
			
		||||
#    'confirm/reject' - confirm order id reset only if ResetOrderIdsAtStart=yes
 | 
			
		||||
#                       and reject any user-initiated requests
 | 
			
		||||
#
 | 
			
		||||
#    'ignore/confirm' - user must decide what to do if ResetOrderIdsAtStart=yes
 | 
			
		||||
#                       and confirm user-initiated requests
 | 
			
		||||
#
 | 
			
		||||
#    'reject/ignore'  - reject order id reset if  ResetOrderIdsAtStart=yes but
 | 
			
		||||
#                       allow user to handle user-initiated requests 
 | 
			
		||||
 | 
			
		||||
ConfirmOrderIdReset=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# =============================================================================
 | 
			
		||||
# 4.   TWS Auto-Closedown
 | 
			
		||||
# 4.   TWS Auto-Logoff and Auto-Restart
 | 
			
		||||
# =============================================================================
 | 
			
		||||
#
 | 
			
		||||
# IMPORTANT NOTE: Starting with TWS 974, this setting no longer
 | 
			
		||||
# works properly, because IB have changed the way TWS handles its
 | 
			
		||||
# autologoff mechanism. 
 | 
			
		||||
# TWS and Gateway insist on being restarted every day. Two alternative
 | 
			
		||||
# automatic options are offered: 
 | 
			
		||||
#
 | 
			
		||||
# You should now configure the TWS autologoff time to something
 | 
			
		||||
# convenient for you, and restart IBC each day.
 | 
			
		||||
#    - Auto-Logoff: at a specified time, TWS shuts down tidily, without
 | 
			
		||||
#      restarting.
 | 
			
		||||
#
 | 
			
		||||
# Alternatively, discontinue use of IBC and use the auto-relogin
 | 
			
		||||
# mechanism within TWS 974 and later versions (note that the 
 | 
			
		||||
# auto-relogin mechanism provided by IB is not available if you
 | 
			
		||||
# use IBC).
 | 
			
		||||
#    - Auto-Restart: at a specified time, TWS shuts down and then restarts
 | 
			
		||||
#      without the user having to re-autheticate.
 | 
			
		||||
#
 | 
			
		||||
# The normal way to configure the time at which this happens is via the Lock
 | 
			
		||||
# and Exit section of the Configuration dialog. Once this time has been
 | 
			
		||||
# configured in this way, the setting persists until the user changes it again.
 | 
			
		||||
#
 | 
			
		||||
# However, there are situations where there is no user available to do this
 | 
			
		||||
# configuration, or where there is no persistent storage (for example some
 | 
			
		||||
# Docker images). In such cases, the auto-restart or auto-logoff time can be
 | 
			
		||||
# set whenever IBC starts with the settings below.
 | 
			
		||||
#
 | 
			
		||||
# The value, if specified, must be a time in HH:MM AM/PM format, for example
 | 
			
		||||
# 08:00 AM or 10:00 PM. Note that there must be a single space between the
 | 
			
		||||
# two parts of this value; also that midnight is "12:00 AM" and midday is
 | 
			
		||||
# "12:00 PM".
 | 
			
		||||
#
 | 
			
		||||
# If no value is specified for either setting, the currently configured
 | 
			
		||||
# settings will apply. If a value is supplied for one setting, the other
 | 
			
		||||
# setting is cleared. If values are supplied for both settings, only the
 | 
			
		||||
# auto-restart time is set, and the auto-logoff time is cleared.
 | 
			
		||||
#
 | 
			
		||||
# Note that for a normal TWS/Gateway installation with persistent storage
 | 
			
		||||
# (for example on a desktop computer) the value will be persisted as if the
 | 
			
		||||
# user had set it via the configuration dialog.
 | 
			
		||||
#
 | 
			
		||||
# If you choose to auto-restart, you should take note of the considerations
 | 
			
		||||
# described at the link below. Note that where this information mentions
 | 
			
		||||
# 'manual authentication', restarting IBC will do the job (IBKR does not
 | 
			
		||||
# recognise the existence of IBC in its docuemntation).
 | 
			
		||||
#
 | 
			
		||||
#  https://www.interactivebrokers.com/en/software/tws/twsguide.htm#usersguidebook/configuretws/auto_restart_info.htm
 | 
			
		||||
#
 | 
			
		||||
# If you use the "RESTART" command via the IBC command server, and IBC is
 | 
			
		||||
# running any version of the Gateway (or a version of TWS earlier than 1018),
 | 
			
		||||
# note that this will set the Auto-Restart time in Gateway/TWS's configuration
 | 
			
		||||
# dialog to the time at which the restart actually happens (which may be up to
 | 
			
		||||
# a minute after the RESTART command is issued). To prevent future auto-
 | 
			
		||||
# restarts at this time, you must make sure you have set AutoLogoffTime or
 | 
			
		||||
# AutoRestartTime to your desired value before running IBC. NB: this does not
 | 
			
		||||
# apply to TWS from version 1018 onwards.
 | 
			
		||||
 | 
			
		||||
# Set to yes or no (lower case).
 | 
			
		||||
#
 | 
			
		||||
#   yes   means allow TWS to shut down automatically at its
 | 
			
		||||
# 	  specified shutdown time, which is set via the TWS
 | 
			
		||||
#	  configuration menu.
 | 
			
		||||
#
 | 
			
		||||
#   no    means TWS never shuts down automatically.
 | 
			
		||||
#
 | 
			
		||||
# NB: IB recommends that you do not keep TWS running
 | 
			
		||||
# continuously. If you set this setting to 'no', you may
 | 
			
		||||
# experience incorrect TWS operation.
 | 
			
		||||
#
 | 
			
		||||
# NB: the default for this setting is 'no'. Since this will
 | 
			
		||||
# only work properly with TWS versions earlier than 974, you
 | 
			
		||||
# should explicitly set this to 'yes' for version 974 and later.
 | 
			
		||||
 | 
			
		||||
IbAutoClosedown=yes
 | 
			
		||||
AutoLogoffTime=
 | 
			
		||||
 | 
			
		||||
AutoRestartTime=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# =============================================================================
 | 
			
		||||
# 5.   TWS Tidy Closedown Time
 | 
			
		||||
# =============================================================================
 | 
			
		||||
#
 | 
			
		||||
# NB: starting with TWS 974 this is no longer a useful option
 | 
			
		||||
# because both TWS and Gateway now have the same auto-logoff
 | 
			
		||||
# mechanism, and IBC can no longer avoid this.
 | 
			
		||||
# Specifies a time at which TWS will close down tidily, with no restart.
 | 
			
		||||
#
 | 
			
		||||
# Note that giving this setting a value does not change TWS's
 | 
			
		||||
# auto-logoff in any way: any setting will be additional to the
 | 
			
		||||
# TWS auto-logoff.
 | 
			
		||||
# There is little reason to use this setting. It is similar to AutoLogoffTime,
 | 
			
		||||
# but can include a day-of-the-week, whereas AutoLogoffTime and AutoRestartTime
 | 
			
		||||
# apply every day. So for example you could use ClosedownAt in conjunction with
 | 
			
		||||
# AutoRestartTime to shut down TWS on Friday evenings after the markets
 | 
			
		||||
# close, without it running on Saturday as well.
 | 
			
		||||
#
 | 
			
		||||
# To tell IBC to tidily close TWS at a specified time every
 | 
			
		||||
# day, set this value to <hh:mm>, for example:
 | 
			
		||||
| 
						 | 
				
			
			@ -487,7 +679,7 @@ AcceptIncomingConnectionAction=reject
 | 
			
		|||
#   no    means the dialog remains on display and must be
 | 
			
		||||
#         handled by the user.
 | 
			
		||||
 | 
			
		||||
AllowBlindTrading=yes
 | 
			
		||||
AllowBlindTrading=no
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Save Settings on a Schedule
 | 
			
		||||
| 
						 | 
				
			
			@ -530,6 +722,26 @@ AllowBlindTrading=yes
 | 
			
		|||
SaveTwsSettingsAt=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Confirm Crypto Currency Orders Automatically
 | 
			
		||||
# --------------------------------------------
 | 
			
		||||
#
 | 
			
		||||
# When you place an order for a cryptocurrency contract, a dialog is displayed
 | 
			
		||||
# asking you to confirm that you want to place the order, and notifying you
 | 
			
		||||
# that you are placing an order to trade cryptocurrency with Paxos, a New York
 | 
			
		||||
# limited trust company, and not at Interactive Brokers.
 | 
			
		||||
#
 | 
			
		||||
#   transmit    means that the order will be placed automatically, and the
 | 
			
		||||
#               dialog will then be closed
 | 
			
		||||
#
 | 
			
		||||
#   cancel      means that the order will not be placed, and the dialog will
 | 
			
		||||
#               then be closed
 | 
			
		||||
#
 | 
			
		||||
#   manual      means that IBC will take no action and the user must deal
 | 
			
		||||
#               with the dialog
 | 
			
		||||
 | 
			
		||||
ConfirmCryptoCurrencyOrders=transmit
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# =============================================================================
 | 
			
		||||
# 7.   Settings Specific to Indian Versions of TWS
 | 
			
		||||
| 
						 | 
				
			
			@ -566,13 +778,17 @@ DismissNSEComplianceNotice=yes
 | 
			
		|||
#
 | 
			
		||||
# The port number that IBC listens on for commands
 | 
			
		||||
# such as "STOP". DO NOT set this to the port number
 | 
			
		||||
# used for TWS API connections. There is no good reason
 | 
			
		||||
# to change this setting unless the port is used by
 | 
			
		||||
# some other application (typically another instance of
 | 
			
		||||
# IBC). The default value is 0, which tells IBC not to
 | 
			
		||||
# start the command server
 | 
			
		||||
# used for TWS API connections.
 | 
			
		||||
#
 | 
			
		||||
# The convention is to use 7462 for this port,
 | 
			
		||||
# but it must be set to a different value from any other
 | 
			
		||||
# IBC instance that might run at the same time.
 | 
			
		||||
#
 | 
			
		||||
# The default value is 0, which tells IBC not to start
 | 
			
		||||
# the command server
 | 
			
		||||
 | 
			
		||||
#CommandServerPort=7462
 | 
			
		||||
CommandServerPort=0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Permitted Command Sources
 | 
			
		||||
| 
						 | 
				
			
			@ -583,19 +799,19 @@ DismissNSEComplianceNotice=yes
 | 
			
		|||
# IBC.  Commands can always be sent from the
 | 
			
		||||
# same host as IBC is running on.
 | 
			
		||||
 | 
			
		||||
ControlFrom=127.0.0.1
 | 
			
		||||
ControlFrom=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Address for Receiving Commands
 | 
			
		||||
# ------------------------------
 | 
			
		||||
#
 | 
			
		||||
# Specifies the IP address on which the Command Server
 | 
			
		||||
# is so listen. For a multi-homed host, this can be used
 | 
			
		||||
# is to listen. For a multi-homed host, this can be used
 | 
			
		||||
# to specify that connection requests are only to be
 | 
			
		||||
# accepted on the specified address. The default is to
 | 
			
		||||
# accept connection requests on all local addresses.
 | 
			
		||||
 | 
			
		||||
BindAddress=127.0.0.1
 | 
			
		||||
BindAddress=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Command Prompt
 | 
			
		||||
| 
						 | 
				
			
			@ -621,7 +837,7 @@ CommandPrompt=
 | 
			
		|||
# information is sent. The default is that such information
 | 
			
		||||
# is not sent.
 | 
			
		||||
 | 
			
		||||
SuppressInfoMessages=no
 | 
			
		||||
SuppressInfoMessages=yes
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -651,10 +867,10 @@ SuppressInfoMessages=no
 | 
			
		|||
# The LogStructureScope setting indicates which windows are
 | 
			
		||||
# eligible for structure logging:
 | 
			
		||||
#
 | 
			
		||||
#    - if set to 'known', only windows that IBC recognizes
 | 
			
		||||
#      are eligible - these are windows that IBC has some
 | 
			
		||||
#      interest in monitoring, usually to take some action
 | 
			
		||||
#      on the user's behalf;
 | 
			
		||||
#    - (default value) if set to 'known', only windows that
 | 
			
		||||
#      IBC recognizes are eligible - these are windows that
 | 
			
		||||
#      IBC has some interest in monitoring, usually to take
 | 
			
		||||
#      some action on the user's behalf;
 | 
			
		||||
#
 | 
			
		||||
#    - if set to 'unknown', only windows that IBC does not
 | 
			
		||||
#      recognize are eligible. Most windows displayed by
 | 
			
		||||
| 
						 | 
				
			
			@ -667,9 +883,8 @@ SuppressInfoMessages=no
 | 
			
		|||
#    - if set to 'all', then every window displayed by TWS
 | 
			
		||||
#      is eligible.
 | 
			
		||||
#
 | 
			
		||||
# The default value is 'known'.
 | 
			
		||||
 | 
			
		||||
LogStructureScope=all
 | 
			
		||||
LogStructureScope=known
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# When to Log Window Structure
 | 
			
		||||
| 
						 | 
				
			
			@ -682,13 +897,15 @@ LogStructureScope=all
 | 
			
		|||
#       structure of an eligible window the first time it
 | 
			
		||||
#       is encountered;
 | 
			
		||||
#
 | 
			
		||||
#     - if set to 'openclose', the structure is logged every
 | 
			
		||||
#       time an eligible window is opened or closed;
 | 
			
		||||
#
 | 
			
		||||
#    - if set to 'activate', the structure is logged every
 | 
			
		||||
#      time an eligible window is made active;
 | 
			
		||||
#
 | 
			
		||||
#    - if set to 'never' or 'no' or 'false', structure
 | 
			
		||||
#      information is never logged.
 | 
			
		||||
#    - (default value) if set to 'never' or 'no' or 'false',
 | 
			
		||||
#      structure information is never logged.
 | 
			
		||||
#
 | 
			
		||||
# The default value is 'never'.
 | 
			
		||||
 | 
			
		||||
LogStructureWhen=never
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -708,4 +925,3 @@ LogStructureWhen=never
 | 
			
		|||
#LogComponents=
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,91 @@
 | 
			
		|||
### NOTE this is likely out of date given it was written some
 | 
			
		||||
(years) time ago by a user that has since not really partaken in
 | 
			
		||||
contributing since.
 | 
			
		||||
 | 
			
		||||
install for tinas
 | 
			
		||||
*****************
 | 
			
		||||
for windows peeps you can start by installing all the prerequisite software:
 | 
			
		||||
 | 
			
		||||
- install git with all default settings - https://git-scm.com/download/win
 | 
			
		||||
- install anaconda all default settings - https://www.anaconda.com/products/individual
 | 
			
		||||
- install microsoft build tools (check the box for Desktop development for C++, you might be able to uncheck some optional downloads)  - https://visualstudio.microsoft.com/visual-cpp-build-tools/
 | 
			
		||||
- install visual studio code default settings - https://code.visualstudio.com/download
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
then, `crack a conda shell`_ and run the following commands::
 | 
			
		||||
 | 
			
		||||
    mkdir code # create code directory
 | 
			
		||||
    cd code # change directory to code
 | 
			
		||||
    git clone https://github.com/pikers/piker.git # downloads piker installation package from github
 | 
			
		||||
    cd piker # change directory to piker
 | 
			
		||||
    
 | 
			
		||||
    conda create -n pikonda # creates conda environment named pikonda
 | 
			
		||||
    conda activate pikonda # activates pikonda
 | 
			
		||||
    
 | 
			
		||||
    conda install -c conda-forge python-levenshtein # in case it is not already installed
 | 
			
		||||
    conda install pip # may already be installed
 | 
			
		||||
    pip # will show if pip is installed
 | 
			
		||||
    
 | 
			
		||||
    pip install -e . -r requirements.txt # install piker in editable mode
 | 
			
		||||
 | 
			
		||||
test Piker to see if it is working::
 | 
			
		||||
 | 
			
		||||
    piker -b binance chart btcusdt.binance # formatting for loading a chart
 | 
			
		||||
    piker -b kraken -b binance chart xbtusdt.kraken
 | 
			
		||||
    piker -b kraken -b binance -b ib chart qqq.nasdaq.ib
 | 
			
		||||
    piker -b ib chart tsla.nasdaq.ib
 | 
			
		||||
 | 
			
		||||
potential error::
 | 
			
		||||
    
 | 
			
		||||
    FileNotFoundError: [Errno 2] No such file or directory: 'C:\\Users\\user\\AppData\\Roaming\\piker\\brokers.toml'
 | 
			
		||||
    
 | 
			
		||||
solution:
 | 
			
		||||
 | 
			
		||||
- navigate to file directory above (may be different on your machine, location should be listed in the error code)
 | 
			
		||||
- copy and paste file from 'C:\\Users\\user\\code\\data/brokers.toml' or create a blank file using notepad at the location above
 | 
			
		||||
 | 
			
		||||
Visual Studio Code setup:
 | 
			
		||||
 | 
			
		||||
- now that piker is installed we can set up vscode as the default terminal for running piker and editing the code
 | 
			
		||||
- open Visual Studio Code
 | 
			
		||||
- file --> Add Folder to Workspace --> C:\Users\user\code\piker (adds piker directory where all piker files are located)
 | 
			
		||||
- file --> Save Workspace As --> save it wherever you want and call it whatever you want, this is going to be your default workspace for running and editing piker code
 | 
			
		||||
- ctrl + shift + p --> start typing Python: Select Interpetter --> when the option comes up select it --> Select at the workspace level --> select the one that shows ('pikonda')
 | 
			
		||||
- change the default terminal to cmd.exe instead of powershell (default)
 | 
			
		||||
- now when you create a new terminal VScode should automatically activate you conda env so that piker can be run as the first command after a new terminal is created
 | 
			
		||||
 | 
			
		||||
also, try out fancyzones as part of powertoyz for a decent tiling windows manager to manage all the cool new software you are going to be running.
 | 
			
		||||
 | 
			
		||||
.. _conda installed: https://
 | 
			
		||||
.. _C++ build toolz: https://
 | 
			
		||||
.. _crack a conda shell: https://
 | 
			
		||||
.. _vscode: https://
 | 
			
		||||
 | 
			
		||||
.. link to the tina guide
 | 
			
		||||
.. _setup a coolio tiled wm console: https://
 | 
			
		||||
 | 
			
		||||
provider support
 | 
			
		||||
****************
 | 
			
		||||
for live data feeds the in-progress set of supported brokers is:
 | 
			
		||||
 | 
			
		||||
- IB_ via ``ib_insync``, also see our `container docs`_
 | 
			
		||||
- binance_ and kraken_ for crypto over their public websocket API
 | 
			
		||||
- questrade_ (ish) which comes with effectively free L1
 | 
			
		||||
 | 
			
		||||
coming soon...
 | 
			
		||||
 | 
			
		||||
- webull_ via the reverse engineered public API
 | 
			
		||||
- yahoo via yliveticker_
 | 
			
		||||
 | 
			
		||||
if you want your broker supported and they have an API let us know.
 | 
			
		||||
 | 
			
		||||
.. _IB: https://interactivebrokers.github.io/tws-api/index.html
 | 
			
		||||
.. _container docs: https://github.com/pikers/piker/tree/master/dockering/ib
 | 
			
		||||
.. _questrade: https://www.questrade.com/api/documentation
 | 
			
		||||
.. _kraken: https://www.kraken.com/features/api#public-market-data
 | 
			
		||||
.. _binance: https://github.com/pikers/piker/pull/182
 | 
			
		||||
.. _webull: https://github.com/tedchou12/webull
 | 
			
		||||
.. _yliveticker: https://github.com/yahoofinancelive/yliveticker
 | 
			
		||||
.. _coinbase: https://docs.pro.coinbase.com/#websocket-feed
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,263 @@
 | 
			
		|||
# from pprint import pformat
 | 
			
		||||
from functools import partial
 | 
			
		||||
from decimal import Decimal
 | 
			
		||||
from typing import Callable
 | 
			
		||||
 | 
			
		||||
import tractor
 | 
			
		||||
import trio
 | 
			
		||||
from uuid import uuid4
 | 
			
		||||
 | 
			
		||||
from piker.service import maybe_open_pikerd
 | 
			
		||||
from piker.accounting import dec_digits
 | 
			
		||||
from piker.clearing import (
 | 
			
		||||
    open_ems,
 | 
			
		||||
    OrderClient,
 | 
			
		||||
)
 | 
			
		||||
# TODO: we should probably expose these top level in this subsys?
 | 
			
		||||
from piker.clearing._messages import (
 | 
			
		||||
    Order,
 | 
			
		||||
    Status,
 | 
			
		||||
    BrokerdPosition,
 | 
			
		||||
)
 | 
			
		||||
from piker.data import (
 | 
			
		||||
    iterticks,
 | 
			
		||||
    Flume,
 | 
			
		||||
    open_feed,
 | 
			
		||||
    Feed,
 | 
			
		||||
    # ShmArray,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: handle other statuses:
 | 
			
		||||
# - fills, errors, and position tracking
 | 
			
		||||
async def wait_for_order_status(
 | 
			
		||||
    trades_stream: tractor.MsgStream,
 | 
			
		||||
    oid: str,
 | 
			
		||||
    expect_status: str,
 | 
			
		||||
 | 
			
		||||
) -> tuple[
 | 
			
		||||
    list[Status],
 | 
			
		||||
    list[BrokerdPosition],
 | 
			
		||||
]:
 | 
			
		||||
    '''
 | 
			
		||||
    Wait for a specific order status for a given dialog, return msg flow
 | 
			
		||||
    up to that msg and any position update msgs in a tuple.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # Wait for position message before moving on to verify flow(s)
 | 
			
		||||
    # for the multi-order position entry/exit.
 | 
			
		||||
    status_msgs: list[Status] = []
 | 
			
		||||
    pp_msgs: list[BrokerdPosition] = []
 | 
			
		||||
 | 
			
		||||
    async for msg in trades_stream:
 | 
			
		||||
        match msg:
 | 
			
		||||
            case {'name': 'position'}:
 | 
			
		||||
                ppmsg = BrokerdPosition(**msg)
 | 
			
		||||
                pp_msgs.append(ppmsg)
 | 
			
		||||
 | 
			
		||||
            case {
 | 
			
		||||
                'name': 'status',
 | 
			
		||||
            }:
 | 
			
		||||
                msg = Status(**msg)
 | 
			
		||||
                status_msgs.append(msg)
 | 
			
		||||
 | 
			
		||||
                # if we get the status we expect then return all
 | 
			
		||||
                # collected msgs from the brokerd dialog up to the
 | 
			
		||||
                # exected msg B)
 | 
			
		||||
                if (
 | 
			
		||||
                     msg.resp == expect_status
 | 
			
		||||
                     and msg.oid == oid
 | 
			
		||||
                ):
 | 
			
		||||
                    return status_msgs, pp_msgs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def bot_main():
 | 
			
		||||
    '''
 | 
			
		||||
    Boot the piker runtime, open an ems connection, submit
 | 
			
		||||
    and process orders statuses in real-time.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    ll: str = 'info'
 | 
			
		||||
 | 
			
		||||
    # open an order ctl client, live data feed, trio nursery for
 | 
			
		||||
    # spawning an order trailer task
 | 
			
		||||
    client: OrderClient
 | 
			
		||||
    trades_stream: tractor.MsgStream
 | 
			
		||||
    feed: Feed
 | 
			
		||||
    accounts: list[str]
 | 
			
		||||
 | 
			
		||||
    fqme: str = 'btcusdt.usdtm.perp.binance'
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
 | 
			
		||||
        # TODO: do this implicitly inside `open_ems()` ep below?
 | 
			
		||||
        # init and sync actor-service runtime
 | 
			
		||||
        maybe_open_pikerd(
 | 
			
		||||
            loglevel=ll,
 | 
			
		||||
            debug_mode=True,
 | 
			
		||||
 | 
			
		||||
        ),
 | 
			
		||||
        open_ems(
 | 
			
		||||
            fqme,
 | 
			
		||||
            mode='paper',  # {'live', 'paper'}
 | 
			
		||||
            # mode='live',  # for real-brokerd submissions
 | 
			
		||||
            loglevel=ll,
 | 
			
		||||
 | 
			
		||||
        ) as (
 | 
			
		||||
            client,  # OrderClient
 | 
			
		||||
            trades_stream,  # tractor.MsgStream startup_pps,
 | 
			
		||||
            _,  # positions
 | 
			
		||||
            accounts,
 | 
			
		||||
            _,  # dialogs
 | 
			
		||||
        ),
 | 
			
		||||
 | 
			
		||||
        open_feed(
 | 
			
		||||
            fqmes=[fqme],
 | 
			
		||||
            loglevel=ll,
 | 
			
		||||
 | 
			
		||||
            # TODO: if you want to throttle via downsampling
 | 
			
		||||
            # how many tick updates your feed received on
 | 
			
		||||
            # quote streams B)
 | 
			
		||||
            # tick_throttle=10,
 | 
			
		||||
        ) as feed,
 | 
			
		||||
 | 
			
		||||
        trio.open_nursery() as tn,
 | 
			
		||||
    ):
 | 
			
		||||
        assert accounts
 | 
			
		||||
        print(f'Loaded binance accounts: {accounts}')
 | 
			
		||||
 | 
			
		||||
        flume: Flume = feed.flumes[fqme]
 | 
			
		||||
        min_tick = Decimal(flume.mkt.price_tick)
 | 
			
		||||
        min_tick_digits: int = dec_digits(min_tick)
 | 
			
		||||
        price_round: Callable = partial(
 | 
			
		||||
            round,
 | 
			
		||||
            ndigits=min_tick_digits,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        quote_stream: trio.abc.ReceiveChannel = feed.streams['binance']
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        # always keep live limit 0.003% below last
 | 
			
		||||
        # clearing price
 | 
			
		||||
        clear_margin: float = 0.9997
 | 
			
		||||
 | 
			
		||||
        async def trailer(
 | 
			
		||||
            order: Order,
 | 
			
		||||
        ):
 | 
			
		||||
            # ref shm OHLCV array history, if you want
 | 
			
		||||
            # s_shm: ShmArray = flume.rt_shm
 | 
			
		||||
            # m_shm: ShmArray = flume.hist_shm
 | 
			
		||||
 | 
			
		||||
            # NOTE: if you wanted to frame ticks by type like the
 | 
			
		||||
            # the quote throttler does.. and this is probably
 | 
			
		||||
            # faster in terms of getting the latest tick type
 | 
			
		||||
            # embedded value of interest?
 | 
			
		||||
            # from piker.data._sampling import frame_ticks
 | 
			
		||||
 | 
			
		||||
            async for quotes in quote_stream:
 | 
			
		||||
                for fqme, quote in quotes.items():
 | 
			
		||||
                    # print(
 | 
			
		||||
                    #     f'{quote["symbol"]} -> {quote["ticks"]}\n'
 | 
			
		||||
                    #     f'last 1s OHLC:\n{s_shm.array[-1]}\n'
 | 
			
		||||
                    #     f'last 1m OHLC:\n{m_shm.array[-1]}\n'
 | 
			
		||||
                    # )
 | 
			
		||||
 | 
			
		||||
                    for tick in iterticks(
 | 
			
		||||
                        quote,
 | 
			
		||||
                        reverse=True,
 | 
			
		||||
                        # types=('trade', 'dark_trade'), # defaults
 | 
			
		||||
                    ):
 | 
			
		||||
 | 
			
		||||
                        await client.update(
 | 
			
		||||
                            uuid=order.oid,
 | 
			
		||||
                            price=price_round(
 | 
			
		||||
                                clear_margin
 | 
			
		||||
                                *
 | 
			
		||||
                                tick['price']
 | 
			
		||||
                            ),
 | 
			
		||||
                        )
 | 
			
		||||
                        msgs, pps = await wait_for_order_status(
 | 
			
		||||
                            trades_stream,
 | 
			
		||||
                            order.oid,
 | 
			
		||||
                            'open'
 | 
			
		||||
                        )
 | 
			
		||||
                        # if multiple clears per quote just
 | 
			
		||||
                        # skip to the next quote?
 | 
			
		||||
                        break
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        # get first live quote to be sure we submit the initial
 | 
			
		||||
        # live buy limit low enough that it doesn't clear due to
 | 
			
		||||
        # a stale initial price from the data feed layer!
 | 
			
		||||
        first_ask_price: float | None = None
 | 
			
		||||
        async for quotes in quote_stream:
 | 
			
		||||
            for fqme, quote in quotes.items():
 | 
			
		||||
                # print(quote['symbol'])
 | 
			
		||||
                for tick in iterticks(quote, types=('ask')):
 | 
			
		||||
                    first_ask_price: float = tick['price']
 | 
			
		||||
                    break
 | 
			
		||||
 | 
			
		||||
            if first_ask_price:
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
        # setup order dialog via first msg
 | 
			
		||||
        price: float = price_round(
 | 
			
		||||
                clear_margin
 | 
			
		||||
                *
 | 
			
		||||
                first_ask_price,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # compute a 1k USD sized pos
 | 
			
		||||
        size: float = round(1e3/price, ndigits=3)
 | 
			
		||||
 | 
			
		||||
        order = Order(
 | 
			
		||||
 | 
			
		||||
            # docs on how this all works, bc even i'm not entirely
 | 
			
		||||
            # clear XD. also we probably want to figure  out how to
 | 
			
		||||
            # offer both the paper engine running and the brokerd
 | 
			
		||||
            # order ctl tasks with the ems choosing which stream to
 | 
			
		||||
            # route msgs on given the account value!
 | 
			
		||||
            account='paper',  # use built-in paper clearing engine and .accounting
 | 
			
		||||
            # account='binance.usdtm',  # for live binance futes
 | 
			
		||||
 | 
			
		||||
            oid=str(uuid4()),
 | 
			
		||||
            exec_mode='live',  # {'dark', 'live', 'alert'}
 | 
			
		||||
 | 
			
		||||
            action='buy',  # TODO: remove this from our schema?
 | 
			
		||||
 | 
			
		||||
            size=size,
 | 
			
		||||
            symbol=fqme,
 | 
			
		||||
            price=price,
 | 
			
		||||
            brokers=['binance'],
 | 
			
		||||
        )
 | 
			
		||||
        await client.send(order)
 | 
			
		||||
 | 
			
		||||
        msgs, pps = await wait_for_order_status(
 | 
			
		||||
            trades_stream,
 | 
			
		||||
            order.oid,
 | 
			
		||||
            'open',
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        assert not pps
 | 
			
		||||
        assert msgs[-1].oid == order.oid
 | 
			
		||||
 | 
			
		||||
        # start "trailer task" which tracks rt quote stream
 | 
			
		||||
        tn.start_soon(trailer, order)
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            # wait for ctl-c from user..
 | 
			
		||||
            await trio.sleep_forever()
 | 
			
		||||
        except KeyboardInterrupt:
 | 
			
		||||
            # cancel the open order
 | 
			
		||||
            await client.cancel(order.oid)
 | 
			
		||||
 | 
			
		||||
            msgs, pps = await wait_for_order_status(
 | 
			
		||||
                trades_stream,
 | 
			
		||||
                order.oid,
 | 
			
		||||
                'canceled'
 | 
			
		||||
            )
 | 
			
		||||
            raise
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    trio.run(bot_main)
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,138 @@
 | 
			
		|||
{
 | 
			
		||||
  "nodes": {
 | 
			
		||||
    "flake-utils": {
 | 
			
		||||
      "inputs": {
 | 
			
		||||
        "systems": "systems"
 | 
			
		||||
      },
 | 
			
		||||
      "locked": {
 | 
			
		||||
        "lastModified": 1689068808,
 | 
			
		||||
        "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
 | 
			
		||||
        "owner": "numtide",
 | 
			
		||||
        "repo": "flake-utils",
 | 
			
		||||
        "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      },
 | 
			
		||||
      "original": {
 | 
			
		||||
        "owner": "numtide",
 | 
			
		||||
        "repo": "flake-utils",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "flake-utils_2": {
 | 
			
		||||
      "inputs": {
 | 
			
		||||
        "systems": "systems_2"
 | 
			
		||||
      },
 | 
			
		||||
      "locked": {
 | 
			
		||||
        "lastModified": 1689068808,
 | 
			
		||||
        "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
 | 
			
		||||
        "owner": "numtide",
 | 
			
		||||
        "repo": "flake-utils",
 | 
			
		||||
        "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      },
 | 
			
		||||
      "original": {
 | 
			
		||||
        "owner": "numtide",
 | 
			
		||||
        "repo": "flake-utils",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "nix-github-actions": {
 | 
			
		||||
      "inputs": {
 | 
			
		||||
        "nixpkgs": [
 | 
			
		||||
          "poetry2nix",
 | 
			
		||||
          "nixpkgs"
 | 
			
		||||
        ]
 | 
			
		||||
      },
 | 
			
		||||
      "locked": {
 | 
			
		||||
        "lastModified": 1688870561,
 | 
			
		||||
        "narHash": "sha256-4UYkifnPEw1nAzqqPOTL2MvWtm3sNGw1UTYTalkTcGY=",
 | 
			
		||||
        "owner": "nix-community",
 | 
			
		||||
        "repo": "nix-github-actions",
 | 
			
		||||
        "rev": "165b1650b753316aa7f1787f3005a8d2da0f5301",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      },
 | 
			
		||||
      "original": {
 | 
			
		||||
        "owner": "nix-community",
 | 
			
		||||
        "repo": "nix-github-actions",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "nixpkgs": {
 | 
			
		||||
      "locked": {
 | 
			
		||||
        "lastModified": 1692174805,
 | 
			
		||||
        "narHash": "sha256-xmNPFDi/AUMIxwgOH/IVom55Dks34u1g7sFKKebxUm0=",
 | 
			
		||||
        "owner": "NixOS",
 | 
			
		||||
        "repo": "nixpkgs",
 | 
			
		||||
        "rev": "caac0eb6bdcad0b32cb2522e03e4002c8975c62e",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      },
 | 
			
		||||
      "original": {
 | 
			
		||||
        "owner": "NixOS",
 | 
			
		||||
        "ref": "nixos-unstable",
 | 
			
		||||
        "repo": "nixpkgs",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "poetry2nix": {
 | 
			
		||||
      "inputs": {
 | 
			
		||||
        "flake-utils": "flake-utils_2",
 | 
			
		||||
        "nix-github-actions": "nix-github-actions",
 | 
			
		||||
        "nixpkgs": [
 | 
			
		||||
          "nixpkgs"
 | 
			
		||||
        ]
 | 
			
		||||
      },
 | 
			
		||||
      "locked": {
 | 
			
		||||
        "lastModified": 1692048894,
 | 
			
		||||
        "narHash": "sha256-cDw03rso2V4CDc3Mll0cHN+ztzysAvdI8pJ7ybbz714=",
 | 
			
		||||
        "ref": "refs/heads/pyqt6",
 | 
			
		||||
        "rev": "b059ad4c3051f45d6c912e17747aae37a9ec1544",
 | 
			
		||||
        "revCount": 2276,
 | 
			
		||||
        "type": "git",
 | 
			
		||||
        "url": "file:///home/lord_fomo/repos/poetry2nix"
 | 
			
		||||
      },
 | 
			
		||||
      "original": {
 | 
			
		||||
        "type": "git",
 | 
			
		||||
        "url": "file:///home/lord_fomo/repos/poetry2nix"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "root": {
 | 
			
		||||
      "inputs": {
 | 
			
		||||
        "flake-utils": "flake-utils",
 | 
			
		||||
        "nixpkgs": "nixpkgs",
 | 
			
		||||
        "poetry2nix": "poetry2nix"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "systems": {
 | 
			
		||||
      "locked": {
 | 
			
		||||
        "lastModified": 1681028828,
 | 
			
		||||
        "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
 | 
			
		||||
        "owner": "nix-systems",
 | 
			
		||||
        "repo": "default",
 | 
			
		||||
        "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      },
 | 
			
		||||
      "original": {
 | 
			
		||||
        "owner": "nix-systems",
 | 
			
		||||
        "repo": "default",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      }
 | 
			
		||||
    },
 | 
			
		||||
    "systems_2": {
 | 
			
		||||
      "locked": {
 | 
			
		||||
        "lastModified": 1681028828,
 | 
			
		||||
        "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
 | 
			
		||||
        "owner": "nix-systems",
 | 
			
		||||
        "repo": "default",
 | 
			
		||||
        "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      },
 | 
			
		||||
      "original": {
 | 
			
		||||
        "owner": "nix-systems",
 | 
			
		||||
        "repo": "default",
 | 
			
		||||
        "type": "github"
 | 
			
		||||
      }
 | 
			
		||||
    }
 | 
			
		||||
  },
 | 
			
		||||
  "root": "root",
 | 
			
		||||
  "version": 7
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,180 @@
 | 
			
		|||
# NOTE: to convert to a poetry2nix env like this here are the
 | 
			
		||||
# steps:
 | 
			
		||||
# - install poetry in your system nix config
 | 
			
		||||
# - convert the repo to use poetry using `poetry init`:
 | 
			
		||||
#   https://python-poetry.org/docs/basic-usage/#initialising-a-pre-existing-project
 | 
			
		||||
# - then manually ensuring all deps are converted over:
 | 
			
		||||
# - add this file to the repo and commit it
 | 
			
		||||
# - 
 | 
			
		||||
 | 
			
		||||
# GROKin tips:
 | 
			
		||||
# - CLI eps are (ostensibly) added via an `entry_points.txt`:
 | 
			
		||||
#   - https://packaging.python.org/en/latest/specifications/entry-points/#file-format
 | 
			
		||||
#   - https://github.com/nix-community/poetry2nix/blob/master/editable.nix#L49
 | 
			
		||||
{
 | 
			
		||||
  description = "piker: trading gear for hackers (pkged with poetry2nix)";
 | 
			
		||||
 | 
			
		||||
  inputs.flake-utils.url = "github:numtide/flake-utils";
 | 
			
		||||
  inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
 | 
			
		||||
 | 
			
		||||
  # see https://github.com/nix-community/poetry2nix/tree/master#api
 | 
			
		||||
  inputs.poetry2nix = {
 | 
			
		||||
    # url = "github:nix-community/poetry2nix";
 | 
			
		||||
    # url = "github:K900/poetry2nix/qt5-explicit-deps";
 | 
			
		||||
    url = "/home/lord_fomo/repos/poetry2nix";
 | 
			
		||||
 | 
			
		||||
    inputs.nixpkgs.follows = "nixpkgs";
 | 
			
		||||
  };
 | 
			
		||||
 | 
			
		||||
  outputs = {
 | 
			
		||||
    self,
 | 
			
		||||
    nixpkgs,
 | 
			
		||||
    flake-utils,
 | 
			
		||||
    poetry2nix,
 | 
			
		||||
  }:
 | 
			
		||||
    # TODO: build cross-OS and use the `${system}` var thingy..
 | 
			
		||||
    flake-utils.lib.eachDefaultSystem (system:
 | 
			
		||||
      let
 | 
			
		||||
        # use PWD as sources
 | 
			
		||||
        projectDir = ./.;
 | 
			
		||||
        pyproject = ./pyproject.toml;
 | 
			
		||||
        poetrylock = ./poetry.lock;
 | 
			
		||||
 | 
			
		||||
        # TODO: port to 3.11 and support both versions?
 | 
			
		||||
        python = "python3.10";
 | 
			
		||||
 | 
			
		||||
        # for more functions and examples.
 | 
			
		||||
        # inherit
 | 
			
		||||
        # (poetry2nix.legacyPackages.${system})
 | 
			
		||||
        # mkPoetryApplication;
 | 
			
		||||
        # pkgs = nixpkgs.legacyPackages.${system};
 | 
			
		||||
 | 
			
		||||
        pkgs = nixpkgs.legacyPackages.x86_64-linux;
 | 
			
		||||
        lib = pkgs.lib;
 | 
			
		||||
        p2npkgs = poetry2nix.legacyPackages.x86_64-linux;
 | 
			
		||||
 | 
			
		||||
        # define all pkg overrides per dep, see edgecases.md:
 | 
			
		||||
        # https://github.com/nix-community/poetry2nix/blob/master/docs/edgecases.md
 | 
			
		||||
        # TODO: add these into the json file:
 | 
			
		||||
        # https://github.com/nix-community/poetry2nix/blob/master/overrides/build-systems.json
 | 
			
		||||
        pypkgs-build-requirements = {
 | 
			
		||||
          asyncvnc = [ "setuptools" ];
 | 
			
		||||
          eventkit = [ "setuptools" ];
 | 
			
		||||
          ib-insync = [ "setuptools" "flake8" ];
 | 
			
		||||
          msgspec = [ "setuptools"];
 | 
			
		||||
          pdbp = [ "setuptools" ];
 | 
			
		||||
          pyqt6-sip = [ "setuptools" ];
 | 
			
		||||
          tabcompleter = [ "setuptools" ];
 | 
			
		||||
          tractor = [ "setuptools" ];
 | 
			
		||||
          tricycle = [ "setuptools" ];
 | 
			
		||||
          trio-typing = [ "setuptools" ];
 | 
			
		||||
          trio-util = [ "setuptools" ];
 | 
			
		||||
          xonsh = [ "setuptools" ];
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        # auto-generate override entries
 | 
			
		||||
        p2n-overrides = p2npkgs.defaultPoetryOverrides.extend (self: super:
 | 
			
		||||
          builtins.mapAttrs (package: build-requirements:
 | 
			
		||||
            (builtins.getAttr package super).overridePythonAttrs (old: {
 | 
			
		||||
              buildInputs = (
 | 
			
		||||
                old.buildInputs or [ ]
 | 
			
		||||
              ) ++ (
 | 
			
		||||
                builtins.map (
 | 
			
		||||
                  pkg: if builtins.isString pkg then builtins.getAttr pkg super else pkg
 | 
			
		||||
                  ) build-requirements
 | 
			
		||||
              );
 | 
			
		||||
            })
 | 
			
		||||
          ) pypkgs-build-requirements
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        # override some ahead-of-time compiled extensions
 | 
			
		||||
        # to be built with their wheels.
 | 
			
		||||
        ahot_overrides = p2n-overrides.extend(
 | 
			
		||||
          final: prev: {
 | 
			
		||||
 | 
			
		||||
            # llvmlite = prev.llvmlite.override {
 | 
			
		||||
            #   preferWheel = false;
 | 
			
		||||
            # };
 | 
			
		||||
 | 
			
		||||
            # TODO: get this workin with p2n and nixpkgs..
 | 
			
		||||
            # pyqt6 = prev.pyqt6.override {
 | 
			
		||||
            #   preferWheel = true;
 | 
			
		||||
            # };
 | 
			
		||||
 | 
			
		||||
            # NOTE: this DOESN'T work atm but after a fix
 | 
			
		||||
            # to poetry2nix, it will and actually this line
 | 
			
		||||
            # won't be needed - thanks @k900:
 | 
			
		||||
            # https://github.com/nix-community/poetry2nix/pull/1257
 | 
			
		||||
            pyqt5 = prev.pyqt5.override {
 | 
			
		||||
              # withWebkit = false;
 | 
			
		||||
              preferWheel = true;
 | 
			
		||||
            };
 | 
			
		||||
 | 
			
		||||
            # see PR from @k900:
 | 
			
		||||
            # https://github.com/nix-community/poetry2nix/pull/1257
 | 
			
		||||
            # pyqt5-qt5 = prev.pyqt5-qt5.override {
 | 
			
		||||
            #   withWebkit = false;
 | 
			
		||||
            #   preferWheel = true;
 | 
			
		||||
            # };
 | 
			
		||||
 | 
			
		||||
            # TODO: patch in an override for polars to build
 | 
			
		||||
            # from src! See the details likely needed from
 | 
			
		||||
            # the cryptography entry:
 | 
			
		||||
            # https://github.com/nix-community/poetry2nix/blob/master/overrides/default.nix#L426-L435
 | 
			
		||||
            polars = prev.polars.override {
 | 
			
		||||
              preferWheel = true;
 | 
			
		||||
            };
 | 
			
		||||
          }
 | 
			
		||||
      );
 | 
			
		||||
 | 
			
		||||
      # WHY!? -> output-attrs that `nix develop` scans for:
 | 
			
		||||
      # https://nixos.org/manual/nix/stable/command-ref/new-cli/nix3-develop.html#flake-output-attributes
 | 
			
		||||
      in
 | 
			
		||||
      rec {
 | 
			
		||||
        packages = {
 | 
			
		||||
          # piker = poetry2nix.legacyPackages.x86_64-linux.mkPoetryEditablePackage {
 | 
			
		||||
          #   editablePackageSources = { piker = ./piker; };
 | 
			
		||||
 | 
			
		||||
          piker = p2npkgs.mkPoetryApplication {
 | 
			
		||||
            projectDir = projectDir;
 | 
			
		||||
 | 
			
		||||
            # SEE ABOVE for auto-genned input set, override
 | 
			
		||||
            # buncha deps with extras.. like `setuptools` mostly.
 | 
			
		||||
            # TODO: maybe propose a patch to p2n to show that you
 | 
			
		||||
            # can even do this in the edgecases docs?
 | 
			
		||||
            overrides = ahot_overrides;
 | 
			
		||||
 | 
			
		||||
            # XXX: won't work on llvmlite..
 | 
			
		||||
            # preferWheels = true;
 | 
			
		||||
          };
 | 
			
		||||
        };
 | 
			
		||||
 | 
			
		||||
        # devShells.default = pkgs.mkShell {
 | 
			
		||||
        #   projectDir = projectDir;
 | 
			
		||||
        #   python = "python3.10";
 | 
			
		||||
        #   overrides = ahot_overrides;
 | 
			
		||||
        #   inputsFrom = [ self.packages.x86_64-linux.piker ];
 | 
			
		||||
        #   packages = packages;
 | 
			
		||||
        #   # packages = [ poetry2nix.packages.${system}.poetry ];
 | 
			
		||||
        # };
 | 
			
		||||
 | 
			
		||||
        # TODO: grok the difference here..
 | 
			
		||||
        # - avoid re-cloning git repos on every develop entry..
 | 
			
		||||
        # - ideally allow hacking on the src code of some deps
 | 
			
		||||
        #   (tractor, pyqtgraph, tomlkit, etc.) WITHOUT having to
 | 
			
		||||
        #   re-install them every time a change is made.
 | 
			
		||||
        # - boot a usable xonsh inside the poetry virtualenv when
 | 
			
		||||
        #   defined via a custom entry point?
 | 
			
		||||
        devShells.default = p2npkgs.mkPoetryEnv {
 | 
			
		||||
        # env = p2npkgs.mkPoetryEnv {
 | 
			
		||||
            projectDir = projectDir;
 | 
			
		||||
            python = pkgs.python310;
 | 
			
		||||
            overrides = ahot_overrides;
 | 
			
		||||
            editablePackageSources = packages;
 | 
			
		||||
              # piker = "./";
 | 
			
		||||
              # tractor = "../tractor/";
 | 
			
		||||
            # };  # wut?
 | 
			
		||||
        };
 | 
			
		||||
      }
 | 
			
		||||
    );  # end of .outputs scope
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -14,37 +14,71 @@
 | 
			
		|||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
'''
 | 
			
		||||
Cacheing apis and toolz.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
'''
 | 
			
		||||
 | 
			
		||||
from collections import OrderedDict
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    asynccontextmanager,
 | 
			
		||||
from typing import (
 | 
			
		||||
    Awaitable,
 | 
			
		||||
    Callable,
 | 
			
		||||
    ParamSpec,
 | 
			
		||||
    TypeVar,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from tractor.trionics import maybe_open_context
 | 
			
		||||
 | 
			
		||||
from .brokers import get_brokermod
 | 
			
		||||
from .log import get_logger
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
T = TypeVar("T")
 | 
			
		||||
P = ParamSpec("P")
 | 
			
		||||
 | 
			
		||||
def async_lifo_cache(maxsize=128):
 | 
			
		||||
    """Async ``cache`` with a LIFO policy.
 | 
			
		||||
 | 
			
		||||
# TODO: move this to `tractor.trionics`..
 | 
			
		||||
# - egs. to replicate for tests: https://github.com/aio-libs/async-lru#usage
 | 
			
		||||
# - their suite as well:
 | 
			
		||||
#   https://github.com/aio-libs/async-lru/tree/master/tests
 | 
			
		||||
# - asked trio_util about it too:
 | 
			
		||||
#   https://github.com/groove-x/trio-util/issues/21
 | 
			
		||||
def async_lifo_cache(
 | 
			
		||||
    maxsize=128,
 | 
			
		||||
 | 
			
		||||
    # NOTE: typing style was learned from:
 | 
			
		||||
    # https://stackoverflow.com/a/71132186
 | 
			
		||||
) -> Callable[
 | 
			
		||||
    Callable[P, Awaitable[T]],
 | 
			
		||||
    Callable[
 | 
			
		||||
        Callable[P, Awaitable[T]],
 | 
			
		||||
        Callable[P, Awaitable[T]],
 | 
			
		||||
    ],
 | 
			
		||||
]:
 | 
			
		||||
    '''
 | 
			
		||||
    Async ``cache`` with a LIFO policy.
 | 
			
		||||
 | 
			
		||||
    Implemented my own since no one else seems to have
 | 
			
		||||
    a standard. I'll wait for the smarter people to come
 | 
			
		||||
    up with one, but until then...
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    NOTE: when decorating, due to this simple/naive implementation, you
 | 
			
		||||
    MUST call the decorator like,
 | 
			
		||||
 | 
			
		||||
    .. code:: python
 | 
			
		||||
 | 
			
		||||
        @async_lifo_cache()
 | 
			
		||||
        async def cache_target():
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    cache = OrderedDict()
 | 
			
		||||
 | 
			
		||||
    def decorator(fn):
 | 
			
		||||
    def decorator(
 | 
			
		||||
        fn: Callable[P, Awaitable[T]],
 | 
			
		||||
    ) -> Callable[P, Awaitable[T]]:
 | 
			
		||||
 | 
			
		||||
        async def wrapper(*args):
 | 
			
		||||
        async def decorated(
 | 
			
		||||
            *args: P.args,
 | 
			
		||||
            **kwargs: P.kwargs,
 | 
			
		||||
        ) -> T:
 | 
			
		||||
            key = args
 | 
			
		||||
            try:
 | 
			
		||||
                return cache[key]
 | 
			
		||||
| 
						 | 
				
			
			@ -53,27 +87,13 @@ def async_lifo_cache(maxsize=128):
 | 
			
		|||
                    # discard last added new entry
 | 
			
		||||
                    cache.popitem()
 | 
			
		||||
 | 
			
		||||
                # do it
 | 
			
		||||
                cache[key] = await fn(*args)
 | 
			
		||||
                # call underlying
 | 
			
		||||
                cache[key] = await fn(
 | 
			
		||||
                    *args,
 | 
			
		||||
                    **kwargs,
 | 
			
		||||
                )
 | 
			
		||||
                return cache[key]
 | 
			
		||||
 | 
			
		||||
        return wrapper
 | 
			
		||||
        return decorated
 | 
			
		||||
 | 
			
		||||
    return decorator
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@asynccontextmanager
 | 
			
		||||
async def open_cached_client(
 | 
			
		||||
    brokername: str,
 | 
			
		||||
) -> 'Client':  # noqa
 | 
			
		||||
    '''
 | 
			
		||||
    Get a cached broker client from the current actor's local vars.
 | 
			
		||||
 | 
			
		||||
    If one has not been setup do it and cache it.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    brokermod = get_brokermod(brokername)
 | 
			
		||||
    async with maybe_open_context(
 | 
			
		||||
        acm_func=brokermod.get_client,
 | 
			
		||||
    ) as (cache_hit, client):
 | 
			
		||||
        yield client
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,16 @@
 | 
			
		|||
.accounting
 | 
			
		||||
-----------
 | 
			
		||||
A subsystem for transaction processing, storage and historical
 | 
			
		||||
measurement.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
.pnl
 | 
			
		||||
----
 | 
			
		||||
BEP, the break even price: the price at which liquidating
 | 
			
		||||
a remaining position results in a zero PnL since the position was
 | 
			
		||||
"opened" in the destination asset.
 | 
			
		||||
 | 
			
		||||
PPU: price-per-unit: the "average cost" (in cumulative mean terms)
 | 
			
		||||
of the "entry" transactions which "make a position larger"; taking
 | 
			
		||||
a profit relative to this price means that you will "make more
 | 
			
		||||
profit then made prior" since the position was opened.
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,107 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
"Accounting for degens": count dem numberz that tracks how much you got
 | 
			
		||||
for tendiez.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
 | 
			
		||||
from .calc import (
 | 
			
		||||
    iter_by_dt,
 | 
			
		||||
)
 | 
			
		||||
from ._ledger import (
 | 
			
		||||
    Transaction,
 | 
			
		||||
    TransactionLedger,
 | 
			
		||||
    open_trade_ledger,
 | 
			
		||||
)
 | 
			
		||||
from ._pos import (
 | 
			
		||||
    Account,
 | 
			
		||||
    load_account,
 | 
			
		||||
    load_account_from_ledger,
 | 
			
		||||
    open_pps,
 | 
			
		||||
    open_account,
 | 
			
		||||
    Position,
 | 
			
		||||
)
 | 
			
		||||
from ._mktinfo import (
 | 
			
		||||
    Asset,
 | 
			
		||||
    dec_digits,
 | 
			
		||||
    digits_to_dec,
 | 
			
		||||
    MktPair,
 | 
			
		||||
    Symbol,
 | 
			
		||||
    unpack_fqme,
 | 
			
		||||
    _derivs as DerivTypes,
 | 
			
		||||
)
 | 
			
		||||
from ._allocate import (
 | 
			
		||||
    mk_allocator,
 | 
			
		||||
    Allocator,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'Account',
 | 
			
		||||
    'Allocator',
 | 
			
		||||
    'Asset',
 | 
			
		||||
    'MktPair',
 | 
			
		||||
    'Position',
 | 
			
		||||
    'Symbol',
 | 
			
		||||
    'Transaction',
 | 
			
		||||
    'TransactionLedger',
 | 
			
		||||
    'dec_digits',
 | 
			
		||||
    'digits_to_dec',
 | 
			
		||||
    'iter_by_dt',
 | 
			
		||||
    'load_account',
 | 
			
		||||
    'load_account_from_ledger',
 | 
			
		||||
    'mk_allocator',
 | 
			
		||||
    'open_account',
 | 
			
		||||
    'open_pps',
 | 
			
		||||
    'open_trade_ledger',
 | 
			
		||||
    'unpack_fqme',
 | 
			
		||||
    'DerivTypes',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_likely_pair(
 | 
			
		||||
    src: str,
 | 
			
		||||
    dst: str,
 | 
			
		||||
    bs_mktid: str,
 | 
			
		||||
 | 
			
		||||
) -> str | None:
 | 
			
		||||
    '''
 | 
			
		||||
    Attempt to get the likely trading pair matching a given destination
 | 
			
		||||
    asset `dst: str`.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    try:
 | 
			
		||||
        src_name_start: str = bs_mktid.rindex(src)
 | 
			
		||||
    except (
 | 
			
		||||
        ValueError,   # substr not found
 | 
			
		||||
    ):
 | 
			
		||||
        # TODO: handle nested positions..(i.e.
 | 
			
		||||
        # positions where the src fiat was used to
 | 
			
		||||
        # buy some other dst which was furhter used
 | 
			
		||||
        # to buy another dst..)
 | 
			
		||||
        # log.warning(
 | 
			
		||||
        #     f'No src fiat {src} found in {bs_mktid}?'
 | 
			
		||||
        # )
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    likely_dst: str = bs_mktid[:src_name_start]
 | 
			
		||||
    if likely_dst == dst:
 | 
			
		||||
        return bs_mktid
 | 
			
		||||
| 
						 | 
				
			
			@ -23,9 +23,9 @@ from typing import Optional
 | 
			
		|||
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
 | 
			
		||||
from ..data._source import Symbol
 | 
			
		||||
from ..data.types import Struct
 | 
			
		||||
from ..pp import Position
 | 
			
		||||
from ._pos import Position
 | 
			
		||||
from . import MktPair
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_size_units = bidict({
 | 
			
		||||
| 
						 | 
				
			
			@ -42,7 +42,7 @@ SizeUnit = Enum(
 | 
			
		|||
 | 
			
		||||
class Allocator(Struct):
 | 
			
		||||
 | 
			
		||||
    symbol: Symbol
 | 
			
		||||
    mkt: MktPair
 | 
			
		||||
 | 
			
		||||
    # TODO: if we ever want ot support non-uniform entry-slot-proportion
 | 
			
		||||
    # "sizes"
 | 
			
		||||
| 
						 | 
				
			
			@ -114,24 +114,24 @@ class Allocator(Struct):
 | 
			
		|||
        depending on position / order entry config.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        sym = self.symbol
 | 
			
		||||
        ld = sym.lot_size_digits
 | 
			
		||||
        mkt: MktPair = self.mkt
 | 
			
		||||
        ld: int = mkt.size_tick_digits
 | 
			
		||||
 | 
			
		||||
        size_unit = self.size_unit
 | 
			
		||||
        live_size = live_pp.size
 | 
			
		||||
        live_size = live_pp.cumsize
 | 
			
		||||
        abs_live_size = abs(live_size)
 | 
			
		||||
        abs_startup_size = abs(startup_pp.size)
 | 
			
		||||
        abs_startup_size = abs(startup_pp.cumsize)
 | 
			
		||||
 | 
			
		||||
        u_per_slot, currency_per_slot = self.step_sizes()
 | 
			
		||||
 | 
			
		||||
        if size_unit == 'units':
 | 
			
		||||
            slot_size = u_per_slot
 | 
			
		||||
            l_sub_pp = self.units_limit - abs_live_size
 | 
			
		||||
            slot_size: float = u_per_slot
 | 
			
		||||
            l_sub_pp: float = self.units_limit - abs_live_size
 | 
			
		||||
 | 
			
		||||
        elif size_unit == 'currency':
 | 
			
		||||
            live_cost_basis = abs_live_size * live_pp.ppu
 | 
			
		||||
            slot_size = currency_per_slot / price
 | 
			
		||||
            l_sub_pp = (self.currency_limit - live_cost_basis) / price
 | 
			
		||||
            live_cost_basis: float = abs_live_size * live_pp.ppu
 | 
			
		||||
            slot_size: float = currency_per_slot / price
 | 
			
		||||
            l_sub_pp: float = (self.currency_limit - live_cost_basis) / price
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            raise ValueError(
 | 
			
		||||
| 
						 | 
				
			
			@ -141,8 +141,14 @@ class Allocator(Struct):
 | 
			
		|||
        # an entry (adding-to or starting a pp)
 | 
			
		||||
        if (
 | 
			
		||||
            live_size == 0
 | 
			
		||||
            or (action == 'buy' and live_size > 0)
 | 
			
		||||
            or action == 'sell' and live_size < 0
 | 
			
		||||
            or (
 | 
			
		||||
                action == 'buy'
 | 
			
		||||
                and live_size > 0
 | 
			
		||||
            )
 | 
			
		||||
            or (
 | 
			
		||||
                action == 'sell'
 | 
			
		||||
                and live_size < 0
 | 
			
		||||
            )
 | 
			
		||||
        ):
 | 
			
		||||
            order_size = min(
 | 
			
		||||
                slot_size,
 | 
			
		||||
| 
						 | 
				
			
			@ -178,7 +184,7 @@ class Allocator(Struct):
 | 
			
		|||
            order_size = max(slotted_pp, slot_size)
 | 
			
		||||
 | 
			
		||||
            if (
 | 
			
		||||
                abs_live_size < slot_size or
 | 
			
		||||
                abs_live_size < slot_size
 | 
			
		||||
 | 
			
		||||
                # NOTE: front/back "loading" heurstic:
 | 
			
		||||
                # if the remaining pp is in between 0-1.5x a slot's
 | 
			
		||||
| 
						 | 
				
			
			@ -187,14 +193,17 @@ class Allocator(Struct):
 | 
			
		|||
                # **without** going past a net-zero pp. if the pp is
 | 
			
		||||
                # > 1.5x a slot size, then front load: exit a slot's and
 | 
			
		||||
                # expect net-zero to be acquired on the final exit.
 | 
			
		||||
                slot_size < pp_size < round((1.5*slot_size), ndigits=ld) or
 | 
			
		||||
                or slot_size < pp_size < round((1.5*slot_size), ndigits=ld)
 | 
			
		||||
                or (
 | 
			
		||||
 | 
			
		||||
                # underlying requires discrete (int) units (eg. stocks)
 | 
			
		||||
                # and thus our slot size (based on our limit) would
 | 
			
		||||
                # exit a fractional unit's worth so, presuming we aren't
 | 
			
		||||
                # supporting a fractional-units-style broker, we need
 | 
			
		||||
                # exit the final unit.
 | 
			
		||||
                ld == 0 and abs_live_size == 1
 | 
			
		||||
                    # underlying requires discrete (int) units (eg. stocks)
 | 
			
		||||
                    # and thus our slot size (based on our limit) would
 | 
			
		||||
                    # exit a fractional unit's worth so, presuming we aren't
 | 
			
		||||
                    # supporting a fractional-units-style broker, we need
 | 
			
		||||
                    # exit the final unit.
 | 
			
		||||
                    ld == 0
 | 
			
		||||
                    and abs_live_size == 1
 | 
			
		||||
                )
 | 
			
		||||
            ):
 | 
			
		||||
                order_size = abs_live_size
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -203,13 +212,12 @@ class Allocator(Struct):
 | 
			
		|||
            # compute a fractional slots size to display
 | 
			
		||||
            slots_used = self.slots_used(
 | 
			
		||||
                Position(
 | 
			
		||||
                    symbol=sym,
 | 
			
		||||
                    size=order_size,
 | 
			
		||||
                    ppu=price,
 | 
			
		||||
                    bsuid=sym,
 | 
			
		||||
                    mkt=mkt,
 | 
			
		||||
                    bs_mktid=mkt.bs_mktid,
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # TODO: render an actual ``Executable`` type here?
 | 
			
		||||
        return {
 | 
			
		||||
            'size': abs(round(order_size, ndigits=ld)),
 | 
			
		||||
            'size_digits': ld,
 | 
			
		||||
| 
						 | 
				
			
			@ -231,7 +239,7 @@ class Allocator(Struct):
 | 
			
		|||
        Calc and return the number of slots used by this ``Position``.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        abs_pp_size = abs(pp.size)
 | 
			
		||||
        abs_pp_size = abs(pp.cumsize)
 | 
			
		||||
 | 
			
		||||
        if self.size_unit == 'currency':
 | 
			
		||||
            # live_currency_size = size or (abs_pp_size * pp.ppu)
 | 
			
		||||
| 
						 | 
				
			
			@ -249,7 +257,7 @@ class Allocator(Struct):
 | 
			
		|||
 | 
			
		||||
def mk_allocator(
 | 
			
		||||
 | 
			
		||||
    symbol: Symbol,
 | 
			
		||||
    mkt: MktPair,
 | 
			
		||||
    startup_pp: Position,
 | 
			
		||||
 | 
			
		||||
    # default allocation settings
 | 
			
		||||
| 
						 | 
				
			
			@ -276,6 +284,6 @@ def mk_allocator(
 | 
			
		|||
    defaults.update(user_def)
 | 
			
		||||
 | 
			
		||||
    return Allocator(
 | 
			
		||||
        symbol=symbol,
 | 
			
		||||
        mkt=mkt,
 | 
			
		||||
        **defaults,
 | 
			
		||||
    )
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,421 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Trade and transaction ledger processing.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from collections import UserDict
 | 
			
		||||
from contextlib import contextmanager as cm
 | 
			
		||||
from functools import partial
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Callable,
 | 
			
		||||
    Generator,
 | 
			
		||||
    Literal,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from pendulum import (
 | 
			
		||||
    DateTime,
 | 
			
		||||
)
 | 
			
		||||
import tomli_w  # for fast ledger writing
 | 
			
		||||
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from piker import config
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
from .calc import (
 | 
			
		||||
    iter_by_dt,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from ..data._symcache import (
 | 
			
		||||
        SymbologyCache,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
TxnType = Literal[
 | 
			
		||||
    'clear',
 | 
			
		||||
    'transfer',
 | 
			
		||||
 | 
			
		||||
    # TODO: see https://github.com/pikers/piker/issues/510
 | 
			
		||||
    # 'split',
 | 
			
		||||
    # 'rename',
 | 
			
		||||
    # 'resize',
 | 
			
		||||
    # 'removal',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Transaction(Struct, frozen=True):
 | 
			
		||||
 | 
			
		||||
    # NOTE: this is a unified acronym also used in our `MktPair`
 | 
			
		||||
    # and can stand for any of a
 | 
			
		||||
    # "fully qualified <blank> endpoint":
 | 
			
		||||
    # - "market" in the case of financial trades
 | 
			
		||||
    #   (btcusdt.spot.binance).
 | 
			
		||||
    # - "merkel (tree)" aka a blockchain system "wallet tranfers"
 | 
			
		||||
    #   (btc.blockchain)
 | 
			
		||||
    # - "money" for tradtitional (digital databases)
 | 
			
		||||
    #   *bank accounts* (usd.swift, eur.sepa)
 | 
			
		||||
    fqme: str
 | 
			
		||||
 | 
			
		||||
    tid: str | int  # unique transaction id
 | 
			
		||||
    size: float
 | 
			
		||||
    price: float
 | 
			
		||||
    cost: float  # commisions or other additional costs
 | 
			
		||||
    dt: DateTime
 | 
			
		||||
 | 
			
		||||
    # the "event type" in terms of "market events" see above and
 | 
			
		||||
    # https://github.com/pikers/piker/issues/510
 | 
			
		||||
    etype: TxnType = 'clear'
 | 
			
		||||
 | 
			
		||||
    # TODO: we can drop this right since we
 | 
			
		||||
    # can instead expect the backend to provide this
 | 
			
		||||
    # via the `MktPair`?
 | 
			
		||||
    expiry: DateTime | None = None
 | 
			
		||||
 | 
			
		||||
    # (optional) key-id defined by the broker-service backend which
 | 
			
		||||
    # ensures the instrument-symbol market key for this record is unique
 | 
			
		||||
    # in the "their backend/system" sense; i.e. this uid for the market
 | 
			
		||||
    # as defined (internally) in some namespace defined by the broker
 | 
			
		||||
    # service.
 | 
			
		||||
    bs_mktid: str | int | None = None
 | 
			
		||||
 | 
			
		||||
    def to_dict(
 | 
			
		||||
        self,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
        dct: dict[str, Any] = super().to_dict(**kwargs)
 | 
			
		||||
 | 
			
		||||
        # ensure we use a pendulum formatted
 | 
			
		||||
        # ISO style str here!@
 | 
			
		||||
        dct['dt'] = str(self.dt)
 | 
			
		||||
 | 
			
		||||
        return dct
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TransactionLedger(UserDict):
 | 
			
		||||
    '''
 | 
			
		||||
    Very simple ``dict`` wrapper + ``pathlib.Path`` handle to
 | 
			
		||||
    a TOML formatted transaction file for enabling file writes
 | 
			
		||||
    dynamically whilst still looking exactly like a ``dict`` from the
 | 
			
		||||
    outside.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # NOTE: see `open_trade_ledger()` for defaults, this should
 | 
			
		||||
    # never be constructed manually!
 | 
			
		||||
    def __init__(
 | 
			
		||||
        self,
 | 
			
		||||
        ledger_dict: dict,
 | 
			
		||||
        file_path: Path,
 | 
			
		||||
        account: str,
 | 
			
		||||
        mod: ModuleType,  # broker mod
 | 
			
		||||
        tx_sort: Callable,
 | 
			
		||||
        symcache: SymbologyCache,
 | 
			
		||||
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        self.account: str = account
 | 
			
		||||
        self.file_path: Path = file_path
 | 
			
		||||
        self.mod: ModuleType = mod
 | 
			
		||||
        self.tx_sort: Callable = tx_sort
 | 
			
		||||
 | 
			
		||||
        self._symcache: SymbologyCache = symcache
 | 
			
		||||
 | 
			
		||||
        # any added txns we keep in that form for meta-data
 | 
			
		||||
        # gathering purposes
 | 
			
		||||
        self._txns: dict[str, Transaction] = {}
 | 
			
		||||
 | 
			
		||||
        super().__init__(ledger_dict)
 | 
			
		||||
 | 
			
		||||
    def __repr__(self) -> str:
 | 
			
		||||
        return (
 | 
			
		||||
            f'TransactionLedger: {len(self)}\n'
 | 
			
		||||
            f'{pformat(list(self.data))}'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def symcache(self) -> SymbologyCache:
 | 
			
		||||
        '''
 | 
			
		||||
        Read-only ref to backend's ``SymbologyCache``.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        return self._symcache
 | 
			
		||||
 | 
			
		||||
    def update_from_t(
 | 
			
		||||
        self,
 | 
			
		||||
        t: Transaction,
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        '''
 | 
			
		||||
        Given an input `Transaction`, cast to `dict` and update
 | 
			
		||||
        from it's transaction id.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        self.data[t.tid] = t.to_dict()
 | 
			
		||||
        self._txns[t.tid] = t
 | 
			
		||||
 | 
			
		||||
    def iter_txns(
 | 
			
		||||
        self,
 | 
			
		||||
        symcache: SymbologyCache | None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> Generator[
 | 
			
		||||
        Transaction,
 | 
			
		||||
        None,
 | 
			
		||||
        None,
 | 
			
		||||
    ]:
 | 
			
		||||
        '''
 | 
			
		||||
        Deliver trades records in ``(key: str, t: Transaction)``
 | 
			
		||||
        form via generator.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        symcache = symcache or self._symcache
 | 
			
		||||
 | 
			
		||||
        if self.account == 'paper':
 | 
			
		||||
            from piker.clearing import _paper_engine
 | 
			
		||||
            norm_trade: Callable = partial(
 | 
			
		||||
                _paper_engine.norm_trade,
 | 
			
		||||
                brokermod=self.mod,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            norm_trade: Callable = self.mod.norm_trade
 | 
			
		||||
 | 
			
		||||
        # datetime-sort and pack into txs
 | 
			
		||||
        for tid, txdict in self.tx_sort(self.data.items()):
 | 
			
		||||
            txn: Transaction = norm_trade(
 | 
			
		||||
                tid,
 | 
			
		||||
                txdict,
 | 
			
		||||
                pairs=symcache.pairs,
 | 
			
		||||
                symcache=symcache,
 | 
			
		||||
            )
 | 
			
		||||
            yield txn
 | 
			
		||||
 | 
			
		||||
    def to_txns(
 | 
			
		||||
        self,
 | 
			
		||||
        symcache: SymbologyCache | None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> dict[str, Transaction]:
 | 
			
		||||
        '''
 | 
			
		||||
        Return entire output from ``.iter_txns()`` in a ``dict``.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        txns: dict[str, Transaction] = {}
 | 
			
		||||
        for t in self.iter_txns(symcache=symcache):
 | 
			
		||||
 | 
			
		||||
            if not t:
 | 
			
		||||
                log.warning(f'{self.mod.name}:{self.account} TXN is -> {t}')
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            txns[t.tid] = t
 | 
			
		||||
 | 
			
		||||
        return txns
 | 
			
		||||
 | 
			
		||||
    def write_config(self) -> None:
 | 
			
		||||
        '''
 | 
			
		||||
        Render the self.data ledger dict to its TOML file form.
 | 
			
		||||
 | 
			
		||||
        ALWAYS order datetime sorted!
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        is_paper: bool = self.account == 'paper'
 | 
			
		||||
 | 
			
		||||
        symcache: SymbologyCache = self._symcache
 | 
			
		||||
        towrite: dict[str, Any] = {}
 | 
			
		||||
        for tid, txdict in self.tx_sort(self.data.copy()):
 | 
			
		||||
            # write blank-str expiry for non-expiring assets
 | 
			
		||||
            if (
 | 
			
		||||
                'expiry' in txdict
 | 
			
		||||
                and txdict['expiry'] is None
 | 
			
		||||
            ):
 | 
			
		||||
                txdict['expiry'] = ''
 | 
			
		||||
 | 
			
		||||
            # (maybe) re-write old acro-key
 | 
			
		||||
            if (
 | 
			
		||||
                is_paper
 | 
			
		||||
                # if symcache is empty/not supported (yet), don't
 | 
			
		||||
                # bother xD
 | 
			
		||||
                and symcache.mktmaps
 | 
			
		||||
            ):
 | 
			
		||||
                fqme: str = txdict.pop('fqsn', None) or txdict['fqme']
 | 
			
		||||
                bs_mktid: str | None = txdict.get('bs_mktid')
 | 
			
		||||
 | 
			
		||||
                if (
 | 
			
		||||
 | 
			
		||||
                    fqme not in symcache.mktmaps
 | 
			
		||||
                    or (
 | 
			
		||||
                        # also try to see if this is maybe a paper
 | 
			
		||||
                        # engine ledger in which case the bs_mktid
 | 
			
		||||
                        # should be the fqme as well!
 | 
			
		||||
                        bs_mktid
 | 
			
		||||
                        and fqme != bs_mktid
 | 
			
		||||
                    )
 | 
			
		||||
                ):
 | 
			
		||||
                    # always take any (paper) bs_mktid if defined and
 | 
			
		||||
                    # in the backend's cache key set.
 | 
			
		||||
                    if bs_mktid in symcache.mktmaps:
 | 
			
		||||
                        fqme: str = bs_mktid
 | 
			
		||||
                    else:
 | 
			
		||||
                        best_fqme: str = list(symcache.search(fqme))[0]
 | 
			
		||||
                        log.warning(
 | 
			
		||||
                            f'Could not find FQME: {fqme} in qualified set?\n'
 | 
			
		||||
                            f'Qualifying and expanding {fqme} -> {best_fqme}'
 | 
			
		||||
                        )
 | 
			
		||||
                        fqme = best_fqme
 | 
			
		||||
 | 
			
		||||
                if (
 | 
			
		||||
                    bs_mktid
 | 
			
		||||
                    and bs_mktid != fqme
 | 
			
		||||
                ):
 | 
			
		||||
                    # in paper account case always make sure both the
 | 
			
		||||
                    # fqme and bs_mktid are fully qualified..
 | 
			
		||||
                    txdict['bs_mktid'] = fqme
 | 
			
		||||
 | 
			
		||||
                # in paper ledgers always write the latest
 | 
			
		||||
                # symbology key field: an FQME.
 | 
			
		||||
                txdict['fqme'] = fqme
 | 
			
		||||
 | 
			
		||||
            towrite[tid] = txdict
 | 
			
		||||
 | 
			
		||||
        with self.file_path.open(mode='wb') as fp:
 | 
			
		||||
            tomli_w.dump(towrite, fp)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_ledger(
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    acctid: str,
 | 
			
		||||
 | 
			
		||||
    # for testing or manual load from file
 | 
			
		||||
    dirpath: Path | None = None,
 | 
			
		||||
 | 
			
		||||
) -> tuple[dict, Path]:
 | 
			
		||||
    '''
 | 
			
		||||
    Load a ledger (TOML) file from user's config directory:
 | 
			
		||||
    $CONFIG_DIR/accounting/ledgers/trades_<brokername>_<acctid>.toml
 | 
			
		||||
 | 
			
		||||
    Return its `dict`-content and file path.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    import time
 | 
			
		||||
    try:
 | 
			
		||||
        import tomllib
 | 
			
		||||
    except ModuleNotFoundError:
 | 
			
		||||
        import tomli as tomllib
 | 
			
		||||
 | 
			
		||||
    ldir: Path = (
 | 
			
		||||
        dirpath
 | 
			
		||||
        or
 | 
			
		||||
        config._config_dir / 'accounting' / 'ledgers'
 | 
			
		||||
    )
 | 
			
		||||
    if not ldir.is_dir():
 | 
			
		||||
        ldir.mkdir()
 | 
			
		||||
 | 
			
		||||
    fname = f'trades_{brokername}_{acctid}.toml'
 | 
			
		||||
    fpath: Path = ldir / fname
 | 
			
		||||
 | 
			
		||||
    if not fpath.is_file():
 | 
			
		||||
        log.info(
 | 
			
		||||
            f'Creating new local trades ledger: {fpath}'
 | 
			
		||||
        )
 | 
			
		||||
        fpath.touch()
 | 
			
		||||
 | 
			
		||||
    with fpath.open(mode='rb') as cf:
 | 
			
		||||
        start = time.time()
 | 
			
		||||
        ledger_dict = tomllib.load(cf)
 | 
			
		||||
        log.debug(f'Ledger load took {time.time() - start}s')
 | 
			
		||||
 | 
			
		||||
    return ledger_dict, fpath
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@cm
 | 
			
		||||
def open_trade_ledger(
 | 
			
		||||
    broker: str,
 | 
			
		||||
    account: str,
 | 
			
		||||
 | 
			
		||||
    allow_from_sync_code: bool = False,
 | 
			
		||||
    symcache: SymbologyCache | None = None,
 | 
			
		||||
 | 
			
		||||
    # default is to sort by detected datetime-ish field
 | 
			
		||||
    tx_sort: Callable = iter_by_dt,
 | 
			
		||||
    rewrite: bool = False,
 | 
			
		||||
 | 
			
		||||
    # for testing or manual load from file
 | 
			
		||||
    _fp: Path | None = None,
 | 
			
		||||
 | 
			
		||||
) -> Generator[TransactionLedger, None, None]:
 | 
			
		||||
    '''
 | 
			
		||||
    Indempotently create and read in a trade log file from the
 | 
			
		||||
    ``<configuration_dir>/ledgers/`` directory.
 | 
			
		||||
 | 
			
		||||
    Files are named per broker account of the form
 | 
			
		||||
    ``<brokername>_<accountname>.toml``. The ``accountname`` here is the
 | 
			
		||||
    name as defined in the user's ``brokers.toml`` config.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from ..brokers import get_brokermod
 | 
			
		||||
    mod: ModuleType = get_brokermod(broker)
 | 
			
		||||
 | 
			
		||||
    ledger_dict, fpath = load_ledger(
 | 
			
		||||
        broker,
 | 
			
		||||
        account,
 | 
			
		||||
        dirpath=_fp,
 | 
			
		||||
    )
 | 
			
		||||
    cpy = ledger_dict.copy()
 | 
			
		||||
 | 
			
		||||
    # XXX NOTE: if not provided presume we are being called from
 | 
			
		||||
    # sync code and need to maybe run `trio` to generate..
 | 
			
		||||
    if symcache is None:
 | 
			
		||||
 | 
			
		||||
        # XXX: be mega pendantic and ensure the caller knows what
 | 
			
		||||
        # they're doing!
 | 
			
		||||
        if not allow_from_sync_code:
 | 
			
		||||
            raise RuntimeError(
 | 
			
		||||
                'You MUST set `allow_from_sync_code=True` when '
 | 
			
		||||
                'calling `open_trade_ledger()` from sync code! '
 | 
			
		||||
                'If you are calling from async code you MUST '
 | 
			
		||||
                'instead pass a `symcache: SymbologyCache`!'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        from ..data._symcache import (
 | 
			
		||||
            get_symcache,
 | 
			
		||||
        )
 | 
			
		||||
        symcache: SymbologyCache = get_symcache(broker)
 | 
			
		||||
 | 
			
		||||
    assert symcache
 | 
			
		||||
 | 
			
		||||
    ledger = TransactionLedger(
 | 
			
		||||
        ledger_dict=cpy,
 | 
			
		||||
        file_path=fpath,
 | 
			
		||||
        account=account,
 | 
			
		||||
        mod=mod,
 | 
			
		||||
        symcache=symcache,
 | 
			
		||||
        tx_sort=getattr(mod, 'tx_sort', tx_sort),
 | 
			
		||||
    )
 | 
			
		||||
    try:
 | 
			
		||||
        yield ledger
 | 
			
		||||
    finally:
 | 
			
		||||
        if (
 | 
			
		||||
            ledger.data != ledger_dict
 | 
			
		||||
            or rewrite
 | 
			
		||||
        ):
 | 
			
		||||
            # TODO: show diff output?
 | 
			
		||||
            # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
 | 
			
		||||
            log.info(f'Updating ledger for {fpath}:\n')
 | 
			
		||||
            ledger.write_config()
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,766 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Market (pair) meta-info layer: sane addressing semantics and meta-data
 | 
			
		||||
for cross-provider marketplaces.
 | 
			
		||||
 | 
			
		||||
We intoduce the concept of,
 | 
			
		||||
 | 
			
		||||
- a FQMA: fully qualified market address,
 | 
			
		||||
- a sane schema for FQMAs including derivatives,
 | 
			
		||||
- a msg-serializeable description of markets for
 | 
			
		||||
  easy sharing with other pikers B)
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from decimal import (
 | 
			
		||||
    Decimal,
 | 
			
		||||
    ROUND_HALF_EVEN,
 | 
			
		||||
)
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Literal,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: make these literals..
 | 
			
		||||
_underlyings: list[str] = [
 | 
			
		||||
    'stock',
 | 
			
		||||
    'bond',
 | 
			
		||||
    'crypto',
 | 
			
		||||
    'fiat',
 | 
			
		||||
    'commodity',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
_crypto_derivs: list[str] = [
 | 
			
		||||
    'perpetual_future',
 | 
			
		||||
    'crypto_future',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
_derivs: list[str] = [
 | 
			
		||||
    'swap',
 | 
			
		||||
    'future',
 | 
			
		||||
    'continuous_future',
 | 
			
		||||
    'option',
 | 
			
		||||
    'futures_option',
 | 
			
		||||
 | 
			
		||||
    # if we can't figure it out, presume the worst XD
 | 
			
		||||
    'unknown',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
# NOTE: a tag for other subsystems to try
 | 
			
		||||
# and do default settings for certain things:
 | 
			
		||||
# - allocator does unit vs. dolla size limiting.
 | 
			
		||||
AssetTypeName: Literal[
 | 
			
		||||
    _underlyings
 | 
			
		||||
    +
 | 
			
		||||
    _derivs
 | 
			
		||||
    +
 | 
			
		||||
    _crypto_derivs
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
# egs. stock, futer, option, bond etc.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def dec_digits(
 | 
			
		||||
    value: float | str | Decimal,
 | 
			
		||||
 | 
			
		||||
) -> int:
 | 
			
		||||
    '''
 | 
			
		||||
    Return the number of precision digits read from a decimal or float
 | 
			
		||||
    value.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    if value == 0:
 | 
			
		||||
        return 0
 | 
			
		||||
 | 
			
		||||
    return int(
 | 
			
		||||
        -Decimal(str(value)).as_tuple().exponent
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
float_digits = dec_digits
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def digits_to_dec(
 | 
			
		||||
    ndigits: int,
 | 
			
		||||
) -> Decimal:
 | 
			
		||||
    '''
 | 
			
		||||
    Return the minimum float value for an input integer value.
 | 
			
		||||
 | 
			
		||||
    eg. 3 -> 0.001
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    if ndigits == 0:
 | 
			
		||||
        return Decimal('0')
 | 
			
		||||
 | 
			
		||||
    return Decimal('0.' + '0'*(ndigits-1) + '1')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Asset(Struct, frozen=True):
 | 
			
		||||
    '''
 | 
			
		||||
    Container type describing any transactable asset and its
 | 
			
		||||
    contract-like and/or underlying technology meta-info.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    name: str
 | 
			
		||||
    atype: str  # AssetTypeName
 | 
			
		||||
 | 
			
		||||
    # minimum transaction size / precision.
 | 
			
		||||
    # eg. for buttcoin this is a "satoshi".
 | 
			
		||||
    tx_tick: Decimal
 | 
			
		||||
 | 
			
		||||
    # NOTE: additional info optionally packed in by the backend, but
 | 
			
		||||
    # should not be explicitly required in our generic API.
 | 
			
		||||
    info: dict | None = None
 | 
			
		||||
 | 
			
		||||
    # `None` is not toml-compat so drop info
 | 
			
		||||
    # if no extra data added..
 | 
			
		||||
    def to_dict(
 | 
			
		||||
        self,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
        dct = super().to_dict(**kwargs)
 | 
			
		||||
        if (info := dct.pop('info', None)):
 | 
			
		||||
            dct['info'] = info
 | 
			
		||||
 | 
			
		||||
        assert dct['tx_tick']
 | 
			
		||||
        return dct
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_msg(
 | 
			
		||||
        cls,
 | 
			
		||||
        msg: dict[str, Any],
 | 
			
		||||
    ) -> Asset:
 | 
			
		||||
        return cls(
 | 
			
		||||
            tx_tick=Decimal(str(msg.pop('tx_tick'))),
 | 
			
		||||
            info=msg.pop('info', None),
 | 
			
		||||
            **msg,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        return self.name
 | 
			
		||||
 | 
			
		||||
    def quantize(
 | 
			
		||||
        self,
 | 
			
		||||
        size: float,
 | 
			
		||||
 | 
			
		||||
    ) -> Decimal:
 | 
			
		||||
        '''
 | 
			
		||||
        Truncate input ``size: float`` using ``Decimal``
 | 
			
		||||
        quantized form of the digit precision defined
 | 
			
		||||
        by ``self.lot_tick_size``.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        digits = float_digits(self.tx_tick)
 | 
			
		||||
        return Decimal(size).quantize(
 | 
			
		||||
            Decimal(f'1.{"0".ljust(digits, "0")}'),
 | 
			
		||||
            rounding=ROUND_HALF_EVEN
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def guess_from_mkt_ep_key(
 | 
			
		||||
        cls,
 | 
			
		||||
        mkt_ep_key: str,
 | 
			
		||||
        atype: str | None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> Asset:
 | 
			
		||||
        '''
 | 
			
		||||
        A hacky guess method for presuming a (target) asset's properties
 | 
			
		||||
        based on either the actualy market endpoint key, or config settings
 | 
			
		||||
        from the user.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        atype = atype or 'unknown'
 | 
			
		||||
 | 
			
		||||
        # attempt to strip off any source asset
 | 
			
		||||
        # via presumed syntax of:
 | 
			
		||||
        # - <dst>/<src>
 | 
			
		||||
        # - <dst>.<src>
 | 
			
		||||
        # - etc.
 | 
			
		||||
        for char in ['/', '.']:
 | 
			
		||||
            dst, _, src = mkt_ep_key.partition(char)
 | 
			
		||||
            if src:
 | 
			
		||||
                if not atype:
 | 
			
		||||
                    atype = 'fiat'
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
        return Asset(
 | 
			
		||||
            name=dst,
 | 
			
		||||
            atype=atype,
 | 
			
		||||
            tx_tick=Decimal('0.01'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def maybe_cons_tokens(
 | 
			
		||||
    tokens: list[Any],
 | 
			
		||||
    delim_char: str = '.',
 | 
			
		||||
) -> str:
 | 
			
		||||
    '''
 | 
			
		||||
    Construct `str` output from a maybe-concatenation of input
 | 
			
		||||
    sequence of elements in ``tokens``.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    return delim_char.join(filter(bool, tokens)).lower()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MktPair(Struct, frozen=True):
 | 
			
		||||
    '''
 | 
			
		||||
    Market description for a pair of assets which are tradeable:
 | 
			
		||||
    a market which enables transactions of the form,
 | 
			
		||||
        buy: source asset -> destination asset
 | 
			
		||||
        sell: destination asset -> source asset
 | 
			
		||||
 | 
			
		||||
    The main intention of this type is for a **simple** cross-asset
 | 
			
		||||
    venue/broker normalized descrption type from which all
 | 
			
		||||
    market-auctions can be mapped from FQME identifiers.
 | 
			
		||||
 | 
			
		||||
    TODO: our eventual target fqme format/schema is:
 | 
			
		||||
    <dst>/<src>.<expiry>.<con_info_1>.<con_info_2>. -> .<venue>.<broker>
 | 
			
		||||
          ^ -- optional tokens ------------------------------- ^
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    Notes:
 | 
			
		||||
    ------
 | 
			
		||||
 | 
			
		||||
    Some venues provide a different semantic (which we frankly find
 | 
			
		||||
    confusing and non-general) such as "base" and "quote" asset.
 | 
			
		||||
    For example this is how `binance` defines the terms:
 | 
			
		||||
 | 
			
		||||
    https://binance-docs.github.io/apidocs/websocket_api/en/#public-api-definitions
 | 
			
		||||
    https://binance-docs.github.io/apidocs/futures/en/#public-endpoints-info
 | 
			
		||||
 | 
			
		||||
    - *base* asset refers to the asset that is the *quantity* of a symbol.
 | 
			
		||||
    - *quote* asset refers to the asset that is the *price* of a symbol.
 | 
			
		||||
 | 
			
		||||
    In other words the "quote" asset is the asset that the market
 | 
			
		||||
    is pricing "buys" *in*, and the *base* asset it the one that the market
 | 
			
		||||
    allows you to "buy" an *amount of*. Put more simply the *quote*
 | 
			
		||||
    asset is our "source" asset and the *base* asset is our "destination"
 | 
			
		||||
    asset.
 | 
			
		||||
 | 
			
		||||
    This defintion can be further understood reading our
 | 
			
		||||
    `.brokers.binance.api.Pair` type wherein the
 | 
			
		||||
    `Pair.[quote/base]AssetPrecision` field determines the (transfer)
 | 
			
		||||
    transaction precision available per asset; i.e. the satoshis
 | 
			
		||||
    unit in bitcoin for representing the minimum size of a
 | 
			
		||||
    transaction that can take place on the blockchain.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    dst: str | Asset
 | 
			
		||||
    # "destination asset" (name) used to buy *to*
 | 
			
		||||
    # (or used to sell *from*)
 | 
			
		||||
 | 
			
		||||
    price_tick: Decimal  # minimum price increment
 | 
			
		||||
    size_tick: Decimal  # minimum size (aka vlm) increment
 | 
			
		||||
    # the tick size is the number describing the smallest step in value
 | 
			
		||||
    # available in this market between the source and destination
 | 
			
		||||
    # assets.
 | 
			
		||||
    # https://en.wikipedia.org/wiki/Tick_size
 | 
			
		||||
    # https://en.wikipedia.org/wiki/Commodity_tick
 | 
			
		||||
    # https://en.wikipedia.org/wiki/Percentage_in_point
 | 
			
		||||
 | 
			
		||||
    # unique "broker id" since every market endpoint provider
 | 
			
		||||
    # has their own nomenclature and schema for market maps.
 | 
			
		||||
    bs_mktid: str
 | 
			
		||||
    broker: str  # the middle man giving access
 | 
			
		||||
 | 
			
		||||
    # NOTE: to start this field is optional but should eventually be
 | 
			
		||||
    # required; the reason is for backward compat since more positioning
 | 
			
		||||
    # calculations were not originally stored with a src asset..
 | 
			
		||||
 | 
			
		||||
    src: str | Asset = ''
 | 
			
		||||
    # "source asset" (name) used to buy *from*
 | 
			
		||||
    # (or used to sell *to*).
 | 
			
		||||
 | 
			
		||||
    venue: str = ''  # market venue provider name
 | 
			
		||||
    expiry: str = ''  # for derivs, expiry datetime parseable str
 | 
			
		||||
 | 
			
		||||
    # destination asset's financial type/classification name
 | 
			
		||||
    # NOTE: this is required for the order size allocator system,
 | 
			
		||||
    # since we use different default settings based on the type
 | 
			
		||||
    # of the destination asset, eg. futes use a units limits vs.
 | 
			
		||||
    # equities a $limit.
 | 
			
		||||
    # dst_type: AssetTypeName | None = None
 | 
			
		||||
 | 
			
		||||
    # source asset's financial type/classification name
 | 
			
		||||
    # TODO: is a src type required for trading?
 | 
			
		||||
    # there's no reason to need any more then the one-way alloc-limiter
 | 
			
		||||
    # config right?
 | 
			
		||||
    # src_type: AssetTypeName
 | 
			
		||||
 | 
			
		||||
    # for derivs, info describing contract, egs.
 | 
			
		||||
    # strike price, call or put, swap type, exercise model, etc.
 | 
			
		||||
    contract_info: list[str] | None = None
 | 
			
		||||
 | 
			
		||||
    # TODO: rename to sectype since all of these can
 | 
			
		||||
    # be considered "securities"?
 | 
			
		||||
    _atype: str = ''
 | 
			
		||||
 | 
			
		||||
    # allow explicit disable of the src part of the market
 | 
			
		||||
    # pair name -> useful for legacy markets like qqq.nasdaq.ib
 | 
			
		||||
    _fqme_without_src: bool = False
 | 
			
		||||
 | 
			
		||||
    # NOTE: when cast to `str` return fqme
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        return self.fqme
 | 
			
		||||
 | 
			
		||||
    def to_dict(
 | 
			
		||||
        self,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
        d = super().to_dict(**kwargs)
 | 
			
		||||
        d['src'] = self.src.to_dict(**kwargs)
 | 
			
		||||
 | 
			
		||||
        if not isinstance(self.dst, str):
 | 
			
		||||
            d['dst'] = self.dst.to_dict(**kwargs)
 | 
			
		||||
        else:
 | 
			
		||||
            d['dst'] = str(self.dst)
 | 
			
		||||
 | 
			
		||||
        d['price_tick'] = str(self.price_tick)
 | 
			
		||||
        d['size_tick'] = str(self.size_tick)
 | 
			
		||||
 | 
			
		||||
        if self.contract_info is None:
 | 
			
		||||
            d.pop('contract_info')
 | 
			
		||||
 | 
			
		||||
        # d.pop('_fqme_without_src')
 | 
			
		||||
 | 
			
		||||
        return d
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_msg(
 | 
			
		||||
        cls,
 | 
			
		||||
        msg: dict[str, Any],
 | 
			
		||||
 | 
			
		||||
    ) -> MktPair:
 | 
			
		||||
        '''
 | 
			
		||||
        Constructor for a received msg-dict normally received over IPC.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        if not isinstance(
 | 
			
		||||
            dst_asset_msg := msg.pop('dst'),
 | 
			
		||||
            str,
 | 
			
		||||
        ):
 | 
			
		||||
            dst: Asset = Asset.from_msg(dst_asset_msg)  # .copy()
 | 
			
		||||
        else:
 | 
			
		||||
            dst: str = dst_asset_msg
 | 
			
		||||
 | 
			
		||||
        src_asset_msg: dict = msg.pop('src')
 | 
			
		||||
        src: Asset = Asset.from_msg(src_asset_msg)  # .copy()
 | 
			
		||||
 | 
			
		||||
        # XXX NOTE: ``msgspec`` can encode `Decimal` but it doesn't
 | 
			
		||||
        # decide to it by default since we aren't spec-cing these
 | 
			
		||||
        # msgs as structs proper to get them to decode implictily
 | 
			
		||||
        # (yet) as per,
 | 
			
		||||
        # - https://github.com/pikers/piker/pull/354
 | 
			
		||||
        # - https://github.com/goodboy/tractor/pull/311
 | 
			
		||||
        # SO we have to ensure we do a struct type
 | 
			
		||||
        # case (which `.copy()` does) to ensure we get the right
 | 
			
		||||
        # type!
 | 
			
		||||
        return cls(
 | 
			
		||||
            dst=dst,
 | 
			
		||||
            src=src,
 | 
			
		||||
            price_tick=Decimal(msg.pop('price_tick')),
 | 
			
		||||
            size_tick=Decimal(msg.pop('size_tick')),
 | 
			
		||||
            **msg,
 | 
			
		||||
        ).copy()
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def resolved(self) -> bool:
 | 
			
		||||
        return isinstance(self.dst, Asset)
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_fqme(
 | 
			
		||||
        cls,
 | 
			
		||||
        fqme: str,
 | 
			
		||||
 | 
			
		||||
        price_tick: float | str,
 | 
			
		||||
        size_tick: float | str,
 | 
			
		||||
        bs_mktid: str,
 | 
			
		||||
 | 
			
		||||
        broker: str | None = None,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
 | 
			
		||||
    ) -> MktPair:
 | 
			
		||||
 | 
			
		||||
        _fqme: str = fqme
 | 
			
		||||
        if (
 | 
			
		||||
            broker
 | 
			
		||||
            and broker not in fqme
 | 
			
		||||
        ):
 | 
			
		||||
            _fqme = f'{fqme}.{broker}'
 | 
			
		||||
 | 
			
		||||
        broker, mkt_ep_key, venue, expiry = unpack_fqme(_fqme)
 | 
			
		||||
 | 
			
		||||
        kven: str = kwargs.pop('venue', venue)
 | 
			
		||||
        if venue:
 | 
			
		||||
            assert venue == kven
 | 
			
		||||
        else:
 | 
			
		||||
            venue = kven
 | 
			
		||||
 | 
			
		||||
        exp: str = kwargs.pop('expiry', expiry)
 | 
			
		||||
        if expiry:
 | 
			
		||||
            assert exp == expiry
 | 
			
		||||
        else:
 | 
			
		||||
            expiry = exp
 | 
			
		||||
 | 
			
		||||
        dst: Asset = Asset.guess_from_mkt_ep_key(
 | 
			
		||||
            mkt_ep_key,
 | 
			
		||||
            atype=kwargs.get('_atype'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # XXX: loading from a fqme string will
 | 
			
		||||
        # leave this pair as "un resolved" meaning
 | 
			
		||||
        # we don't yet have `.dst` set as an `Asset`
 | 
			
		||||
        # which we expect to be filled in by some
 | 
			
		||||
        # backend client with access to that data-info.
 | 
			
		||||
        return cls(
 | 
			
		||||
            dst=dst,
 | 
			
		||||
            # XXX: not resolved to ``Asset`` :(
 | 
			
		||||
            #src=src,
 | 
			
		||||
 | 
			
		||||
            broker=broker,
 | 
			
		||||
            venue=venue,
 | 
			
		||||
            # XXX NOTE: we presume this token
 | 
			
		||||
            # if the expiry for now!
 | 
			
		||||
            expiry=expiry,
 | 
			
		||||
 | 
			
		||||
            price_tick=price_tick,
 | 
			
		||||
            size_tick=size_tick,
 | 
			
		||||
            bs_mktid=bs_mktid,
 | 
			
		||||
 | 
			
		||||
            **kwargs,
 | 
			
		||||
 | 
			
		||||
        ).copy()
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def key(self) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        The "endpoint key" for this market.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        return self.pair
 | 
			
		||||
 | 
			
		||||
    def pair(
 | 
			
		||||
        self,
 | 
			
		||||
        delim_char: str | None = None,
 | 
			
		||||
    ) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        The "endpoint asset pair key" for this market.
 | 
			
		||||
        Eg. mnq/usd or btc/usdt or xmr/btc
 | 
			
		||||
 | 
			
		||||
        In most other tina platforms this is referred to as the
 | 
			
		||||
        "symbol".
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        return maybe_cons_tokens(
 | 
			
		||||
            [str(self.dst),
 | 
			
		||||
             str(self.src)],
 | 
			
		||||
            # TODO: make the default '/'
 | 
			
		||||
            delim_char=delim_char or '',
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def suffix(self) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        The "contract suffix" for this market.
 | 
			
		||||
 | 
			
		||||
        Eg. mnq/usd.20230616.cme.ib
 | 
			
		||||
                    ^ ----- ^
 | 
			
		||||
        or tsla/usd.20230324.200c.cboe.ib
 | 
			
		||||
                    ^ ---------- ^
 | 
			
		||||
 | 
			
		||||
        In most other tina platforms they only show you these details in
 | 
			
		||||
        some kinda "meta data" format, we have FQMEs so we do this up
 | 
			
		||||
        front and explicit.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        field_strs = [self.expiry]
 | 
			
		||||
        con_info = self.contract_info
 | 
			
		||||
        if con_info is not None:
 | 
			
		||||
            field_strs.extend(con_info)
 | 
			
		||||
 | 
			
		||||
        return maybe_cons_tokens(field_strs)
 | 
			
		||||
 | 
			
		||||
    def get_fqme(
 | 
			
		||||
        self,
 | 
			
		||||
 | 
			
		||||
        # NOTE: allow dropping the source asset from the
 | 
			
		||||
        # market endpoint's pair key. Eg. to change
 | 
			
		||||
        # mnq/usd.<> -> mnq.<> which is useful when
 | 
			
		||||
        # searching (legacy) stock exchanges.
 | 
			
		||||
        without_src: bool = False,
 | 
			
		||||
        delim_char: str | None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        Return the fully qualified market endpoint-address for the
 | 
			
		||||
        pair of transacting assets.
 | 
			
		||||
 | 
			
		||||
        fqme = "fully qualified market endpoint"
 | 
			
		||||
 | 
			
		||||
        And yes, you pronounce it colloquially as read..
 | 
			
		||||
 | 
			
		||||
        Basically the idea here is for all client code (consumers of piker's
 | 
			
		||||
        APIs which query the data/broker-provider agnostic layer(s)) should be
 | 
			
		||||
        able to tell which backend / venue / derivative each data feed/flow is
 | 
			
		||||
        from by an explicit string-key of the current form:
 | 
			
		||||
 | 
			
		||||
        <market-instrument-name>
 | 
			
		||||
            .<venue>
 | 
			
		||||
            .<expiry>
 | 
			
		||||
            .<derivative-suffix-info>
 | 
			
		||||
            .<brokerbackendname>
 | 
			
		||||
 | 
			
		||||
        eg. for an explicit daq mini futes contract: mnq.cme.20230317.ib
 | 
			
		||||
 | 
			
		||||
        TODO: I have thoughts that we should actually change this to be
 | 
			
		||||
        more like an "attr lookup" (like how the web should have done
 | 
			
		||||
        urls, but marketting peeps ruined it etc. etc.)
 | 
			
		||||
 | 
			
		||||
        <broker>.<venue>.<instrumentname>.<suffixwithmetadata>
 | 
			
		||||
 | 
			
		||||
        TODO:
 | 
			
		||||
        See community discussion on naming and nomenclature, order
 | 
			
		||||
        of addressing hierarchy, general schema, internal representation:
 | 
			
		||||
 | 
			
		||||
        https://github.com/pikers/piker/issues/467
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        key: str = (
 | 
			
		||||
            self.pair(delim_char=delim_char)
 | 
			
		||||
            if not (without_src or self._fqme_without_src)
 | 
			
		||||
            else str(self.dst)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        return maybe_cons_tokens([
 | 
			
		||||
            key,  # final "pair name" (eg. qqq[/usd], btcusdt)
 | 
			
		||||
            self.venue,
 | 
			
		||||
            self.suffix,  # includes expiry and other con info
 | 
			
		||||
            self.broker,
 | 
			
		||||
        ])
 | 
			
		||||
 | 
			
		||||
    # NOTE: the main idea behind an fqme is to map a "market address"
 | 
			
		||||
    # to some endpoint from a transaction provider (eg. a broker) such
 | 
			
		||||
    # that we build a table of `fqme: str -> bs_mktid: Any` where any "piker
 | 
			
		||||
    # market address" maps 1-to-1 to some broker trading endpoint.
 | 
			
		||||
    # @cached_property
 | 
			
		||||
    fqme = property(get_fqme)
 | 
			
		||||
 | 
			
		||||
    def get_bs_fqme(
 | 
			
		||||
        self,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
    ) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        FQME sin broker part XD
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        sin_broker, *_ = self.get_fqme(**kwargs).rpartition('.')
 | 
			
		||||
        return sin_broker
 | 
			
		||||
 | 
			
		||||
    bs_fqme = property(get_bs_fqme)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def fqsn(self) -> str:
 | 
			
		||||
        return self.fqme
 | 
			
		||||
 | 
			
		||||
    def quantize(
 | 
			
		||||
        self,
 | 
			
		||||
        size: float,
 | 
			
		||||
 | 
			
		||||
        quantity_type: Literal['price', 'size'] = 'size',
 | 
			
		||||
 | 
			
		||||
    ) -> Decimal:
 | 
			
		||||
        '''
 | 
			
		||||
        Truncate input ``size: float`` using ``Decimal``
 | 
			
		||||
        and ``.size_tick``'s # of digits.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        match quantity_type:
 | 
			
		||||
            case 'price':
 | 
			
		||||
                digits = float_digits(self.price_tick)
 | 
			
		||||
            case 'size':
 | 
			
		||||
                digits = float_digits(self.size_tick)
 | 
			
		||||
 | 
			
		||||
        return Decimal(size).quantize(
 | 
			
		||||
            Decimal(f'1.{"0".ljust(digits, "0")}'),
 | 
			
		||||
            rounding=ROUND_HALF_EVEN
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # TODO: BACKWARD COMPAT, TO REMOVE?
 | 
			
		||||
    @property
 | 
			
		||||
    def type_key(self) -> str:
 | 
			
		||||
 | 
			
		||||
        # if set explicitly then use it!
 | 
			
		||||
        if self._atype:
 | 
			
		||||
            return self._atype
 | 
			
		||||
 | 
			
		||||
        if isinstance(self.dst, Asset):
 | 
			
		||||
            return str(self.dst.atype)
 | 
			
		||||
 | 
			
		||||
        return 'UNKNOWN'
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def price_tick_digits(self) -> int:
 | 
			
		||||
        return float_digits(self.price_tick)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def size_tick_digits(self) -> int:
 | 
			
		||||
        return float_digits(self.size_tick)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unpack_fqme(
 | 
			
		||||
    fqme: str,
 | 
			
		||||
 | 
			
		||||
    broker: str | None = None
 | 
			
		||||
 | 
			
		||||
) -> tuple[str, ...]:
 | 
			
		||||
    '''
 | 
			
		||||
    Unpack a fully-qualified-symbol-name to ``tuple``.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    venue = ''
 | 
			
		||||
    suffix = ''
 | 
			
		||||
 | 
			
		||||
    # TODO: probably reverse the order of all this XD
 | 
			
		||||
    tokens = fqme.split('.')
 | 
			
		||||
 | 
			
		||||
    match tokens:
 | 
			
		||||
        case [mkt_ep, broker]:
 | 
			
		||||
            # probably crypto
 | 
			
		||||
            return (
 | 
			
		||||
                broker,
 | 
			
		||||
                mkt_ep,
 | 
			
		||||
                '',
 | 
			
		||||
                '',
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # TODO: swap venue and suffix/deriv-info here?
 | 
			
		||||
        case [mkt_ep, venue, suffix, broker]:
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
        # handle `bs_mktid` + `broker` input case
 | 
			
		||||
        case [
 | 
			
		||||
            mkt_ep, venue, suffix
 | 
			
		||||
        ] if (
 | 
			
		||||
            broker
 | 
			
		||||
            and suffix != broker
 | 
			
		||||
        ):
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
        case [mkt_ep, venue, broker]:
 | 
			
		||||
            suffix = ''
 | 
			
		||||
 | 
			
		||||
        case _:
 | 
			
		||||
            raise ValueError(f'Invalid fqme: {fqme}')
 | 
			
		||||
 | 
			
		||||
    return (
 | 
			
		||||
        broker,
 | 
			
		||||
        mkt_ep,
 | 
			
		||||
        venue,
 | 
			
		||||
        # '.'.join([mkt_ep, venue]),
 | 
			
		||||
        suffix,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Symbol(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    I guess this is some kinda container thing for dealing with
 | 
			
		||||
    all the different meta-data formats from brokers?
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    key: str
 | 
			
		||||
 | 
			
		||||
    broker: str = ''
 | 
			
		||||
    venue: str = ''
 | 
			
		||||
 | 
			
		||||
    # precision descriptors for price and vlm
 | 
			
		||||
    tick_size: Decimal = Decimal('0.01')
 | 
			
		||||
    lot_tick_size: Decimal = Decimal('0.0')
 | 
			
		||||
 | 
			
		||||
    suffix: str = ''
 | 
			
		||||
    broker_info: dict[str, dict[str, Any]] = {}
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_fqme(
 | 
			
		||||
        cls,
 | 
			
		||||
        fqsn: str,
 | 
			
		||||
        info: dict[str, Any],
 | 
			
		||||
 | 
			
		||||
    ) -> Symbol:
 | 
			
		||||
        broker, mktep, venue, suffix = unpack_fqme(fqsn)
 | 
			
		||||
        tick_size = info.get('price_tick_size', 0.01)
 | 
			
		||||
        lot_size = info.get('lot_tick_size', 0.0)
 | 
			
		||||
 | 
			
		||||
        return Symbol(
 | 
			
		||||
            broker=broker,
 | 
			
		||||
            key=mktep,
 | 
			
		||||
            tick_size=tick_size,
 | 
			
		||||
            lot_tick_size=lot_size,
 | 
			
		||||
            venue=venue,
 | 
			
		||||
            suffix=suffix,
 | 
			
		||||
            broker_info={broker: info},
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def type_key(self) -> str:
 | 
			
		||||
        return list(self.broker_info.values())[0]['asset_type']
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def tick_size_digits(self) -> int:
 | 
			
		||||
        return float_digits(self.tick_size)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def lot_size_digits(self) -> int:
 | 
			
		||||
        return float_digits(self.lot_tick_size)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def price_tick(self) -> Decimal:
 | 
			
		||||
        return Decimal(str(self.tick_size))
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def size_tick(self) -> Decimal:
 | 
			
		||||
        return Decimal(str(self.lot_tick_size))
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def broker(self) -> str:
 | 
			
		||||
        return list(self.broker_info.keys())[0]
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def fqme(self) -> str:
 | 
			
		||||
        return maybe_cons_tokens([
 | 
			
		||||
            self.key,  # final "pair name" (eg. qqq[/usd], btcusdt)
 | 
			
		||||
            self.venue,
 | 
			
		||||
            self.suffix,  # includes expiry and other con info
 | 
			
		||||
            self.broker,
 | 
			
		||||
        ])
 | 
			
		||||
 | 
			
		||||
    def quantize(
 | 
			
		||||
        self,
 | 
			
		||||
        size: float,
 | 
			
		||||
    ) -> Decimal:
 | 
			
		||||
        digits = float_digits(self.lot_tick_size)
 | 
			
		||||
        return Decimal(size).quantize(
 | 
			
		||||
            Decimal(f'1.{"0".ljust(digits, "0")}'),
 | 
			
		||||
            rounding=ROUND_HALF_EVEN
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # NOTE: when cast to `str` return fqme
 | 
			
		||||
    def __str__(self) -> str:
 | 
			
		||||
        return self.fqme
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,983 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Personal/Private position parsing, calculating, summarizing in a way
 | 
			
		||||
that doesn't try to cuk most humans who prefer to not lose their moneys..
 | 
			
		||||
 | 
			
		||||
(looking at you `ib` and dirt-bird friends)
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from contextlib import contextmanager as cm
 | 
			
		||||
from decimal import Decimal
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Iterator,
 | 
			
		||||
    Generator
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import pendulum
 | 
			
		||||
from pendulum import (
 | 
			
		||||
    datetime,
 | 
			
		||||
    now,
 | 
			
		||||
)
 | 
			
		||||
import polars as pl
 | 
			
		||||
import tomlkit
 | 
			
		||||
 | 
			
		||||
from ._ledger import (
 | 
			
		||||
    Transaction,
 | 
			
		||||
    TransactionLedger,
 | 
			
		||||
)
 | 
			
		||||
from ._mktinfo import (
 | 
			
		||||
    MktPair,
 | 
			
		||||
    Asset,
 | 
			
		||||
    unpack_fqme,
 | 
			
		||||
)
 | 
			
		||||
from .calc import (
 | 
			
		||||
    ppu,
 | 
			
		||||
    # iter_by_dt,
 | 
			
		||||
)
 | 
			
		||||
from .. import config
 | 
			
		||||
from ..clearing._messages import (
 | 
			
		||||
    BrokerdPosition,
 | 
			
		||||
)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from piker.data._symcache import SymbologyCache
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Position(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    An asset "position" model with attached clearing transaction history.
 | 
			
		||||
 | 
			
		||||
    A financial "position" in `piker` terms is a summary of accounting
 | 
			
		||||
    metrics computed from a transaction ledger; generally it describes
 | 
			
		||||
    some accumulative "size" and "average price" from the summarized
 | 
			
		||||
    underlying transaction set.
 | 
			
		||||
 | 
			
		||||
    In piker we focus on the `.ppu` (price per unit) and the `.bep`
 | 
			
		||||
    (break even price) including all transaction entries and exits since
 | 
			
		||||
    the last "net-zero" size of the destination asset's holding.
 | 
			
		||||
 | 
			
		||||
    This interface serves as an object API for computing and
 | 
			
		||||
    tracking positions as well as supports serialization for
 | 
			
		||||
    storage in the local file system (in TOML) and to interchange
 | 
			
		||||
    as a msg over IPC.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    mkt: MktPair
 | 
			
		||||
 | 
			
		||||
    # can be +ve or -ve for long/short
 | 
			
		||||
    # size: float
 | 
			
		||||
 | 
			
		||||
    # "price-per-unit price" above or below which pnl moves above and
 | 
			
		||||
    # below zero for the entirety of the current "trade state". The ppu
 | 
			
		||||
    # is only modified on "increases of" the absolute size of a position
 | 
			
		||||
    # in one of a long/short "direction" (i.e. abs(.size_i) > 0 after
 | 
			
		||||
    # the next transaction given .size was > 0 before that tx, and vice
 | 
			
		||||
    # versa for -ve sized positions).
 | 
			
		||||
    # ppu: float
 | 
			
		||||
 | 
			
		||||
    # TODO: break-even-price support!
 | 
			
		||||
    # bep: float
 | 
			
		||||
 | 
			
		||||
    # unique "backend system market id"
 | 
			
		||||
    bs_mktid: str
 | 
			
		||||
 | 
			
		||||
    split_ratio: int | None = None
 | 
			
		||||
 | 
			
		||||
    # TODO: use a `pl.DataFrame` intead?
 | 
			
		||||
    _events: dict[str, Transaction | dict] = {}
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def expiry(self) -> datetime | None:
 | 
			
		||||
        '''
 | 
			
		||||
        Security expiry if it has a limited lifetime.
 | 
			
		||||
 | 
			
		||||
        For non-derivative markets this is normally `None`.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        exp: str | None = self.mkt.expiry
 | 
			
		||||
        if exp is None:
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
        match exp.lower():
 | 
			
		||||
            # empty str, 'perp' (contract) or simply a null
 | 
			
		||||
            # signifies instrument with NO expiry.
 | 
			
		||||
            case 'perp' | '' | None:
 | 
			
		||||
                return None
 | 
			
		||||
 | 
			
		||||
            case str():
 | 
			
		||||
                return pendulum.parse(exp)
 | 
			
		||||
 | 
			
		||||
            case _:
 | 
			
		||||
                raise ValueError(
 | 
			
		||||
                    f'Unhandled `MktPair.expiry`: `{exp}`'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    # TODO: idea: "real LIFO" dynamic positioning.
 | 
			
		||||
    # - when a trade takes place where the pnl for
 | 
			
		||||
    # the (set of) trade(s) is below the breakeven price
 | 
			
		||||
    # it may be that the trader took a +ve pnl on a short(er)
 | 
			
		||||
    # term trade in the same account.
 | 
			
		||||
    # - in this case we could recalc the be price to
 | 
			
		||||
    # be reverted back to it's prior value before the nearest term
 | 
			
		||||
    # trade was opened.?
 | 
			
		||||
    # def bep() -> float:
 | 
			
		||||
    #     ...
 | 
			
		||||
    def clears_df(self) -> pl.DataFrame:
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    def clearsitems(self) -> list[(str, dict)]:
 | 
			
		||||
        return ppu(
 | 
			
		||||
            self.iter_by_type('clear'),
 | 
			
		||||
            as_ledger=True
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def iter_by_type(
 | 
			
		||||
        self,
 | 
			
		||||
        etype: str,
 | 
			
		||||
 | 
			
		||||
    ) -> Iterator[dict | Transaction]:
 | 
			
		||||
        '''
 | 
			
		||||
        Iterate the internally managed ``._events: dict`` table in
 | 
			
		||||
        datetime-stamped order.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        # sort on the expected datetime field
 | 
			
		||||
        # for event in iter_by_dt(
 | 
			
		||||
        for event in sorted(
 | 
			
		||||
            self._events.values(),
 | 
			
		||||
            key=lambda entry: entry.dt
 | 
			
		||||
        ):
 | 
			
		||||
            # if event.etype == etype:
 | 
			
		||||
            match event:
 | 
			
		||||
                case (
 | 
			
		||||
                    {'etype': _etype} |
 | 
			
		||||
                    Transaction(etype=str(_etype))
 | 
			
		||||
                ):
 | 
			
		||||
                    assert _etype == etype
 | 
			
		||||
                    yield event
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    def minimized_clears(self) -> dict[str, dict]:
 | 
			
		||||
        '''
 | 
			
		||||
        Minimize the position's clears entries by removing
 | 
			
		||||
        all transactions before the last net zero size except for when
 | 
			
		||||
        a clear event causes a position "side" change (i.e. long to short
 | 
			
		||||
        after a single fill) wherein we store the transaction prior to the
 | 
			
		||||
        net-zero pass.
 | 
			
		||||
 | 
			
		||||
        This avoids unnecessary history irrelevant to the current
 | 
			
		||||
        non-net-zero size state when serializing for offline storage.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        # scan for the last "net zero" position by iterating
 | 
			
		||||
        # transactions until the next net-zero cumsize, rinse,
 | 
			
		||||
        # repeat.
 | 
			
		||||
        cumsize: float = 0
 | 
			
		||||
        clears_since_zero: list[dict] = []
 | 
			
		||||
 | 
			
		||||
        for tid, cleardict in self.clearsitems():
 | 
			
		||||
            cumsize = float(
 | 
			
		||||
                # self.mkt.quantize(cumsize + cleardict['tx'].size
 | 
			
		||||
                self.mkt.quantize(cleardict['cumsize'])
 | 
			
		||||
            )
 | 
			
		||||
            clears_since_zero.append(cleardict)
 | 
			
		||||
 | 
			
		||||
            # NOTE: always pop sign change since we just use it to
 | 
			
		||||
            # determine which entry to clear "up to".
 | 
			
		||||
            sign_change: bool = cleardict.pop('sign_change')
 | 
			
		||||
            if cumsize == 0:
 | 
			
		||||
                clears_since_zero = clears_since_zero[:-2]
 | 
			
		||||
                # clears_since_zero.clear()
 | 
			
		||||
 | 
			
		||||
            elif sign_change:
 | 
			
		||||
                clears_since_zero = clears_since_zero[:-1]
 | 
			
		||||
 | 
			
		||||
        return clears_since_zero
 | 
			
		||||
 | 
			
		||||
    def to_pretoml(self) -> tuple[str, dict]:
 | 
			
		||||
        '''
 | 
			
		||||
        Prep this position's data contents for export as an entry
 | 
			
		||||
        in a TOML "account file" (such as
 | 
			
		||||
        `account.binance.paper.toml`) including re-structuring of
 | 
			
		||||
        the ``._events`` entries as an array of inline-subtables
 | 
			
		||||
        for better ``pps.toml`` compactness.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        mkt: MktPair = self.mkt
 | 
			
		||||
        assert isinstance(mkt, MktPair)
 | 
			
		||||
 | 
			
		||||
        # TODO: we need to figure out how to have one top level
 | 
			
		||||
        # listing venue here even when the backend isn't providing
 | 
			
		||||
        # it via the trades ledger..
 | 
			
		||||
        # drop symbol obj in serialized form
 | 
			
		||||
        fqme: str = mkt.fqme
 | 
			
		||||
        broker, mktep, venue, suffix = unpack_fqme(fqme)
 | 
			
		||||
 | 
			
		||||
        # an asset resolved mkt where we have ``Asset`` info about
 | 
			
		||||
        # each tradeable asset in the market.
 | 
			
		||||
        asset_type: str = 'n/a'
 | 
			
		||||
        if mkt.resolved:
 | 
			
		||||
            dst: Asset = mkt.dst
 | 
			
		||||
            asset_type = dst.atype
 | 
			
		||||
 | 
			
		||||
        asdict: dict[str, Any] = {
 | 
			
		||||
            'bs_mktid': self.bs_mktid,
 | 
			
		||||
            # 'expiry': self.expiry or '',
 | 
			
		||||
            'asset_type': asset_type,
 | 
			
		||||
            'price_tick': mkt.price_tick,
 | 
			
		||||
            'size_tick': mkt.size_tick,
 | 
			
		||||
        }
 | 
			
		||||
        if exp := self.expiry:
 | 
			
		||||
            asdict['expiry'] = exp
 | 
			
		||||
 | 
			
		||||
        clears_since_zero: list[dict] = self.minimized_clears()
 | 
			
		||||
 | 
			
		||||
        # setup a "multi-line array of inline tables" which we call
 | 
			
		||||
        # the "clears table", contained by each position entry in
 | 
			
		||||
        # an "account file".
 | 
			
		||||
        clears_table: tomlkit.Array = tomlkit.array()
 | 
			
		||||
        clears_table.multiline(
 | 
			
		||||
            multiline=True,
 | 
			
		||||
            indent='',
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        for entry in clears_since_zero:
 | 
			
		||||
            inline_table = tomlkit.inline_table()
 | 
			
		||||
 | 
			
		||||
            # insert optional clear fields in column order
 | 
			
		||||
            for k in ['ppu', 'cumsize']:
 | 
			
		||||
                if val := entry.get(k):
 | 
			
		||||
                    inline_table[k] = val
 | 
			
		||||
 | 
			
		||||
            # insert required fields
 | 
			
		||||
            for k in ['price', 'size', 'cost']:
 | 
			
		||||
                inline_table[k] = entry[k]
 | 
			
		||||
 | 
			
		||||
            # NOTE: we don't actually need to serialize datetime to parsable `str`
 | 
			
		||||
            # since `tomlkit` supports a native `DateTime` but
 | 
			
		||||
            # seems like we're not doing it entirely in clearing
 | 
			
		||||
            # tables yet?
 | 
			
		||||
            inline_table['dt'] = entry['dt']  # .isoformat('T')
 | 
			
		||||
 | 
			
		||||
            tid: str = entry['tid']
 | 
			
		||||
            inline_table['tid'] = tid
 | 
			
		||||
            clears_table.append(inline_table)
 | 
			
		||||
 | 
			
		||||
        # assert not events
 | 
			
		||||
        asdict['clears'] = clears_table
 | 
			
		||||
 | 
			
		||||
        return fqme, asdict
 | 
			
		||||
 | 
			
		||||
    def update_from_msg(
 | 
			
		||||
        self,
 | 
			
		||||
        msg: BrokerdPosition,
 | 
			
		||||
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        '''
 | 
			
		||||
        Hard-set the current position from a remotely-received
 | 
			
		||||
        (normally via IPC) msg by applying the msg as the one (and
 | 
			
		||||
        only) txn in the `._events` table thus forcing the current
 | 
			
		||||
        asset allocation blindly.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        mkt: MktPair = self.mkt
 | 
			
		||||
        now_dt: pendulum.DateTime = now()
 | 
			
		||||
        now_str: str = str(now_dt)
 | 
			
		||||
 | 
			
		||||
        # XXX: wipe all prior txn history since we wanted it we wouldn't
 | 
			
		||||
        # be using this method to compute our state!
 | 
			
		||||
        self._events.clear()
 | 
			
		||||
 | 
			
		||||
        # NOTE WARNING XXX: we summarize the pos with a single
 | 
			
		||||
        # summary transaction (for now) until we either pass THIS
 | 
			
		||||
        # type as msg directly from emsd or come up with a better
 | 
			
		||||
        # way?
 | 
			
		||||
        t = Transaction(
 | 
			
		||||
            fqme=mkt.fqme,
 | 
			
		||||
            bs_mktid=mkt.bs_mktid,
 | 
			
		||||
            size=msg['size'],
 | 
			
		||||
            price=msg['avg_price'],
 | 
			
		||||
            cost=0,
 | 
			
		||||
 | 
			
		||||
            # NOTE: special provisions required!
 | 
			
		||||
            # - tid needs to be unique or this txn will be ignored!!
 | 
			
		||||
            tid=now_str,
 | 
			
		||||
 | 
			
		||||
            # TODO: also figure out how to avoid this!
 | 
			
		||||
            dt=now_dt,
 | 
			
		||||
        )
 | 
			
		||||
        self.add_clear(t)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def dsize(self) -> float:
 | 
			
		||||
        '''
 | 
			
		||||
        The "dollar" size of the pp, normally in source asset
 | 
			
		||||
        (fiat) units.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        return self.ppu * self.cumsize
 | 
			
		||||
 | 
			
		||||
    def expired(self) -> bool:
 | 
			
		||||
        '''
 | 
			
		||||
        Predicate which checks if the contract/instrument is past
 | 
			
		||||
        its expiry.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        return bool(self.expiry) and self.expiry < now()
 | 
			
		||||
 | 
			
		||||
    def add_clear(
 | 
			
		||||
        self,
 | 
			
		||||
        t: Transaction,
 | 
			
		||||
    ) -> bool:
 | 
			
		||||
        '''
 | 
			
		||||
        Update clearing table by calculating the rolling ppu and
 | 
			
		||||
        (accumulative) size in both the clears entry and local
 | 
			
		||||
        attrs state.
 | 
			
		||||
 | 
			
		||||
        Inserts are always done in datetime sorted order.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        # added: bool = False
 | 
			
		||||
        tid: str = t.tid
 | 
			
		||||
        if tid in self._events:
 | 
			
		||||
            log.warning(f'{t} is already added?!')
 | 
			
		||||
            # return added
 | 
			
		||||
 | 
			
		||||
        # TODO: apparently this IS possible with a dict but not
 | 
			
		||||
        # common and probably not that beneficial unless we're also
 | 
			
		||||
        # going to do cum-calcs on each insert?
 | 
			
		||||
        # https://stackoverflow.com/questions/38079171/python-insert-new-element-into-sorted-list-of-dictionaries
 | 
			
		||||
        # from bisect import insort
 | 
			
		||||
        # insort(
 | 
			
		||||
        #     self._clears,
 | 
			
		||||
        #     clear,
 | 
			
		||||
        #     key=lambda entry: entry['dt']
 | 
			
		||||
        # )
 | 
			
		||||
        self._events[tid] = t
 | 
			
		||||
        return True
 | 
			
		||||
 | 
			
		||||
    # TODO: compute these incrementally instead
 | 
			
		||||
    # of re-looping through each time resulting in O(n**2)
 | 
			
		||||
    # behaviour..? Can we have some kinda clears len to cached
 | 
			
		||||
    # output subsys?
 | 
			
		||||
    def calc_ppu(self) -> float:
 | 
			
		||||
        return ppu(self.iter_by_type('clear'))
 | 
			
		||||
 | 
			
		||||
        # # return self.clearsdict()
 | 
			
		||||
        # # )
 | 
			
		||||
        # return list(self.clearsdict())[-1][1]['ppu']
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def ppu(self) -> float:
 | 
			
		||||
        return round(
 | 
			
		||||
            self.calc_ppu(),
 | 
			
		||||
            ndigits=self.mkt.price_tick_digits,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def calc_size(self) -> float:
 | 
			
		||||
        '''
 | 
			
		||||
        Calculate the unit size of this position in the destination
 | 
			
		||||
        asset using the clears/trade event table; zero if expired.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        # time-expired pps (normally derivatives) are "closed"
 | 
			
		||||
        # and have a zero size.
 | 
			
		||||
        if self.expired():
 | 
			
		||||
            return 0.
 | 
			
		||||
 | 
			
		||||
        clears: list[(str, dict)] = self.clearsitems()
 | 
			
		||||
        if clears:
 | 
			
		||||
            return clears[-1][1]['cumsize']
 | 
			
		||||
        else:
 | 
			
		||||
            return 0.
 | 
			
		||||
 | 
			
		||||
        # if self.split_ratio is not None:
 | 
			
		||||
        #     size = round(size * self.split_ratio)
 | 
			
		||||
 | 
			
		||||
        # return float(
 | 
			
		||||
        #     self.mkt.quantize(size),
 | 
			
		||||
        # )
 | 
			
		||||
 | 
			
		||||
    # TODO: ideally we don't implicitly recompute the
 | 
			
		||||
    # full sequence from `.clearsdict()` every read..
 | 
			
		||||
    # the writer-updates-local-attr-state was actually kinda nice
 | 
			
		||||
    # before, but sometimes led to hard to detect bugs when
 | 
			
		||||
    # state was de-synced.
 | 
			
		||||
    @property
 | 
			
		||||
    def cumsize(self) -> float:
 | 
			
		||||
 | 
			
		||||
        if (
 | 
			
		||||
            self.expiry
 | 
			
		||||
            and self.expiry < now()
 | 
			
		||||
        ):
 | 
			
		||||
            return 0
 | 
			
		||||
 | 
			
		||||
        return round(
 | 
			
		||||
            self.calc_size(),
 | 
			
		||||
            ndigits=self.mkt.size_tick_digits,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # TODO: once we have an `.events` table with diff
 | 
			
		||||
    # mkt event types..?
 | 
			
		||||
    # def suggest_split(self) -> float:
 | 
			
		||||
    #     ...
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Account(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    The real-time (double-entry accounting) state of
 | 
			
		||||
    a given **asset ownership tracking system**, normally offered
 | 
			
		||||
    or measured from some brokerage, CEX or (implied virtual)
 | 
			
		||||
    summary crypto$ "wallets" aggregated and tracked over some set
 | 
			
		||||
    of DEX-es.
 | 
			
		||||
 | 
			
		||||
    Both market-mapped and ledger-system-native (aka inter-account
 | 
			
		||||
    "transfers") transactions are accounted and they pertain to
 | 
			
		||||
    (implied) PnL relatve to any other accountable asset.
 | 
			
		||||
 | 
			
		||||
    More specifically in piker terms, an account tracks all of:
 | 
			
		||||
 | 
			
		||||
    - the *balances* of all assets currently available for use either
 | 
			
		||||
      in (future) market or (inter-account/wallet) transfer
 | 
			
		||||
      transactions.
 | 
			
		||||
    - a transaction *ledger* from a given brokerd backend whic
 | 
			
		||||
      is a recording of all (know) such transactions from the past.
 | 
			
		||||
    - a set of financial *positions* as measured from the current
 | 
			
		||||
      ledger state.
 | 
			
		||||
 | 
			
		||||
    See the semantic origins from double-bookeeping:
 | 
			
		||||
    https://en.wikipedia.org/wiki/Double-entry_bookkeeping
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    mod: ModuleType
 | 
			
		||||
    acctid: str
 | 
			
		||||
    pps: dict[str, Position]
 | 
			
		||||
 | 
			
		||||
    conf_path: Path
 | 
			
		||||
    conf: dict | None = {}
 | 
			
		||||
 | 
			
		||||
    # TODO: track a table of asset balances as `.balances:
 | 
			
		||||
    # dict[Asset, float]`?
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def brokername(self) -> str:
 | 
			
		||||
        return self.mod.name
 | 
			
		||||
 | 
			
		||||
    def update_from_ledger(
 | 
			
		||||
        self,
 | 
			
		||||
        ledger: TransactionLedger | dict[str, Transaction],
 | 
			
		||||
        cost_scalar: float = 2,
 | 
			
		||||
        symcache: SymbologyCache | None = None,
 | 
			
		||||
 | 
			
		||||
        _mktmap_table: dict[str, MktPair] | None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> dict[str, Position]:
 | 
			
		||||
        '''
 | 
			
		||||
        Update the internal `.pps[str, Position]` table from input
 | 
			
		||||
        transactions recomputing the price-per-unit (ppu) and
 | 
			
		||||
        accumulative size for each entry.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        if (
 | 
			
		||||
            not isinstance(ledger, TransactionLedger)
 | 
			
		||||
        ):
 | 
			
		||||
            if symcache is None:
 | 
			
		||||
                raise RuntimeError(
 | 
			
		||||
                    'No ledger provided!\n'
 | 
			
		||||
                    'We can not determine the `MktPair`s without a symcache..\n'
 | 
			
		||||
                    'Please provide `symcache: SymbologyCache` when '
 | 
			
		||||
                    'processing NEW positions!'
 | 
			
		||||
                )
 | 
			
		||||
            itertxns = sorted(
 | 
			
		||||
                ledger.values(),
 | 
			
		||||
                key=lambda t: t.dt,
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            itertxns = ledger.iter_txns()
 | 
			
		||||
            symcache = ledger.symcache
 | 
			
		||||
 | 
			
		||||
        pps = self.pps
 | 
			
		||||
        updated: dict[str, Position] = {}
 | 
			
		||||
 | 
			
		||||
        # lifo update all pps from records, ensuring
 | 
			
		||||
        # we compute the PPU and size sorted in time!
 | 
			
		||||
        for txn in itertxns:
 | 
			
		||||
            fqme: str = txn.fqme
 | 
			
		||||
            bs_mktid: str = txn.bs_mktid
 | 
			
		||||
 | 
			
		||||
            # template the mkt-info presuming a legacy market ticks
 | 
			
		||||
            # if no info exists in the transactions..
 | 
			
		||||
            try:
 | 
			
		||||
                mkt: MktPair = symcache.mktmaps[fqme]
 | 
			
		||||
            except KeyError:
 | 
			
		||||
                if _mktmap_table is None:
 | 
			
		||||
                    raise
 | 
			
		||||
 | 
			
		||||
                # XXX: caller is allowed to provide a fallback
 | 
			
		||||
                # mktmap table for the case where a new position is
 | 
			
		||||
                # being added and the preloaded symcache didn't
 | 
			
		||||
                # have this entry prior (eg. with frickin IB..)
 | 
			
		||||
                mkt = _mktmap_table[fqme]
 | 
			
		||||
 | 
			
		||||
            if not (pos := pps.get(bs_mktid)):
 | 
			
		||||
 | 
			
		||||
                assert isinstance(
 | 
			
		||||
                    mkt,
 | 
			
		||||
                    MktPair,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # if no existing pos, allocate fresh one.
 | 
			
		||||
                pos = pps[bs_mktid] = Position(
 | 
			
		||||
                    mkt=mkt,
 | 
			
		||||
                    bs_mktid=bs_mktid,
 | 
			
		||||
                )
 | 
			
		||||
            else:
 | 
			
		||||
                # NOTE: if for some reason a "less resolved" mkt pair
 | 
			
		||||
                # info has been set (based on the `.fqme` being
 | 
			
		||||
                # a shorter string), instead use the one from the
 | 
			
		||||
                # transaction since it likely has (more) full
 | 
			
		||||
                # information from the provider.
 | 
			
		||||
                if len(pos.mkt.fqme) < len(fqme):
 | 
			
		||||
                    pos.mkt = mkt
 | 
			
		||||
 | 
			
		||||
            # update clearing acnt!
 | 
			
		||||
            # NOTE: likely you'll see repeats of the same
 | 
			
		||||
            # ``Transaction`` passed in here if/when you are
 | 
			
		||||
            # restarting a ``brokerd.ib`` where the API will
 | 
			
		||||
            # re-report trades from the current session, so we need
 | 
			
		||||
            # to make sure we don't "double count" these in pp
 | 
			
		||||
            # calculations; `Position.add_clear()` stores txs in
 | 
			
		||||
            # a `._events: dict[tid, tx]` which should always
 | 
			
		||||
            # ensure this is true!
 | 
			
		||||
            pos.add_clear(txn)
 | 
			
		||||
            updated[txn.bs_mktid] = pos
 | 
			
		||||
 | 
			
		||||
        # NOTE: deliver only the position entries that were
 | 
			
		||||
        # actually updated (modified the state) from the input
 | 
			
		||||
        # transaction set.
 | 
			
		||||
        return updated
 | 
			
		||||
 | 
			
		||||
    def dump_active(
 | 
			
		||||
        self,
 | 
			
		||||
    ) -> tuple[
 | 
			
		||||
        dict[str, Position],
 | 
			
		||||
        dict[str, Position]
 | 
			
		||||
    ]:
 | 
			
		||||
        '''
 | 
			
		||||
        Iterate all tabulated positions, render active positions to
 | 
			
		||||
        a ``dict`` format amenable to serialization (via TOML) and drop
 | 
			
		||||
        from state (``.pps``) as well as return in a ``dict`` all
 | 
			
		||||
        ``Position``s which have recently closed.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        # NOTE: newly closed position are also important to report/return
 | 
			
		||||
        # since a consumer, like an order mode UI ;), might want to react
 | 
			
		||||
        # based on the closure (for example removing the breakeven line
 | 
			
		||||
        # and clearing the entry from any lists/monitors).
 | 
			
		||||
        closed_pp_objs: dict[str, Position] = {}
 | 
			
		||||
        open_pp_objs: dict[str, Position] = {}
 | 
			
		||||
 | 
			
		||||
        pp_objs = self.pps
 | 
			
		||||
        for bs_mktid in list(pp_objs):
 | 
			
		||||
            pos = pp_objs[bs_mktid]
 | 
			
		||||
            # pos.ensure_state()
 | 
			
		||||
 | 
			
		||||
            # "net-zero" is a "closed" position
 | 
			
		||||
            if pos.cumsize == 0:
 | 
			
		||||
                # NOTE: we DO NOT pop the pos here since it can still be
 | 
			
		||||
                # used to check for duplicate clears that may come in as
 | 
			
		||||
                # new transaction from some backend API and need to be
 | 
			
		||||
                # ignored; the closed positions won't be written to the
 | 
			
		||||
                # ``pps.toml`` since ``pp_active_entries`` above is what's
 | 
			
		||||
                # written.
 | 
			
		||||
                closed_pp_objs[bs_mktid] = pos
 | 
			
		||||
 | 
			
		||||
            else:
 | 
			
		||||
                open_pp_objs[bs_mktid] = pos
 | 
			
		||||
 | 
			
		||||
        return open_pp_objs, closed_pp_objs
 | 
			
		||||
 | 
			
		||||
    def prep_toml(
 | 
			
		||||
        self,
 | 
			
		||||
        active: dict[str, Position] | None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> dict[str, Any]:
 | 
			
		||||
 | 
			
		||||
        if active is None:
 | 
			
		||||
            active, _ = self.dump_active()
 | 
			
		||||
 | 
			
		||||
        # ONLY dict-serialize all active positions; those that are
 | 
			
		||||
        # closed we don't store in the ``pps.toml``.
 | 
			
		||||
        to_toml_dict: dict[str, Any] = {}
 | 
			
		||||
 | 
			
		||||
        pos: Position
 | 
			
		||||
        for bs_mktid, pos in active.items():
 | 
			
		||||
            # pos.ensure_state()
 | 
			
		||||
 | 
			
		||||
            # serialize to pre-toml form
 | 
			
		||||
            # NOTE: we only store the minimal amount of clears that
 | 
			
		||||
            # make up this position since the last net-zero state,
 | 
			
		||||
            # see `Position.to_pretoml()` for details
 | 
			
		||||
            fqme, asdict = pos.to_pretoml()
 | 
			
		||||
 | 
			
		||||
            # clears: list[dict] = asdict['clears']
 | 
			
		||||
            # assert 'Datetime' not in [0]['dt']
 | 
			
		||||
            log.info(f'Updating active pp: {fqme}')
 | 
			
		||||
 | 
			
		||||
            # XXX: ugh, it's cuz we push the section under
 | 
			
		||||
            # the broker name.. maybe we need to rethink this?
 | 
			
		||||
            brokerless_key = fqme.removeprefix(f'{self.brokername}.')
 | 
			
		||||
            to_toml_dict[brokerless_key] = asdict
 | 
			
		||||
 | 
			
		||||
        return to_toml_dict
 | 
			
		||||
 | 
			
		||||
    def write_config(self) -> None:
 | 
			
		||||
        '''
 | 
			
		||||
        Write the current account state to the user's account TOML file, normally
 | 
			
		||||
        something like ``pps.toml``.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        # TODO: show diff output?
 | 
			
		||||
        # https://stackoverflow.com/questions/12956957/print-diff-of-python-dictionaries
 | 
			
		||||
        # active, closed_pp_objs = acnt.dump_active()
 | 
			
		||||
 | 
			
		||||
        active, closed = self.dump_active()
 | 
			
		||||
        pp_entries = self.prep_toml(active=active)
 | 
			
		||||
        if pp_entries:
 | 
			
		||||
            log.info(
 | 
			
		||||
                f'Updating positions in ``{self.conf_path}``:\n'
 | 
			
		||||
                f'n{pformat(pp_entries)}'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            if self.brokername in self.conf:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    f'Rewriting {self.conf_path} keys to drop <broker.acct>!'
 | 
			
		||||
                )
 | 
			
		||||
                # legacy key schema including <brokername.account>, so
 | 
			
		||||
                # rewrite all entries to drop those tables since we now
 | 
			
		||||
                # put that in the filename!
 | 
			
		||||
                accounts = self.conf.pop(self.brokername)
 | 
			
		||||
                assert len(accounts) == 1
 | 
			
		||||
                entries = accounts.pop(self.acctid)
 | 
			
		||||
                self.conf.update(entries)
 | 
			
		||||
 | 
			
		||||
            self.conf.update(pp_entries)
 | 
			
		||||
 | 
			
		||||
            # drop any entries that are computed as net-zero
 | 
			
		||||
            # we don't care about storing in the pps file.
 | 
			
		||||
            if closed:
 | 
			
		||||
                bs_mktid: str
 | 
			
		||||
                for bs_mktid, pos in closed.items():
 | 
			
		||||
                    fqme: str = pos.mkt.fqme
 | 
			
		||||
                    if fqme in self.conf:
 | 
			
		||||
                        self.conf.pop(fqme)
 | 
			
		||||
                    else:
 | 
			
		||||
                        # TODO: we reallly need a diff set of
 | 
			
		||||
                        # loglevels/colors per subsys.
 | 
			
		||||
                        log.warning(
 | 
			
		||||
                            f'Recent position for {fqme} was closed!'
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
        # if there are no active position entries according
 | 
			
		||||
        # to the toml dump output above, then clear the config
 | 
			
		||||
        # file of all entries.
 | 
			
		||||
        elif self.conf:
 | 
			
		||||
            for entry in list(self.conf):
 | 
			
		||||
                del self.conf[entry]
 | 
			
		||||
 | 
			
		||||
        # XXX WTF: if we use a tomlkit.Integer here we get this
 | 
			
		||||
        # super weird --1 thing going on for cumsize!?1!
 | 
			
		||||
        # NOTE: the fix was to always float() the size value loaded
 | 
			
		||||
        # in open_pps() below!
 | 
			
		||||
        config.write(
 | 
			
		||||
            config=self.conf,
 | 
			
		||||
            path=self.conf_path,
 | 
			
		||||
            fail_empty=False,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_account(
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    acctid: str,
 | 
			
		||||
 | 
			
		||||
    dirpath: Path | None = None,
 | 
			
		||||
 | 
			
		||||
) -> tuple[dict, Path]:
 | 
			
		||||
    '''
 | 
			
		||||
    Load a accounting (with positions) file from
 | 
			
		||||
    $CONFIG_DIR/accounting/account.<brokername>.<acctid>.toml
 | 
			
		||||
 | 
			
		||||
    Where normally $CONFIG_DIR = ~/.config/piker/
 | 
			
		||||
    and we implicitly create a accounting subdir which should
 | 
			
		||||
    normally be linked to a git repo managed by the user B)
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    legacy_fn: str = f'pps.{brokername}.{acctid}.toml'
 | 
			
		||||
    fn: str = f'account.{brokername}.{acctid}.toml'
 | 
			
		||||
 | 
			
		||||
    dirpath: Path = dirpath or (config._config_dir / 'accounting')
 | 
			
		||||
    if not dirpath.is_dir():
 | 
			
		||||
        dirpath.mkdir()
 | 
			
		||||
 | 
			
		||||
    conf, path = config.load(
 | 
			
		||||
        path=dirpath / fn,
 | 
			
		||||
        decode=tomlkit.parse,
 | 
			
		||||
        touch_if_dne=True,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if not conf:
 | 
			
		||||
        legacypath = dirpath / legacy_fn
 | 
			
		||||
        log.warning(
 | 
			
		||||
            f'Your account file is using the legacy `pps.` prefix..\n'
 | 
			
		||||
            f'Rewriting contents to new name -> {path}\n'
 | 
			
		||||
            'Please delete the old file!\n'
 | 
			
		||||
            f'|-> {legacypath}\n'
 | 
			
		||||
        )
 | 
			
		||||
        if legacypath.is_file():
 | 
			
		||||
            legacy_config, _ = config.load(
 | 
			
		||||
                path=legacypath,
 | 
			
		||||
 | 
			
		||||
                # TODO: move to tomlkit:
 | 
			
		||||
                # - needs to be fixed to support bidict?
 | 
			
		||||
                #   https://github.com/sdispater/tomlkit/issues/289
 | 
			
		||||
                # - we need to use or fork's fix to do multiline array
 | 
			
		||||
                #   indenting.
 | 
			
		||||
                decode=tomlkit.parse,
 | 
			
		||||
            )
 | 
			
		||||
            conf.update(legacy_config)
 | 
			
		||||
 | 
			
		||||
            # XXX: override the presumably previously non-existant
 | 
			
		||||
            # file with legacy's contents.
 | 
			
		||||
            config.write(
 | 
			
		||||
                conf,
 | 
			
		||||
                path=path,
 | 
			
		||||
                fail_empty=False,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    return conf, path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: make this async and offer a `get_account()` that
 | 
			
		||||
# can be used from sync code which does the same thing as
 | 
			
		||||
# open_trade_ledger()!
 | 
			
		||||
@cm
 | 
			
		||||
def open_account(
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    acctid: str,
 | 
			
		||||
    write_on_exit: bool = False,
 | 
			
		||||
 | 
			
		||||
    # for testing or manual load from file
 | 
			
		||||
    _fp: Path | None = None,
 | 
			
		||||
 | 
			
		||||
) -> Generator[Account, None, None]:
 | 
			
		||||
    '''
 | 
			
		||||
    Read out broker-specific position entries from
 | 
			
		||||
    incremental update file: ``pps.toml``.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    conf: dict
 | 
			
		||||
    conf_path: Path
 | 
			
		||||
    conf, conf_path = load_account(
 | 
			
		||||
        brokername,
 | 
			
		||||
        acctid,
 | 
			
		||||
        dirpath=_fp,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if brokername in conf:
 | 
			
		||||
        log.warning(
 | 
			
		||||
            f'Rewriting {conf_path} keys to drop <broker.acct>!'
 | 
			
		||||
        )
 | 
			
		||||
        # legacy key schema including <brokername.account>, so
 | 
			
		||||
        # rewrite all entries to drop those tables since we now
 | 
			
		||||
        # put that in the filename!
 | 
			
		||||
        accounts = conf.pop(brokername)
 | 
			
		||||
        for acctid in accounts.copy():
 | 
			
		||||
            entries = accounts.pop(acctid)
 | 
			
		||||
            conf.update(entries)
 | 
			
		||||
 | 
			
		||||
    # TODO: ideally we can pass in an existing
 | 
			
		||||
    # pps state to this right? such that we
 | 
			
		||||
    # don't have to do a ledger reload all the
 | 
			
		||||
    # time.. a couple ideas I can think of,
 | 
			
		||||
    # - mirror this in some client side actor which
 | 
			
		||||
    #   does the actual ledger updates (say the paper
 | 
			
		||||
    #   engine proc if we decide to always spawn it?),
 | 
			
		||||
    # - do diffs against updates from the ledger writer
 | 
			
		||||
    #   actor and the in-mem state here?
 | 
			
		||||
    from ..brokers import get_brokermod
 | 
			
		||||
    mod: ModuleType = get_brokermod(brokername)
 | 
			
		||||
 | 
			
		||||
    pp_objs: dict[str, Position] = {}
 | 
			
		||||
    acnt = Account(
 | 
			
		||||
        mod,
 | 
			
		||||
        acctid,
 | 
			
		||||
        pp_objs,
 | 
			
		||||
        conf_path,
 | 
			
		||||
        conf=conf,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # unmarshal/load ``pps.toml`` config entries into object form
 | 
			
		||||
    # and update `Account` obj entries.
 | 
			
		||||
    for fqme, entry in conf.items():
 | 
			
		||||
 | 
			
		||||
        # unique broker-backend-system market id
 | 
			
		||||
        bs_mktid = str(
 | 
			
		||||
            entry.get('bsuid')
 | 
			
		||||
            or entry.get('bs_mktid')
 | 
			
		||||
        )
 | 
			
		||||
        price_tick = Decimal(str(
 | 
			
		||||
            entry.get('price_tick_size')
 | 
			
		||||
            or entry.get('price_tick')
 | 
			
		||||
            or '0.01'
 | 
			
		||||
        ))
 | 
			
		||||
        size_tick = Decimal(str(
 | 
			
		||||
            entry.get('lot_tick_size')
 | 
			
		||||
            or entry.get('size_tick')
 | 
			
		||||
            or '0.0'
 | 
			
		||||
        ))
 | 
			
		||||
 | 
			
		||||
        # load the pair using the fqme which
 | 
			
		||||
        # will make the pair "unresolved" until
 | 
			
		||||
        # the backend broker actually loads
 | 
			
		||||
        # the market and position info.
 | 
			
		||||
        mkt = MktPair.from_fqme(
 | 
			
		||||
            fqme,
 | 
			
		||||
            price_tick=price_tick,
 | 
			
		||||
            size_tick=size_tick,
 | 
			
		||||
            bs_mktid=bs_mktid,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # TODO: RE: general "events" instead of just "clears":
 | 
			
		||||
        # - make this an `events` field and support more event types
 | 
			
		||||
        #   such as 'split', 'name_change', 'mkt_info', etc..
 | 
			
		||||
        # - should be make a ``Struct`` for clear/event entries? convert
 | 
			
		||||
        #   "clear events table" from the toml config (list of a dicts)
 | 
			
		||||
        #   and load it into object form for use in position processing of
 | 
			
		||||
        #   new clear events.
 | 
			
		||||
 | 
			
		||||
        # convert clears sub-tables (only in this form
 | 
			
		||||
        # for toml re-presentation) back into a master table.
 | 
			
		||||
        toml_clears_list: list[dict[str, Any]] = entry['clears']
 | 
			
		||||
        trans: list[Transaction] = []
 | 
			
		||||
 | 
			
		||||
        for clears_table in toml_clears_list:
 | 
			
		||||
            tid = clears_table['tid']
 | 
			
		||||
            dt: tomlkit.items.DateTime | str = clears_table['dt']
 | 
			
		||||
 | 
			
		||||
            # woa cool, `tomlkit` will actually load datetimes into
 | 
			
		||||
            # native form B)
 | 
			
		||||
            if isinstance(dt, str):
 | 
			
		||||
                dt = pendulum.parse(dt)
 | 
			
		||||
 | 
			
		||||
            clears_table['dt'] = dt
 | 
			
		||||
            trans.append(Transaction(
 | 
			
		||||
                fqme=bs_mktid,
 | 
			
		||||
                # sym=mkt,
 | 
			
		||||
                bs_mktid=bs_mktid,
 | 
			
		||||
                tid=tid,
 | 
			
		||||
                # XXX: not sure why sometimes these are loaded as
 | 
			
		||||
                # `tomlkit.Integer` and are eventually written with
 | 
			
		||||
                # an extra `-` in front like `--1`?
 | 
			
		||||
                size=float(clears_table['size']),
 | 
			
		||||
                price=float(clears_table['price']),
 | 
			
		||||
                cost=clears_table['cost'],
 | 
			
		||||
                dt=dt,
 | 
			
		||||
            ))
 | 
			
		||||
 | 
			
		||||
        split_ratio = entry.get('split_ratio')
 | 
			
		||||
 | 
			
		||||
        # if a string-ified expiry field is loaded we try to parse
 | 
			
		||||
        # it, THO, they should normally be serialized as native
 | 
			
		||||
        # TOML datetimes, since that's supported.
 | 
			
		||||
        if (
 | 
			
		||||
            (expiry := entry.get('expiry'))
 | 
			
		||||
            and isinstance(expiry, str)
 | 
			
		||||
        ):
 | 
			
		||||
            expiry: pendulum.DateTime = pendulum.parse(expiry)
 | 
			
		||||
 | 
			
		||||
        pp = pp_objs[bs_mktid] = Position(
 | 
			
		||||
            mkt,
 | 
			
		||||
            split_ratio=split_ratio,
 | 
			
		||||
            bs_mktid=bs_mktid,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # XXX: super critical, we need to be sure to include
 | 
			
		||||
        # all pps.toml clears to avoid reusing clears that were
 | 
			
		||||
        # already included in the current incremental update
 | 
			
		||||
        # state, since today's records may have already been
 | 
			
		||||
        # processed!
 | 
			
		||||
        for t in trans:
 | 
			
		||||
            pp.add_clear(t)
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        yield acnt
 | 
			
		||||
    finally:
 | 
			
		||||
        if write_on_exit:
 | 
			
		||||
            acnt.write_config()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: drop the old name and THIS!
 | 
			
		||||
@cm
 | 
			
		||||
def open_pps(
 | 
			
		||||
    *args,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
) -> Generator[Account, None, None]:
 | 
			
		||||
    log.warning(
 | 
			
		||||
        '`open_pps()` is now deprecated!\n'
 | 
			
		||||
        'Please use `with open_account() as cnt:`'
 | 
			
		||||
    )
 | 
			
		||||
    with open_account(*args, **kwargs) as acnt:
 | 
			
		||||
        yield acnt
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_account_from_ledger(
 | 
			
		||||
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    acctname: str,
 | 
			
		||||
 | 
			
		||||
    # post normalization filter on ledger entries to be processed
 | 
			
		||||
    filter_by_ids: dict[str, list[str]] | None = None,
 | 
			
		||||
 | 
			
		||||
    ledger: TransactionLedger | None = None,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
) -> Account:
 | 
			
		||||
    '''
 | 
			
		||||
    Open a ledger file by broker name and account and read in and
 | 
			
		||||
    process any trade records into our normalized ``Transaction`` form
 | 
			
		||||
    and then update the equivalent ``Pptable`` and deliver the two
 | 
			
		||||
    bs_mktid-mapped dict-sets of the transactions and pps.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    acnt: Account
 | 
			
		||||
    with open_account(
 | 
			
		||||
        brokername,
 | 
			
		||||
        acctname,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
    ) as acnt:
 | 
			
		||||
        if ledger is not None:
 | 
			
		||||
            acnt.update_from_ledger(ledger)
 | 
			
		||||
 | 
			
		||||
    return acnt
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,698 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Calculation routines for balance and position tracking such that
 | 
			
		||||
you know when you're losing money (if possible) XD
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from collections.abc import ValuesView
 | 
			
		||||
from contextlib import contextmanager as cm
 | 
			
		||||
from math import copysign
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Callable,
 | 
			
		||||
    Iterator,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import polars as pl
 | 
			
		||||
from pendulum import (
 | 
			
		||||
    DateTime,
 | 
			
		||||
    from_timestamp,
 | 
			
		||||
    parse,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from ._ledger import (
 | 
			
		||||
        Transaction,
 | 
			
		||||
        TransactionLedger,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ppu(
 | 
			
		||||
    clears: Iterator[Transaction],
 | 
			
		||||
 | 
			
		||||
    # include transaction cost in breakeven price
 | 
			
		||||
    # and presume the worst case of the same cost
 | 
			
		||||
    # to exit this transaction (even though in reality
 | 
			
		||||
    # it will be dynamic based on exit stratetgy).
 | 
			
		||||
    cost_scalar: float = 2,
 | 
			
		||||
 | 
			
		||||
    # return the ledger of clears as a (now dt sorted) dict with
 | 
			
		||||
    # new position fields inserted alongside each entry.
 | 
			
		||||
    as_ledger: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> float | list[(str, dict)]:
 | 
			
		||||
    '''
 | 
			
		||||
    Compute the "price-per-unit" price for the given non-zero sized
 | 
			
		||||
    rolling position.
 | 
			
		||||
 | 
			
		||||
    The recurrence relation which computes this (exponential) mean
 | 
			
		||||
    per new clear which **increases** the accumulative postiion size
 | 
			
		||||
    is:
 | 
			
		||||
 | 
			
		||||
    ppu[-1] = (
 | 
			
		||||
        ppu[-2] * accum_size[-2]
 | 
			
		||||
        +
 | 
			
		||||
        ppu[-1] * size
 | 
			
		||||
    ) / accum_size[-1]
 | 
			
		||||
 | 
			
		||||
    where `cost_basis` for the current step is simply the price
 | 
			
		||||
    * size of the most recent clearing transaction.
 | 
			
		||||
 | 
			
		||||
    -----
 | 
			
		||||
    TODO: get the BEP computed and working similarly!
 | 
			
		||||
    -----
 | 
			
		||||
    the equivalent "break even price" or bep at each new clear
 | 
			
		||||
    event step conversely only changes when an "position exiting
 | 
			
		||||
    clear" which **decreases** the cumulative dst asset size:
 | 
			
		||||
 | 
			
		||||
    bep[-1] = ppu[-1] - (cum_pnl[-1] / cumsize[-1])
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    asize_h: list[float] = []  # historical accumulative size
 | 
			
		||||
    ppu_h: list[float] = []  # historical price-per-unit
 | 
			
		||||
    # ledger: dict[str, dict] = {}
 | 
			
		||||
    ledger: list[dict] = []
 | 
			
		||||
 | 
			
		||||
    t: Transaction
 | 
			
		||||
    for t in clears:
 | 
			
		||||
        clear_size: float = t.size
 | 
			
		||||
        clear_price: str | float = t.price
 | 
			
		||||
        is_clear: bool = not isinstance(clear_price, str)
 | 
			
		||||
 | 
			
		||||
        last_accum_size = asize_h[-1] if asize_h else 0
 | 
			
		||||
        accum_size: float = last_accum_size + clear_size
 | 
			
		||||
        accum_sign = copysign(1, accum_size)
 | 
			
		||||
        sign_change: bool = False
 | 
			
		||||
 | 
			
		||||
        # on transfers we normally write some non-valid
 | 
			
		||||
        # price since withdrawal to another account/wallet
 | 
			
		||||
        # has nothing to do with inter-asset-market prices.
 | 
			
		||||
        # TODO: this should be better handled via a `type: 'tx'`
 | 
			
		||||
        # field as per existing issue surrounding all this:
 | 
			
		||||
        # https://github.com/pikers/piker/issues/510
 | 
			
		||||
        if isinstance(clear_price, str):
 | 
			
		||||
            # TODO: we can't necessarily have this commit to
 | 
			
		||||
            # the overall pos size since we also need to
 | 
			
		||||
            # include other positions contributions to this
 | 
			
		||||
            # balance or we might end up with a -ve balance for
 | 
			
		||||
            # the position..
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # test if the pp somehow went "passed" a net zero size state
 | 
			
		||||
        # resulting in a change of the "sign" of the size (+ve for
 | 
			
		||||
        # long, -ve for short).
 | 
			
		||||
        sign_change = (
 | 
			
		||||
            copysign(1, last_accum_size) + accum_sign == 0
 | 
			
		||||
            and last_accum_size != 0
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # since we passed the net-zero-size state the new size
 | 
			
		||||
        # after sum should be the remaining size the new
 | 
			
		||||
        # "direction" (aka, long vs. short) for this clear.
 | 
			
		||||
        if sign_change:
 | 
			
		||||
            clear_size: float = accum_size
 | 
			
		||||
            abs_diff: float = abs(accum_size)
 | 
			
		||||
            asize_h.append(0)
 | 
			
		||||
            ppu_h.append(0)
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            # old size minus the new size gives us size diff with
 | 
			
		||||
            # +ve -> increase in pp size
 | 
			
		||||
            # -ve -> decrease in pp size
 | 
			
		||||
            abs_diff = abs(accum_size) - abs(last_accum_size)
 | 
			
		||||
 | 
			
		||||
        # XXX: LIFO breakeven price update. only an increaze in size
 | 
			
		||||
        # of the position contributes the breakeven price,
 | 
			
		||||
        # a decrease does not (i.e. the position is being made
 | 
			
		||||
        # smaller).
 | 
			
		||||
        # abs_clear_size = abs(clear_size)
 | 
			
		||||
        abs_new_size: float | int = abs(accum_size)
 | 
			
		||||
 | 
			
		||||
        if (
 | 
			
		||||
            abs_diff > 0
 | 
			
		||||
            and is_clear
 | 
			
		||||
        ):
 | 
			
		||||
            cost_basis = (
 | 
			
		||||
                # cost basis for this clear
 | 
			
		||||
                clear_price * abs(clear_size)
 | 
			
		||||
                +
 | 
			
		||||
                # transaction cost
 | 
			
		||||
                accum_sign * cost_scalar * t.cost
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            if asize_h:
 | 
			
		||||
                size_last: float = abs(asize_h[-1])
 | 
			
		||||
                cb_last: float = ppu_h[-1] * size_last
 | 
			
		||||
                ppu: float = (cost_basis + cb_last) / abs_new_size
 | 
			
		||||
 | 
			
		||||
            else:
 | 
			
		||||
                ppu: float = cost_basis / abs_new_size
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            # TODO: for PPU we should probably handle txs out
 | 
			
		||||
            # (aka withdrawals) similarly by simply not having
 | 
			
		||||
            # them contrib to the running PPU calc and only
 | 
			
		||||
            # when the next entry clear comes in (which will
 | 
			
		||||
            # then have a higher weighting on the PPU).
 | 
			
		||||
 | 
			
		||||
            # on "exit" clears from a given direction,
 | 
			
		||||
            # only the size changes not the price-per-unit
 | 
			
		||||
            # need to be updated since the ppu remains constant
 | 
			
		||||
            # and gets weighted by the new size.
 | 
			
		||||
            ppu: float = ppu_h[-1] if ppu_h else 0  # set to previous value
 | 
			
		||||
 | 
			
		||||
        # extend with new rolling metric for this step
 | 
			
		||||
        ppu_h.append(ppu)
 | 
			
		||||
        asize_h.append(accum_size)
 | 
			
		||||
 | 
			
		||||
        # ledger[t.tid] = {
 | 
			
		||||
            # 'txn': t,
 | 
			
		||||
        # ledger[t.tid] = t.to_dict() | {
 | 
			
		||||
        ledger.append((
 | 
			
		||||
            t.tid,
 | 
			
		||||
            t.to_dict() | {
 | 
			
		||||
                'ppu': ppu,
 | 
			
		||||
                'cumsize': accum_size,
 | 
			
		||||
                'sign_change': sign_change,
 | 
			
		||||
 | 
			
		||||
                # TODO: cum_pnl, bep
 | 
			
		||||
            }
 | 
			
		||||
        ))
 | 
			
		||||
 | 
			
		||||
    final_ppu = ppu_h[-1] if ppu_h else 0
 | 
			
		||||
    # TODO: once we have etypes in all ledger entries..
 | 
			
		||||
    # handle any split info entered (for now) manually by user
 | 
			
		||||
    # if self.split_ratio is not None:
 | 
			
		||||
    #     final_ppu /= self.split_ratio
 | 
			
		||||
 | 
			
		||||
    if as_ledger:
 | 
			
		||||
        return ledger
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        return final_ppu
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def iter_by_dt(
 | 
			
		||||
    records: (
 | 
			
		||||
        dict[str, dict[str, Any]]
 | 
			
		||||
        | ValuesView[dict]  # eg. `Position._events.values()`
 | 
			
		||||
        | list[dict]
 | 
			
		||||
        | list[Transaction]  # XXX preferred!
 | 
			
		||||
    ),
 | 
			
		||||
 | 
			
		||||
    # NOTE: parsers are looked up in the insert order
 | 
			
		||||
    # so if you know that the record stats show some field
 | 
			
		||||
    # is more common then others, stick it at the top B)
 | 
			
		||||
    parsers: dict[str, Callable | None] = {
 | 
			
		||||
        'dt': parse,  # parity case
 | 
			
		||||
        'datetime': parse,  # datetime-str
 | 
			
		||||
        'time': from_timestamp,  # float epoch
 | 
			
		||||
    },
 | 
			
		||||
    key: Callable | None = None,
 | 
			
		||||
 | 
			
		||||
) -> Iterator[tuple[str, dict]]:
 | 
			
		||||
    '''
 | 
			
		||||
    Iterate entries of a transaction table sorted by entry recorded
 | 
			
		||||
    datetime presumably set at the ``'dt'`` field in each entry.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    if isinstance(records, dict):
 | 
			
		||||
        records: list[tuple[str, dict]] = list(records.items())
 | 
			
		||||
 | 
			
		||||
    def dyn_parse_to_dt(
 | 
			
		||||
        tx: tuple[str, dict[str, Any]] | Transaction,
 | 
			
		||||
    ) -> DateTime:
 | 
			
		||||
 | 
			
		||||
        # handle `.items()` inputs
 | 
			
		||||
        if isinstance(tx, tuple):
 | 
			
		||||
            tx = tx[1]
 | 
			
		||||
 | 
			
		||||
        # dict or tx object?
 | 
			
		||||
        isdict: bool = isinstance(tx, dict)
 | 
			
		||||
 | 
			
		||||
        # get best parser for this record..
 | 
			
		||||
        for k in parsers:
 | 
			
		||||
            if (
 | 
			
		||||
                isdict and k in tx
 | 
			
		||||
                 or getattr(tx, k, None)
 | 
			
		||||
            ):
 | 
			
		||||
                v = tx[k] if isdict else tx.dt
 | 
			
		||||
                assert v is not None, f'No valid value for `{k}`!?'
 | 
			
		||||
 | 
			
		||||
                # only call parser on the value if not None from
 | 
			
		||||
                # the `parsers` table above (when NOT using
 | 
			
		||||
                # `.get()`), otherwise pass through the value and
 | 
			
		||||
                # sort on it directly
 | 
			
		||||
                if (
 | 
			
		||||
                    not isinstance(v, DateTime)
 | 
			
		||||
                    and (parser := parsers.get(k))
 | 
			
		||||
                ):
 | 
			
		||||
                    return parser(v)
 | 
			
		||||
                else:
 | 
			
		||||
                    return v
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            # XXX: should never get here..
 | 
			
		||||
            breakpoint()
 | 
			
		||||
 | 
			
		||||
    entry: tuple[str, dict] | Transaction
 | 
			
		||||
    for entry in sorted(
 | 
			
		||||
        records,
 | 
			
		||||
        key=key or dyn_parse_to_dt,
 | 
			
		||||
    ):
 | 
			
		||||
        # NOTE the type sig above; either pairs or txns B)
 | 
			
		||||
        yield entry
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: probably just move this into the test suite or
 | 
			
		||||
# keep it here for use from as such?
 | 
			
		||||
# def ensure_state(self) -> None:
 | 
			
		||||
#     '''
 | 
			
		||||
#     Audit either the `.cumsize` and `.ppu` local instance vars against
 | 
			
		||||
#     the clears table calculations and return the calc-ed values if
 | 
			
		||||
#     they differ and log warnings to console.
 | 
			
		||||
 | 
			
		||||
#     '''
 | 
			
		||||
#     # clears: list[dict] = self._clears
 | 
			
		||||
 | 
			
		||||
#     # self.first_clear_dt = min(clears, key=lambda e: e['dt'])['dt']
 | 
			
		||||
#     last_clear: dict = clears[-1]
 | 
			
		||||
#     csize: float = self.calc_size()
 | 
			
		||||
#     accum: float = last_clear['accum_size']
 | 
			
		||||
 | 
			
		||||
#     if not self.expired():
 | 
			
		||||
#         if (
 | 
			
		||||
#             csize != accum
 | 
			
		||||
#             and csize != round(accum * (self.split_ratio or 1))
 | 
			
		||||
#         ):
 | 
			
		||||
#             raise ValueError(f'Size mismatch: {csize}')
 | 
			
		||||
#     else:
 | 
			
		||||
#         assert csize == 0, 'Contract is expired but non-zero size?'
 | 
			
		||||
 | 
			
		||||
#     if self.cumsize != csize:
 | 
			
		||||
#         log.warning(
 | 
			
		||||
#             'Position state mismatch:\n'
 | 
			
		||||
#             f'{self.cumsize} => {csize}'
 | 
			
		||||
#         )
 | 
			
		||||
#         self.cumsize = csize
 | 
			
		||||
 | 
			
		||||
#     cppu: float = self.calc_ppu()
 | 
			
		||||
#     ppu: float = last_clear['ppu']
 | 
			
		||||
#     if (
 | 
			
		||||
#         cppu != ppu
 | 
			
		||||
#         and self.split_ratio is not None
 | 
			
		||||
 | 
			
		||||
#         # handle any split info entered (for now) manually by user
 | 
			
		||||
#         and cppu != (ppu / self.split_ratio)
 | 
			
		||||
#     ):
 | 
			
		||||
#         raise ValueError(f'PPU mismatch: {cppu}')
 | 
			
		||||
 | 
			
		||||
#     if self.ppu != cppu:
 | 
			
		||||
#         log.warning(
 | 
			
		||||
#             'Position state mismatch:\n'
 | 
			
		||||
#             f'{self.ppu} => {cppu}'
 | 
			
		||||
#         )
 | 
			
		||||
#         self.ppu = cppu
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@cm
 | 
			
		||||
def open_ledger_dfs(
 | 
			
		||||
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    acctname: str,
 | 
			
		||||
 | 
			
		||||
    ledger: TransactionLedger | None = None,
 | 
			
		||||
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tuple[
 | 
			
		||||
    dict[str, pl.DataFrame],
 | 
			
		||||
    TransactionLedger,
 | 
			
		||||
]:
 | 
			
		||||
    '''
 | 
			
		||||
    Open a ledger of trade records (presumably from some broker
 | 
			
		||||
    backend), normalize the records into `Transactions` via the
 | 
			
		||||
    backend's declared endpoint, cast to a `polars.DataFrame` which
 | 
			
		||||
    can update the ledger on exit.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from piker.toolz import open_crash_handler
 | 
			
		||||
    with open_crash_handler():
 | 
			
		||||
        if not ledger:
 | 
			
		||||
            import time
 | 
			
		||||
            from ._ledger import open_trade_ledger
 | 
			
		||||
 | 
			
		||||
            now = time.time()
 | 
			
		||||
 | 
			
		||||
            with open_trade_ledger(
 | 
			
		||||
                    brokername,
 | 
			
		||||
                    acctname,
 | 
			
		||||
                    rewrite=True,
 | 
			
		||||
                    allow_from_sync_code=True,
 | 
			
		||||
 | 
			
		||||
                    # proxied through from caller
 | 
			
		||||
                    **kwargs,
 | 
			
		||||
 | 
			
		||||
            ) as ledger:
 | 
			
		||||
                if not ledger:
 | 
			
		||||
                    raise ValueError(f'No ledger for {acctname}@{brokername} exists?')
 | 
			
		||||
 | 
			
		||||
                print(f'LEDGER LOAD TIME: {time.time() - now}')
 | 
			
		||||
 | 
			
		||||
        yield ledger_to_dfs(ledger), ledger
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ledger_to_dfs(
 | 
			
		||||
    ledger: TransactionLedger,
 | 
			
		||||
 | 
			
		||||
) -> dict[str, pl.DataFrame]:
 | 
			
		||||
 | 
			
		||||
    txns: dict[str, Transaction] = ledger.to_txns()
 | 
			
		||||
 | 
			
		||||
    # ldf = pl.DataFrame(
 | 
			
		||||
    #     list(txn.to_dict() for txn in txns.values()),
 | 
			
		||||
    ldf = pl.from_dicts(
 | 
			
		||||
        list(txn.to_dict() for txn in txns.values()),
 | 
			
		||||
 | 
			
		||||
        # only for ordering the cols
 | 
			
		||||
        schema=[
 | 
			
		||||
            ('fqme', str),
 | 
			
		||||
            ('tid', str),
 | 
			
		||||
            ('bs_mktid', str),
 | 
			
		||||
            ('expiry', str),
 | 
			
		||||
            ('etype', str),
 | 
			
		||||
            ('dt', str),
 | 
			
		||||
            ('size', pl.Float64),
 | 
			
		||||
            ('price', pl.Float64),
 | 
			
		||||
            ('cost', pl.Float64),
 | 
			
		||||
        ],
 | 
			
		||||
    ).sort(  # chronological order
 | 
			
		||||
        'dt'
 | 
			
		||||
    ).with_columns([
 | 
			
		||||
        pl.col('dt').str.to_datetime(),
 | 
			
		||||
        # pl.col('expiry').str.to_datetime(),
 | 
			
		||||
        # pl.col('expiry').dt.date(),
 | 
			
		||||
    ])
 | 
			
		||||
 | 
			
		||||
    # filter out to the columns matching values filter passed
 | 
			
		||||
    # as input.
 | 
			
		||||
    # if filter_by_ids:
 | 
			
		||||
    #     for col, vals in filter_by_ids.items():
 | 
			
		||||
    #         str_vals = set(map(str, vals))
 | 
			
		||||
    #         pred: pl.Expr = pl.col(col).eq(str_vals.pop())
 | 
			
		||||
    #         for val in str_vals:
 | 
			
		||||
    #             pred |= pl.col(col).eq(val)
 | 
			
		||||
 | 
			
		||||
    #     fdf = df.filter(pred)
 | 
			
		||||
 | 
			
		||||
    # TODO: originally i had tried just using a plain ol' groupby
 | 
			
		||||
    # + agg here but the issue was re-inserting to the src frame.
 | 
			
		||||
    # however, learning more about `polars` seems like maybe we can
 | 
			
		||||
    # use `.over()`?
 | 
			
		||||
    # https://pola-rs.github.io/polars/py-polars/html/reference/expressions/api/polars.Expr.over.html#polars.Expr.over
 | 
			
		||||
    # => CURRENTLY we break up into a frame per mkt / fqme
 | 
			
		||||
    dfs: dict[str, pl.DataFrame] = ldf.partition_by(
 | 
			
		||||
        'bs_mktid',
 | 
			
		||||
        as_dict=True,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # TODO: not sure if this is even possible but..
 | 
			
		||||
    # - it'd be more ideal to use `ppt = df.groupby('fqme').agg([`
 | 
			
		||||
    # - ppu and bep calcs!
 | 
			
		||||
    for key in dfs:
 | 
			
		||||
 | 
			
		||||
        # covert to lazy form (since apparently we might need it
 | 
			
		||||
        # eventually ...)
 | 
			
		||||
        df: pl.DataFrame = dfs[key]
 | 
			
		||||
 | 
			
		||||
        ldf: pl.LazyFrame = df.lazy()
 | 
			
		||||
 | 
			
		||||
        df = dfs[key] = ldf.with_columns([
 | 
			
		||||
 | 
			
		||||
            pl.cumsum('size').alias('cumsize'),
 | 
			
		||||
 | 
			
		||||
            # amount of source asset "sent" (via buy txns in
 | 
			
		||||
            # the market) to acquire the dst asset, PER txn.
 | 
			
		||||
            # when this value is -ve (i.e. a sell operation) then
 | 
			
		||||
            # the amount sent is actually "returned".
 | 
			
		||||
            (
 | 
			
		||||
                (pl.col('price') * pl.col('size'))
 | 
			
		||||
                +
 | 
			
		||||
                (pl.col('cost')) # * pl.col('size').sign())
 | 
			
		||||
            ).alias('dst_bot'),
 | 
			
		||||
 | 
			
		||||
        ]).with_columns([
 | 
			
		||||
 | 
			
		||||
            # rolling balance in src asset units
 | 
			
		||||
            (pl.col('dst_bot').cumsum() * -1).alias('src_balance'),
 | 
			
		||||
 | 
			
		||||
            # "position operation type" in terms of increasing the
 | 
			
		||||
            # amount in the dst asset (entering) or decreasing the
 | 
			
		||||
            # amount in the dst asset (exiting).
 | 
			
		||||
            pl.when(
 | 
			
		||||
                pl.col('size').sign() == pl.col('cumsize').sign()
 | 
			
		||||
 | 
			
		||||
            ).then(
 | 
			
		||||
                pl.lit('enter')  # see above, but is just price * size per txn
 | 
			
		||||
 | 
			
		||||
            ).otherwise(
 | 
			
		||||
                pl.when(pl.col('cumsize') == 0)
 | 
			
		||||
                .then(pl.lit('exit_to_zero'))
 | 
			
		||||
                .otherwise(pl.lit('exit'))
 | 
			
		||||
            ).alias('descr'),
 | 
			
		||||
 | 
			
		||||
            (pl.col('cumsize').sign() == pl.col('size').sign())
 | 
			
		||||
            .alias('is_enter'),
 | 
			
		||||
 | 
			
		||||
        ]).with_columns([
 | 
			
		||||
 | 
			
		||||
            # pl.lit(0, dtype=pl.Utf8).alias('virt_cost'),
 | 
			
		||||
            pl.lit(0, dtype=pl.Float64).alias('applied_cost'),
 | 
			
		||||
            pl.lit(0, dtype=pl.Float64).alias('pos_ppu'),
 | 
			
		||||
            pl.lit(0, dtype=pl.Float64).alias('per_txn_pnl'),
 | 
			
		||||
            pl.lit(0, dtype=pl.Float64).alias('cum_pos_pnl'),
 | 
			
		||||
            pl.lit(0, dtype=pl.Float64).alias('pos_bep'),
 | 
			
		||||
            pl.lit(0, dtype=pl.Float64).alias('cum_ledger_pnl'),
 | 
			
		||||
            pl.lit(None, dtype=pl.Float64).alias('ledger_bep'),
 | 
			
		||||
 | 
			
		||||
            # TODO: instead of the iterative loop below i guess we
 | 
			
		||||
            # could try using embedded lists to track which txns
 | 
			
		||||
            # are part of which ppu / bep calcs? Not sure this will
 | 
			
		||||
            # look any better nor be any more performant though xD
 | 
			
		||||
            # pl.lit([[0]], dtype=pl.List(pl.Float64)).alias('list'),
 | 
			
		||||
 | 
			
		||||
        # choose fields to emit for accounting puposes
 | 
			
		||||
        ]).select([
 | 
			
		||||
            pl.exclude([
 | 
			
		||||
                'tid',
 | 
			
		||||
                # 'dt',
 | 
			
		||||
                'expiry',
 | 
			
		||||
                'bs_mktid',
 | 
			
		||||
                'etype',
 | 
			
		||||
                # 'is_enter',
 | 
			
		||||
            ]),
 | 
			
		||||
        ]).collect()
 | 
			
		||||
 | 
			
		||||
        # compute recurrence relations for ppu and bep
 | 
			
		||||
        last_ppu: float = 0
 | 
			
		||||
        last_cumsize: float = 0
 | 
			
		||||
        last_ledger_pnl: float = 0
 | 
			
		||||
        last_pos_pnl: float = 0
 | 
			
		||||
        virt_costs: list[float, float] = [0., 0.]
 | 
			
		||||
 | 
			
		||||
        # imperatively compute the PPU (price per unit) and BEP
 | 
			
		||||
        # (break even price) iteratively over the ledger, oriented
 | 
			
		||||
        # around each position state: a state of split balances in
 | 
			
		||||
        # > 1 asset.
 | 
			
		||||
        for i, row in enumerate(df.iter_rows(named=True)):
 | 
			
		||||
 | 
			
		||||
            cumsize: float = row['cumsize']
 | 
			
		||||
            is_enter: bool = row['is_enter']
 | 
			
		||||
            price: float = row['price']
 | 
			
		||||
            size: float = row['size']
 | 
			
		||||
 | 
			
		||||
            # the profit is ALWAYS decreased, aka made a "loss"
 | 
			
		||||
            # by the constant fee charged by the txn provider!
 | 
			
		||||
            # see below in final PnL calculation and row element
 | 
			
		||||
            # set.
 | 
			
		||||
            txn_cost: float = row['cost']
 | 
			
		||||
            pnl: float = 0
 | 
			
		||||
 | 
			
		||||
            # ALWAYS reset per-position cum PnL
 | 
			
		||||
            if last_cumsize == 0:
 | 
			
		||||
                last_pos_pnl: float = 0
 | 
			
		||||
 | 
			
		||||
            # a "position size INCREASING" or ENTER transaction
 | 
			
		||||
            # which "makes larger", in src asset unit terms, the
 | 
			
		||||
            # trade's side-size of the destination asset:
 | 
			
		||||
            # - "buying" (more) units of the dst asset
 | 
			
		||||
            # - "selling" (more short) units of the dst asset
 | 
			
		||||
            if is_enter:
 | 
			
		||||
 | 
			
		||||
                # Naively include transaction cost in breakeven
 | 
			
		||||
                # price and presume the worst case of the
 | 
			
		||||
                # exact-same-cost-to-exit this transaction's worth
 | 
			
		||||
                # of size even though in reality it will be dynamic
 | 
			
		||||
                # based on exit strategy, price, liquidity, etc..
 | 
			
		||||
                virt_cost: float = txn_cost
 | 
			
		||||
 | 
			
		||||
                # cpu: float = cost / size
 | 
			
		||||
                # cummean of the cost-per-unit used for modelling
 | 
			
		||||
                # a projected future exit cost which we immediately
 | 
			
		||||
                # include in the costs incorporated to BEP on enters
 | 
			
		||||
                last_cum_costs_size, last_cpu = virt_costs
 | 
			
		||||
                cum_costs_size: float = last_cum_costs_size + abs(size)
 | 
			
		||||
                cumcpu = (
 | 
			
		||||
                    (last_cpu * last_cum_costs_size)
 | 
			
		||||
                    +
 | 
			
		||||
                    txn_cost
 | 
			
		||||
                ) / cum_costs_size
 | 
			
		||||
                virt_costs = [cum_costs_size, cumcpu]
 | 
			
		||||
 | 
			
		||||
                txn_cost = txn_cost + virt_cost
 | 
			
		||||
                # df[i, 'virt_cost'] = f'{-virt_cost} FROM {cumcpu}@{cum_costs_size}'
 | 
			
		||||
 | 
			
		||||
                # a cumulative mean of the price-per-unit acquired
 | 
			
		||||
                # in the destination asset:
 | 
			
		||||
                # https://en.wikipedia.org/wiki/Moving_average#Cumulative_average
 | 
			
		||||
                # You could also think of this measure more
 | 
			
		||||
                # generally as an exponential mean with `alpha
 | 
			
		||||
                # = 1/N` where `N` is the current number of txns
 | 
			
		||||
                # included in the "position" defining set:
 | 
			
		||||
                # https://en.wikipedia.org/wiki/Exponential_smoothing
 | 
			
		||||
                ppu: float = (
 | 
			
		||||
                    (
 | 
			
		||||
                        (last_ppu * last_cumsize)
 | 
			
		||||
                        +
 | 
			
		||||
                        (price * size)
 | 
			
		||||
                    ) /
 | 
			
		||||
                    cumsize
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            # a "position size DECREASING" or EXIT transaction
 | 
			
		||||
            # which "makes smaller" the trade's side-size of the
 | 
			
		||||
            # destination asset:
 | 
			
		||||
            # - selling previously bought units of the dst asset
 | 
			
		||||
            #   (aka 'closing' a long position).
 | 
			
		||||
            # - buying previously borrowed and sold (short) units
 | 
			
		||||
            #   of the dst asset (aka 'covering'/'closing' a short
 | 
			
		||||
            #   position).
 | 
			
		||||
            else:
 | 
			
		||||
                # only changes on position size increasing txns
 | 
			
		||||
                ppu: float = last_ppu
 | 
			
		||||
 | 
			
		||||
                # UNWIND IMPLIED COSTS FROM ENTRIES
 | 
			
		||||
                # => Reverse the virtual/modelled (2x predicted) txn
 | 
			
		||||
                # cost that was included in the least-recently
 | 
			
		||||
                # entered txn that is still part of the current CSi
 | 
			
		||||
                # set.
 | 
			
		||||
                # => we look up the cost-per-unit cumsum and apply
 | 
			
		||||
                # if over the current txn size (by multiplication)
 | 
			
		||||
                # and then reverse that previusly applied cost on
 | 
			
		||||
                # the txn_cost for this record.
 | 
			
		||||
                #
 | 
			
		||||
                # NOTE: current "model" is just to previously assumed 2x
 | 
			
		||||
                # the txn cost for a matching enter-txn's
 | 
			
		||||
                # cost-per-unit; we then immediately reverse this
 | 
			
		||||
                # prediction and apply the real cost received here.
 | 
			
		||||
                last_cum_costs_size, last_cpu = virt_costs
 | 
			
		||||
                prev_virt_cost: float = last_cpu * abs(size)
 | 
			
		||||
                txn_cost: float = txn_cost - prev_virt_cost  # +ve thus a "reversal"
 | 
			
		||||
                cum_costs_size: float = last_cum_costs_size - abs(size)
 | 
			
		||||
                virt_costs = [cum_costs_size, last_cpu]
 | 
			
		||||
 | 
			
		||||
                # df[i, 'virt_cost'] = (
 | 
			
		||||
                #     f'{-prev_virt_cost} FROM {last_cpu}@{cum_costs_size}'
 | 
			
		||||
                # )
 | 
			
		||||
 | 
			
		||||
                # the per-txn profit or loss (PnL) given we are
 | 
			
		||||
                # (partially) "closing"/"exiting" the position via
 | 
			
		||||
                # this txn.
 | 
			
		||||
                pnl: float = (last_ppu - price) * size
 | 
			
		||||
 | 
			
		||||
            # always subtract txn cost from total txn pnl
 | 
			
		||||
            txn_pnl: float = pnl - txn_cost
 | 
			
		||||
 | 
			
		||||
            # cumulative PnLs per txn
 | 
			
		||||
            last_ledger_pnl = (
 | 
			
		||||
                last_ledger_pnl + txn_pnl
 | 
			
		||||
            )
 | 
			
		||||
            last_pos_pnl = df[i, 'cum_pos_pnl'] = (
 | 
			
		||||
                last_pos_pnl + txn_pnl
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            if cumsize == 0:
 | 
			
		||||
                last_ppu = ppu = 0
 | 
			
		||||
 | 
			
		||||
            # compute the BEP: "break even price", a value that
 | 
			
		||||
            # determines at what price the remaining cumsize can be
 | 
			
		||||
            # liquidated such that the net-PnL on the current
 | 
			
		||||
            # position will result in ZERO gain or loss from open
 | 
			
		||||
            # to close including all txn costs B)
 | 
			
		||||
            if (
 | 
			
		||||
                abs(cumsize) > 0  # non-exit-to-zero position txn
 | 
			
		||||
            ):
 | 
			
		||||
                cumsize_sign: float = copysign(1, cumsize)
 | 
			
		||||
                ledger_bep: float = (
 | 
			
		||||
                    (
 | 
			
		||||
                        (ppu * cumsize)
 | 
			
		||||
                        -
 | 
			
		||||
                        (last_ledger_pnl * cumsize_sign)
 | 
			
		||||
                    ) / cumsize
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # NOTE: when we "enter more" dst asset units (aka
 | 
			
		||||
                # increase position state) AFTER having exited some
 | 
			
		||||
                # units (aka decreasing the pos size some) the bep
 | 
			
		||||
                # needs to be RECOMPUTED based on new ppu such that
 | 
			
		||||
                # liquidation of the cumsize at the bep price
 | 
			
		||||
                # results in a zero-pnl for the existing position
 | 
			
		||||
                # (since the last one).
 | 
			
		||||
                # for position lifetime BEP we never can have
 | 
			
		||||
                # a valid value once the position is "closed"
 | 
			
		||||
                # / full exitted Bo
 | 
			
		||||
                pos_bep: float = (
 | 
			
		||||
                    (
 | 
			
		||||
                        (ppu * cumsize)
 | 
			
		||||
                        -
 | 
			
		||||
                        (last_pos_pnl * cumsize_sign)
 | 
			
		||||
                    ) / cumsize
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            # inject DF row with all values
 | 
			
		||||
            df[i, 'pos_ppu'] = ppu
 | 
			
		||||
            df[i, 'per_txn_pnl'] = txn_pnl
 | 
			
		||||
            df[i, 'applied_cost'] = -txn_cost
 | 
			
		||||
            df[i, 'cum_pos_pnl'] = last_pos_pnl
 | 
			
		||||
            df[i, 'pos_bep'] = pos_bep
 | 
			
		||||
            df[i, 'cum_ledger_pnl'] = last_ledger_pnl
 | 
			
		||||
            df[i, 'ledger_bep'] = ledger_bep
 | 
			
		||||
 | 
			
		||||
            # keep backrefs to suffice reccurence relation
 | 
			
		||||
            last_ppu: float = ppu
 | 
			
		||||
            last_cumsize: float = cumsize
 | 
			
		||||
 | 
			
		||||
    # TODO?: pass back the current `Position` object loaded from
 | 
			
		||||
    # the account as well? Would provide incentive to do all
 | 
			
		||||
    # this ledger loading inside a new async open_account().
 | 
			
		||||
    # bs_mktid: str = df[0]['bs_mktid']
 | 
			
		||||
    # pos: Position = acnt.pps[bs_mktid]
 | 
			
		||||
 | 
			
		||||
    return dfs
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,311 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
CLI front end for trades ledger and position tracking management.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
from rich.console import Console
 | 
			
		||||
from rich.markdown import Markdown
 | 
			
		||||
import polars as pl
 | 
			
		||||
import tractor
 | 
			
		||||
import trio
 | 
			
		||||
import typer
 | 
			
		||||
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
from ..service import (
 | 
			
		||||
    open_piker_runtime,
 | 
			
		||||
)
 | 
			
		||||
from ..clearing._messages import BrokerdPosition
 | 
			
		||||
from ..calc import humanize
 | 
			
		||||
from ..brokers._daemon import broker_init
 | 
			
		||||
from ._ledger import (
 | 
			
		||||
    load_ledger,
 | 
			
		||||
    TransactionLedger,
 | 
			
		||||
    # open_trade_ledger,
 | 
			
		||||
)
 | 
			
		||||
from .calc import (
 | 
			
		||||
    open_ledger_dfs,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ledger = typer.Typer()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unpack_fqan(
 | 
			
		||||
    fully_qualified_account_name: str,
 | 
			
		||||
    console: Console | None = None,
 | 
			
		||||
) -> tuple | bool:
 | 
			
		||||
    try:
 | 
			
		||||
        brokername, account = fully_qualified_account_name.split('.')
 | 
			
		||||
        return brokername, account
 | 
			
		||||
    except ValueError:
 | 
			
		||||
        if console is not None:
 | 
			
		||||
            md = Markdown(
 | 
			
		||||
                f'=> `{fully_qualified_account_name}` <=\n\n'
 | 
			
		||||
                'is not a valid '
 | 
			
		||||
                '__fully qualified account name?__\n\n'
 | 
			
		||||
                'Your account name needs to be of the form '
 | 
			
		||||
                '`<brokername>.<account_name>`\n'
 | 
			
		||||
            )
 | 
			
		||||
            console.print(md)
 | 
			
		||||
        return False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ledger.command()
 | 
			
		||||
def sync(
 | 
			
		||||
    fully_qualified_account_name: str,
 | 
			
		||||
    pdb: bool = False,
 | 
			
		||||
 | 
			
		||||
    loglevel: str = typer.Option(
 | 
			
		||||
        'error',
 | 
			
		||||
        "-l",
 | 
			
		||||
    ),
 | 
			
		||||
):
 | 
			
		||||
    log = get_logger(loglevel)
 | 
			
		||||
    console = Console()
 | 
			
		||||
 | 
			
		||||
    pair: tuple[str, str]
 | 
			
		||||
    if not (pair := unpack_fqan(
 | 
			
		||||
        fully_qualified_account_name,
 | 
			
		||||
        console,
 | 
			
		||||
    )):
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    brokername, account = pair
 | 
			
		||||
 | 
			
		||||
    brokermod, start_kwargs, deamon_ep = broker_init(
 | 
			
		||||
        brokername,
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
    )
 | 
			
		||||
    brokername: str = brokermod.name
 | 
			
		||||
 | 
			
		||||
    async def main():
 | 
			
		||||
 | 
			
		||||
        async with (
 | 
			
		||||
            open_piker_runtime(
 | 
			
		||||
                name='ledger_cli',
 | 
			
		||||
                loglevel=loglevel,
 | 
			
		||||
                debug_mode=pdb,
 | 
			
		||||
 | 
			
		||||
            ) as (actor, sockaddr),
 | 
			
		||||
 | 
			
		||||
            tractor.open_nursery() as an,
 | 
			
		||||
        ):
 | 
			
		||||
            try:
 | 
			
		||||
                log.info(
 | 
			
		||||
                    f'Piker runtime up as {actor.uid}@{sockaddr}'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                portal = await an.start_actor(
 | 
			
		||||
                    loglevel=loglevel,
 | 
			
		||||
                    debug_mode=pdb,
 | 
			
		||||
                    **start_kwargs,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                from ..clearing import (
 | 
			
		||||
                    open_brokerd_dialog,
 | 
			
		||||
                )
 | 
			
		||||
                brokerd_stream: tractor.MsgStream
 | 
			
		||||
 | 
			
		||||
                async with (
 | 
			
		||||
                    # engage the brokerd daemon context
 | 
			
		||||
                    portal.open_context(
 | 
			
		||||
                        deamon_ep,
 | 
			
		||||
                        brokername=brokername,
 | 
			
		||||
                        loglevel=loglevel,
 | 
			
		||||
                    ),
 | 
			
		||||
 | 
			
		||||
                    # manually open the brokerd trade dialog EP
 | 
			
		||||
                    # (what the EMS normally does internall) B)
 | 
			
		||||
                    open_brokerd_dialog(
 | 
			
		||||
                        brokermod,
 | 
			
		||||
                        portal,
 | 
			
		||||
                        exec_mode=(
 | 
			
		||||
                            'paper'
 | 
			
		||||
                            if account == 'paper'
 | 
			
		||||
                            else 'live'
 | 
			
		||||
                        ),
 | 
			
		||||
                        loglevel=loglevel,
 | 
			
		||||
                    ) as (
 | 
			
		||||
                        brokerd_stream,
 | 
			
		||||
                        pp_msg_table,
 | 
			
		||||
                        accounts,
 | 
			
		||||
                    ),
 | 
			
		||||
                ):
 | 
			
		||||
                    try:
 | 
			
		||||
                        assert len(accounts) == 1
 | 
			
		||||
                        if not pp_msg_table:
 | 
			
		||||
                            ld, fpath = load_ledger(brokername, account)
 | 
			
		||||
                            assert not ld, f'WTF did we fail to parse ledger:\n{ld}'
 | 
			
		||||
 | 
			
		||||
                            console.print(
 | 
			
		||||
                                '[yellow]'
 | 
			
		||||
                                'No pps found for '
 | 
			
		||||
                                f'`{brokername}.{account}` '
 | 
			
		||||
                                'account!\n\n'
 | 
			
		||||
                                '[/][underline]'
 | 
			
		||||
                                'None of the following ledger files exist:\n\n[/]'
 | 
			
		||||
                                f'{fpath.as_uri()}\n'
 | 
			
		||||
                            )
 | 
			
		||||
                            return
 | 
			
		||||
 | 
			
		||||
                        pps_by_symbol: dict[str, BrokerdPosition] = pp_msg_table[
 | 
			
		||||
                            brokername,
 | 
			
		||||
                            account,
 | 
			
		||||
                        ]
 | 
			
		||||
 | 
			
		||||
                        summary: str = (
 | 
			
		||||
                            '[dim underline]Piker Position Summary[/] '
 | 
			
		||||
                            f'[dim blue underline]{brokername}[/]'
 | 
			
		||||
                            '[dim].[/]'
 | 
			
		||||
                            f'[blue underline]{account}[/]'
 | 
			
		||||
                            f'[dim underline] -> total pps: [/]'
 | 
			
		||||
                            f'[green]{len(pps_by_symbol)}[/]\n'
 | 
			
		||||
                        )
 | 
			
		||||
                        # for ppdict in positions:
 | 
			
		||||
                        for fqme, ppmsg in pps_by_symbol.items():
 | 
			
		||||
                            # ppmsg = BrokerdPosition(**ppdict)
 | 
			
		||||
                            size = ppmsg.size
 | 
			
		||||
                            if size:
 | 
			
		||||
                                ppu: float = round(
 | 
			
		||||
                                    ppmsg.avg_price,
 | 
			
		||||
                                    ndigits=2,
 | 
			
		||||
                                )
 | 
			
		||||
                                cost_basis: str = humanize(size * ppu)
 | 
			
		||||
                                h_size: str = humanize(size)
 | 
			
		||||
 | 
			
		||||
                                if size < 0:
 | 
			
		||||
                                    pcolor = 'red'
 | 
			
		||||
                                else:
 | 
			
		||||
                                    pcolor = 'green'
 | 
			
		||||
 | 
			
		||||
                                # sematic-highlight of fqme
 | 
			
		||||
                                fqme = ppmsg.symbol
 | 
			
		||||
                                tokens = fqme.split('.')
 | 
			
		||||
                                styled_fqme = f'[blue underline]{tokens[0]}[/]'
 | 
			
		||||
                                for tok in tokens[1:]:
 | 
			
		||||
                                    styled_fqme += '[dim].[/]'
 | 
			
		||||
                                    styled_fqme += f'[dim blue underline]{tok}[/]'
 | 
			
		||||
 | 
			
		||||
                                # TODO: instead display in a ``rich.Table``?
 | 
			
		||||
                                summary += (
 | 
			
		||||
                                    styled_fqme +
 | 
			
		||||
                                    '[dim]: [/]'
 | 
			
		||||
                                    f'[{pcolor}]{h_size}[/]'
 | 
			
		||||
                                    '[dim blue]u @[/]'
 | 
			
		||||
                                    f'[{pcolor}]{ppu}[/]'
 | 
			
		||||
                                    '[dim blue] = [/]'
 | 
			
		||||
                                    f'[{pcolor}]$ {cost_basis}\n[/]'
 | 
			
		||||
                                )
 | 
			
		||||
 | 
			
		||||
                        console.print(summary)
 | 
			
		||||
 | 
			
		||||
                    finally:
 | 
			
		||||
                        # exit via ctx cancellation.
 | 
			
		||||
                        brokerd_ctx: tractor.Context = brokerd_stream._ctx
 | 
			
		||||
                        await brokerd_ctx.cancel(timeout=1)
 | 
			
		||||
 | 
			
		||||
                    # TODO: once ported to newer tractor branch we should
 | 
			
		||||
                    # be able to do a loop like this:
 | 
			
		||||
                    # while brokerd_ctx.cancel_called_remote is None:
 | 
			
		||||
                    #     await trio.sleep(0.01)
 | 
			
		||||
                    #     await brokerd_ctx.cancel()
 | 
			
		||||
 | 
			
		||||
            finally:
 | 
			
		||||
                await portal.cancel_actor()
 | 
			
		||||
 | 
			
		||||
    trio.run(main)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ledger.command()
 | 
			
		||||
def disect(
 | 
			
		||||
    # "fully_qualified_account_name"
 | 
			
		||||
    fqan: str,
 | 
			
		||||
    fqme: str,  # for ib
 | 
			
		||||
 | 
			
		||||
    # TODO: in tractor we should really have
 | 
			
		||||
    # a debug_mode ctx for wrapping any kind of code no?
 | 
			
		||||
    pdb: bool = False,
 | 
			
		||||
    bs_mktid: str = typer.Option(
 | 
			
		||||
        None,
 | 
			
		||||
        "-bid",
 | 
			
		||||
    ),
 | 
			
		||||
    loglevel: str = typer.Option(
 | 
			
		||||
        'error',
 | 
			
		||||
        "-l",
 | 
			
		||||
    ),
 | 
			
		||||
):
 | 
			
		||||
    from piker.log import get_console_log
 | 
			
		||||
    from piker.toolz import open_crash_handler
 | 
			
		||||
    get_console_log(loglevel)
 | 
			
		||||
 | 
			
		||||
    pair: tuple[str, str]
 | 
			
		||||
    if not (pair := unpack_fqan(fqan)):
 | 
			
		||||
        raise ValueError('{fqan} malformed!?')
 | 
			
		||||
 | 
			
		||||
    brokername, account = pair
 | 
			
		||||
 | 
			
		||||
    # ledger dfs groupby-partitioned by fqme
 | 
			
		||||
    dfs: dict[str, pl.DataFrame]
 | 
			
		||||
    # actual ledger instance
 | 
			
		||||
    ldgr: TransactionLedger
 | 
			
		||||
 | 
			
		||||
    pl.Config.set_tbl_cols(-1)
 | 
			
		||||
    pl.Config.set_tbl_rows(-1)
 | 
			
		||||
    with (
 | 
			
		||||
        open_crash_handler(),
 | 
			
		||||
        open_ledger_dfs(
 | 
			
		||||
            brokername,
 | 
			
		||||
            account,
 | 
			
		||||
        ) as (dfs, ldgr),
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        # look up specific frame for fqme-selected asset
 | 
			
		||||
        if (df := dfs.get(fqme)) is None:
 | 
			
		||||
            mktids2fqmes: dict[str, list[str]] = {}
 | 
			
		||||
            for bs_mktid in dfs:
 | 
			
		||||
                df: pl.DataFrame = dfs[bs_mktid]
 | 
			
		||||
                fqmes: pl.Series[str] = df['fqme']
 | 
			
		||||
                uniques: list[str] = fqmes.unique()
 | 
			
		||||
                mktids2fqmes[bs_mktid] = set(uniques)
 | 
			
		||||
                if fqme in uniques:
 | 
			
		||||
                    break
 | 
			
		||||
            print(
 | 
			
		||||
                f'No specific ledger for fqme={fqme} could be found in\n'
 | 
			
		||||
                f'{pformat(mktids2fqmes)}?\n'
 | 
			
		||||
                f'Maybe the `{brokername}` backend uses something '
 | 
			
		||||
                'else for its `bs_mktid` then the `fqme`?\n'
 | 
			
		||||
                'Scanning for matches in unique fqmes per frame..\n'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # :pray:
 | 
			
		||||
        assert not df.is_empty()
 | 
			
		||||
 | 
			
		||||
        # muck around in pdbp REPL
 | 
			
		||||
        breakpoint()
 | 
			
		||||
 | 
			
		||||
        # TODO: we REALLY need a better console REPL for this
 | 
			
		||||
        # kinda thing..
 | 
			
		||||
        # - `xonsh` is an obvious option (and it looks amazin) but
 | 
			
		||||
        # we need to figure out how to embed it better then just:
 | 
			
		||||
        # from xonsh.main import main
 | 
			
		||||
        # main(argv=[])
 | 
			
		||||
        # which will not actually inject the `df` to globals?
 | 
			
		||||
| 
						 | 
				
			
			@ -17,13 +17,40 @@
 | 
			
		|||
"""
 | 
			
		||||
Broker clients, daemons and general back end machinery.
 | 
			
		||||
"""
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    asynccontextmanager as acm,
 | 
			
		||||
)
 | 
			
		||||
from importlib import import_module
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
 | 
			
		||||
__brokers__ = [
 | 
			
		||||
from tractor.trionics import maybe_open_context
 | 
			
		||||
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,
 | 
			
		||||
    BrokerError,
 | 
			
		||||
    SymbolNotFound,
 | 
			
		||||
    NoData,
 | 
			
		||||
    DataUnavailable,
 | 
			
		||||
    DataThrottle,
 | 
			
		||||
    resproc,
 | 
			
		||||
    get_logger,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
__all__: list[str] = [
 | 
			
		||||
    'BrokerError',
 | 
			
		||||
    'SymbolNotFound',
 | 
			
		||||
    'NoData',
 | 
			
		||||
    'DataUnavailable',
 | 
			
		||||
    'DataThrottle',
 | 
			
		||||
    'resproc',
 | 
			
		||||
    'get_logger',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
__brokers__: list[str] = [
 | 
			
		||||
    'binance',
 | 
			
		||||
    'ib',
 | 
			
		||||
    'kraken',
 | 
			
		||||
    'kucoin',
 | 
			
		||||
 | 
			
		||||
    # broken but used to work
 | 
			
		||||
    # 'questrade',
 | 
			
		||||
| 
						 | 
				
			
			@ -35,7 +62,6 @@ __brokers__ = [
 | 
			
		|||
    # iex
 | 
			
		||||
 | 
			
		||||
    # deribit
 | 
			
		||||
    # kucoin
 | 
			
		||||
    # bitso
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -45,7 +71,7 @@ def get_brokermod(brokername: str) -> ModuleType:
 | 
			
		|||
    Return the imported broker module by name.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    module = import_module('.' + brokername, 'piker.brokers')
 | 
			
		||||
    module: ModuleType = import_module('.' + brokername, 'piker.brokers')
 | 
			
		||||
    # we only allow monkeying because it's for internal keying
 | 
			
		||||
    module.name = module.__name__.split('.')[-1]
 | 
			
		||||
    return module
 | 
			
		||||
| 
						 | 
				
			
			@ -58,3 +84,28 @@ def iter_brokermods():
 | 
			
		|||
    '''
 | 
			
		||||
    for name in __brokers__:
 | 
			
		||||
        yield get_brokermod(name)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_cached_client(
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
) -> 'Client':  # noqa
 | 
			
		||||
    '''
 | 
			
		||||
    Get a cached broker client from the current actor's local vars.
 | 
			
		||||
 | 
			
		||||
    If one has not been setup do it and cache it.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    brokermod = get_brokermod(brokername)
 | 
			
		||||
    async with maybe_open_context(
 | 
			
		||||
        acm_func=brokermod.get_client,
 | 
			
		||||
        kwargs=kwargs,
 | 
			
		||||
 | 
			
		||||
    ) as (cache_hit, client):
 | 
			
		||||
 | 
			
		||||
        if cache_hit:
 | 
			
		||||
            log.runtime(f'Reusing existing {client}')
 | 
			
		||||
 | 
			
		||||
        yield client
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,276 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Broker-daemon-actor "endpoint-hooks": the service task entry points for
 | 
			
		||||
``brokerd``.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    asynccontextmanager as acm,
 | 
			
		||||
)
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
from typing import (
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
    AsyncContextManager,
 | 
			
		||||
)
 | 
			
		||||
import exceptiongroup as eg
 | 
			
		||||
 | 
			
		||||
import tractor
 | 
			
		||||
import trio
 | 
			
		||||
 | 
			
		||||
from . import _util
 | 
			
		||||
from . import get_brokermod
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from ..data import _FeedsBus
 | 
			
		||||
 | 
			
		||||
# `brokerd` enabled modules
 | 
			
		||||
# TODO: move this def to the `.data` subpkg..
 | 
			
		||||
# NOTE: keeping this list as small as possible is part of our caps-sec
 | 
			
		||||
# model and should be treated with utmost care!
 | 
			
		||||
_data_mods: str = [
 | 
			
		||||
    'piker.brokers.core',
 | 
			
		||||
    'piker.brokers.data',
 | 
			
		||||
    'piker.brokers._daemon',
 | 
			
		||||
    'piker.data',
 | 
			
		||||
    'piker.data.feed',
 | 
			
		||||
    'piker.data._sampling'
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: we should rename the daemon to datad prolly once we split up
 | 
			
		||||
# broker vs. data tasks into separate actors?
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def _setup_persistent_brokerd(
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
    Allocate a actor-wide service nursery in ``brokerd``
 | 
			
		||||
    such that feeds can be run in the background persistently by
 | 
			
		||||
    the broker backend as needed.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # NOTE: we only need to setup logging once (and only) here
 | 
			
		||||
    # since all hosted daemon tasks will reference this same
 | 
			
		||||
    # log instance's (actor local) state and thus don't require
 | 
			
		||||
    # any further (level) configuration on their own B)
 | 
			
		||||
    log = _util.get_console_log(
 | 
			
		||||
        loglevel or tractor.current_actor().loglevel,
 | 
			
		||||
        name=f'{_util.subsys}.{brokername}',
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # set global for this actor to this new process-wide instance B)
 | 
			
		||||
    _util.log = log
 | 
			
		||||
 | 
			
		||||
    # further, set the log level on any broker broker specific
 | 
			
		||||
    # logger instance.
 | 
			
		||||
 | 
			
		||||
    from piker.data import feed
 | 
			
		||||
    assert not feed._bus
 | 
			
		||||
 | 
			
		||||
    # allocate a nursery to the bus for spawning background
 | 
			
		||||
    # tasks to service client IPC requests, normally
 | 
			
		||||
    # `tractor.Context` connections to explicitly required
 | 
			
		||||
    # `brokerd` endpoints such as:
 | 
			
		||||
    # - `stream_quotes()`,
 | 
			
		||||
    # - `manage_history()`,
 | 
			
		||||
    # - `allocate_persistent_feed()`,
 | 
			
		||||
    # - `open_symbol_search()`
 | 
			
		||||
    # NOTE: see ep invocation details inside `.data.feed`.
 | 
			
		||||
    try:
 | 
			
		||||
        async with trio.open_nursery() as service_nursery:
 | 
			
		||||
            bus: _FeedsBus = feed.get_feed_bus(
 | 
			
		||||
                brokername,
 | 
			
		||||
                service_nursery,
 | 
			
		||||
            )
 | 
			
		||||
            assert bus is feed._bus
 | 
			
		||||
 | 
			
		||||
            # unblock caller
 | 
			
		||||
            await ctx.started()
 | 
			
		||||
 | 
			
		||||
            # we pin this task to keep the feeds manager active until the
 | 
			
		||||
            # parent actor decides to tear it down
 | 
			
		||||
            await trio.sleep_forever()
 | 
			
		||||
 | 
			
		||||
    except eg.ExceptionGroup:
 | 
			
		||||
        # TODO: likely some underlying `brokerd` IPC connection
 | 
			
		||||
        # broke so here we handle a respawn and re-connect attempt!
 | 
			
		||||
        # This likely should pair with development of the OCO task
 | 
			
		||||
        # nusery in dev over @ `tractor` B)
 | 
			
		||||
        # https://github.com/goodboy/tractor/pull/363
 | 
			
		||||
        raise
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def broker_init(
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
    **start_actor_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tuple[
 | 
			
		||||
    ModuleType,
 | 
			
		||||
    dict,
 | 
			
		||||
    AsyncContextManager,
 | 
			
		||||
]:
 | 
			
		||||
    '''
 | 
			
		||||
    Given an input broker name, load all named arguments
 | 
			
		||||
    which can be passed for daemon endpoint + context spawn
 | 
			
		||||
    as required in every `brokerd` (actor) service.
 | 
			
		||||
 | 
			
		||||
    This includes:
 | 
			
		||||
    - load the appropriate <brokername>.py pkg module,
 | 
			
		||||
    - reads any declared `__enable_modules__: listr[str]` which will be
 | 
			
		||||
      passed to `tractor.ActorNursery.start_actor(enabled_modules=<this>)`
 | 
			
		||||
      at actor start time,
 | 
			
		||||
    - deliver a references to the daemon lifetime fixture, which
 | 
			
		||||
      for now is always the `_setup_persistent_brokerd()` context defined
 | 
			
		||||
      above.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from ..brokers import get_brokermod
 | 
			
		||||
    brokermod = get_brokermod(brokername)
 | 
			
		||||
    modpath: str = brokermod.__name__
 | 
			
		||||
 | 
			
		||||
    start_actor_kwargs['name'] = f'brokerd.{brokername}'
 | 
			
		||||
    start_actor_kwargs.update(
 | 
			
		||||
        getattr(
 | 
			
		||||
            brokermod,
 | 
			
		||||
            '_spawn_kwargs',
 | 
			
		||||
            {},
 | 
			
		||||
        )
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # XXX TODO: make this not so hacky/monkeypatched..
 | 
			
		||||
    # -> we need a sane way to configure the logging level for all
 | 
			
		||||
    # code running in brokerd.
 | 
			
		||||
    # if utilmod := getattr(brokermod, '_util', False):
 | 
			
		||||
    #     utilmod.log.setLevel(loglevel.upper())
 | 
			
		||||
 | 
			
		||||
    # lookup actor-enabled modules declared by the backend offering the
 | 
			
		||||
    # `brokerd` endpoint(s).
 | 
			
		||||
    enabled: list[str]
 | 
			
		||||
    enabled = start_actor_kwargs['enable_modules'] = [
 | 
			
		||||
        __name__,  # so that eps from THIS mod can be invoked
 | 
			
		||||
        modpath,
 | 
			
		||||
    ]
 | 
			
		||||
    for submodname in getattr(
 | 
			
		||||
        brokermod,
 | 
			
		||||
        '__enable_modules__',
 | 
			
		||||
        [],
 | 
			
		||||
    ):
 | 
			
		||||
        subpath: str = f'{modpath}.{submodname}'
 | 
			
		||||
        enabled.append(subpath)
 | 
			
		||||
 | 
			
		||||
    return (
 | 
			
		||||
        brokermod,
 | 
			
		||||
        start_actor_kwargs,  # to `ActorNursery.start_actor()`
 | 
			
		||||
 | 
			
		||||
        # XXX see impl above; contains all (actor global)
 | 
			
		||||
        # setup/teardown expected in all `brokerd` actor instances.
 | 
			
		||||
        _setup_persistent_brokerd,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def spawn_brokerd(
 | 
			
		||||
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
    **tractor_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> bool:
 | 
			
		||||
 | 
			
		||||
    from piker.service._util import log  # use service mngr log
 | 
			
		||||
    log.info(f'Spawning {brokername} broker daemon')
 | 
			
		||||
 | 
			
		||||
    (
 | 
			
		||||
        brokermode,
 | 
			
		||||
        tractor_kwargs,
 | 
			
		||||
        daemon_fixture_ep,
 | 
			
		||||
    ) = broker_init(
 | 
			
		||||
        brokername,
 | 
			
		||||
        loglevel,
 | 
			
		||||
        **tractor_kwargs,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    brokermod = get_brokermod(brokername)
 | 
			
		||||
    extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
 | 
			
		||||
    tractor_kwargs.update(extra_tractor_kwargs)
 | 
			
		||||
 | 
			
		||||
    # ask `pikerd` to spawn a new sub-actor and manage it under its
 | 
			
		||||
    # actor nursery
 | 
			
		||||
    from piker.service import Services
 | 
			
		||||
 | 
			
		||||
    dname: str = tractor_kwargs.pop('name')  # f'brokerd.{brokername}'
 | 
			
		||||
    portal = await Services.actor_n.start_actor(
 | 
			
		||||
        dname,
 | 
			
		||||
        enable_modules=_data_mods + tractor_kwargs.pop('enable_modules'),
 | 
			
		||||
        debug_mode=Services.debug_mode,
 | 
			
		||||
        **tractor_kwargs
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # NOTE: the service mngr expects an already spawned actor + its
 | 
			
		||||
    # portal ref in order to do non-blocking setup of brokerd
 | 
			
		||||
    # service nursery.
 | 
			
		||||
    await Services.start_service_task(
 | 
			
		||||
        dname,
 | 
			
		||||
        portal,
 | 
			
		||||
 | 
			
		||||
        # signature of target root-task endpoint
 | 
			
		||||
        daemon_fixture_ep,
 | 
			
		||||
        brokername=brokername,
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
    )
 | 
			
		||||
    return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def maybe_spawn_brokerd(
 | 
			
		||||
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
    **pikerd_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tractor.Portal:
 | 
			
		||||
    '''
 | 
			
		||||
    Helper to spawn a brokerd service *from* a client who wishes to
 | 
			
		||||
    use the sub-actor-daemon but is fine with re-using any existing
 | 
			
		||||
    and contactable `brokerd`.
 | 
			
		||||
 | 
			
		||||
    Mas o menos, acts as a cached-actor-getter factory.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from piker.service import maybe_spawn_daemon
 | 
			
		||||
 | 
			
		||||
    async with maybe_spawn_daemon(
 | 
			
		||||
 | 
			
		||||
        f'brokerd.{brokername}',
 | 
			
		||||
        service_task_target=spawn_brokerd,
 | 
			
		||||
        spawn_args={
 | 
			
		||||
            'brokername': brokername,
 | 
			
		||||
        },
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
 | 
			
		||||
        **pikerd_kwargs,
 | 
			
		||||
 | 
			
		||||
    ) as portal:
 | 
			
		||||
        yield portal
 | 
			
		||||
| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0)
 | 
			
		||||
# Copyright (C) 2018-present Tyler Goodlet (in stewardship of pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -15,13 +15,32 @@
 | 
			
		|||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
Handy utils.
 | 
			
		||||
Handy cross-broker utils.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from functools import partial
 | 
			
		||||
 | 
			
		||||
import json
 | 
			
		||||
import asks
 | 
			
		||||
import httpx
 | 
			
		||||
import logging
 | 
			
		||||
 | 
			
		||||
from ..log import colorize_json
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
    colorize_json,
 | 
			
		||||
)
 | 
			
		||||
subsys: str = 'piker.brokers'
 | 
			
		||||
 | 
			
		||||
# NOTE: level should be reset by any actor that is spawned
 | 
			
		||||
# as well as given a (more) explicit name/key such
 | 
			
		||||
# as `piker.brokers.binance` matching the subpkg.
 | 
			
		||||
log = get_logger(subsys)
 | 
			
		||||
 | 
			
		||||
get_console_log = partial(
 | 
			
		||||
    get_console_log,
 | 
			
		||||
    name=subsys,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BrokerError(Exception):
 | 
			
		||||
| 
						 | 
				
			
			@ -32,6 +51,7 @@ class SymbolNotFound(BrokerError):
 | 
			
		|||
    "Symbol not found by broker search"
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: these should probably be moved to `.tsp/.data`?
 | 
			
		||||
class NoData(BrokerError):
 | 
			
		||||
    '''
 | 
			
		||||
    Symbol data not permitted or no data
 | 
			
		||||
| 
						 | 
				
			
			@ -41,14 +61,15 @@ class NoData(BrokerError):
 | 
			
		|||
    def __init__(
 | 
			
		||||
        self,
 | 
			
		||||
        *args,
 | 
			
		||||
        frame_size: int = 1000,
 | 
			
		||||
        info: dict|None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        super().__init__(*args)
 | 
			
		||||
        self.info: dict|None = info
 | 
			
		||||
 | 
			
		||||
        # when raised, machinery can check if the backend
 | 
			
		||||
        # set a "frame size" for doing datetime calcs.
 | 
			
		||||
        self.frame_size: int = 1000
 | 
			
		||||
        # self.frame_size: int = 1000
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DataUnavailable(BrokerError):
 | 
			
		||||
| 
						 | 
				
			
			@ -69,18 +90,19 @@ class DataThrottle(BrokerError):
 | 
			
		|||
    # TODO: add in throttle metrics/feedback
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def resproc(
 | 
			
		||||
    resp: asks.response_objects.Response,
 | 
			
		||||
    resp: httpx.Response,
 | 
			
		||||
    log: logging.Logger,
 | 
			
		||||
    return_json: bool = True,
 | 
			
		||||
    log_resp: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> asks.response_objects.Response:
 | 
			
		||||
    """Process response and return its json content.
 | 
			
		||||
) -> httpx.Response:
 | 
			
		||||
    '''
 | 
			
		||||
    Process response and return its json content.
 | 
			
		||||
 | 
			
		||||
    Raise the appropriate error on non-200 OK responses.
 | 
			
		||||
    """
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    if not resp.status_code == 200:
 | 
			
		||||
        raise BrokerError(resp.body)
 | 
			
		||||
    try:
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,599 +0,0 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
Binance backend
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from contextlib import asynccontextmanager as acm
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any, Union, Optional,
 | 
			
		||||
    AsyncGenerator, Callable,
 | 
			
		||||
)
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
from trio_util import trio_async_generator
 | 
			
		||||
import trio
 | 
			
		||||
from trio_typing import TaskStatus
 | 
			
		||||
import pendulum
 | 
			
		||||
import asks
 | 
			
		||||
from fuzzywuzzy import process as fuzzy
 | 
			
		||||
import numpy as np
 | 
			
		||||
import tractor
 | 
			
		||||
import wsproto
 | 
			
		||||
 | 
			
		||||
from .._cacheables import open_cached_client
 | 
			
		||||
from ._util import (
 | 
			
		||||
    resproc,
 | 
			
		||||
    SymbolNotFound,
 | 
			
		||||
    DataUnavailable,
 | 
			
		||||
)
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
from ..data.types import Struct
 | 
			
		||||
from ..data._web_bs import (
 | 
			
		||||
    open_autorecon_ws,
 | 
			
		||||
    NoBsWs,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_url = 'https://api.binance.com'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Broker specific ohlc schema (rest)
 | 
			
		||||
_ohlc_dtype = [
 | 
			
		||||
    ('index', int),
 | 
			
		||||
    ('time', int),
 | 
			
		||||
    ('open', float),
 | 
			
		||||
    ('high', float),
 | 
			
		||||
    ('low', float),
 | 
			
		||||
    ('close', float),
 | 
			
		||||
    ('volume', float),
 | 
			
		||||
    ('bar_wap', float),  # will be zeroed by sampler if not filled
 | 
			
		||||
 | 
			
		||||
    # XXX: some additional fields are defined in the docs:
 | 
			
		||||
    # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
 | 
			
		||||
 | 
			
		||||
    # ('close_time', int),
 | 
			
		||||
    # ('quote_vol', float),
 | 
			
		||||
    # ('num_trades', int),
 | 
			
		||||
    # ('buy_base_vol', float),
 | 
			
		||||
    # ('buy_quote_vol', float),
 | 
			
		||||
    # ('ignore', float),
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
# UI components allow this to be declared such that additional
 | 
			
		||||
# (historical) fields can be exposed.
 | 
			
		||||
ohlc_dtype = np.dtype(_ohlc_dtype)
 | 
			
		||||
 | 
			
		||||
_show_wap_in_history = False
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# https://binance-docs.github.io/apidocs/spot/en/#exchange-information
 | 
			
		||||
class Pair(Struct, frozen=True):
 | 
			
		||||
    symbol: str
 | 
			
		||||
    status: str
 | 
			
		||||
 | 
			
		||||
    baseAsset: str
 | 
			
		||||
    baseAssetPrecision: int
 | 
			
		||||
    cancelReplaceAllowed: bool
 | 
			
		||||
    allowTrailingStop: bool
 | 
			
		||||
    quoteAsset: str
 | 
			
		||||
    quotePrecision: int
 | 
			
		||||
    quoteAssetPrecision: int
 | 
			
		||||
 | 
			
		||||
    baseCommissionPrecision: int
 | 
			
		||||
    quoteCommissionPrecision: int
 | 
			
		||||
 | 
			
		||||
    orderTypes: list[str]
 | 
			
		||||
 | 
			
		||||
    icebergAllowed: bool
 | 
			
		||||
    ocoAllowed: bool
 | 
			
		||||
    quoteOrderQtyMarketAllowed: bool
 | 
			
		||||
    isSpotTradingAllowed: bool
 | 
			
		||||
    isMarginTradingAllowed: bool
 | 
			
		||||
 | 
			
		||||
    defaultSelfTradePreventionMode: str
 | 
			
		||||
    allowedSelfTradePreventionModes: list[str]
 | 
			
		||||
 | 
			
		||||
    filters: list[dict[str, Union[str, int, float]]]
 | 
			
		||||
    permissions: list[str]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OHLC(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    Description of the flattened OHLC quote format.
 | 
			
		||||
 | 
			
		||||
    For schema details see:
 | 
			
		||||
    https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-streams
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    time: int
 | 
			
		||||
 | 
			
		||||
    open: float
 | 
			
		||||
    high: float
 | 
			
		||||
    low: float
 | 
			
		||||
    close: float
 | 
			
		||||
    volume: float
 | 
			
		||||
 | 
			
		||||
    close_time: int
 | 
			
		||||
 | 
			
		||||
    quote_vol: float
 | 
			
		||||
    num_trades: int
 | 
			
		||||
    buy_base_vol: float
 | 
			
		||||
    buy_quote_vol: float
 | 
			
		||||
    ignore: int
 | 
			
		||||
 | 
			
		||||
    # null the place holder for `bar_wap` until we
 | 
			
		||||
    # figure out what to extract for this.
 | 
			
		||||
    bar_wap: float = 0.0
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# convert datetime obj timestamp to unixtime in milliseconds
 | 
			
		||||
def binance_timestamp(
 | 
			
		||||
    when: datetime
 | 
			
		||||
) -> int:
 | 
			
		||||
    return int((when.timestamp() * 1000) + (when.microsecond / 1000))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Client:
 | 
			
		||||
 | 
			
		||||
    def __init__(self) -> None:
 | 
			
		||||
        self._sesh = asks.Session(connections=4)
 | 
			
		||||
        self._sesh.base_location = _url
 | 
			
		||||
        self._pairs: dict[str, Any] = {}
 | 
			
		||||
 | 
			
		||||
    async def _api(
 | 
			
		||||
        self,
 | 
			
		||||
        method: str,
 | 
			
		||||
        params: dict,
 | 
			
		||||
    ) -> dict[str, Any]:
 | 
			
		||||
        resp = await self._sesh.get(
 | 
			
		||||
            path=f'/api/v3/{method}',
 | 
			
		||||
            params=params,
 | 
			
		||||
            timeout=float('inf')
 | 
			
		||||
        )
 | 
			
		||||
        return resproc(resp, log)
 | 
			
		||||
 | 
			
		||||
    async def symbol_info(
 | 
			
		||||
 | 
			
		||||
        self,
 | 
			
		||||
        sym: Optional[str] = None,
 | 
			
		||||
 | 
			
		||||
    ) -> dict[str, Any]:
 | 
			
		||||
        '''Get symbol info for the exchange.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        # TODO: we can load from our self._pairs cache
 | 
			
		||||
        # on repeat calls...
 | 
			
		||||
 | 
			
		||||
        # will retrieve all symbols by default
 | 
			
		||||
        params = {}
 | 
			
		||||
 | 
			
		||||
        if sym is not None:
 | 
			
		||||
            sym = sym.lower()
 | 
			
		||||
            params = {'symbol': sym}
 | 
			
		||||
 | 
			
		||||
        resp = await self._api(
 | 
			
		||||
            'exchangeInfo',
 | 
			
		||||
            params=params,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        entries = resp['symbols']
 | 
			
		||||
        if not entries:
 | 
			
		||||
            raise SymbolNotFound(f'{sym} not found')
 | 
			
		||||
 | 
			
		||||
        syms = {item['symbol']: item for item in entries}
 | 
			
		||||
 | 
			
		||||
        if sym is not None:
 | 
			
		||||
            return syms[sym]
 | 
			
		||||
        else:
 | 
			
		||||
            return syms
 | 
			
		||||
 | 
			
		||||
    async def cache_symbols(
 | 
			
		||||
        self,
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
        if not self._pairs:
 | 
			
		||||
            self._pairs = await self.symbol_info()
 | 
			
		||||
 | 
			
		||||
        return self._pairs
 | 
			
		||||
 | 
			
		||||
    async def search_symbols(
 | 
			
		||||
        self,
 | 
			
		||||
        pattern: str,
 | 
			
		||||
        limit: int = None,
 | 
			
		||||
    ) -> dict[str, Any]:
 | 
			
		||||
        if self._pairs is not None:
 | 
			
		||||
            data = self._pairs
 | 
			
		||||
        else:
 | 
			
		||||
            data = await self.symbol_info()
 | 
			
		||||
 | 
			
		||||
        matches = fuzzy.extractBests(
 | 
			
		||||
            pattern,
 | 
			
		||||
            data,
 | 
			
		||||
            score_cutoff=50,
 | 
			
		||||
        )
 | 
			
		||||
        # repack in dict form
 | 
			
		||||
        return {item[0]['symbol']: item[0]
 | 
			
		||||
                for item in matches}
 | 
			
		||||
 | 
			
		||||
    async def bars(
 | 
			
		||||
        self,
 | 
			
		||||
        symbol: str,
 | 
			
		||||
        start_dt: Optional[datetime] = None,
 | 
			
		||||
        end_dt: Optional[datetime] = None,
 | 
			
		||||
        limit: int = 1000,  # <- max allowed per query
 | 
			
		||||
        as_np: bool = True,
 | 
			
		||||
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
 | 
			
		||||
        if end_dt is None:
 | 
			
		||||
            end_dt = pendulum.now('UTC').add(minutes=1)
 | 
			
		||||
 | 
			
		||||
        if start_dt is None:
 | 
			
		||||
            start_dt = end_dt.start_of(
 | 
			
		||||
                'minute').subtract(minutes=limit)
 | 
			
		||||
 | 
			
		||||
        start_time = binance_timestamp(start_dt)
 | 
			
		||||
        end_time = binance_timestamp(end_dt)
 | 
			
		||||
 | 
			
		||||
        # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
 | 
			
		||||
        bars = await self._api(
 | 
			
		||||
            'klines',
 | 
			
		||||
            params={
 | 
			
		||||
                'symbol': symbol.upper(),
 | 
			
		||||
                'interval': '1m',
 | 
			
		||||
                'startTime': start_time,
 | 
			
		||||
                'endTime': end_time,
 | 
			
		||||
                'limit': limit
 | 
			
		||||
            }
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # TODO: pack this bars scheme into a ``pydantic`` validator type:
 | 
			
		||||
        # https://binance-docs.github.io/apidocs/spot/en/#kline-candlestick-data
 | 
			
		||||
 | 
			
		||||
        # TODO: we should port this to ``pydantic`` to avoid doing
 | 
			
		||||
        # manual validation ourselves..
 | 
			
		||||
        new_bars = []
 | 
			
		||||
        for i, bar in enumerate(bars):
 | 
			
		||||
 | 
			
		||||
            bar = OHLC(*bar)
 | 
			
		||||
            bar.typecast()
 | 
			
		||||
 | 
			
		||||
            row = []
 | 
			
		||||
            for j, (name, ftype) in enumerate(_ohlc_dtype[1:]):
 | 
			
		||||
 | 
			
		||||
                # TODO: maybe we should go nanoseconds on all
 | 
			
		||||
                # history time stamps?
 | 
			
		||||
                if name == 'time':
 | 
			
		||||
                    # convert to epoch seconds: float
 | 
			
		||||
                    row.append(bar.time / 1000.0)
 | 
			
		||||
 | 
			
		||||
                else:
 | 
			
		||||
                    row.append(getattr(bar, name))
 | 
			
		||||
 | 
			
		||||
            new_bars.append((i,) + tuple(row))
 | 
			
		||||
 | 
			
		||||
        array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
 | 
			
		||||
        return array
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def get_client() -> Client:
 | 
			
		||||
    client = Client()
 | 
			
		||||
    await client.cache_symbols()
 | 
			
		||||
    yield client
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# validation type
 | 
			
		||||
class AggTrade(Struct):
 | 
			
		||||
    e: str  # Event type
 | 
			
		||||
    E: int  # Event time
 | 
			
		||||
    s: str  # Symbol
 | 
			
		||||
    a: int  # Aggregate trade ID
 | 
			
		||||
    p: float  # Price
 | 
			
		||||
    q: float  # Quantity
 | 
			
		||||
    f: int  # First trade ID
 | 
			
		||||
    l: int  # Last trade ID
 | 
			
		||||
    T: int  # Trade time
 | 
			
		||||
    m: bool  # Is the buyer the market maker?
 | 
			
		||||
    M: bool  # Ignore
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@trio_async_generator
 | 
			
		||||
async def stream_messages(
 | 
			
		||||
    ws: NoBsWs,
 | 
			
		||||
) -> AsyncGenerator[NoBsWs, dict]:
 | 
			
		||||
 | 
			
		||||
    timeouts = 0
 | 
			
		||||
    while True:
 | 
			
		||||
 | 
			
		||||
        with trio.move_on_after(3) as cs:
 | 
			
		||||
            msg = await ws.recv_msg()
 | 
			
		||||
 | 
			
		||||
        if cs.cancelled_caught:
 | 
			
		||||
 | 
			
		||||
            timeouts += 1
 | 
			
		||||
            if timeouts > 2:
 | 
			
		||||
                log.error("binance feed seems down and slow af? rebooting...")
 | 
			
		||||
                await ws._connect()
 | 
			
		||||
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # for l1 streams binance doesn't add an event type field so
 | 
			
		||||
        # identify those messages by matching keys
 | 
			
		||||
        # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams
 | 
			
		||||
 | 
			
		||||
        if msg.get('u'):
 | 
			
		||||
            sym = msg['s']
 | 
			
		||||
            bid = float(msg['b'])
 | 
			
		||||
            bsize = float(msg['B'])
 | 
			
		||||
            ask = float(msg['a'])
 | 
			
		||||
            asize = float(msg['A'])
 | 
			
		||||
 | 
			
		||||
            yield 'l1', {
 | 
			
		||||
                'symbol': sym,
 | 
			
		||||
                'ticks': [
 | 
			
		||||
                    {'type': 'bid', 'price': bid, 'size': bsize},
 | 
			
		||||
                    {'type': 'bsize', 'price': bid, 'size': bsize},
 | 
			
		||||
                    {'type': 'ask', 'price': ask, 'size': asize},
 | 
			
		||||
                    {'type': 'asize', 'price': ask, 'size': asize}
 | 
			
		||||
                ]
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
        elif msg.get('e') == 'aggTrade':
 | 
			
		||||
 | 
			
		||||
            # NOTE: this is purely for a definition, ``msgspec.Struct``
 | 
			
		||||
            # does not runtime-validate until you decode/encode.
 | 
			
		||||
            # see: https://jcristharif.com/msgspec/structs.html#type-validation
 | 
			
		||||
            msg = AggTrade(**msg)
 | 
			
		||||
 | 
			
		||||
            # TODO: type out and require this quote format
 | 
			
		||||
            # from all backends!
 | 
			
		||||
            yield 'trade', {
 | 
			
		||||
                'symbol': msg.s,
 | 
			
		||||
                'last': msg.p,
 | 
			
		||||
                'brokerd_ts': time.time(),
 | 
			
		||||
                'ticks': [{
 | 
			
		||||
                    'type': 'trade',
 | 
			
		||||
                    'price': float(msg.p),
 | 
			
		||||
                    'size': float(msg.q),
 | 
			
		||||
                    'broker_ts': msg.T,
 | 
			
		||||
                }],
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]:
 | 
			
		||||
    """Create a request subscription packet dict.
 | 
			
		||||
 | 
			
		||||
    https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
 | 
			
		||||
    """
 | 
			
		||||
    return {
 | 
			
		||||
        'method': 'SUBSCRIBE',
 | 
			
		||||
        'params': [
 | 
			
		||||
            f'{pair.lower()}@{sub_name}'
 | 
			
		||||
            for pair in pairs
 | 
			
		||||
        ],
 | 
			
		||||
        'id': uid
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_history_client(
 | 
			
		||||
    symbol: str,
 | 
			
		||||
 | 
			
		||||
) -> tuple[Callable, int]:
 | 
			
		||||
 | 
			
		||||
    # TODO implement history getter for the new storage layer.
 | 
			
		||||
    async with open_cached_client('binance') as client:
 | 
			
		||||
 | 
			
		||||
        async def get_ohlc(
 | 
			
		||||
            timeframe: float,
 | 
			
		||||
            end_dt: datetime | None = None,
 | 
			
		||||
            start_dt: datetime | None = None,
 | 
			
		||||
 | 
			
		||||
        ) -> tuple[
 | 
			
		||||
            np.ndarray,
 | 
			
		||||
            datetime,  # start
 | 
			
		||||
            datetime,  # end
 | 
			
		||||
        ]:
 | 
			
		||||
            if timeframe != 60:
 | 
			
		||||
                raise DataUnavailable('Only 1m bars are supported')
 | 
			
		||||
 | 
			
		||||
            array = await client.bars(
 | 
			
		||||
                symbol,
 | 
			
		||||
                start_dt=start_dt,
 | 
			
		||||
                end_dt=end_dt,
 | 
			
		||||
            )
 | 
			
		||||
            times = array['time']
 | 
			
		||||
            if (
 | 
			
		||||
                end_dt is None
 | 
			
		||||
            ):
 | 
			
		||||
                inow = round(time.time())
 | 
			
		||||
                if (inow - times[-1]) > 60:
 | 
			
		||||
                    await tractor.breakpoint()
 | 
			
		||||
 | 
			
		||||
            start_dt = pendulum.from_timestamp(times[0])
 | 
			
		||||
            end_dt = pendulum.from_timestamp(times[-1])
 | 
			
		||||
 | 
			
		||||
            return array, start_dt, end_dt
 | 
			
		||||
 | 
			
		||||
        yield get_ohlc, {'erlangs': 3, 'rate': 3}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def stream_quotes(
 | 
			
		||||
 | 
			
		||||
    send_chan: trio.abc.SendChannel,
 | 
			
		||||
    symbols: list[str],
 | 
			
		||||
    feed_is_live: trio.Event,
 | 
			
		||||
    loglevel: str = None,
 | 
			
		||||
 | 
			
		||||
    # startup sync
 | 
			
		||||
    task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    # XXX: required to propagate ``tractor`` loglevel to piker logging
 | 
			
		||||
    get_console_log(loglevel or tractor.current_actor().loglevel)
 | 
			
		||||
 | 
			
		||||
    sym_infos = {}
 | 
			
		||||
    uid = 0
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        open_cached_client('binance') as client,
 | 
			
		||||
        send_chan as send_chan,
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
        # keep client cached for real-time section
 | 
			
		||||
        cache = await client.cache_symbols()
 | 
			
		||||
 | 
			
		||||
        for sym in symbols:
 | 
			
		||||
            d = cache[sym.upper()]
 | 
			
		||||
            syminfo = Pair(**d)  # validation
 | 
			
		||||
 | 
			
		||||
            si = sym_infos[sym] = syminfo.to_dict()
 | 
			
		||||
            filters = {}
 | 
			
		||||
            for entry in syminfo.filters:
 | 
			
		||||
                ftype = entry['filterType']
 | 
			
		||||
                filters[ftype] = entry
 | 
			
		||||
 | 
			
		||||
            # XXX: after manually inspecting the response format we
 | 
			
		||||
            # just directly pick out the info we need
 | 
			
		||||
            si['price_tick_size'] = float(
 | 
			
		||||
                filters['PRICE_FILTER']['tickSize']
 | 
			
		||||
            )
 | 
			
		||||
            si['lot_tick_size'] = float(
 | 
			
		||||
                filters['LOT_SIZE']['stepSize']
 | 
			
		||||
            )
 | 
			
		||||
            si['asset_type'] = 'crypto'
 | 
			
		||||
 | 
			
		||||
        symbol = symbols[0]
 | 
			
		||||
 | 
			
		||||
        init_msgs = {
 | 
			
		||||
            # pass back token, and bool, signalling if we're the writer
 | 
			
		||||
            # and that history has been written
 | 
			
		||||
            symbol: {
 | 
			
		||||
                'symbol_info': sym_infos[sym],
 | 
			
		||||
                'shm_write_opts': {'sum_tick_vml': False},
 | 
			
		||||
                'fqsn': sym,
 | 
			
		||||
            },
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        @acm
 | 
			
		||||
        async def subscribe(ws: wsproto.WSConnection):
 | 
			
		||||
            # setup subs
 | 
			
		||||
 | 
			
		||||
            # trade data (aka L1)
 | 
			
		||||
            # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker
 | 
			
		||||
            l1_sub = make_sub(symbols, 'bookTicker', uid)
 | 
			
		||||
            await ws.send_msg(l1_sub)
 | 
			
		||||
 | 
			
		||||
            # aggregate (each order clear by taker **not** by maker)
 | 
			
		||||
            # trades data:
 | 
			
		||||
            # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams
 | 
			
		||||
            agg_trades_sub = make_sub(symbols, 'aggTrade', uid)
 | 
			
		||||
            await ws.send_msg(agg_trades_sub)
 | 
			
		||||
 | 
			
		||||
            # ack from ws server
 | 
			
		||||
            res = await ws.recv_msg()
 | 
			
		||||
            assert res['id'] == uid
 | 
			
		||||
 | 
			
		||||
            yield
 | 
			
		||||
 | 
			
		||||
            subs = []
 | 
			
		||||
            for sym in symbols:
 | 
			
		||||
                subs.append("{sym}@aggTrade")
 | 
			
		||||
                subs.append("{sym}@bookTicker")
 | 
			
		||||
 | 
			
		||||
            # unsub from all pairs on teardown
 | 
			
		||||
            if ws.connected():
 | 
			
		||||
                await ws.send_msg({
 | 
			
		||||
                    "method": "UNSUBSCRIBE",
 | 
			
		||||
                    "params": subs,
 | 
			
		||||
                    "id": uid,
 | 
			
		||||
                })
 | 
			
		||||
 | 
			
		||||
                # XXX: do we need to ack the unsub?
 | 
			
		||||
                # await ws.recv_msg()
 | 
			
		||||
 | 
			
		||||
        async with (
 | 
			
		||||
            open_autorecon_ws(
 | 
			
		||||
                # XXX: see api docs which show diff addr?
 | 
			
		||||
                # https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information
 | 
			
		||||
                # 'wss://ws-api.binance.com:443/ws-api/v3',
 | 
			
		||||
                'wss://stream.binance.com/ws',
 | 
			
		||||
                fixture=subscribe,
 | 
			
		||||
            ) as ws,
 | 
			
		||||
 | 
			
		||||
            # avoid stream-gen closure from breaking trio..
 | 
			
		||||
            stream_messages(ws) as msg_gen,
 | 
			
		||||
        ):
 | 
			
		||||
            typ, quote = await anext(msg_gen)
 | 
			
		||||
 | 
			
		||||
            # pull a first quote and deliver
 | 
			
		||||
            while typ != 'trade':
 | 
			
		||||
                typ, quote = await anext(msg_gen)
 | 
			
		||||
 | 
			
		||||
            task_status.started((init_msgs,  quote))
 | 
			
		||||
 | 
			
		||||
            # signal to caller feed is ready for consumption
 | 
			
		||||
            feed_is_live.set()
 | 
			
		||||
 | 
			
		||||
            # import time
 | 
			
		||||
            # last = time.time()
 | 
			
		||||
 | 
			
		||||
            # start streaming
 | 
			
		||||
            async for typ, msg in msg_gen:
 | 
			
		||||
 | 
			
		||||
                # period = time.time() - last
 | 
			
		||||
                # hz = 1/period if period else float('inf')
 | 
			
		||||
                # if hz > 60:
 | 
			
		||||
                #     log.info(f'Binance quotez : {hz}')
 | 
			
		||||
 | 
			
		||||
                topic = msg['symbol'].lower()
 | 
			
		||||
                await send_chan.send({topic: msg})
 | 
			
		||||
                # last = time.time()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def open_symbol_search(
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
) -> Client:
 | 
			
		||||
    async with open_cached_client('binance') as client:
 | 
			
		||||
 | 
			
		||||
        # load all symbols locally for fast search
 | 
			
		||||
        cache = await client.cache_symbols()
 | 
			
		||||
        await ctx.started()
 | 
			
		||||
 | 
			
		||||
        async with ctx.open_stream() as stream:
 | 
			
		||||
 | 
			
		||||
            async for pattern in stream:
 | 
			
		||||
                # results = await client.symbol_info(sym=pattern.upper())
 | 
			
		||||
 | 
			
		||||
                matches = fuzzy.extractBests(
 | 
			
		||||
                    pattern,
 | 
			
		||||
                    cache,
 | 
			
		||||
                    score_cutoff=50,
 | 
			
		||||
                )
 | 
			
		||||
                # repack in dict form
 | 
			
		||||
                await stream.send(
 | 
			
		||||
                    {item[0]['symbol']: item[0]
 | 
			
		||||
                     for item in matches}
 | 
			
		||||
                )
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,60 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C)
 | 
			
		||||
#   Guillermo Rodriguez (aka ze jefe)
 | 
			
		||||
#   Tyler Goodlet
 | 
			
		||||
#   (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
binancial secs on the floor, in the office, behind the dumpster.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from .api import (
 | 
			
		||||
    get_client,
 | 
			
		||||
)
 | 
			
		||||
from .feed import (
 | 
			
		||||
    get_mkt_info,
 | 
			
		||||
    open_history_client,
 | 
			
		||||
    open_symbol_search,
 | 
			
		||||
    stream_quotes,
 | 
			
		||||
)
 | 
			
		||||
from .broker import (
 | 
			
		||||
    open_trade_dialog,
 | 
			
		||||
    get_cost,
 | 
			
		||||
)
 | 
			
		||||
from .venues import (
 | 
			
		||||
    SpotPair,
 | 
			
		||||
    FutesPair,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'get_client',
 | 
			
		||||
    'get_mkt_info',
 | 
			
		||||
    'get_cost',
 | 
			
		||||
    'SpotPair',
 | 
			
		||||
    'FutesPair',
 | 
			
		||||
    'open_trade_dialog',
 | 
			
		||||
    'open_history_client',
 | 
			
		||||
    'open_symbol_search',
 | 
			
		||||
    'stream_quotes',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# `brokerd` modules
 | 
			
		||||
__enable_modules__: list[str] = [
 | 
			
		||||
    'api',
 | 
			
		||||
    'feed',
 | 
			
		||||
    'broker',
 | 
			
		||||
]
 | 
			
		||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
				
			
			@ -0,0 +1,710 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C)
 | 
			
		||||
#   Guillermo Rodriguez (aka ze jefe)
 | 
			
		||||
#   Tyler Goodlet
 | 
			
		||||
#   (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Live order control B)
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    AsyncIterator,
 | 
			
		||||
)
 | 
			
		||||
import time
 | 
			
		||||
from time import time_ns
 | 
			
		||||
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
import tractor
 | 
			
		||||
import trio
 | 
			
		||||
 | 
			
		||||
from piker.accounting import (
 | 
			
		||||
    Asset,
 | 
			
		||||
)
 | 
			
		||||
from piker.brokers._util import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
)
 | 
			
		||||
from piker.data._web_bs import (
 | 
			
		||||
    open_autorecon_ws,
 | 
			
		||||
    NoBsWs,
 | 
			
		||||
)
 | 
			
		||||
from piker.brokers import (
 | 
			
		||||
    open_cached_client,
 | 
			
		||||
    BrokerError,
 | 
			
		||||
)
 | 
			
		||||
from piker.clearing import (
 | 
			
		||||
    OrderDialogs,
 | 
			
		||||
)
 | 
			
		||||
from piker.clearing._messages import (
 | 
			
		||||
    BrokerdOrder,
 | 
			
		||||
    BrokerdOrderAck,
 | 
			
		||||
    BrokerdStatus,
 | 
			
		||||
    BrokerdPosition,
 | 
			
		||||
    BrokerdFill,
 | 
			
		||||
    BrokerdCancel,
 | 
			
		||||
    BrokerdError,
 | 
			
		||||
    Status,
 | 
			
		||||
    Order,
 | 
			
		||||
)
 | 
			
		||||
from .venues import (
 | 
			
		||||
    Pair,
 | 
			
		||||
    _futes_ws,
 | 
			
		||||
    _testnet_futes_ws,
 | 
			
		||||
)
 | 
			
		||||
from .api import Client
 | 
			
		||||
 | 
			
		||||
log = get_logger('piker.brokers.binance')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Fee schedule template, mostly for paper engine fees modelling.
 | 
			
		||||
# https://www.binance.com/en/support/faq/what-are-market-makers-and-takers-360007720071
 | 
			
		||||
def get_cost(
 | 
			
		||||
    price: float,
 | 
			
		||||
    size: float,
 | 
			
		||||
    is_taker: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> float:
 | 
			
		||||
 | 
			
		||||
    # https://www.binance.com/en/fee/trading
 | 
			
		||||
    cb: float = price * size
 | 
			
		||||
    match is_taker:
 | 
			
		||||
        case True:
 | 
			
		||||
            return cb * 0.001000
 | 
			
		||||
 | 
			
		||||
        case False if cb < 1e6:
 | 
			
		||||
            return cb * 0.001000
 | 
			
		||||
 | 
			
		||||
        case False if 1e6 >= cb < 5e6:
 | 
			
		||||
            return cb * 0.000900
 | 
			
		||||
 | 
			
		||||
        # NOTE: there's more but are you really going
 | 
			
		||||
        # to have a cb bigger then this per trade?
 | 
			
		||||
        case False if cb >= 5e6:
 | 
			
		||||
            return cb * 0.000800
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def handle_order_requests(
 | 
			
		||||
    ems_order_stream: tractor.MsgStream,
 | 
			
		||||
    client: Client,
 | 
			
		||||
    dids: bidict[str, str],
 | 
			
		||||
    dialogs: OrderDialogs,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
    Receive order requests from `emsd`, translate tramsit API calls and transmit.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    msg: dict | BrokerdOrder | BrokerdCancel
 | 
			
		||||
    async for msg in ems_order_stream:
 | 
			
		||||
        log.info(f'Rx order request:\n{pformat(msg)}')
 | 
			
		||||
        match msg:
 | 
			
		||||
            case {
 | 
			
		||||
                'action': 'cancel',
 | 
			
		||||
            }:
 | 
			
		||||
                cancel = BrokerdCancel(**msg)
 | 
			
		||||
                existing: BrokerdOrder | None = dialogs.get(cancel.oid)
 | 
			
		||||
                if not existing:
 | 
			
		||||
                    log.error(
 | 
			
		||||
                        f'NO Existing order-dialog for {cancel.oid}!?'
 | 
			
		||||
                    )
 | 
			
		||||
                    await ems_order_stream.send(BrokerdError(
 | 
			
		||||
                        oid=cancel.oid,
 | 
			
		||||
 | 
			
		||||
                        # TODO: do we need the symbol?
 | 
			
		||||
                        # https://github.com/pikers/piker/issues/514
 | 
			
		||||
                        symbol='unknown',
 | 
			
		||||
 | 
			
		||||
                        reason=(
 | 
			
		||||
                            'Invalid `binance` order request dialog oid',
 | 
			
		||||
                        )
 | 
			
		||||
                    ))
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                else:
 | 
			
		||||
                    symbol: str = existing['symbol']
 | 
			
		||||
                    try:
 | 
			
		||||
                        await client.submit_cancel(
 | 
			
		||||
                            symbol,
 | 
			
		||||
                            cancel.oid,
 | 
			
		||||
                        )
 | 
			
		||||
                    except BrokerError as be:
 | 
			
		||||
                        await ems_order_stream.send(
 | 
			
		||||
                            BrokerdError(
 | 
			
		||||
                                oid=msg['oid'],
 | 
			
		||||
                                symbol=symbol,
 | 
			
		||||
                                reason=(
 | 
			
		||||
                                    '`binance` CANCEL failed:\n'
 | 
			
		||||
                                    f'{be}'
 | 
			
		||||
                                ))
 | 
			
		||||
                        )
 | 
			
		||||
                        continue
 | 
			
		||||
 | 
			
		||||
            case {
 | 
			
		||||
                'account': ('binance.usdtm' | 'binance.spot') as account,
 | 
			
		||||
                'action': action,
 | 
			
		||||
            } if action in {'buy', 'sell'}:
 | 
			
		||||
 | 
			
		||||
                # validate
 | 
			
		||||
                order = BrokerdOrder(**msg)
 | 
			
		||||
                oid: str = order.oid  # emsd order id
 | 
			
		||||
                modify: bool = False
 | 
			
		||||
 | 
			
		||||
                # NOTE: check and report edits
 | 
			
		||||
                if existing := dialogs.get(order.oid):
 | 
			
		||||
                    log.info(
 | 
			
		||||
                        f'Existing order for {oid} updated:\n'
 | 
			
		||||
                        f'{pformat(existing.maps[-1])} -> {pformat(msg)}'
 | 
			
		||||
                    )
 | 
			
		||||
                    modify = True
 | 
			
		||||
 | 
			
		||||
                    # only add new msg AFTER the existing check
 | 
			
		||||
                    dialogs.add_msg(oid, msg)
 | 
			
		||||
 | 
			
		||||
                else:
 | 
			
		||||
                    # XXX NOTE: update before the ack!
 | 
			
		||||
                    # track latest request state such that map
 | 
			
		||||
                    # lookups start at the most recent msg and then
 | 
			
		||||
                    # scan reverse-chronologically.
 | 
			
		||||
                    dialogs.add_msg(oid, msg)
 | 
			
		||||
 | 
			
		||||
                    # XXX: ACK the request **immediately** before sending
 | 
			
		||||
                    # the api side request to ensure the ems maps the oid ->
 | 
			
		||||
                    # reqid correctly!
 | 
			
		||||
                    resp = BrokerdOrderAck(
 | 
			
		||||
                        oid=oid,  # ems order request id
 | 
			
		||||
                        reqid=oid,  # our custom int mapping
 | 
			
		||||
                        account='binance',  # piker account
 | 
			
		||||
                    )
 | 
			
		||||
                    await ems_order_stream.send(resp)
 | 
			
		||||
 | 
			
		||||
                # call our client api to submit the order
 | 
			
		||||
                # NOTE: modifies only require diff key for user oid:
 | 
			
		||||
                # https://binance-docs.github.io/apidocs/futures/en/#modify-order-trade
 | 
			
		||||
                try:
 | 
			
		||||
                    reqid = await client.submit_limit(
 | 
			
		||||
                        symbol=order.symbol,
 | 
			
		||||
                        side=order.action,
 | 
			
		||||
                        quantity=order.size,
 | 
			
		||||
                        price=order.price,
 | 
			
		||||
                        oid=oid,
 | 
			
		||||
                        modify=modify,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    # SMH they do gen their own order id: ints.. 
 | 
			
		||||
                    # assert reqid == order.oid
 | 
			
		||||
                    dids[order.oid] = reqid
 | 
			
		||||
 | 
			
		||||
                except BrokerError as be:
 | 
			
		||||
                    await ems_order_stream.send(
 | 
			
		||||
                        BrokerdError(
 | 
			
		||||
                            oid=msg['oid'],
 | 
			
		||||
                            symbol=msg['symbol'],
 | 
			
		||||
                            reason=(
 | 
			
		||||
                                '`binance` request failed:\n'
 | 
			
		||||
                                f'{be}'
 | 
			
		||||
                            ))
 | 
			
		||||
                    )
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
            case _:
 | 
			
		||||
                account = msg.get('account')
 | 
			
		||||
                if account not in {'binance.spot', 'binance.futes'}:
 | 
			
		||||
                    log.error(
 | 
			
		||||
                        'Order request does not have a valid binance account name?\n'
 | 
			
		||||
                        'Only one of\n'
 | 
			
		||||
                        '- `binance.spot` or,\n'
 | 
			
		||||
                        '- `binance.usdtm`\n'
 | 
			
		||||
                        'is currently valid!'
 | 
			
		||||
                    )
 | 
			
		||||
                await ems_order_stream.send(
 | 
			
		||||
                    BrokerdError(
 | 
			
		||||
                        oid=msg['oid'],
 | 
			
		||||
                        symbol=msg['symbol'],
 | 
			
		||||
                        reason=(
 | 
			
		||||
                            f'Invalid `binance` broker request msg:\n{msg}'
 | 
			
		||||
                        ))
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def open_trade_dialog(
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
 | 
			
		||||
) -> AsyncIterator[dict[str, Any]]:
 | 
			
		||||
 | 
			
		||||
    # TODO: how do we set this from the EMS such that
 | 
			
		||||
    # positions are loaded from the correct venue on the user
 | 
			
		||||
    # stream at startup? (that is in an attempt to support both
 | 
			
		||||
    # spot and futes markets?)
 | 
			
		||||
    # - I guess we just want to instead start 2 separate user
 | 
			
		||||
    #   stream tasks right? unless we want another actor pool?
 | 
			
		||||
    #   XXX: see issue: <urlhere>
 | 
			
		||||
    venue_name: str = 'futes'
 | 
			
		||||
    venue_mode: str = 'usdtm_futes'
 | 
			
		||||
    account_name: str = 'usdtm'
 | 
			
		||||
    use_testnet: bool = False
 | 
			
		||||
 | 
			
		||||
    # TODO: if/when we add .accounting support we need to
 | 
			
		||||
    # do a open_symcache() call.. though maybe we can hide
 | 
			
		||||
    # this in a new async version of open_account()?
 | 
			
		||||
    async with open_cached_client('binance') as client:
 | 
			
		||||
        subconf: dict|None = client.conf.get(venue_name)
 | 
			
		||||
 | 
			
		||||
        # XXX: if no futes.api_key or spot.api_key has been set we
 | 
			
		||||
        # always fall back to the paper engine!
 | 
			
		||||
        if (
 | 
			
		||||
            not subconf
 | 
			
		||||
            or
 | 
			
		||||
            not subconf.get('api_key')
 | 
			
		||||
        ):
 | 
			
		||||
            await ctx.started('paper')
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        use_testnet: bool = subconf.get('use_testnet', False)
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        open_cached_client('binance') as client,
 | 
			
		||||
    ):
 | 
			
		||||
        client.mkt_mode: str = venue_mode
 | 
			
		||||
 | 
			
		||||
        # TODO: map these wss urls depending on spot or futes
 | 
			
		||||
        # setting passed when this task is spawned?
 | 
			
		||||
        wss_url: str = _futes_ws if not use_testnet else _testnet_futes_ws
 | 
			
		||||
 | 
			
		||||
        wss: NoBsWs
 | 
			
		||||
        async with (
 | 
			
		||||
            client.manage_listen_key() as listen_key,
 | 
			
		||||
            open_autorecon_ws(f'{wss_url}/?listenKey={listen_key}') as wss,
 | 
			
		||||
        ):
 | 
			
		||||
            nsid: int = time_ns()
 | 
			
		||||
            await wss.send_msg({
 | 
			
		||||
                # "method": "SUBSCRIBE",
 | 
			
		||||
                "method": "REQUEST",
 | 
			
		||||
                "params":
 | 
			
		||||
                [
 | 
			
		||||
                    f"{listen_key}@account",
 | 
			
		||||
                    f"{listen_key}@balance",
 | 
			
		||||
                    f"{listen_key}@position",
 | 
			
		||||
 | 
			
		||||
                    # TODO: does this even work!? seems to cause
 | 
			
		||||
                    # a hang on the first msg..? lelelel.
 | 
			
		||||
                    # f"{listen_key}@order",
 | 
			
		||||
                ],
 | 
			
		||||
                "id": nsid
 | 
			
		||||
            })
 | 
			
		||||
 | 
			
		||||
            with trio.fail_after(6):
 | 
			
		||||
                msg = await wss.recv_msg()
 | 
			
		||||
                assert msg['id'] == nsid
 | 
			
		||||
 | 
			
		||||
            # TODO: load other market wide data / statistics:
 | 
			
		||||
            # - OI: https://binance-docs.github.io/apidocs/futures/en/#open-interest
 | 
			
		||||
            # - OI stats: https://binance-docs.github.io/apidocs/futures/en/#open-interest-statistics
 | 
			
		||||
            accounts: bidict[str, str] = bidict({'binance.usdtm': None})
 | 
			
		||||
            balances: dict[Asset, float] = {}
 | 
			
		||||
            positions: list[BrokerdPosition] = []
 | 
			
		||||
 | 
			
		||||
            for resp_dict in msg['result']:
 | 
			
		||||
                resp: dict = resp_dict['res']
 | 
			
		||||
                req: str = resp_dict['req']
 | 
			
		||||
 | 
			
		||||
                # @account response should be something like:
 | 
			
		||||
                # {'accountAlias': 'sRFzFzAuuXsR',
 | 
			
		||||
                #  'canDeposit': True,
 | 
			
		||||
                #  'canTrade': True,
 | 
			
		||||
                #  'canWithdraw': True,
 | 
			
		||||
                #  'feeTier': 0}
 | 
			
		||||
                if 'account' in req:
 | 
			
		||||
                    # NOTE: fill in the hash-like key/alias binance
 | 
			
		||||
                    # provides for the account.
 | 
			
		||||
                    alias: str = resp['accountAlias']
 | 
			
		||||
                    accounts['binance.usdtm'] = alias
 | 
			
		||||
 | 
			
		||||
                # @balance response:
 | 
			
		||||
                # {'accountAlias': 'sRFzFzAuuXsR',
 | 
			
		||||
                #      'balances': [{'asset': 'BTC',
 | 
			
		||||
                #                    'availableBalance': '0.00000000',
 | 
			
		||||
                #                    'balance': '0.00000000',
 | 
			
		||||
                #                    'crossUnPnl': '0.00000000',
 | 
			
		||||
                #                    'crossWalletBalance': '0.00000000',
 | 
			
		||||
                #                    'maxWithdrawAmount': '0.00000000',
 | 
			
		||||
                #                    'updateTime': 0}]
 | 
			
		||||
                #                     ...
 | 
			
		||||
                # }
 | 
			
		||||
                elif 'balance' in req:
 | 
			
		||||
                    for entry in resp['balances']:
 | 
			
		||||
                        name: str = entry['asset']
 | 
			
		||||
                        balance: float = float(entry['balance'])
 | 
			
		||||
                        last_update_t: int = entry['updateTime']
 | 
			
		||||
 | 
			
		||||
                        spot_asset: Asset = client._venue2assets['spot'][name]
 | 
			
		||||
 | 
			
		||||
                        if balance > 0:
 | 
			
		||||
                            balances[spot_asset] = (balance, last_update_t)
 | 
			
		||||
                            # await tractor.pause()
 | 
			
		||||
 | 
			
		||||
                # @position response:
 | 
			
		||||
                # {'positions': [{'entryPrice': '0.0',
 | 
			
		||||
                #                    'isAutoAddMargin': False,
 | 
			
		||||
                #                    'isolatedMargin': '0',
 | 
			
		||||
                #                    'leverage': 20,
 | 
			
		||||
                #                    'liquidationPrice': '0',
 | 
			
		||||
                #                    'marginType': 'CROSSED',
 | 
			
		||||
                #                    'markPrice': '0.60289650',
 | 
			
		||||
                #                    'markPrice': '0.00000000',
 | 
			
		||||
                #                    'maxNotionalValue': '25000',
 | 
			
		||||
                #                    'notional': '0',
 | 
			
		||||
                #                    'positionAmt': '0',
 | 
			
		||||
                #                    'positionSide': 'BOTH',
 | 
			
		||||
                #                    'symbol': 'ETHUSDT_230630',
 | 
			
		||||
                #                    'unRealizedProfit': '0.00000000',
 | 
			
		||||
                #                    'updateTime': 1672741444894}
 | 
			
		||||
                #                    ...
 | 
			
		||||
                # }
 | 
			
		||||
                elif 'position' in req:
 | 
			
		||||
                    for entry in resp['positions']:
 | 
			
		||||
                        bs_mktid: str = entry['symbol']
 | 
			
		||||
                        entry_size: float = float(entry['positionAmt'])
 | 
			
		||||
 | 
			
		||||
                        pair: Pair | None = client._venue2pairs[
 | 
			
		||||
                            venue_mode
 | 
			
		||||
                        ].get(bs_mktid)
 | 
			
		||||
                        if (
 | 
			
		||||
                            pair
 | 
			
		||||
                            and entry_size > 0
 | 
			
		||||
                        ):
 | 
			
		||||
                            entry_price: float = float(entry['entryPrice'])
 | 
			
		||||
 | 
			
		||||
                            ppmsg = BrokerdPosition(
 | 
			
		||||
                                broker='binance',
 | 
			
		||||
                                account=f'binance.{account_name}',
 | 
			
		||||
 | 
			
		||||
                                # TODO: maybe we should be passing back
 | 
			
		||||
                                # a `MktPair` here?
 | 
			
		||||
                                symbol=pair.bs_fqme.lower() + '.binance',
 | 
			
		||||
 | 
			
		||||
                                size=entry_size,
 | 
			
		||||
                                avg_price=entry_price,
 | 
			
		||||
                            )
 | 
			
		||||
                            positions.append(ppmsg)
 | 
			
		||||
 | 
			
		||||
                        if pair is None:
 | 
			
		||||
                            log.warning(
 | 
			
		||||
                                f'`{bs_mktid}` Position entry but no market pair?\n'
 | 
			
		||||
                                f'{pformat(entry)}\n'
 | 
			
		||||
                            )
 | 
			
		||||
 | 
			
		||||
            await ctx.started((
 | 
			
		||||
                positions,
 | 
			
		||||
                list(accounts)
 | 
			
		||||
            ))
 | 
			
		||||
 | 
			
		||||
            # TODO: package more state tracking into the dialogs API?
 | 
			
		||||
            # - hmm maybe we could include `OrderDialogs.dids:
 | 
			
		||||
            #   bidict` as part of the interface and then ask for
 | 
			
		||||
            #   a reqid field to be passed at init?
 | 
			
		||||
            #   |-> `OrderDialog(reqid_field='orderId')` kinda thing?
 | 
			
		||||
            # - also maybe bundle in some kind of dialog to account
 | 
			
		||||
            #   table?
 | 
			
		||||
            dialogs = OrderDialogs()
 | 
			
		||||
            dids: dict[str, int] = bidict()
 | 
			
		||||
 | 
			
		||||
            # TODO: further init setup things to get full EMS and
 | 
			
		||||
            # .accounting support B)
 | 
			
		||||
            # - live order loading via user stream subscription and
 | 
			
		||||
            #   update to the order dialog table.
 | 
			
		||||
            #   - MAKE SURE we add live orders loaded during init
 | 
			
		||||
            #   into the dialogs table to ensure they can be
 | 
			
		||||
            #   cancelled, meaning we can do a symbol lookup.
 | 
			
		||||
            # - position loading using `piker.accounting` subsys
 | 
			
		||||
            #   and comparison with binance's own position calcs.
 | 
			
		||||
            # - load pps and accounts using accounting apis, write
 | 
			
		||||
            #   the ledger and account files
 | 
			
		||||
            #   - table: Account
 | 
			
		||||
            #   - ledger: TransactionLedger
 | 
			
		||||
 | 
			
		||||
            async with (
 | 
			
		||||
                trio.open_nursery() as tn,
 | 
			
		||||
                ctx.open_stream() as ems_stream,
 | 
			
		||||
            ):
 | 
			
		||||
                # deliver all pre-exist open orders to EMS thus syncing
 | 
			
		||||
                # state with existing live limits reported by them.
 | 
			
		||||
                order: Order
 | 
			
		||||
                for order in await client.get_open_orders():
 | 
			
		||||
                    status_msg = Status(
 | 
			
		||||
                        time_ns=time.time_ns(),
 | 
			
		||||
                        resp='open',
 | 
			
		||||
                        oid=order.oid,
 | 
			
		||||
                        reqid=order.oid,
 | 
			
		||||
 | 
			
		||||
                        # embedded order info
 | 
			
		||||
                        req=order,
 | 
			
		||||
                        src='binance',
 | 
			
		||||
                    )
 | 
			
		||||
                    dialogs.add_msg(order.oid, order.to_dict())
 | 
			
		||||
                    await ems_stream.send(status_msg)
 | 
			
		||||
 | 
			
		||||
                tn.start_soon(
 | 
			
		||||
                    handle_order_requests,
 | 
			
		||||
                    ems_stream,
 | 
			
		||||
                    client,
 | 
			
		||||
                    dids,
 | 
			
		||||
                    dialogs,
 | 
			
		||||
                )
 | 
			
		||||
                tn.start_soon(
 | 
			
		||||
                    handle_order_updates,
 | 
			
		||||
                    venue_mode,
 | 
			
		||||
                    account_name,
 | 
			
		||||
                    client,
 | 
			
		||||
                    ems_stream,
 | 
			
		||||
                    wss,
 | 
			
		||||
                    dialogs,
 | 
			
		||||
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                await trio.sleep_forever()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def handle_order_updates(
 | 
			
		||||
    venue: str,
 | 
			
		||||
    account_name: str,
 | 
			
		||||
    client: Client,
 | 
			
		||||
    ems_stream: tractor.MsgStream,
 | 
			
		||||
    wss: NoBsWs,
 | 
			
		||||
    dialogs: OrderDialogs,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
    Main msg handling loop for all things order management.
 | 
			
		||||
 | 
			
		||||
    This code is broken out to make the context explicit and state
 | 
			
		||||
    variables defined in the signature clear to the reader.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async for msg in wss:
 | 
			
		||||
        log.info(f'Rx USERSTREAM msg:\n{pformat(msg)}')
 | 
			
		||||
        match msg:
 | 
			
		||||
 | 
			
		||||
            # ORDER update
 | 
			
		||||
            # spot: https://binance-docs.github.io/apidocs/spot/en/#payload-balance-update
 | 
			
		||||
            # futes: https://binance-docs.github.io/apidocs/futures/en/#event-order-update
 | 
			
		||||
            # futes: https://binance-docs.github.io/apidocs/futures/en/#event-balance-and-position-update
 | 
			
		||||
            # {'o': {
 | 
			
		||||
            #    'L': '0',
 | 
			
		||||
            #    'N': 'USDT',
 | 
			
		||||
            #    'R': False,
 | 
			
		||||
            #    'S': 'BUY',
 | 
			
		||||
            #    'T': 1687028772484,
 | 
			
		||||
            #    'X': 'NEW',
 | 
			
		||||
            #    'a': '0',
 | 
			
		||||
            #    'ap': '0',
 | 
			
		||||
            #    'b': '7012.06520',
 | 
			
		||||
            #    'c': '518d4122-8d3e-49b0-9a1e-1fabe6f62e4c',
 | 
			
		||||
            #    'cp': False,
 | 
			
		||||
            #    'f': 'GTC',
 | 
			
		||||
            #    'i': 3376956924,
 | 
			
		||||
            #    'l': '0',
 | 
			
		||||
            #    'm': False,
 | 
			
		||||
            #    'n': '0',
 | 
			
		||||
            #    'o': 'LIMIT',
 | 
			
		||||
            #    'ot': 'LIMIT',
 | 
			
		||||
            #    'p': '21136.80',
 | 
			
		||||
            #    'pP': False,
 | 
			
		||||
            #    'ps': 'BOTH',
 | 
			
		||||
            #    'q': '0.047',
 | 
			
		||||
            #    'rp': '0',
 | 
			
		||||
            #    's': 'BTCUSDT',
 | 
			
		||||
            #    'si': 0,
 | 
			
		||||
            #    'sp': '0',
 | 
			
		||||
            #    'ss': 0,
 | 
			
		||||
            #    't': 0,
 | 
			
		||||
            #    'wt': 'CONTRACT_PRICE',
 | 
			
		||||
            #    'x': 'NEW',
 | 
			
		||||
            #    'z': '0'}
 | 
			
		||||
            # }
 | 
			
		||||
            case {
 | 
			
		||||
                # 'e': 'executionReport',
 | 
			
		||||
                'e': 'ORDER_TRADE_UPDATE',
 | 
			
		||||
                'T': int(epoch_ms),
 | 
			
		||||
                'o': {
 | 
			
		||||
                    's': bs_mktid,
 | 
			
		||||
 | 
			
		||||
                    # XXX NOTE XXX see special ids for market
 | 
			
		||||
                    # events or margin calls:
 | 
			
		||||
                    # // special client order id:
 | 
			
		||||
                    # // starts with "autoclose-": liquidation order
 | 
			
		||||
                    # // "adl_autoclose": ADL auto close order
 | 
			
		||||
                    # // "settlement_autoclose-": settlement order
 | 
			
		||||
                    #     for delisting or delivery
 | 
			
		||||
                    'c': oid,
 | 
			
		||||
                    # 'i': reqid,  # binance internal int id
 | 
			
		||||
 | 
			
		||||
                    # prices
 | 
			
		||||
                    'a': submit_price,
 | 
			
		||||
                    'ap': avg_price,
 | 
			
		||||
                    'L': fill_price,
 | 
			
		||||
 | 
			
		||||
                    # sizing
 | 
			
		||||
                    'q': req_size,
 | 
			
		||||
                    'l': clear_size_filled,  # this event
 | 
			
		||||
                    'z': accum_size_filled,  # accum
 | 
			
		||||
 | 
			
		||||
                    # commissions
 | 
			
		||||
                    'n': cost,
 | 
			
		||||
                    'N': cost_asset,
 | 
			
		||||
 | 
			
		||||
                    # state
 | 
			
		||||
                    'S': side,
 | 
			
		||||
                    'X': status,
 | 
			
		||||
                },
 | 
			
		||||
            } as order_msg:
 | 
			
		||||
                log.info(
 | 
			
		||||
                    f'{status} for {side} ORDER oid: {oid}\n'
 | 
			
		||||
                    f'bs_mktid: {bs_mktid}\n\n'
 | 
			
		||||
 | 
			
		||||
                    f'order size: {req_size}\n'
 | 
			
		||||
                    f'cleared size: {clear_size_filled}\n'
 | 
			
		||||
                    f'accum filled size: {accum_size_filled}\n\n'
 | 
			
		||||
 | 
			
		||||
                    f'submit price: {submit_price}\n'
 | 
			
		||||
                    f'fill_price: {fill_price}\n'
 | 
			
		||||
                    f'avg clearing price: {avg_price}\n\n'
 | 
			
		||||
 | 
			
		||||
                    f'cost: {cost}@{cost_asset}\n'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # status remap from binance to piker's
 | 
			
		||||
                # status set:
 | 
			
		||||
                # - NEW
 | 
			
		||||
                # - PARTIALLY_FILLED
 | 
			
		||||
                # - FILLED
 | 
			
		||||
                # - CANCELED
 | 
			
		||||
                # - EXPIRED
 | 
			
		||||
                # https://binance-docs.github.io/apidocs/futures/en/#event-order-update
 | 
			
		||||
 | 
			
		||||
                req_size: float = float(req_size)
 | 
			
		||||
                accum_size_filled: float = float(accum_size_filled)
 | 
			
		||||
                fill_price: float = float(fill_price)
 | 
			
		||||
 | 
			
		||||
                match status:
 | 
			
		||||
                    case 'PARTIALLY_FILLED' | 'FILLED':
 | 
			
		||||
                        status = 'fill'
 | 
			
		||||
 | 
			
		||||
                        fill_msg = BrokerdFill(
 | 
			
		||||
                            time_ns=time_ns(),
 | 
			
		||||
                            # reqid=reqid,
 | 
			
		||||
                            reqid=oid,
 | 
			
		||||
 | 
			
		||||
                            # just use size value for now?
 | 
			
		||||
                            # action=action,
 | 
			
		||||
                            size=clear_size_filled,
 | 
			
		||||
                            price=fill_price,
 | 
			
		||||
 | 
			
		||||
                            # TODO: maybe capture more msg data
 | 
			
		||||
                            # i.e fees?
 | 
			
		||||
                            broker_details={'name': 'broker'} | order_msg,
 | 
			
		||||
                            broker_time=time.time(),
 | 
			
		||||
                        )
 | 
			
		||||
                        await ems_stream.send(fill_msg)
 | 
			
		||||
 | 
			
		||||
                        if accum_size_filled == req_size:
 | 
			
		||||
                            status = 'closed'
 | 
			
		||||
                            dialogs.pop(oid)
 | 
			
		||||
 | 
			
		||||
                    case 'NEW':
 | 
			
		||||
                        status = 'open'
 | 
			
		||||
 | 
			
		||||
                    case 'EXPIRED':
 | 
			
		||||
                        status = 'canceled'
 | 
			
		||||
                        dialogs.pop(oid)
 | 
			
		||||
 | 
			
		||||
                    case _:
 | 
			
		||||
                        status = status.lower()
 | 
			
		||||
 | 
			
		||||
                resp = BrokerdStatus(
 | 
			
		||||
                    time_ns=time_ns(),
 | 
			
		||||
                    # reqid=reqid,
 | 
			
		||||
                    reqid=oid,
 | 
			
		||||
 | 
			
		||||
                    # TODO: i feel like we don't need to make the
 | 
			
		||||
                    # ems and upstream clients aware of this?
 | 
			
		||||
                    # account='binance.usdtm',
 | 
			
		||||
 | 
			
		||||
                    status=status,
 | 
			
		||||
 | 
			
		||||
                    filled=accum_size_filled,
 | 
			
		||||
                    remaining=req_size - accum_size_filled,
 | 
			
		||||
                    broker_details={
 | 
			
		||||
                        'name': 'binance',
 | 
			
		||||
                        'broker_time': epoch_ms / 1000.
 | 
			
		||||
                    }
 | 
			
		||||
                )
 | 
			
		||||
                await ems_stream.send(resp)
 | 
			
		||||
 | 
			
		||||
            # ACCOUNT and POSITION update B)
 | 
			
		||||
            # {
 | 
			
		||||
            #  'E': 1687036749218,
 | 
			
		||||
            #  'e': 'ACCOUNT_UPDATE'
 | 
			
		||||
            #  'T': 1687036749215,
 | 
			
		||||
            #  'a': {'B': [{'a': 'USDT',
 | 
			
		||||
            #               'bc': '0',
 | 
			
		||||
            #               'cw': '1267.48920735',
 | 
			
		||||
            #               'wb': '1410.90245576'}],
 | 
			
		||||
            #        'P': [{'cr': '-3292.10973007',
 | 
			
		||||
            #               'ep': '26349.90000',
 | 
			
		||||
            #               'iw': '143.41324841',
 | 
			
		||||
            #               'ma': 'USDT',
 | 
			
		||||
            #               'mt': 'isolated',
 | 
			
		||||
            #               'pa': '0.038',
 | 
			
		||||
            #               'ps': 'BOTH',
 | 
			
		||||
            #               's': 'BTCUSDT',
 | 
			
		||||
            #               'up': '5.17555453'}],
 | 
			
		||||
            #        'm': 'ORDER'},
 | 
			
		||||
            # }
 | 
			
		||||
            case {
 | 
			
		||||
                'T': int(epoch_ms),
 | 
			
		||||
                'e': 'ACCOUNT_UPDATE',
 | 
			
		||||
                'a': {
 | 
			
		||||
                    'P': [{
 | 
			
		||||
                        's': bs_mktid,
 | 
			
		||||
                        'pa': pos_amount,
 | 
			
		||||
                        'ep': entry_price,
 | 
			
		||||
                    }],
 | 
			
		||||
                },
 | 
			
		||||
            }:
 | 
			
		||||
                # real-time relay position updates back to EMS
 | 
			
		||||
                pair: Pair | None = client._venue2pairs[venue].get(bs_mktid)
 | 
			
		||||
                ppmsg = BrokerdPosition(
 | 
			
		||||
                    broker='binance',
 | 
			
		||||
                    account=f'binance.{account_name}',
 | 
			
		||||
 | 
			
		||||
                    # TODO: maybe we should be passing back
 | 
			
		||||
                    # a `MktPair` here?
 | 
			
		||||
                    symbol=pair.bs_fqme.lower() + '.binance',
 | 
			
		||||
 | 
			
		||||
                    size=float(pos_amount),
 | 
			
		||||
                    avg_price=float(entry_price),
 | 
			
		||||
                )
 | 
			
		||||
                await ems_stream.send(ppmsg)
 | 
			
		||||
 | 
			
		||||
            case _:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    'Unhandled event:\n'
 | 
			
		||||
                    f'{pformat(msg)}'
 | 
			
		||||
                )
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,557 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Real-time and historical data feed endpoints.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    asynccontextmanager as acm,
 | 
			
		||||
    aclosing,
 | 
			
		||||
)
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from functools import (
 | 
			
		||||
    partial,
 | 
			
		||||
)
 | 
			
		||||
import itertools
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    AsyncGenerator,
 | 
			
		||||
    Callable,
 | 
			
		||||
    Generator,
 | 
			
		||||
)
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
import trio
 | 
			
		||||
from trio_typing import TaskStatus
 | 
			
		||||
from pendulum import (
 | 
			
		||||
    from_timestamp,
 | 
			
		||||
)
 | 
			
		||||
import numpy as np
 | 
			
		||||
import tractor
 | 
			
		||||
 | 
			
		||||
from piker.brokers import (
 | 
			
		||||
    open_cached_client,
 | 
			
		||||
    NoData,
 | 
			
		||||
)
 | 
			
		||||
from piker._cacheables import (
 | 
			
		||||
    async_lifo_cache,
 | 
			
		||||
)
 | 
			
		||||
from piker.accounting import (
 | 
			
		||||
    Asset,
 | 
			
		||||
    DerivTypes,
 | 
			
		||||
    MktPair,
 | 
			
		||||
    unpack_fqme,
 | 
			
		||||
)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from piker.data.validate import FeedInit
 | 
			
		||||
from piker.data._web_bs import (
 | 
			
		||||
    open_autorecon_ws,
 | 
			
		||||
    NoBsWs,
 | 
			
		||||
)
 | 
			
		||||
from piker.brokers._util import (
 | 
			
		||||
    DataUnavailable,
 | 
			
		||||
    get_logger,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from .api import (
 | 
			
		||||
    Client,
 | 
			
		||||
)
 | 
			
		||||
from .venues import (
 | 
			
		||||
    Pair,
 | 
			
		||||
    FutesPair,
 | 
			
		||||
    get_api_eps,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
log = get_logger('piker.brokers.binance')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class L1(Struct):
 | 
			
		||||
    # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams
 | 
			
		||||
 | 
			
		||||
    update_id: int
 | 
			
		||||
    sym: str
 | 
			
		||||
 | 
			
		||||
    bid: float
 | 
			
		||||
    bsize: float
 | 
			
		||||
    ask: float
 | 
			
		||||
    asize: float
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# validation type
 | 
			
		||||
class AggTrade(Struct, frozen=True):
 | 
			
		||||
    e: str  # Event type
 | 
			
		||||
    E: int  # Event time
 | 
			
		||||
    s: str  # Symbol
 | 
			
		||||
    a: int  # Aggregate trade ID
 | 
			
		||||
    p: float  # Price
 | 
			
		||||
    q: float  # Quantity
 | 
			
		||||
    f: int  # First trade ID
 | 
			
		||||
    l: int  # noqa Last trade ID
 | 
			
		||||
    T: int  # Trade time
 | 
			
		||||
    m: bool  # Is the buyer the market maker?
 | 
			
		||||
    M: bool | None = None  # Ignore
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def stream_messages(
 | 
			
		||||
    ws: NoBsWs,
 | 
			
		||||
 | 
			
		||||
) -> AsyncGenerator[NoBsWs, dict]:
 | 
			
		||||
 | 
			
		||||
    # TODO: match syntax here!
 | 
			
		||||
    msg: dict[str, Any]
 | 
			
		||||
    async for msg in ws:
 | 
			
		||||
        match msg:
 | 
			
		||||
            # for l1 streams binance doesn't add an event type field so
 | 
			
		||||
            # identify those messages by matching keys
 | 
			
		||||
            # https://binance-docs.github.io/apidocs/spot/en/#individual-symbol-book-ticker-streams
 | 
			
		||||
            case {
 | 
			
		||||
                # NOTE: this is never an old value it seems, so
 | 
			
		||||
                # they are always sending real L1 spread updates.
 | 
			
		||||
                'u': upid,  # update id
 | 
			
		||||
                's': sym,
 | 
			
		||||
                'b': bid,
 | 
			
		||||
                'B': bsize,
 | 
			
		||||
                'a': ask,
 | 
			
		||||
                'A': asize,
 | 
			
		||||
            }:
 | 
			
		||||
                # TODO: it would be super nice to have a `L1` piker type
 | 
			
		||||
                # which "renders" incremental tick updates from a packed
 | 
			
		||||
                # msg-struct:
 | 
			
		||||
                # - backend msgs after packed into the type such that we
 | 
			
		||||
                #   can reduce IPC usage but without each backend having
 | 
			
		||||
                #   to do that incremental update logic manually B)
 | 
			
		||||
                # - would it maybe be more efficient to use this instead?
 | 
			
		||||
                #   https://binance-docs.github.io/apidocs/spot/en/#diff-depth-stream
 | 
			
		||||
                l1 = L1(
 | 
			
		||||
                    update_id=upid,
 | 
			
		||||
                    sym=sym,
 | 
			
		||||
                    bid=bid,
 | 
			
		||||
                    bsize=bsize,
 | 
			
		||||
                    ask=ask,
 | 
			
		||||
                    asize=asize,
 | 
			
		||||
                )
 | 
			
		||||
                # for speed probably better to only specifically
 | 
			
		||||
                # cast fields we need in numerical form?
 | 
			
		||||
                # l1.typecast()
 | 
			
		||||
 | 
			
		||||
                # repack into piker's tick-quote format
 | 
			
		||||
                yield 'l1', {
 | 
			
		||||
                    'symbol': l1.sym,
 | 
			
		||||
                    'ticks': [
 | 
			
		||||
                        {
 | 
			
		||||
                            'type': 'bid',
 | 
			
		||||
                            'price': float(l1.bid),
 | 
			
		||||
                            'size': float(l1.bsize),
 | 
			
		||||
                        },
 | 
			
		||||
                        {
 | 
			
		||||
                            'type': 'bsize',
 | 
			
		||||
                            'price': float(l1.bid),
 | 
			
		||||
                            'size': float(l1.bsize),
 | 
			
		||||
                        },
 | 
			
		||||
                        {
 | 
			
		||||
                            'type': 'ask',
 | 
			
		||||
                            'price': float(l1.ask),
 | 
			
		||||
                            'size': float(l1.asize),
 | 
			
		||||
                        },
 | 
			
		||||
                        {
 | 
			
		||||
                            'type': 'asize',
 | 
			
		||||
                            'price': float(l1.ask),
 | 
			
		||||
                            'size': float(l1.asize),
 | 
			
		||||
                        }
 | 
			
		||||
                    ]
 | 
			
		||||
                }
 | 
			
		||||
 | 
			
		||||
            # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams
 | 
			
		||||
            case {
 | 
			
		||||
                'e': 'aggTrade',
 | 
			
		||||
            }:
 | 
			
		||||
                # NOTE: this is purely for a definition,
 | 
			
		||||
                # ``msgspec.Struct`` does not runtime-validate until you
 | 
			
		||||
                # decode/encode, see:
 | 
			
		||||
                # https://jcristharif.com/msgspec/structs.html#type-validation
 | 
			
		||||
                msg = AggTrade(**msg)  # TODO: should we .copy() ?
 | 
			
		||||
                piker_quote: dict = {
 | 
			
		||||
                    'symbol': msg.s,
 | 
			
		||||
                    'last': float(msg.p),
 | 
			
		||||
                    'brokerd_ts': time.time(),
 | 
			
		||||
                    'ticks': [{
 | 
			
		||||
                        'type': 'trade',
 | 
			
		||||
                        'price': float(msg.p),
 | 
			
		||||
                        'size': float(msg.q),
 | 
			
		||||
                        'broker_ts': msg.T,
 | 
			
		||||
                    }],
 | 
			
		||||
                }
 | 
			
		||||
                yield 'trade', piker_quote
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def make_sub(pairs: list[str], sub_name: str, uid: int) -> dict[str, str]:
 | 
			
		||||
    '''
 | 
			
		||||
    Create a request subscription packet dict.
 | 
			
		||||
 | 
			
		||||
    - spot:
 | 
			
		||||
      https://binance-docs.github.io/apidocs/spot/en/#live-subscribing-unsubscribing-to-streams
 | 
			
		||||
 | 
			
		||||
    - futes:
 | 
			
		||||
      https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    return {
 | 
			
		||||
        'method': 'SUBSCRIBE',
 | 
			
		||||
        'params': [
 | 
			
		||||
            f'{pair.lower()}@{sub_name}'
 | 
			
		||||
            for pair in pairs
 | 
			
		||||
        ],
 | 
			
		||||
        'id': uid
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO, why aren't frame resp `log.info()`s showing in upstream
 | 
			
		||||
# code?!
 | 
			
		||||
@acm
 | 
			
		||||
async def open_history_client(
 | 
			
		||||
    mkt: MktPair,
 | 
			
		||||
 | 
			
		||||
) -> tuple[Callable, int]:
 | 
			
		||||
 | 
			
		||||
    # TODO implement history getter for the new storage layer.
 | 
			
		||||
    async with open_cached_client('binance') as client:
 | 
			
		||||
 | 
			
		||||
        async def get_ohlc(
 | 
			
		||||
            timeframe: float,
 | 
			
		||||
            end_dt: datetime | None = None,
 | 
			
		||||
            start_dt: datetime | None = None,
 | 
			
		||||
 | 
			
		||||
        ) -> tuple[
 | 
			
		||||
            np.ndarray,
 | 
			
		||||
            datetime,  # start
 | 
			
		||||
            datetime,  # end
 | 
			
		||||
        ]:
 | 
			
		||||
            if timeframe != 60:
 | 
			
		||||
                raise DataUnavailable('Only 1m bars are supported')
 | 
			
		||||
 | 
			
		||||
            # TODO: better wrapping for venue / mode?
 | 
			
		||||
            # - eventually logic for usd vs. coin settled futes
 | 
			
		||||
            #   based on `MktPair.src` type/value?
 | 
			
		||||
            # - maybe something like `async with
 | 
			
		||||
            # Client.use_venue('usdtm_futes')`
 | 
			
		||||
            if mkt.type_key in DerivTypes:
 | 
			
		||||
                client.mkt_mode = 'usdtm_futes'
 | 
			
		||||
            else:
 | 
			
		||||
                client.mkt_mode = 'spot'
 | 
			
		||||
 | 
			
		||||
            array: np.ndarray = await client.bars(
 | 
			
		||||
                mkt=mkt,
 | 
			
		||||
                start_dt=start_dt,
 | 
			
		||||
                end_dt=end_dt,
 | 
			
		||||
            )
 | 
			
		||||
            if array.size == 0:
 | 
			
		||||
                raise NoData(
 | 
			
		||||
                    f'No frame for {start_dt} -> {end_dt}\n'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            times = array['time']
 | 
			
		||||
            if not times.any():
 | 
			
		||||
                raise ValueError(
 | 
			
		||||
                    'Bad frame with null-times?\n\n'
 | 
			
		||||
                    f'{times}'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            if end_dt is None:
 | 
			
		||||
                inow: int = round(time.time())
 | 
			
		||||
                if (inow - times[-1]) > 60:
 | 
			
		||||
                    await tractor.pause()
 | 
			
		||||
 | 
			
		||||
            start_dt = from_timestamp(times[0])
 | 
			
		||||
            end_dt = from_timestamp(times[-1])
 | 
			
		||||
            return array, start_dt, end_dt
 | 
			
		||||
 | 
			
		||||
        yield get_ohlc, {'erlangs': 3, 'rate': 3}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@async_lifo_cache()
 | 
			
		||||
async def get_mkt_info(
 | 
			
		||||
    fqme: str,
 | 
			
		||||
 | 
			
		||||
) -> tuple[MktPair, Pair] | None:
 | 
			
		||||
 | 
			
		||||
    # uppercase since kraken bs_mktid is always upper
 | 
			
		||||
    if 'binance' not in fqme.lower():
 | 
			
		||||
        fqme += '.binance'
 | 
			
		||||
 | 
			
		||||
    mkt_mode: str = ''
 | 
			
		||||
    broker, mkt_ep, venue, expiry = unpack_fqme(fqme)
 | 
			
		||||
 | 
			
		||||
    # NOTE: we always upper case all tokens to be consistent with
 | 
			
		||||
    # binance's symbology style for pairs, like `BTCUSDT`, but in
 | 
			
		||||
    # theory we could also just keep things lower case; as long as
 | 
			
		||||
    # we're consistent and the symcache matches whatever this func
 | 
			
		||||
    # returns, always!
 | 
			
		||||
    expiry: str = expiry.upper()
 | 
			
		||||
    venue: str = venue.upper()
 | 
			
		||||
    venue_lower: str = venue.lower()
 | 
			
		||||
 | 
			
		||||
    # XXX TODO: we should change the usdtm_futes name to just
 | 
			
		||||
    # usdm_futes (dropping the tether part) since it turns out that
 | 
			
		||||
    # there are indeed USD-tokens OTHER THEN tether being used as
 | 
			
		||||
    # the margin assets.. it's going to require a wholesale
 | 
			
		||||
    # (variable/key) rename as well as file name adjustments to any
 | 
			
		||||
    # existing tsdb set..
 | 
			
		||||
    if 'usd' in venue_lower:
 | 
			
		||||
        mkt_mode: str = 'usdtm_futes'
 | 
			
		||||
 | 
			
		||||
    # NO IDEA what these contracts (some kinda DEX-ish futes?) are
 | 
			
		||||
    # but we're masking them for now..
 | 
			
		||||
    elif (
 | 
			
		||||
        'defi' in venue_lower
 | 
			
		||||
 | 
			
		||||
        # TODO: handle coinm futes which have a margin asset that
 | 
			
		||||
        # is some crypto token!
 | 
			
		||||
        # https://binance-docs.github.io/apidocs/delivery/en/#exchange-information
 | 
			
		||||
        or 'btc' in venue_lower
 | 
			
		||||
    ):
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        # NOTE: see the `FutesPair.bs_fqme: str` implementation
 | 
			
		||||
        # to understand the reverse market info lookup below.
 | 
			
		||||
        mkt_mode = venue_lower or 'spot'
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
        venue
 | 
			
		||||
        and 'spot' not in venue_lower
 | 
			
		||||
 | 
			
		||||
        # XXX: catch all in case user doesn't know which
 | 
			
		||||
        # venue they want (usdtm vs. coinm) and we can choose
 | 
			
		||||
        # a default (via config?) once we support coin-m APIs.
 | 
			
		||||
        or 'perp' in venue_lower
 | 
			
		||||
    ):
 | 
			
		||||
        if not mkt_mode:
 | 
			
		||||
            mkt_mode: str = f'{venue_lower}_futes'
 | 
			
		||||
 | 
			
		||||
    async with open_cached_client(
 | 
			
		||||
        'binance',
 | 
			
		||||
    ) as client:
 | 
			
		||||
 | 
			
		||||
        assets: dict[str, Asset] = await client.get_assets()
 | 
			
		||||
        pair_str: str = mkt_ep.upper()
 | 
			
		||||
 | 
			
		||||
        # switch venue-mode depending on input pattern parsing
 | 
			
		||||
        # since we want to use a particular endpoint (set) for
 | 
			
		||||
        # pair info lookup!
 | 
			
		||||
        client.mkt_mode = mkt_mode
 | 
			
		||||
 | 
			
		||||
        pair: Pair = await client.exch_info(
 | 
			
		||||
            pair_str,
 | 
			
		||||
            venue=mkt_mode,  # explicit
 | 
			
		||||
            expiry=expiry,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if 'futes' in mkt_mode:
 | 
			
		||||
            assert isinstance(pair, FutesPair)
 | 
			
		||||
 | 
			
		||||
        dst: Asset | None = assets.get(pair.bs_dst_asset)
 | 
			
		||||
        if (
 | 
			
		||||
            not dst
 | 
			
		||||
            # TODO: a known asset DNE list?
 | 
			
		||||
            # and pair.baseAsset == 'DEFI'
 | 
			
		||||
        ):
 | 
			
		||||
            log.warning(
 | 
			
		||||
                f'UNKNOWN {venue} asset {pair.baseAsset} from,\n'
 | 
			
		||||
                f'{pformat(pair.to_dict())}'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # XXX UNKNOWN missing "asset", though no idea why?
 | 
			
		||||
            # maybe it's only avail in the margin venue(s): /dapi/ ?
 | 
			
		||||
            return None
 | 
			
		||||
 | 
			
		||||
        mkt = MktPair(
 | 
			
		||||
            dst=dst,
 | 
			
		||||
            src=assets[pair.bs_src_asset],
 | 
			
		||||
            price_tick=pair.price_tick,
 | 
			
		||||
            size_tick=pair.size_tick,
 | 
			
		||||
            bs_mktid=pair.symbol,
 | 
			
		||||
            expiry=expiry,
 | 
			
		||||
            venue=venue,
 | 
			
		||||
            broker='binance',
 | 
			
		||||
 | 
			
		||||
            # NOTE: sectype is always taken from dst, see
 | 
			
		||||
            # `MktPair.type_key` and `Client._cache_pairs()`
 | 
			
		||||
            # _atype=sectype,
 | 
			
		||||
        )
 | 
			
		||||
        return mkt, pair
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def subscribe(
 | 
			
		||||
    ws: NoBsWs,
 | 
			
		||||
    symbols: list[str],
 | 
			
		||||
 | 
			
		||||
    # defined once at import time to keep a global state B)
 | 
			
		||||
    iter_subids: Generator[int, None, None] = itertools.count(),
 | 
			
		||||
 | 
			
		||||
):
 | 
			
		||||
    # setup subs
 | 
			
		||||
 | 
			
		||||
    subid: int = next(iter_subids)
 | 
			
		||||
 | 
			
		||||
    # trade data (aka L1)
 | 
			
		||||
    # https://binance-docs.github.io/apidocs/spot/en/#symbol-order-book-ticker
 | 
			
		||||
    l1_sub = make_sub(symbols, 'bookTicker', subid)
 | 
			
		||||
    await ws.send_msg(l1_sub)
 | 
			
		||||
 | 
			
		||||
    # aggregate (each order clear by taker **not** by maker)
 | 
			
		||||
    # trades data:
 | 
			
		||||
    # https://binance-docs.github.io/apidocs/spot/en/#aggregate-trade-streams
 | 
			
		||||
    agg_trades_sub = make_sub(symbols, 'aggTrade', subid)
 | 
			
		||||
    await ws.send_msg(agg_trades_sub)
 | 
			
		||||
 | 
			
		||||
    # might get ack from ws server, or maybe some
 | 
			
		||||
    # other msg still in transit..
 | 
			
		||||
    res = await ws.recv_msg()
 | 
			
		||||
    subid: str | None = res.get('id')
 | 
			
		||||
    if subid:
 | 
			
		||||
        assert res['id'] == subid
 | 
			
		||||
 | 
			
		||||
    yield
 | 
			
		||||
 | 
			
		||||
    subs = []
 | 
			
		||||
    for sym in symbols:
 | 
			
		||||
        subs.append("{sym}@aggTrade")
 | 
			
		||||
        subs.append("{sym}@bookTicker")
 | 
			
		||||
 | 
			
		||||
    # unsub from all pairs on teardown
 | 
			
		||||
    if ws.connected():
 | 
			
		||||
        await ws.send_msg({
 | 
			
		||||
            "method": "UNSUBSCRIBE",
 | 
			
		||||
            "params": subs,
 | 
			
		||||
            "id": subid,
 | 
			
		||||
        })
 | 
			
		||||
 | 
			
		||||
        # XXX: do we need to ack the unsub?
 | 
			
		||||
        # await ws.recv_msg()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def stream_quotes(
 | 
			
		||||
 | 
			
		||||
    send_chan: trio.abc.SendChannel,
 | 
			
		||||
    symbols: list[str],
 | 
			
		||||
    feed_is_live: trio.Event,
 | 
			
		||||
    loglevel: str = None,
 | 
			
		||||
 | 
			
		||||
    # startup sync
 | 
			
		||||
    task_status: TaskStatus[tuple[dict, dict]] = trio.TASK_STATUS_IGNORED,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        send_chan as send_chan,
 | 
			
		||||
        open_cached_client('binance') as client,
 | 
			
		||||
    ):
 | 
			
		||||
        init_msgs: list[FeedInit] = []
 | 
			
		||||
        for sym in symbols:
 | 
			
		||||
            mkt: MktPair
 | 
			
		||||
            pair: Pair
 | 
			
		||||
            mkt, pair = await get_mkt_info(sym)
 | 
			
		||||
 | 
			
		||||
            # build out init msgs according to latest spec
 | 
			
		||||
            init_msgs.append(
 | 
			
		||||
                FeedInit(mkt_info=mkt)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        wss_url: str = get_api_eps(client.mkt_mode)[1]  # 2nd elem is wss url
 | 
			
		||||
 | 
			
		||||
        # TODO: for sanity, but remove eventually Xp
 | 
			
		||||
        if 'future' in mkt.type_key:
 | 
			
		||||
            assert 'fstream' in wss_url
 | 
			
		||||
 | 
			
		||||
        async with (
 | 
			
		||||
            open_autorecon_ws(
 | 
			
		||||
                url=wss_url,
 | 
			
		||||
                fixture=partial(
 | 
			
		||||
                    subscribe,
 | 
			
		||||
                    symbols=[mkt.bs_mktid],
 | 
			
		||||
                ),
 | 
			
		||||
            ) as ws,
 | 
			
		||||
 | 
			
		||||
            # avoid stream-gen closure from breaking trio..
 | 
			
		||||
            aclosing(stream_messages(ws)) as msg_gen,
 | 
			
		||||
        ):
 | 
			
		||||
            # log.info('WAITING ON FIRST LIVE QUOTE..')
 | 
			
		||||
            typ, quote = await anext(msg_gen)
 | 
			
		||||
 | 
			
		||||
            # pull a first quote and deliver
 | 
			
		||||
            while typ != 'trade':
 | 
			
		||||
                typ, quote = await anext(msg_gen)
 | 
			
		||||
 | 
			
		||||
            task_status.started((init_msgs, quote))
 | 
			
		||||
 | 
			
		||||
            # signal to caller feed is ready for consumption
 | 
			
		||||
            feed_is_live.set()
 | 
			
		||||
 | 
			
		||||
            # import time
 | 
			
		||||
            # last = time.time()
 | 
			
		||||
 | 
			
		||||
            # XXX NOTE: can't include the `.binance` suffix
 | 
			
		||||
            # or the sampling loop will not broadcast correctly
 | 
			
		||||
            # since `bus._subscribers.setdefault(bs_fqme, set())`
 | 
			
		||||
            # is used inside `.data.open_feed_bus()` !!!
 | 
			
		||||
            topic: str = mkt.bs_fqme
 | 
			
		||||
 | 
			
		||||
            # start streaming
 | 
			
		||||
            async for typ, quote in msg_gen:
 | 
			
		||||
                # period = time.time() - last
 | 
			
		||||
                # hz = 1/period if period else float('inf')
 | 
			
		||||
                # if hz > 60:
 | 
			
		||||
                #     log.info(f'Binance quotez : {hz}')
 | 
			
		||||
                await send_chan.send({topic: quote})
 | 
			
		||||
                # last = time.time()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def open_symbol_search(
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
) -> Client:
 | 
			
		||||
 | 
			
		||||
    # NOTE: symbology tables are loaded as part of client
 | 
			
		||||
    # startup in ``.api.get_client()`` and in this case
 | 
			
		||||
    # are stored as `Client._pairs`.
 | 
			
		||||
    async with open_cached_client('binance') as client:
 | 
			
		||||
 | 
			
		||||
        # TODO: maybe we should deliver the cache
 | 
			
		||||
        # so that client's can always do a local-lookup-first
 | 
			
		||||
        # style try and then update async as (new) match results
 | 
			
		||||
        # are delivered from here?
 | 
			
		||||
        await ctx.started()
 | 
			
		||||
 | 
			
		||||
        async with ctx.open_stream() as stream:
 | 
			
		||||
 | 
			
		||||
            pattern: str
 | 
			
		||||
            async for pattern in stream:
 | 
			
		||||
                # NOTE: pattern fuzzy-matching is done within
 | 
			
		||||
                # the methd impl.
 | 
			
		||||
                pairs: dict[str, Pair] = await client.search_symbols(
 | 
			
		||||
                    pattern,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # repack in fqme-keyed table
 | 
			
		||||
                byfqme: dict[str, Pair] = {}
 | 
			
		||||
                for pair in pairs.values():
 | 
			
		||||
                    byfqme[pair.bs_fqme] = pair
 | 
			
		||||
 | 
			
		||||
                await stream.send(byfqme)
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,303 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
Per market data-type definitions and schemas types.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from typing import (
 | 
			
		||||
    Literal,
 | 
			
		||||
)
 | 
			
		||||
from decimal import Decimal
 | 
			
		||||
 | 
			
		||||
from msgspec import field
 | 
			
		||||
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# API endpoint paths by venue / sub-API
 | 
			
		||||
_domain: str = 'binance.com'
 | 
			
		||||
_spot_url = f'https://api.{_domain}'
 | 
			
		||||
_futes_url = f'https://fapi.{_domain}'
 | 
			
		||||
 | 
			
		||||
# WEBsocketz
 | 
			
		||||
# NOTE XXX: see api docs which show diff addr?
 | 
			
		||||
# https://developers.binance.com/docs/binance-trading-api/websocket_api#general-api-information
 | 
			
		||||
_spot_ws: str = 'wss://stream.binance.com/ws'
 | 
			
		||||
# or this one? ..
 | 
			
		||||
# 'wss://ws-api.binance.com:443/ws-api/v3',
 | 
			
		||||
 | 
			
		||||
# https://binance-docs.github.io/apidocs/futures/en/#websocket-market-streams
 | 
			
		||||
_futes_ws: str = f'wss://fstream.{_domain}/ws'
 | 
			
		||||
_auth_futes_ws: str = 'wss://fstream-auth.{_domain}/ws'
 | 
			
		||||
 | 
			
		||||
# test nets
 | 
			
		||||
# NOTE: spot test network only allows certain ep sets:
 | 
			
		||||
# https://testnet.binance.vision/
 | 
			
		||||
# https://www.binance.com/en/support/faq/how-to-test-my-functions-on-binance-testnet-ab78f9a1b8824cf0a106b4229c76496d
 | 
			
		||||
_testnet_spot_url: str = 'https://testnet.binance.vision/api'
 | 
			
		||||
_testnet_spot_ws: str = 'wss://testnet.binance.vision/ws'
 | 
			
		||||
# or this one? ..
 | 
			
		||||
# 'wss://testnet.binance.vision/ws-api/v3'
 | 
			
		||||
 | 
			
		||||
_testnet_futes_url: str = 'https://testnet.binancefuture.com'
 | 
			
		||||
_testnet_futes_ws: str = 'wss://stream.binancefuture.com/ws'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
MarketType = Literal[
 | 
			
		||||
    'spot',
 | 
			
		||||
    # 'margin',
 | 
			
		||||
    'usdtm_futes',
 | 
			
		||||
    # 'coinm_futes',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_api_eps(venue: MarketType) -> tuple[str, str]:
 | 
			
		||||
    '''
 | 
			
		||||
    Return API ep root paths per venue.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    return {
 | 
			
		||||
        'spot': (
 | 
			
		||||
            _spot_url,
 | 
			
		||||
            _spot_ws,
 | 
			
		||||
        ),
 | 
			
		||||
        'usdtm_futes': (
 | 
			
		||||
            _futes_url,
 | 
			
		||||
            _futes_ws,
 | 
			
		||||
        ),
 | 
			
		||||
    }[venue]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Pair(Struct, frozen=True, kw_only=True):
 | 
			
		||||
 | 
			
		||||
    symbol: str
 | 
			
		||||
    status: str
 | 
			
		||||
    orderTypes: list[str]
 | 
			
		||||
 | 
			
		||||
    # src
 | 
			
		||||
    quoteAsset: str
 | 
			
		||||
    quotePrecision: int
 | 
			
		||||
 | 
			
		||||
    # dst
 | 
			
		||||
    baseAsset: str
 | 
			
		||||
    baseAssetPrecision: int
 | 
			
		||||
 | 
			
		||||
    filters: dict[
 | 
			
		||||
        str,
 | 
			
		||||
        str | int | float,
 | 
			
		||||
    ] = field(default_factory=dict)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def price_tick(self) -> Decimal:
 | 
			
		||||
        # XXX: lul, after manually inspecting the response format we
 | 
			
		||||
        # just directly pick out the info we need
 | 
			
		||||
        step_size: str = self.filters['PRICE_FILTER']['tickSize'].rstrip('0')
 | 
			
		||||
        return Decimal(step_size)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def size_tick(self) -> Decimal:
 | 
			
		||||
        step_size: str = self.filters['LOT_SIZE']['stepSize'].rstrip('0')
 | 
			
		||||
        return Decimal(step_size)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_fqme(self) -> str:
 | 
			
		||||
        return self.symbol
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_mktid(self) -> str:
 | 
			
		||||
        return f'{self.symbol}.{self.venue}'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SpotPair(Pair, frozen=True):
 | 
			
		||||
 | 
			
		||||
    cancelReplaceAllowed: bool
 | 
			
		||||
    allowTrailingStop: bool
 | 
			
		||||
    quoteAssetPrecision: int
 | 
			
		||||
 | 
			
		||||
    baseCommissionPrecision: int
 | 
			
		||||
    quoteCommissionPrecision: int
 | 
			
		||||
 | 
			
		||||
    icebergAllowed: bool
 | 
			
		||||
    ocoAllowed: bool
 | 
			
		||||
    quoteOrderQtyMarketAllowed: bool
 | 
			
		||||
    isSpotTradingAllowed: bool
 | 
			
		||||
    isMarginTradingAllowed: bool
 | 
			
		||||
    otoAllowed: bool
 | 
			
		||||
 | 
			
		||||
    defaultSelfTradePreventionMode: str
 | 
			
		||||
    allowedSelfTradePreventionModes: list[str]
 | 
			
		||||
    permissions: list[str]
 | 
			
		||||
    permissionSets: list[list[str]]
 | 
			
		||||
 | 
			
		||||
    # NOTE: see `.data._symcache.SymbologyCache.load()` for why
 | 
			
		||||
    ns_path: str = 'piker.brokers.binance:SpotPair'
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def venue(self) -> str:
 | 
			
		||||
        return 'SPOT'
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_fqme(self) -> str:
 | 
			
		||||
        return f'{self.symbol}.SPOT'
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_src_asset(self) -> str:
 | 
			
		||||
        return f'{self.quoteAsset}'
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_dst_asset(self) -> str:
 | 
			
		||||
        return f'{self.baseAsset}'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FutesPair(Pair):
 | 
			
		||||
    symbol: str  # 'BTCUSDT',
 | 
			
		||||
    pair: str  # 'BTCUSDT',
 | 
			
		||||
    baseAssetPrecision: int # 8,
 | 
			
		||||
    contractType: str  # 'PERPETUAL',
 | 
			
		||||
    deliveryDate: int   # 4133404800000,
 | 
			
		||||
    liquidationFee: float  # '0.012500',
 | 
			
		||||
    maintMarginPercent: float  # '2.5000',
 | 
			
		||||
    marginAsset: str  # 'USDT',
 | 
			
		||||
    marketTakeBound: float  # '0.05',
 | 
			
		||||
    maxMoveOrderLimit: int  # 10000,
 | 
			
		||||
    onboardDate: int  # 1569398400000,
 | 
			
		||||
    pricePrecision: int  # 2,
 | 
			
		||||
    quantityPrecision: int  # 3,
 | 
			
		||||
    quoteAsset: str  # 'USDT',
 | 
			
		||||
    quotePrecision: int  # 8,
 | 
			
		||||
    requiredMarginPercent: float  # '5.0000',
 | 
			
		||||
    timeInForce: list[str]  # ['GTC', 'IOC', 'FOK', 'GTX'],
 | 
			
		||||
    triggerProtect: float  # '0.0500',
 | 
			
		||||
    underlyingSubType: list[str]  # ['PoW'],
 | 
			
		||||
    underlyingType: str  # 'COIN'
 | 
			
		||||
 | 
			
		||||
    # NOTE: see `.data._symcache.SymbologyCache.load()` for why
 | 
			
		||||
    ns_path: str = 'piker.brokers.binance:FutesPair'
 | 
			
		||||
 | 
			
		||||
    # NOTE: for compat with spot pairs and `MktPair.src: Asset`
 | 
			
		||||
    # processing..
 | 
			
		||||
    @property
 | 
			
		||||
    def quoteAssetPrecision(self) -> int:
 | 
			
		||||
        return self.quotePrecision
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def expiry(self) -> str:
 | 
			
		||||
        symbol: str = self.symbol
 | 
			
		||||
        contype: str = self.contractType
 | 
			
		||||
        match contype:
 | 
			
		||||
            case (
 | 
			
		||||
                'CURRENT_QUARTER'
 | 
			
		||||
                | 'CURRENT_QUARTER DELIVERING'
 | 
			
		||||
                | 'NEXT_QUARTER'  # su madre binance..
 | 
			
		||||
            ):
 | 
			
		||||
                pair, _, expiry = symbol.partition('_')
 | 
			
		||||
                assert pair == self.pair  # sanity
 | 
			
		||||
                return f'{expiry}'
 | 
			
		||||
 | 
			
		||||
            case 'PERPETUAL':
 | 
			
		||||
                return 'PERP'
 | 
			
		||||
 | 
			
		||||
            case '':
 | 
			
		||||
                subtype: list[str] = self.underlyingSubType
 | 
			
		||||
                if not subtype:
 | 
			
		||||
                    if self.status == 'PENDING_TRADING':
 | 
			
		||||
                        return 'PENDING'
 | 
			
		||||
 | 
			
		||||
                match subtype:
 | 
			
		||||
                    case ['DEFI']:
 | 
			
		||||
                        return 'PERP'
 | 
			
		||||
 | 
			
		||||
        # wow, just wow you binance guys suck..
 | 
			
		||||
        if self.status == 'PENDING_TRADING':
 | 
			
		||||
            return 'PENDING'
 | 
			
		||||
 | 
			
		||||
        # XXX: yeah no clue then..
 | 
			
		||||
        raise ValueError(
 | 
			
		||||
            f'Bad .expiry token match: {contype} for {symbol}'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def venue(self) -> str:
 | 
			
		||||
        symbol: str = self.symbol
 | 
			
		||||
        ctype: str = self.contractType
 | 
			
		||||
        margin: str = self.marginAsset
 | 
			
		||||
 | 
			
		||||
        match ctype:
 | 
			
		||||
            case 'PERPETUAL':
 | 
			
		||||
                return f'{margin}M'
 | 
			
		||||
 | 
			
		||||
            case (
 | 
			
		||||
                'CURRENT_QUARTER'
 | 
			
		||||
                | 'CURRENT_QUARTER DELIVERING'
 | 
			
		||||
                | 'NEXT_QUARTER'  # su madre binance..
 | 
			
		||||
            ):
 | 
			
		||||
                _, _, expiry = symbol.partition('_')
 | 
			
		||||
                return f'{margin}M'
 | 
			
		||||
 | 
			
		||||
            case '':
 | 
			
		||||
                subtype: list[str] = self.underlyingSubType
 | 
			
		||||
                if not subtype:
 | 
			
		||||
                    if self.status == 'PENDING_TRADING':
 | 
			
		||||
                        return f'{margin}M'
 | 
			
		||||
 | 
			
		||||
                match subtype:
 | 
			
		||||
                    case (
 | 
			
		||||
                        ['DEFI']
 | 
			
		||||
                        | ['USDC']
 | 
			
		||||
                    ):
 | 
			
		||||
                        return f'{subtype[0]}'
 | 
			
		||||
 | 
			
		||||
        # XXX: yeah no clue then..
 | 
			
		||||
        raise ValueError(
 | 
			
		||||
            f'Bad .venue token match: {ctype}'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_fqme(self) -> str:
 | 
			
		||||
        symbol: str = self.symbol
 | 
			
		||||
        ctype: str = self.contractType
 | 
			
		||||
        venue: str = self.venue
 | 
			
		||||
        pair: str = self.pair
 | 
			
		||||
 | 
			
		||||
        match ctype:
 | 
			
		||||
            case (
 | 
			
		||||
                'CURRENT_QUARTER'
 | 
			
		||||
                | 'NEXT_QUARTER'  # su madre binance..
 | 
			
		||||
            ):
 | 
			
		||||
                pair, _, expiry = symbol.partition('_')
 | 
			
		||||
                assert pair == self.pair
 | 
			
		||||
 | 
			
		||||
        return f'{pair}.{venue}.{self.expiry}'
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_src_asset(self) -> str:
 | 
			
		||||
        return f'{self.quoteAsset}'
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_dst_asset(self) -> str:
 | 
			
		||||
        return f'{self.baseAsset}.{self.venue}'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
PAIRTYPES: dict[MarketType, Pair] = {
 | 
			
		||||
    'spot': SpotPair,
 | 
			
		||||
    'usdtm_futes': FutesPair,
 | 
			
		||||
 | 
			
		||||
    # TODO: support coin-margined venue:
 | 
			
		||||
    # https://binance-docs.github.io/apidocs/delivery/en/#change-log
 | 
			
		||||
    # 'coinm_futes': CoinFutesPair,
 | 
			
		||||
}
 | 
			
		||||
| 
						 | 
				
			
			@ -21,6 +21,7 @@ import os
 | 
			
		|||
from functools import partial
 | 
			
		||||
from operator import attrgetter
 | 
			
		||||
from operator import itemgetter
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
 | 
			
		||||
import click
 | 
			
		||||
import trio
 | 
			
		||||
| 
						 | 
				
			
			@ -28,7 +29,13 @@ import tractor
 | 
			
		|||
 | 
			
		||||
from ..cli import cli
 | 
			
		||||
from .. import watchlists as wl
 | 
			
		||||
from ..log import get_console_log, colorize_json, get_logger
 | 
			
		||||
from ..log import (
 | 
			
		||||
    colorize_json,
 | 
			
		||||
)
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
from ..service import (
 | 
			
		||||
    maybe_spawn_brokerd,
 | 
			
		||||
    maybe_open_pikerd,
 | 
			
		||||
| 
						 | 
				
			
			@ -38,9 +45,7 @@ from ..brokers import (
 | 
			
		|||
    get_brokermod,
 | 
			
		||||
    data,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
log = get_logger('cli')
 | 
			
		||||
DEFAULT_BROKER = 'questrade'
 | 
			
		||||
DEFAULT_BROKER = 'binance'
 | 
			
		||||
 | 
			
		||||
_config_dir = click.get_app_dir('piker')
 | 
			
		||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
 | 
			
		||||
| 
						 | 
				
			
			@ -190,7 +195,7 @@ def brokercheck(config, broker):
 | 
			
		|||
 | 
			
		||||
@cli.command()
 | 
			
		||||
@click.option('--keys', '-k', multiple=True,
 | 
			
		||||
              help='Return results only for these keys')
 | 
			
		||||
            help='Return results only for these keys')
 | 
			
		||||
@click.argument('meth', nargs=1)
 | 
			
		||||
@click.argument('kwargs', nargs=-1)
 | 
			
		||||
@click.pass_obj
 | 
			
		||||
| 
						 | 
				
			
			@ -237,7 +242,7 @@ def quote(config, tickers):
 | 
			
		|||
 | 
			
		||||
    '''
 | 
			
		||||
    # global opts
 | 
			
		||||
    brokermod = config['brokermods'][0]
 | 
			
		||||
    brokermod = list(config['brokermods'].values())[0]
 | 
			
		||||
 | 
			
		||||
    quotes = trio.run(partial(core.stocks_quote, brokermod, tickers))
 | 
			
		||||
    if not quotes:
 | 
			
		||||
| 
						 | 
				
			
			@ -264,7 +269,7 @@ def bars(config, symbol, count):
 | 
			
		|||
 | 
			
		||||
    '''
 | 
			
		||||
    # global opts
 | 
			
		||||
    brokermod = config['brokermods'][0]
 | 
			
		||||
    brokermod = list(config['brokermods'].values())[0]
 | 
			
		||||
 | 
			
		||||
    # broker backend should return at the least a
 | 
			
		||||
    # list of candle dictionaries
 | 
			
		||||
| 
						 | 
				
			
			@ -299,7 +304,7 @@ def record(config, rate, name, dhost, filename):
 | 
			
		|||
 | 
			
		||||
    '''
 | 
			
		||||
    # global opts
 | 
			
		||||
    brokermod = config['brokermods'][0]
 | 
			
		||||
    brokermod = list(config['brokermods'].values())[0]
 | 
			
		||||
    loglevel = config['loglevel']
 | 
			
		||||
    log = config['log']
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -364,7 +369,7 @@ def optsquote(config, symbol, date):
 | 
			
		|||
 | 
			
		||||
    '''
 | 
			
		||||
    # global opts
 | 
			
		||||
    brokermod = config['brokermods'][0]
 | 
			
		||||
    brokermod = list(config['brokermods'].values())[0]
 | 
			
		||||
 | 
			
		||||
    quotes = trio.run(
 | 
			
		||||
        partial(
 | 
			
		||||
| 
						 | 
				
			
			@ -381,58 +386,151 @@ def optsquote(config, symbol, date):
 | 
			
		|||
@cli.command()
 | 
			
		||||
@click.argument('tickers', nargs=-1, required=True)
 | 
			
		||||
@click.pass_obj
 | 
			
		||||
def symbol_info(config, tickers):
 | 
			
		||||
def mkt_info(
 | 
			
		||||
    config: dict,
 | 
			
		||||
    tickers: list[str],
 | 
			
		||||
):
 | 
			
		||||
    '''
 | 
			
		||||
    Print symbol quotes to the console
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # global opts
 | 
			
		||||
    brokermod = config['brokermods'][0]
 | 
			
		||||
    from msgspec.json import encode, decode
 | 
			
		||||
    from ..accounting import MktPair
 | 
			
		||||
    from ..service import (
 | 
			
		||||
        open_piker_runtime,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    quotes = trio.run(partial(core.symbol_info, brokermod, tickers))
 | 
			
		||||
    if not quotes:
 | 
			
		||||
        log.error(f"No quotes could be found for {tickers}?")
 | 
			
		||||
    # global opts
 | 
			
		||||
    brokermods: dict[str, ModuleType] = config['brokermods']
 | 
			
		||||
 | 
			
		||||
    mkts: list[MktPair] = []
 | 
			
		||||
    async def main():
 | 
			
		||||
 | 
			
		||||
        async with open_piker_runtime(
 | 
			
		||||
            name='mkt_info_query',
 | 
			
		||||
            # loglevel=loglevel,
 | 
			
		||||
            debug_mode=True,
 | 
			
		||||
 | 
			
		||||
        ) as (_, _):
 | 
			
		||||
            for fqme in tickers:
 | 
			
		||||
                bs_fqme, _, broker = fqme.rpartition('.')
 | 
			
		||||
                brokermod: ModuleType = brokermods[broker]
 | 
			
		||||
                mkt, bs_pair = await core.mkt_info(
 | 
			
		||||
                    brokermod,
 | 
			
		||||
                    bs_fqme,
 | 
			
		||||
                )
 | 
			
		||||
                mkts.append((mkt, bs_pair))
 | 
			
		||||
 | 
			
		||||
    trio.run(main)
 | 
			
		||||
 | 
			
		||||
    if not mkts:
 | 
			
		||||
        log.error(
 | 
			
		||||
            f'No market info could be found for {tickers}'
 | 
			
		||||
        )
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    if len(quotes) < len(tickers):
 | 
			
		||||
        syms = tuple(map(itemgetter('symbol'), quotes))
 | 
			
		||||
    if len(mkts) < len(tickers):
 | 
			
		||||
        syms = tuple(map(itemgetter('fqme'), mkts))
 | 
			
		||||
        for ticker in tickers:
 | 
			
		||||
            if ticker not in syms:
 | 
			
		||||
                brokermod.log.warn(f"Could not find symbol {ticker}?")
 | 
			
		||||
                log.warn(f"Could not find symbol {ticker}?")
 | 
			
		||||
 | 
			
		||||
    click.echo(colorize_json(quotes))
 | 
			
		||||
 | 
			
		||||
    # TODO: use ``rich.Table`` intead here!
 | 
			
		||||
    for mkt, bs_pair in mkts:
 | 
			
		||||
        click.echo(
 | 
			
		||||
            '\n'
 | 
			
		||||
            '----------------------------------------------------\n'
 | 
			
		||||
            f'{type(bs_pair)}\n'
 | 
			
		||||
            '----------------------------------------------------\n'
 | 
			
		||||
            f'{colorize_json(bs_pair.to_dict())}\n'
 | 
			
		||||
            '----------------------------------------------------\n'
 | 
			
		||||
            f'as piker `MktPair` with fqme: {mkt.fqme}\n'
 | 
			
		||||
            '----------------------------------------------------\n'
 | 
			
		||||
            # NOTE: roundtrip to json codec for console print
 | 
			
		||||
            f'{colorize_json(decode(encode(mkt)))}'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@cli.command()
 | 
			
		||||
@click.argument('pattern', required=True)
 | 
			
		||||
# TODO: move this to top level click/typer context for all subs
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--pdb',
 | 
			
		||||
    is_flag=True,
 | 
			
		||||
    help='Enable tractor debug mode',
 | 
			
		||||
)
 | 
			
		||||
@click.pass_obj
 | 
			
		||||
def search(config, pattern):
 | 
			
		||||
def search(
 | 
			
		||||
    config: dict,
 | 
			
		||||
    pattern: str,
 | 
			
		||||
    pdb: bool,
 | 
			
		||||
):
 | 
			
		||||
    '''
 | 
			
		||||
    Search for symbols from broker backend(s).
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # global opts
 | 
			
		||||
    brokermods = config['brokermods']
 | 
			
		||||
    brokermods = list(config['brokermods'].values())
 | 
			
		||||
 | 
			
		||||
    # define tractor entrypoint
 | 
			
		||||
    async def main(func):
 | 
			
		||||
 | 
			
		||||
        async with maybe_open_pikerd(
 | 
			
		||||
            loglevel=config['loglevel'],
 | 
			
		||||
            debug_mode=pdb,
 | 
			
		||||
        ):
 | 
			
		||||
            return await func()
 | 
			
		||||
 | 
			
		||||
    quotes = trio.run(
 | 
			
		||||
        main,
 | 
			
		||||
        partial(
 | 
			
		||||
            core.symbol_search,
 | 
			
		||||
            brokermods,
 | 
			
		||||
            pattern,
 | 
			
		||||
        ),
 | 
			
		||||
    )
 | 
			
		||||
    from piker.toolz import open_crash_handler
 | 
			
		||||
    with open_crash_handler():
 | 
			
		||||
        quotes = trio.run(
 | 
			
		||||
            main,
 | 
			
		||||
            partial(
 | 
			
		||||
                core.symbol_search,
 | 
			
		||||
                brokermods,
 | 
			
		||||
                pattern,
 | 
			
		||||
            ),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if not quotes:
 | 
			
		||||
        log.error(f"No matches could be found for {pattern}?")
 | 
			
		||||
        return
 | 
			
		||||
        if not quotes:
 | 
			
		||||
            log.error(f"No matches could be found for {pattern}?")
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
    click.echo(colorize_json(quotes))
 | 
			
		||||
        click.echo(colorize_json(quotes))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@cli.command()
 | 
			
		||||
@click.argument('section', required=False)
 | 
			
		||||
@click.argument('value', required=False)
 | 
			
		||||
@click.option('--delete', '-d', flag_value=True, help='Delete section')
 | 
			
		||||
@click.pass_obj
 | 
			
		||||
def brokercfg(config, section, value, delete):
 | 
			
		||||
    '''
 | 
			
		||||
    If invoked with no arguments, open an editor to edit broker
 | 
			
		||||
    configs file or get / update an individual section.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from .. import config
 | 
			
		||||
 | 
			
		||||
    if section:
 | 
			
		||||
        conf, path = config.load()
 | 
			
		||||
 | 
			
		||||
        if not delete:
 | 
			
		||||
            if value:
 | 
			
		||||
                config.set_value(conf, section, value)
 | 
			
		||||
 | 
			
		||||
            click.echo(
 | 
			
		||||
                colorize_json(
 | 
			
		||||
                    config.get_value(conf, section))
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            config.del_value(conf, section)
 | 
			
		||||
 | 
			
		||||
        config.write(config=conf)
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        conf, path = config.load(raw=True)
 | 
			
		||||
        config.write(
 | 
			
		||||
            raw=click.edit(text=conf)
 | 
			
		||||
        )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -26,13 +26,11 @@ from typing import List, Dict, Any, Optional
 | 
			
		|||
 | 
			
		||||
import trio
 | 
			
		||||
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
from ._util import log
 | 
			
		||||
from . import get_brokermod
 | 
			
		||||
from ..service import maybe_spawn_brokerd
 | 
			
		||||
from .._cacheables import open_cached_client
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
from . import open_cached_client
 | 
			
		||||
from ..accounting import MktPair
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def api(brokername: str, methname: str, **kwargs) -> dict:
 | 
			
		||||
| 
						 | 
				
			
			@ -97,15 +95,15 @@ async def option_chain(
 | 
			
		|||
            return await client.option_chains(contracts)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def contracts(
 | 
			
		||||
    brokermod: ModuleType,
 | 
			
		||||
    symbol: str,
 | 
			
		||||
) -> Dict[str, Dict[str, Dict[str, Any]]]:
 | 
			
		||||
    """Return option contracts (all expiries) for ``symbol``.
 | 
			
		||||
    """
 | 
			
		||||
    async with brokermod.get_client() as client:
 | 
			
		||||
        # return await client.get_all_contracts([symbol])
 | 
			
		||||
        return await client.get_all_contracts([symbol])
 | 
			
		||||
# async def contracts(
 | 
			
		||||
#     brokermod: ModuleType,
 | 
			
		||||
#     symbol: str,
 | 
			
		||||
# ) -> Dict[str, Dict[str, Dict[str, Any]]]:
 | 
			
		||||
#     """Return option contracts (all expiries) for ``symbol``.
 | 
			
		||||
#     """
 | 
			
		||||
#     async with brokermod.get_client() as client:
 | 
			
		||||
#         # return await client.get_all_contracts([symbol])
 | 
			
		||||
#         return await client.get_all_contracts([symbol])
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def bars(
 | 
			
		||||
| 
						 | 
				
			
			@ -119,17 +117,6 @@ async def bars(
 | 
			
		|||
        return await client.bars(symbol, **kwargs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def symbol_info(
 | 
			
		||||
    brokermod: ModuleType,
 | 
			
		||||
    symbol: str,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
) -> Dict[str, Dict[str, Dict[str, Any]]]:
 | 
			
		||||
    """Return symbol info from broker.
 | 
			
		||||
    """
 | 
			
		||||
    async with brokermod.get_client() as client:
 | 
			
		||||
        return await client.symbol_info(symbol, **kwargs)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def search_w_brokerd(name: str, pattern: str) -> dict:
 | 
			
		||||
 | 
			
		||||
    async with open_cached_client(name) as client:
 | 
			
		||||
| 
						 | 
				
			
			@ -158,7 +145,11 @@ async def symbol_search(
 | 
			
		|||
 | 
			
		||||
        async with maybe_spawn_brokerd(
 | 
			
		||||
            mod.name,
 | 
			
		||||
            infect_asyncio=getattr(mod, '_infect_asyncio', False),
 | 
			
		||||
            infect_asyncio=getattr(
 | 
			
		||||
                mod,
 | 
			
		||||
                '_infect_asyncio',
 | 
			
		||||
                False,
 | 
			
		||||
            ),
 | 
			
		||||
        ) as portal:
 | 
			
		||||
 | 
			
		||||
            results.append((
 | 
			
		||||
| 
						 | 
				
			
			@ -176,3 +167,20 @@ async def symbol_search(
 | 
			
		|||
            n.start_soon(search_backend, mod.name)
 | 
			
		||||
 | 
			
		||||
    return results
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def mkt_info(
 | 
			
		||||
    brokermod: ModuleType,
 | 
			
		||||
    fqme: str,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
) -> MktPair:
 | 
			
		||||
    '''
 | 
			
		||||
    Return MktPair info from broker including src and dst assets.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async with open_cached_client(brokermod.name) as client:
 | 
			
		||||
        assert client
 | 
			
		||||
        return await brokermod.get_mkt_info(
 | 
			
		||||
            fqme.replace(brokermod.name, '')
 | 
			
		||||
        )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -41,13 +41,13 @@ import tractor
 | 
			
		|||
from tractor.experimental import msgpub
 | 
			
		||||
from async_generator import asynccontextmanager
 | 
			
		||||
 | 
			
		||||
from ..log import get_logger, get_console_log
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
from . import get_brokermod
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def wait_for_network(
 | 
			
		||||
    net_func: Callable,
 | 
			
		||||
    sleep: int = 1
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -21,8 +21,6 @@ Deribit backend.
 | 
			
		|||
 | 
			
		||||
from piker.log import get_logger
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
from .api import (
 | 
			
		||||
    get_client,
 | 
			
		||||
)
 | 
			
		||||
| 
						 | 
				
			
			@ -30,13 +28,15 @@ from .feed import (
 | 
			
		|||
    open_history_client,
 | 
			
		||||
    open_symbol_search,
 | 
			
		||||
    stream_quotes,
 | 
			
		||||
    backfill_bars
 | 
			
		||||
    # backfill_bars,
 | 
			
		||||
)
 | 
			
		||||
# from .broker import (
 | 
			
		||||
    # trades_dialogue,
 | 
			
		||||
    # open_trade_dialog,
 | 
			
		||||
    # norm_trade_records,
 | 
			
		||||
# )
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'get_client',
 | 
			
		||||
#    'trades_dialogue',
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -18,43 +18,33 @@
 | 
			
		|||
Deribit backend.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
import json
 | 
			
		||||
import time
 | 
			
		||||
import asyncio
 | 
			
		||||
 | 
			
		||||
from contextlib import asynccontextmanager as acm, AsyncExitStack
 | 
			
		||||
from functools import partial
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    asynccontextmanager as acm,
 | 
			
		||||
)
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from typing import Any, Optional, Iterable, Callable
 | 
			
		||||
 | 
			
		||||
import pendulum
 | 
			
		||||
import asks
 | 
			
		||||
import trio
 | 
			
		||||
from trio_typing import Nursery, TaskStatus
 | 
			
		||||
from fuzzywuzzy import process as fuzzy
 | 
			
		||||
import numpy as np
 | 
			
		||||
 | 
			
		||||
from piker.data.types import Struct
 | 
			
		||||
from piker.data._web_bs import (
 | 
			
		||||
    NoBsWs,
 | 
			
		||||
    open_autorecon_ws,
 | 
			
		||||
    open_jsonrpc_session
 | 
			
		||||
from functools import partial
 | 
			
		||||
import time
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Optional,
 | 
			
		||||
    Callable,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from .._util import resproc
 | 
			
		||||
 | 
			
		||||
from piker import config
 | 
			
		||||
from piker.log import get_logger
 | 
			
		||||
 | 
			
		||||
import pendulum
 | 
			
		||||
import trio
 | 
			
		||||
from trio_typing import TaskStatus
 | 
			
		||||
from rapidfuzz import process as fuzzy
 | 
			
		||||
import numpy as np
 | 
			
		||||
from tractor.trionics import (
 | 
			
		||||
    broadcast_receiver,
 | 
			
		||||
    BroadcastReceiver,
 | 
			
		||||
    maybe_open_context
 | 
			
		||||
)
 | 
			
		||||
from tractor import to_asyncio
 | 
			
		||||
 | 
			
		||||
# XXX WOOPS XD
 | 
			
		||||
# yeah you'll need to install it since it was removed in #489 by
 | 
			
		||||
# accident; well i thought we had removed all usage..
 | 
			
		||||
from cryptofeed import FeedHandler
 | 
			
		||||
 | 
			
		||||
from cryptofeed.defines import (
 | 
			
		||||
    DERIBIT,
 | 
			
		||||
    L1_BOOK, TRADES,
 | 
			
		||||
| 
						 | 
				
			
			@ -62,6 +52,20 @@ from cryptofeed.defines import (
 | 
			
		|||
)
 | 
			
		||||
from cryptofeed.symbols import Symbol
 | 
			
		||||
 | 
			
		||||
from piker.data import (
 | 
			
		||||
    def_iohlcv_fields,
 | 
			
		||||
    match_from_pairs,
 | 
			
		||||
    Struct,
 | 
			
		||||
)
 | 
			
		||||
from piker.data._web_bs import (
 | 
			
		||||
    open_jsonrpc_session
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
from piker import config
 | 
			
		||||
from piker.log import get_logger
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -75,26 +79,13 @@ _ws_url = 'wss://www.deribit.com/ws/api/v2'
 | 
			
		|||
_testnet_ws_url = 'wss://test.deribit.com/ws/api/v2'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# Broker specific ohlc schema (rest)
 | 
			
		||||
_ohlc_dtype = [
 | 
			
		||||
    ('index', int),
 | 
			
		||||
    ('time', int),
 | 
			
		||||
    ('open', float),
 | 
			
		||||
    ('high', float),
 | 
			
		||||
    ('low', float),
 | 
			
		||||
    ('close', float),
 | 
			
		||||
    ('volume', float),
 | 
			
		||||
    ('bar_wap', float),  # will be zeroed by sampler if not filled
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class JSONRPCResult(Struct):
 | 
			
		||||
    jsonrpc: str = '2.0'
 | 
			
		||||
    id: int
 | 
			
		||||
    result: Optional[dict] = None
 | 
			
		||||
    result: Optional[list[dict]] = None
 | 
			
		||||
    error: Optional[dict] = None
 | 
			
		||||
    usIn: int 
 | 
			
		||||
    usOut: int 
 | 
			
		||||
    usIn: int
 | 
			
		||||
    usOut: int
 | 
			
		||||
    usDiff: int
 | 
			
		||||
    testnet: bool
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -301,24 +292,29 @@ class Client:
 | 
			
		|||
        currency: str = 'btc',  # BTC, ETH, SOL, USDC
 | 
			
		||||
        kind: str = 'option',
 | 
			
		||||
        expired: bool = False
 | 
			
		||||
    ) -> dict[str, Any]:
 | 
			
		||||
        """Get symbol info for the exchange.
 | 
			
		||||
 | 
			
		||||
        """
 | 
			
		||||
    ) -> dict[str, dict]:
 | 
			
		||||
        '''
 | 
			
		||||
        Get symbol infos.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        if self._pairs:
 | 
			
		||||
            return self._pairs
 | 
			
		||||
 | 
			
		||||
        # will retrieve all symbols by default
 | 
			
		||||
        params = {
 | 
			
		||||
        params: dict[str, str] = {
 | 
			
		||||
            'currency': currency.upper(),
 | 
			
		||||
            'kind': kind,
 | 
			
		||||
            'expired': str(expired).lower()
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        resp = await self.json_rpc('public/get_instruments', params)
 | 
			
		||||
        results = resp.result
 | 
			
		||||
 | 
			
		||||
        instruments = {
 | 
			
		||||
        resp: JSONRPCResult = await self.json_rpc(
 | 
			
		||||
            'public/get_instruments',
 | 
			
		||||
            params,
 | 
			
		||||
        )
 | 
			
		||||
        # convert to symbol-keyed table
 | 
			
		||||
        results: list[dict] | None = resp.result
 | 
			
		||||
        instruments: dict[str, dict] = {
 | 
			
		||||
            item['instrument_name'].lower(): item
 | 
			
		||||
            for item in results
 | 
			
		||||
        }
 | 
			
		||||
| 
						 | 
				
			
			@ -331,6 +327,7 @@ class Client:
 | 
			
		|||
    async def cache_symbols(
 | 
			
		||||
        self,
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
 | 
			
		||||
        if not self._pairs:
 | 
			
		||||
            self._pairs = await self.symbol_info()
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -341,17 +338,23 @@ class Client:
 | 
			
		|||
        pattern: str,
 | 
			
		||||
        limit: int = 30,
 | 
			
		||||
    ) -> dict[str, Any]:
 | 
			
		||||
        data = await self.symbol_info()
 | 
			
		||||
        '''
 | 
			
		||||
        Fuzzy search symbology set for pairs matching `pattern`.
 | 
			
		||||
 | 
			
		||||
        matches = fuzzy.extractBests(
 | 
			
		||||
            pattern,
 | 
			
		||||
            data,
 | 
			
		||||
        '''
 | 
			
		||||
        pairs: dict[str, Any] = await self.symbol_info()
 | 
			
		||||
        matches: dict[str, Pair] = match_from_pairs(
 | 
			
		||||
            pairs=pairs,
 | 
			
		||||
            query=pattern.upper(),
 | 
			
		||||
            score_cutoff=35,
 | 
			
		||||
            limit=limit
 | 
			
		||||
        )
 | 
			
		||||
        # repack in dict form
 | 
			
		||||
        return {item[0]['instrument_name'].lower(): item[0]
 | 
			
		||||
                for item in matches}
 | 
			
		||||
 | 
			
		||||
       # repack in name-keyed table
 | 
			
		||||
        return {
 | 
			
		||||
            pair['instrument_name'].lower(): pair
 | 
			
		||||
            for pair in matches.values()
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    async def bars(
 | 
			
		||||
        self,
 | 
			
		||||
| 
						 | 
				
			
			@ -405,7 +408,7 @@ class Client:
 | 
			
		|||
 | 
			
		||||
            new_bars.append((i,) + tuple(row))
 | 
			
		||||
 | 
			
		||||
        array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else klines
 | 
			
		||||
        array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else klines
 | 
			
		||||
        return array
 | 
			
		||||
 | 
			
		||||
    async def last_trades(
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -26,11 +26,11 @@ import time
 | 
			
		|||
import trio
 | 
			
		||||
from trio_typing import TaskStatus
 | 
			
		||||
import pendulum
 | 
			
		||||
from fuzzywuzzy import process as fuzzy
 | 
			
		||||
from rapidfuzz import process as fuzzy
 | 
			
		||||
import numpy as np
 | 
			
		||||
import tractor
 | 
			
		||||
 | 
			
		||||
from piker._cacheables import open_cached_client
 | 
			
		||||
from piker.brokers import open_cached_client
 | 
			
		||||
from piker.log import get_logger, get_console_log
 | 
			
		||||
from piker.data import ShmArray
 | 
			
		||||
from piker.brokers._util import (
 | 
			
		||||
| 
						 | 
				
			
			@ -39,7 +39,6 @@ from piker.brokers._util import (
 | 
			
		|||
)
 | 
			
		||||
 | 
			
		||||
from cryptofeed import FeedHandler
 | 
			
		||||
 | 
			
		||||
from cryptofeed.defines import (
 | 
			
		||||
    DERIBIT, L1_BOOK, TRADES, OPTION, CALL, PUT
 | 
			
		||||
)
 | 
			
		||||
| 
						 | 
				
			
			@ -62,9 +61,10 @@ log = get_logger(__name__)
 | 
			
		|||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_history_client(
 | 
			
		||||
    instrument: str,
 | 
			
		||||
    mkt: MktPair,
 | 
			
		||||
) -> tuple[Callable, int]:
 | 
			
		||||
 | 
			
		||||
    fnstrument: str = mkt.bs_fqme
 | 
			
		||||
    # TODO implement history getter for the new storage layer.
 | 
			
		||||
    async with open_cached_client('deribit') as client:
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -127,7 +127,7 @@ your ``pps.toml`` file will have position entries like,
 | 
			
		|||
    [ib.algopaper."mnq.globex.20221216"]
 | 
			
		||||
    size = -1.0
 | 
			
		||||
    ppu = 12423.630576923071
 | 
			
		||||
    bsuid = 515416577
 | 
			
		||||
    bs_mktid = 515416577
 | 
			
		||||
    expiry = "2022-12-16T00:00:00+00:00"
 | 
			
		||||
    clears = [
 | 
			
		||||
     { dt = "2022-08-31T18:54:46+00:00", ppu = 12423.630576923071, accum_size = -19.0, price = 12372.75, size = 1.0, cost = 0.57, tid = "0000e1a7.630f5e5a.01.01" },
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -30,29 +30,52 @@ from .api import (
 | 
			
		|||
)
 | 
			
		||||
from .feed import (
 | 
			
		||||
    open_history_client,
 | 
			
		||||
    open_symbol_search,
 | 
			
		||||
    stream_quotes,
 | 
			
		||||
)
 | 
			
		||||
from .broker import (
 | 
			
		||||
    trades_dialogue,
 | 
			
		||||
    open_trade_dialog,
 | 
			
		||||
)
 | 
			
		||||
from .ledger import (
 | 
			
		||||
    norm_trade,
 | 
			
		||||
    norm_trade_records,
 | 
			
		||||
    tx_sort,
 | 
			
		||||
)
 | 
			
		||||
from .symbols import (
 | 
			
		||||
    get_mkt_info,
 | 
			
		||||
    open_symbol_search,
 | 
			
		||||
    _search_conf,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'get_client',
 | 
			
		||||
    'trades_dialogue',
 | 
			
		||||
    'get_mkt_info',
 | 
			
		||||
    'norm_trade',
 | 
			
		||||
    'norm_trade_records',
 | 
			
		||||
    'open_trade_dialog',
 | 
			
		||||
    'open_history_client',
 | 
			
		||||
    'open_symbol_search',
 | 
			
		||||
    'stream_quotes',
 | 
			
		||||
    '_search_conf',
 | 
			
		||||
    'tx_sort',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
_brokerd_mods: list[str] = [
 | 
			
		||||
    'api',
 | 
			
		||||
    'broker',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
_datad_mods: list[str] = [
 | 
			
		||||
    'feed',
 | 
			
		||||
    'symbols',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# tractor RPC enable arg
 | 
			
		||||
__enable_modules__: list[str] = [
 | 
			
		||||
    'api',
 | 
			
		||||
    'feed',
 | 
			
		||||
    'broker',
 | 
			
		||||
]
 | 
			
		||||
__enable_modules__: list[str] = (
 | 
			
		||||
    _brokerd_mods
 | 
			
		||||
    +
 | 
			
		||||
    _datad_mods
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
# passed to ``tractor.ActorNursery.start_actor()``
 | 
			
		||||
_spawn_kwargs = {
 | 
			
		||||
| 
						 | 
				
			
			@ -63,3 +86,8 @@ _spawn_kwargs = {
 | 
			
		|||
# know if ``brokerd`` should be spawned with
 | 
			
		||||
# ``tractor``'s aio mode.
 | 
			
		||||
_infect_asyncio: bool = True
 | 
			
		||||
 | 
			
		||||
# XXX NOTE: for now we disable symcache with this backend since
 | 
			
		||||
# there is no clearly simple nor practical way to download "all
 | 
			
		||||
# symbology info" for all supported venues..
 | 
			
		||||
_no_symcache: bool = True
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,195 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
"FLEX" report processing utils.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
import pendulum
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
from .api import (
 | 
			
		||||
    get_config,
 | 
			
		||||
    log,
 | 
			
		||||
)
 | 
			
		||||
from piker.accounting import (
 | 
			
		||||
    open_trade_ledger,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_flex_dt(
 | 
			
		||||
    record: str,
 | 
			
		||||
) -> pendulum.datetime:
 | 
			
		||||
    '''
 | 
			
		||||
    Parse stupid flex record datetime stamps for the `dateTime` field..
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    date, ts = record.split(';')
 | 
			
		||||
    dt = pendulum.parse(date)
 | 
			
		||||
    ts = f'{ts[:2]}:{ts[2:4]}:{ts[4:]}'
 | 
			
		||||
    tsdt = pendulum.parse(ts)
 | 
			
		||||
    return dt.set(hour=tsdt.hour, minute=tsdt.minute, second=tsdt.second)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def flex_records_to_ledger_entries(
 | 
			
		||||
    accounts: bidict,
 | 
			
		||||
    trade_entries: list[object],
 | 
			
		||||
 | 
			
		||||
) -> dict:
 | 
			
		||||
    '''
 | 
			
		||||
    Convert flex report entry objects into ``dict`` form, pretty much
 | 
			
		||||
    straight up without modification except add a `pydatetime` field
 | 
			
		||||
    from the parsed timestamp.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    trades_by_account = {}
 | 
			
		||||
    for t in trade_entries:
 | 
			
		||||
        entry = t.__dict__
 | 
			
		||||
 | 
			
		||||
        # XXX: LOL apparently ``toml`` has a bug
 | 
			
		||||
        # where a section key error will show up in the write
 | 
			
		||||
        # if you leave a table key as an `int`? So i guess
 | 
			
		||||
        # cast to strs for all keys..
 | 
			
		||||
 | 
			
		||||
        # oddly for some so-called "BookTrade" entries
 | 
			
		||||
        # this field seems to be blank, no cuckin clue.
 | 
			
		||||
        # trade['ibExecID']
 | 
			
		||||
        tid = str(entry.get('ibExecID') or entry['tradeID'])
 | 
			
		||||
        # date = str(entry['tradeDate'])
 | 
			
		||||
 | 
			
		||||
        # XXX: is it going to cause problems if a account name
 | 
			
		||||
        # get's lost? The user should be able to find it based
 | 
			
		||||
        # on the actual exec history right?
 | 
			
		||||
        acctid = accounts[str(entry['accountId'])]
 | 
			
		||||
 | 
			
		||||
        # probably a flex record with a wonky non-std timestamp..
 | 
			
		||||
        dt = entry['pydatetime'] = parse_flex_dt(entry['dateTime'])
 | 
			
		||||
        entry['datetime'] = str(dt)
 | 
			
		||||
 | 
			
		||||
        if not tid:
 | 
			
		||||
            # this is likely some kind of internal adjustment
 | 
			
		||||
            # transaction, likely one of the following:
 | 
			
		||||
            # - an expiry event that will show a "book trade" indicating
 | 
			
		||||
            #   some adjustment to cash balances: zeroing or itm settle.
 | 
			
		||||
            # - a manual cash balance position adjustment likely done by
 | 
			
		||||
            #   the user from the accounts window in TWS where they can
 | 
			
		||||
            #   manually set the avg price and size:
 | 
			
		||||
            #   https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST
 | 
			
		||||
            log.warning(f'Skipping ID-less ledger entry:\n{pformat(entry)}')
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        trades_by_account.setdefault(
 | 
			
		||||
            acctid, {}
 | 
			
		||||
        )[tid] = entry
 | 
			
		||||
 | 
			
		||||
    for acctid in trades_by_account:
 | 
			
		||||
        trades_by_account[acctid] = dict(sorted(
 | 
			
		||||
            trades_by_account[acctid].items(),
 | 
			
		||||
            key=lambda entry: entry[1]['pydatetime'],
 | 
			
		||||
        ))
 | 
			
		||||
 | 
			
		||||
    return trades_by_account
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_flex_trades(
 | 
			
		||||
    path: str | None = None,
 | 
			
		||||
 | 
			
		||||
) -> dict[str, Any]:
 | 
			
		||||
 | 
			
		||||
    from ib_insync import flexreport, util
 | 
			
		||||
 | 
			
		||||
    conf = get_config()
 | 
			
		||||
 | 
			
		||||
    if not path:
 | 
			
		||||
        # load ``brokers.toml`` and try to get the flex
 | 
			
		||||
        # token and query id that must be previously defined
 | 
			
		||||
        # by the user.
 | 
			
		||||
        token = conf.get('flex_token')
 | 
			
		||||
        if not token:
 | 
			
		||||
            raise ValueError(
 | 
			
		||||
                'You must specify a ``flex_token`` field in your'
 | 
			
		||||
                '`brokers.toml` in order load your trade log, see our'
 | 
			
		||||
                'intructions for how to set this up here:\n'
 | 
			
		||||
                'PUT LINK HERE!'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        qid = conf['flex_trades_query_id']
 | 
			
		||||
 | 
			
		||||
        # TODO: hack this into our logging
 | 
			
		||||
        # system like we do with the API client..
 | 
			
		||||
        util.logToConsole()
 | 
			
		||||
 | 
			
		||||
        # TODO: rewrite the query part of this with async..httpx?
 | 
			
		||||
        report = flexreport.FlexReport(
 | 
			
		||||
            token=token,
 | 
			
		||||
            queryId=qid,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        # XXX: another project we could potentially look at,
 | 
			
		||||
        # https://pypi.org/project/ibflex/
 | 
			
		||||
        report = flexreport.FlexReport(path=path)
 | 
			
		||||
 | 
			
		||||
    trade_entries = report.extract('Trade')
 | 
			
		||||
    ln = len(trade_entries)
 | 
			
		||||
    log.info(f'Loaded {ln} trades from flex query')
 | 
			
		||||
 | 
			
		||||
    trades_by_account = flex_records_to_ledger_entries(
 | 
			
		||||
        conf['accounts'].inverse,  # reverse map to user account names
 | 
			
		||||
        trade_entries,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    ledger_dict: dict | None = None
 | 
			
		||||
 | 
			
		||||
    for acctid in trades_by_account:
 | 
			
		||||
        trades_by_id = trades_by_account[acctid]
 | 
			
		||||
 | 
			
		||||
        with open_trade_ledger(
 | 
			
		||||
            'ib',
 | 
			
		||||
            acctid,
 | 
			
		||||
            allow_from_sync_code=True,
 | 
			
		||||
        ) as ledger_dict:
 | 
			
		||||
            tid_delta = set(trades_by_id) - set(ledger_dict)
 | 
			
		||||
            log.info(
 | 
			
		||||
                'New trades detected\n'
 | 
			
		||||
                f'{pformat(tid_delta)}'
 | 
			
		||||
            )
 | 
			
		||||
            if tid_delta:
 | 
			
		||||
                sorted_delta = dict(sorted(
 | 
			
		||||
                    {tid: trades_by_id[tid] for tid in tid_delta}.items(),
 | 
			
		||||
                    key=lambda entry: entry[1].pop('pydatetime'),
 | 
			
		||||
                ))
 | 
			
		||||
                ledger_dict.update(sorted_delta)
 | 
			
		||||
 | 
			
		||||
    return ledger_dict
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
if __name__ == '__main__':
 | 
			
		||||
    import sys
 | 
			
		||||
    import os
 | 
			
		||||
 | 
			
		||||
    args = sys.argv
 | 
			
		||||
    if len(args) > 1:
 | 
			
		||||
        args = args[1:]
 | 
			
		||||
        for arg in args:
 | 
			
		||||
            path = os.path.abspath(arg)
 | 
			
		||||
            load_flex_trades(path=path)
 | 
			
		||||
    else:
 | 
			
		||||
        # expect brokers.toml to have an entry and
 | 
			
		||||
        # pull from the web service.
 | 
			
		||||
        load_flex_trades()
 | 
			
		||||
| 
						 | 
				
			
			@ -19,15 +19,23 @@
 | 
			
		|||
runnable script-programs.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from typing import Literal
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from functools import partial
 | 
			
		||||
from typing import (
 | 
			
		||||
    Literal,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
import subprocess
 | 
			
		||||
 | 
			
		||||
import tractor
 | 
			
		||||
 | 
			
		||||
from piker.log import get_logger
 | 
			
		||||
from piker.brokers._util import get_logger
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from .api import Client
 | 
			
		||||
    from ib_insync import IB
 | 
			
		||||
 | 
			
		||||
log = get_logger('piker.brokers.ib')
 | 
			
		||||
 | 
			
		||||
_reset_tech: Literal[
 | 
			
		||||
    'vnc',
 | 
			
		||||
| 
						 | 
				
			
			@ -41,7 +49,9 @@ _reset_tech: Literal[
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
async def data_reset_hack(
 | 
			
		||||
    reset_type: str = 'data',
 | 
			
		||||
    # vnc_host: str,
 | 
			
		||||
    client: Client,
 | 
			
		||||
    reset_type: Literal['data', 'connection'],
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
| 
						 | 
				
			
			@ -71,18 +81,61 @@ async def data_reset_hack(
 | 
			
		|||
          that need to be wrangle.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    ib_client: IB = client.ib
 | 
			
		||||
 | 
			
		||||
    # look up any user defined vnc socket address mapped from
 | 
			
		||||
    # a particular API socket port.
 | 
			
		||||
    api_port: str = str(ib_client.client.port)
 | 
			
		||||
    vnc_host: str
 | 
			
		||||
    vnc_port: int
 | 
			
		||||
    vnc_sockaddr: tuple[str] | None = client.conf.get('vnc_addrs')
 | 
			
		||||
 | 
			
		||||
    no_setup_msg:str = (
 | 
			
		||||
        f'No data reset hack test setup for {vnc_sockaddr}!\n'
 | 
			
		||||
        'See config setup tips @\n'
 | 
			
		||||
        'https://github.com/pikers/piker/tree/master/piker/brokers/ib'
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if not vnc_sockaddr:
 | 
			
		||||
        log.warning(
 | 
			
		||||
            no_setup_msg
 | 
			
		||||
            +
 | 
			
		||||
            'REQUIRES A `vnc_addrs: array` ENTRY'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    vnc_host, vnc_port = vnc_sockaddr.get(
 | 
			
		||||
        api_port,
 | 
			
		||||
        ('localhost', 3003)
 | 
			
		||||
    )
 | 
			
		||||
    global _reset_tech
 | 
			
		||||
 | 
			
		||||
    match _reset_tech:
 | 
			
		||||
        case 'vnc':
 | 
			
		||||
            try:
 | 
			
		||||
                await tractor.to_asyncio.run_task(vnc_click_hack)
 | 
			
		||||
                await tractor.to_asyncio.run_task(
 | 
			
		||||
                    partial(
 | 
			
		||||
                        vnc_click_hack,
 | 
			
		||||
                        host=vnc_host,
 | 
			
		||||
                        port=vnc_port,
 | 
			
		||||
                    )
 | 
			
		||||
                )
 | 
			
		||||
            except OSError:
 | 
			
		||||
                _reset_tech = 'i3ipc_xdotool'
 | 
			
		||||
                if vnc_host != 'localhost':
 | 
			
		||||
                    log.warning(no_setup_msg)
 | 
			
		||||
                    return False
 | 
			
		||||
 | 
			
		||||
                try:
 | 
			
		||||
                    import i3ipc  # noqa  (since a deps dynamic check)
 | 
			
		||||
                except ModuleNotFoundError:
 | 
			
		||||
                    log.warning(no_setup_msg)
 | 
			
		||||
                    return False
 | 
			
		||||
 | 
			
		||||
                try:
 | 
			
		||||
                    i3ipc_xdotool_manual_click_hack()
 | 
			
		||||
                    _reset_tech = 'i3ipc_xdotool'
 | 
			
		||||
                    return True
 | 
			
		||||
                except OSError:
 | 
			
		||||
                    log.exception(no_setup_msg)
 | 
			
		||||
                    return False
 | 
			
		||||
 | 
			
		||||
        case 'i3ipc_xdotool':
 | 
			
		||||
| 
						 | 
				
			
			@ -96,21 +149,39 @@ async def data_reset_hack(
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
async def vnc_click_hack(
 | 
			
		||||
    host: str,
 | 
			
		||||
    port: int,
 | 
			
		||||
    reset_type: str = 'data'
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
    Reset the data or netowork connection for the VNC attached
 | 
			
		||||
    Reset the data or network connection for the VNC attached
 | 
			
		||||
    ib gateway using magic combos.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    key = {'data': 'f', 'connection': 'r'}[reset_type]
 | 
			
		||||
    try:
 | 
			
		||||
        import asyncvnc
 | 
			
		||||
    except ModuleNotFoundError:
 | 
			
		||||
        log.warning(
 | 
			
		||||
            "In order to leverage `piker`'s built-in data reset hacks, install "
 | 
			
		||||
            "the `asyncvnc` project: https://github.com/barneygale/asyncvnc"
 | 
			
		||||
        )
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    import asyncvnc
 | 
			
		||||
    # two different hot keys which trigger diff types of reset
 | 
			
		||||
    # requests B)
 | 
			
		||||
    key = {
 | 
			
		||||
        'data': 'f',
 | 
			
		||||
        'connection': 'r'
 | 
			
		||||
    }[reset_type]
 | 
			
		||||
 | 
			
		||||
    async with asyncvnc.connect(
 | 
			
		||||
        'localhost',
 | 
			
		||||
        port=3003,
 | 
			
		||||
        host,
 | 
			
		||||
        port=port,
 | 
			
		||||
 | 
			
		||||
        # TODO: doesn't work see:
 | 
			
		||||
        # https://github.com/barneygale/asyncvnc/issues/7
 | 
			
		||||
        # password='ibcansmbz',
 | 
			
		||||
 | 
			
		||||
    ) as client:
 | 
			
		||||
 | 
			
		||||
        # move to middle of screen
 | 
			
		||||
| 
						 | 
				
			
			@ -124,9 +195,16 @@ async def vnc_click_hack(
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
def i3ipc_xdotool_manual_click_hack() -> None:
 | 
			
		||||
    import i3ipc
 | 
			
		||||
    '''
 | 
			
		||||
    Do the data reset hack but expecting a local X-window using `xdotool`.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    import i3ipc
 | 
			
		||||
    i3 = i3ipc.Connection()
 | 
			
		||||
 | 
			
		||||
    # TODO: might be worth offering some kinda api for grabbing
 | 
			
		||||
    # the window id from the pid?
 | 
			
		||||
    # https://stackoverflow.com/a/2250879
 | 
			
		||||
    t = i3.get_tree()
 | 
			
		||||
 | 
			
		||||
    orig_win_id = t.find_focused().window
 | 
			
		||||
| 
						 | 
				
			
			@ -181,7 +259,7 @@ def i3ipc_xdotool_manual_click_hack() -> None:
 | 
			
		|||
                        timeout=timeout,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
    # re-activate and focus original window
 | 
			
		||||
        # re-activate and focus original window
 | 
			
		||||
        subprocess.call([
 | 
			
		||||
            'xdotool',
 | 
			
		||||
            'windowactivate', '--sync', str(orig_win_id),
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
				
			
			@ -0,0 +1,529 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Trade transaction accounting and normalization.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from bisect import insort
 | 
			
		||||
from dataclasses import asdict
 | 
			
		||||
from decimal import Decimal
 | 
			
		||||
from functools import partial
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Callable,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
from pendulum import (
 | 
			
		||||
    DateTime,
 | 
			
		||||
    parse,
 | 
			
		||||
    from_timestamp,
 | 
			
		||||
)
 | 
			
		||||
from ib_insync import (
 | 
			
		||||
    Contract,
 | 
			
		||||
    Commodity,
 | 
			
		||||
    Fill,
 | 
			
		||||
    Execution,
 | 
			
		||||
    CommissionReport,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from piker.data import (
 | 
			
		||||
    SymbologyCache,
 | 
			
		||||
)
 | 
			
		||||
from piker.accounting import (
 | 
			
		||||
    Asset,
 | 
			
		||||
    dec_digits,
 | 
			
		||||
    digits_to_dec,
 | 
			
		||||
    Transaction,
 | 
			
		||||
    MktPair,
 | 
			
		||||
    iter_by_dt,
 | 
			
		||||
)
 | 
			
		||||
from ._flex_reports import parse_flex_dt
 | 
			
		||||
from ._util import log
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from .api import (
 | 
			
		||||
        Client,
 | 
			
		||||
        MethodProxy,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
tx_sort: Callable = partial(
 | 
			
		||||
    iter_by_dt,
 | 
			
		||||
    parsers={
 | 
			
		||||
        'dateTime': parse_flex_dt,
 | 
			
		||||
        'datetime': parse,
 | 
			
		||||
 | 
			
		||||
        # XXX: for some some fucking 2022 and
 | 
			
		||||
        # back options records.. f@#$ me..
 | 
			
		||||
        'date': parse,
 | 
			
		||||
    }
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def norm_trade(
 | 
			
		||||
    tid: str,
 | 
			
		||||
    record: dict[str, Any],
 | 
			
		||||
 | 
			
		||||
    # this is the dict that was returned from
 | 
			
		||||
    # `Client.get_mkt_pairs()` and when running offline ledger
 | 
			
		||||
    # processing from `.accounting`, this will be the table loaded
 | 
			
		||||
    # into `SymbologyCache.pairs`.
 | 
			
		||||
    pairs: dict[str, Struct],
 | 
			
		||||
    symcache: SymbologyCache | None = None,
 | 
			
		||||
 | 
			
		||||
) -> Transaction | None:
 | 
			
		||||
 | 
			
		||||
    conid: int = str(record.get('conId') or record['conid'])
 | 
			
		||||
    bs_mktid: str = str(conid)
 | 
			
		||||
 | 
			
		||||
    # NOTE: sometimes weird records (like BTTX?)
 | 
			
		||||
    # have no field for this?
 | 
			
		||||
    comms: float = -1 * (
 | 
			
		||||
        record.get('commission')
 | 
			
		||||
        or record.get('ibCommission')
 | 
			
		||||
        or 0
 | 
			
		||||
    )
 | 
			
		||||
    if not comms:
 | 
			
		||||
        log.warning(
 | 
			
		||||
            'No commissions found for record?\n'
 | 
			
		||||
            f'{pformat(record)}\n'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    price: float = (
 | 
			
		||||
        record.get('price')
 | 
			
		||||
        or record.get('tradePrice')
 | 
			
		||||
    )
 | 
			
		||||
    if price is None:
 | 
			
		||||
        log.warning(
 | 
			
		||||
            'No `price` field found in record?\n'
 | 
			
		||||
            'Skipping normalization..\n'
 | 
			
		||||
            f'{pformat(record)}\n'
 | 
			
		||||
        )
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    # the api doesn't do the -/+ on the quantity for you but flex
 | 
			
		||||
    # records do.. are you fucking serious ib...!?
 | 
			
		||||
    size: float|int = (
 | 
			
		||||
        record.get('quantity')
 | 
			
		||||
        or record['shares']
 | 
			
		||||
    ) * {
 | 
			
		||||
        'BOT': 1,
 | 
			
		||||
        'SLD': -1,
 | 
			
		||||
    }[record['side']]
 | 
			
		||||
 | 
			
		||||
    symbol: str = record['symbol']
 | 
			
		||||
    exch: str = (
 | 
			
		||||
        record.get('listingExchange')
 | 
			
		||||
        or record.get('primaryExchange')
 | 
			
		||||
        or record['exchange']
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # NOTE: remove null values since `tomlkit` can't serialize
 | 
			
		||||
    # them to file.
 | 
			
		||||
    if dnc := record.pop('deltaNeutralContract', None):
 | 
			
		||||
        record['deltaNeutralContract'] = dnc
 | 
			
		||||
 | 
			
		||||
    # likely an opts contract record from a flex report..
 | 
			
		||||
    # TODO: no idea how to parse ^ the strike part from flex..
 | 
			
		||||
    # (00010000 any, or 00007500 tsla, ..)
 | 
			
		||||
    # we probably must do the contract lookup for this?
 | 
			
		||||
    if (
 | 
			
		||||
        '   ' in symbol
 | 
			
		||||
        or '--' in exch
 | 
			
		||||
    ):
 | 
			
		||||
        underlying, _, tail = symbol.partition('   ')
 | 
			
		||||
        exch: str = 'opt'
 | 
			
		||||
        expiry: str = tail[:6]
 | 
			
		||||
        # otype = tail[6]
 | 
			
		||||
        # strike = tail[7:]
 | 
			
		||||
 | 
			
		||||
        log.warning(
 | 
			
		||||
            f'Skipping option contract -> NO SUPPORT YET!\n'
 | 
			
		||||
            f'{symbol}\n'
 | 
			
		||||
        )
 | 
			
		||||
        return None
 | 
			
		||||
 | 
			
		||||
    # timestamping is way different in API records
 | 
			
		||||
    dtstr: str = record.get('datetime')
 | 
			
		||||
    date: str = record.get('date')
 | 
			
		||||
    flex_dtstr: str = record.get('dateTime')
 | 
			
		||||
 | 
			
		||||
    if dtstr or date:
 | 
			
		||||
        dt: DateTime = parse(dtstr or date)
 | 
			
		||||
 | 
			
		||||
    elif flex_dtstr:
 | 
			
		||||
        # probably a flex record with a wonky non-std timestamp..
 | 
			
		||||
        dt: DateTime = parse_flex_dt(record['dateTime'])
 | 
			
		||||
 | 
			
		||||
    # special handling of symbol extraction from
 | 
			
		||||
    # flex records using some ad-hoc schema parsing.
 | 
			
		||||
    asset_type: str = (
 | 
			
		||||
        record.get('assetCategory')
 | 
			
		||||
        or record.get('secType')
 | 
			
		||||
        or 'STK'
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    if (expiry := (
 | 
			
		||||
            record.get('lastTradeDateOrContractMonth')
 | 
			
		||||
            or record.get('expiry')
 | 
			
		||||
        )
 | 
			
		||||
    ):
 | 
			
		||||
        expiry: str = str(expiry).strip(' ')
 | 
			
		||||
        # NOTE: we directly use the (simple and usually short)
 | 
			
		||||
        # date-string expiry token when packing the `MktPair`
 | 
			
		||||
        # since we want the fqme to contain *that* token.
 | 
			
		||||
        # It might make sense later to instead parse and then
 | 
			
		||||
        # render different output str format(s) for this same
 | 
			
		||||
        # purpose depending on asset-type-market down the road.
 | 
			
		||||
        # Eg. for derivs we use the short token only for fqme
 | 
			
		||||
        # but use the isoformat('T') for transactions and
 | 
			
		||||
        # account file position entries?
 | 
			
		||||
        # dt_str: str = pendulum.parse(expiry).isoformat('T')
 | 
			
		||||
 | 
			
		||||
    # XXX: pretty much all legacy market assets have a fiat
 | 
			
		||||
    # currency (denomination) determined by their venue.
 | 
			
		||||
    currency: str = record['currency']
 | 
			
		||||
    src = Asset(
 | 
			
		||||
        name=currency.lower(),
 | 
			
		||||
        atype='fiat',
 | 
			
		||||
        tx_tick=Decimal('0.01'),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    match asset_type:
 | 
			
		||||
        case 'FUT':
 | 
			
		||||
            # XXX (flex) ledger entries don't necessarily have any
 | 
			
		||||
            # simple 3-char key.. sometimes the .symbol is some
 | 
			
		||||
            # weird internal key that we probably don't want in the
 | 
			
		||||
            # .fqme => we should probably just wrap `Contract` to
 | 
			
		||||
            # this like we do other crypto$ backends XD
 | 
			
		||||
 | 
			
		||||
            # NOTE: at least older FLEX records should have
 | 
			
		||||
            # this field.. no idea about API entries..
 | 
			
		||||
            local_symbol: str | None = record.get('localSymbol')
 | 
			
		||||
            underlying_key: str = record.get('underlyingSymbol')
 | 
			
		||||
            descr: str | None = record.get('description')
 | 
			
		||||
 | 
			
		||||
            if (
 | 
			
		||||
                not (
 | 
			
		||||
                    local_symbol
 | 
			
		||||
                    and symbol in local_symbol
 | 
			
		||||
                )
 | 
			
		||||
                and (
 | 
			
		||||
                        descr
 | 
			
		||||
                        and symbol not in descr
 | 
			
		||||
                    )
 | 
			
		||||
            ):
 | 
			
		||||
                con_key, exp_str = descr.split(' ')
 | 
			
		||||
                symbol: str = underlying_key or con_key
 | 
			
		||||
 | 
			
		||||
            dst = Asset(
 | 
			
		||||
                name=symbol.lower(),
 | 
			
		||||
                atype='future',
 | 
			
		||||
                tx_tick=Decimal('1'),
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        case 'STK':
 | 
			
		||||
            dst = Asset(
 | 
			
		||||
                name=symbol.lower(),
 | 
			
		||||
                atype='stock',
 | 
			
		||||
                tx_tick=Decimal('1'),
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        case 'CASH':
 | 
			
		||||
            if currency not in symbol:
 | 
			
		||||
                # likely a dict-casted `Forex` contract which
 | 
			
		||||
                # has .symbol as the dst and .currency as the
 | 
			
		||||
                # src.
 | 
			
		||||
                name: str = symbol.lower()
 | 
			
		||||
            else:
 | 
			
		||||
                # likely a flex-report record which puts
 | 
			
		||||
                # EUR.USD as the symbol field and just USD in
 | 
			
		||||
                # the currency field.
 | 
			
		||||
                name: str = symbol.lower().replace(f'.{src.name}', '')
 | 
			
		||||
 | 
			
		||||
            dst = Asset(
 | 
			
		||||
                name=name,
 | 
			
		||||
                atype='fiat',
 | 
			
		||||
                tx_tick=Decimal('0.01'),
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        case 'OPT':
 | 
			
		||||
            dst = Asset(
 | 
			
		||||
                name=symbol.lower(),
 | 
			
		||||
                atype='option',
 | 
			
		||||
                tx_tick=Decimal('1'),
 | 
			
		||||
 | 
			
		||||
                # TODO: we should probably always cast to the
 | 
			
		||||
                # `Contract` instance then dict-serialize that for
 | 
			
		||||
                # the `.info` field!
 | 
			
		||||
                # info=asdict(Option()),
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        case 'CMDTY':
 | 
			
		||||
            from .symbols import _adhoc_symbol_map
 | 
			
		||||
            con_kwargs, _ = _adhoc_symbol_map[symbol.upper()]
 | 
			
		||||
            dst = Asset(
 | 
			
		||||
                name=symbol.lower(),
 | 
			
		||||
                atype='commodity',
 | 
			
		||||
                tx_tick=Decimal('1'),
 | 
			
		||||
                info=asdict(Commodity(**con_kwargs)),
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
    # try to build out piker fqme from record.
 | 
			
		||||
    # src: str = record['currency']
 | 
			
		||||
    price_tick: Decimal = digits_to_dec(dec_digits(price))
 | 
			
		||||
 | 
			
		||||
    # NOTE: can't serlialize `tomlkit.String` so cast to native
 | 
			
		||||
    atype: str = str(dst.atype)
 | 
			
		||||
 | 
			
		||||
    # if not (mkt := symcache.mktmaps.get(bs_mktid)):
 | 
			
		||||
    mkt = MktPair(
 | 
			
		||||
        bs_mktid=bs_mktid,
 | 
			
		||||
        dst=dst,
 | 
			
		||||
 | 
			
		||||
        price_tick=price_tick,
 | 
			
		||||
        # NOTE: for "legacy" assets, volume is normally discreet, not
 | 
			
		||||
        # a float, but we keep a digit in case the suitz decide
 | 
			
		||||
        # to get crazy and change it; we'll be kinda ready
 | 
			
		||||
        # schema-wise..
 | 
			
		||||
        size_tick=Decimal('1'),
 | 
			
		||||
 | 
			
		||||
        src=src,  # XXX: normally always a fiat
 | 
			
		||||
 | 
			
		||||
        _atype=atype,
 | 
			
		||||
 | 
			
		||||
        venue=exch,
 | 
			
		||||
        expiry=expiry,
 | 
			
		||||
        broker='ib',
 | 
			
		||||
 | 
			
		||||
        _fqme_without_src=(atype != 'fiat'),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    fqme: str = mkt.fqme
 | 
			
		||||
 | 
			
		||||
    # XXX: if passed in, we fill out the symcache ad-hoc in order
 | 
			
		||||
    # to make downstream accounting work..
 | 
			
		||||
    if symcache is not None:
 | 
			
		||||
        orig_mkt: MktPair | None  = symcache.mktmaps.get(bs_mktid)
 | 
			
		||||
        if (
 | 
			
		||||
            orig_mkt
 | 
			
		||||
            and orig_mkt.fqme != mkt.fqme
 | 
			
		||||
        ):
 | 
			
		||||
            log.warning(
 | 
			
		||||
            # print(
 | 
			
		||||
                f'Contracts with common `conId`: {bs_mktid} mismatch..\n'
 | 
			
		||||
                f'{orig_mkt.fqme} -> {mkt.fqme}\n'
 | 
			
		||||
                # 'with DIFF:\n'
 | 
			
		||||
                # f'{mkt - orig_mkt}'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        symcache.mktmaps[bs_mktid] = mkt
 | 
			
		||||
        symcache.mktmaps[fqme] = mkt
 | 
			
		||||
        symcache.assets[src.name] = src
 | 
			
		||||
        symcache.assets[dst.name] = dst
 | 
			
		||||
 | 
			
		||||
    # NOTE: for flex records the normal fields for defining an fqme
 | 
			
		||||
    # sometimes won't be available so we rely on two approaches for
 | 
			
		||||
    # the "reverse lookup" of piker style fqme keys:
 | 
			
		||||
    # - when dealing with API trade records received from
 | 
			
		||||
    #   `IB.trades()` we do a contract lookup at he time of processing
 | 
			
		||||
    # - when dealing with flex records, it is assumed the record
 | 
			
		||||
    #   is at least a day old and thus the TWS position reporting system
 | 
			
		||||
    #   should already have entries if the pps are still open, in
 | 
			
		||||
    #   which case, we can pull the fqme from that table (see
 | 
			
		||||
    #   `trades_dialogue()` above).
 | 
			
		||||
    return Transaction(
 | 
			
		||||
        fqme=fqme,
 | 
			
		||||
        tid=tid,
 | 
			
		||||
        size=size,
 | 
			
		||||
        price=price,
 | 
			
		||||
        cost=comms,
 | 
			
		||||
        dt=dt,
 | 
			
		||||
        expiry=expiry,
 | 
			
		||||
        bs_mktid=str(conid),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def norm_trade_records(
 | 
			
		||||
    ledger: dict[str, Any],
 | 
			
		||||
    symcache: SymbologyCache | None = None,
 | 
			
		||||
 | 
			
		||||
) -> dict[str, Transaction]:
 | 
			
		||||
    '''
 | 
			
		||||
    Normalize (xml) flex-report or (recent) API trade records into
 | 
			
		||||
    our ledger format with parsing for `MktPair` and `Asset`
 | 
			
		||||
    extraction to fill in the `Transaction.sys: MktPair` field.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    records: list[Transaction] = []
 | 
			
		||||
    for tid, record in ledger.items():
 | 
			
		||||
 | 
			
		||||
        txn = norm_trade(
 | 
			
		||||
            tid,
 | 
			
		||||
            record,
 | 
			
		||||
 | 
			
		||||
            # NOTE: currently no symcache support
 | 
			
		||||
            pairs={},
 | 
			
		||||
            symcache=symcache,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if txn is None:
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # inject txns sorted by datetime
 | 
			
		||||
        insort(
 | 
			
		||||
            records,
 | 
			
		||||
            txn,
 | 
			
		||||
            key=lambda t: t.dt
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    return {r.tid: r for r in records}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def api_trades_to_ledger_entries(
 | 
			
		||||
    accounts: bidict[str, str],
 | 
			
		||||
    fills: list[Fill],
 | 
			
		||||
 | 
			
		||||
) -> dict[str, dict]:
 | 
			
		||||
    '''
 | 
			
		||||
    Convert API execution objects entry objects into
 | 
			
		||||
    flattened-``dict`` form, pretty much straight up without
 | 
			
		||||
    modification except add a `pydatetime` field from the parsed
 | 
			
		||||
    timestamp so that on write
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    trades_by_account: dict[str, dict] = {}
 | 
			
		||||
    for fill in fills:
 | 
			
		||||
 | 
			
		||||
        # NOTE: for the schema, see the defn for `Fill` which is
 | 
			
		||||
        # a `NamedTuple` subtype
 | 
			
		||||
        fdict: dict = fill._asdict()
 | 
			
		||||
 | 
			
		||||
        # flatten all (sub-)objects and convert to dicts.
 | 
			
		||||
        # with values packed into one top level entry.
 | 
			
		||||
        val: CommissionReport | Execution | Contract
 | 
			
		||||
        txn_dict: dict[str, Any] = {}
 | 
			
		||||
        for attr_name, val in fdict.items():
 | 
			
		||||
            match attr_name:
 | 
			
		||||
                # value is a `@dataclass` subtype
 | 
			
		||||
                case 'contract' | 'execution' | 'commissionReport':
 | 
			
		||||
                    txn_dict.update(asdict(val))
 | 
			
		||||
 | 
			
		||||
                case 'time':
 | 
			
		||||
                    # ib has wack ns timestamps, or is that us?
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                # TODO: we can remove this case right since there's
 | 
			
		||||
                # only 4 fields on a `Fill`?
 | 
			
		||||
                case _:
 | 
			
		||||
                    txn_dict[attr_name] = val
 | 
			
		||||
 | 
			
		||||
        tid = str(txn_dict['execId'])
 | 
			
		||||
        dt = from_timestamp(txn_dict['time'])
 | 
			
		||||
        txn_dict['datetime'] = str(dt)
 | 
			
		||||
        acctid = accounts[txn_dict['acctNumber']]
 | 
			
		||||
 | 
			
		||||
        # NOTE: only inserted (then later popped) for sorting below!
 | 
			
		||||
        txn_dict['pydatetime'] = dt
 | 
			
		||||
 | 
			
		||||
        if not tid:
 | 
			
		||||
            # this is likely some kind of internal adjustment
 | 
			
		||||
            # transaction, likely one of the following:
 | 
			
		||||
            # - an expiry event that will show a "book trade" indicating
 | 
			
		||||
            #   some adjustment to cash balances: zeroing or itm settle.
 | 
			
		||||
            # - a manual cash balance position adjustment likely done by
 | 
			
		||||
            #   the user from the accounts window in TWS where they can
 | 
			
		||||
            #   manually set the avg price and size:
 | 
			
		||||
            #   https://api.ibkr.com/lib/cstools/faq/web1/index.html#/tag/DTWS_ADJ_AVG_COST
 | 
			
		||||
            log.warning(
 | 
			
		||||
                'Skipping ID-less ledger txn_dict:\n'
 | 
			
		||||
                f'{pformat(txn_dict)}'
 | 
			
		||||
            )
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        trades_by_account.setdefault(
 | 
			
		||||
            acctid, {}
 | 
			
		||||
        )[tid] = txn_dict
 | 
			
		||||
 | 
			
		||||
    # TODO: maybe we should just bisect.insort() into a list of
 | 
			
		||||
    # tuples and then return a dict of that?
 | 
			
		||||
    # sort entries in output by python based datetime
 | 
			
		||||
    for acctid in trades_by_account:
 | 
			
		||||
        trades_by_account[acctid] = dict(sorted(
 | 
			
		||||
            trades_by_account[acctid].items(),
 | 
			
		||||
            key=lambda entry: entry[1].pop('pydatetime'),
 | 
			
		||||
        ))
 | 
			
		||||
 | 
			
		||||
    return trades_by_account
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def update_ledger_from_api_trades(
 | 
			
		||||
    fills: list[Fill],
 | 
			
		||||
    client: Client | MethodProxy,
 | 
			
		||||
    accounts_def_inv: bidict[str, str],
 | 
			
		||||
 | 
			
		||||
    # NOTE: provided for ad-hoc insertions "as transactions are
 | 
			
		||||
    # processed" -> see `norm_trade()` signature requirements.
 | 
			
		||||
    symcache: SymbologyCache | None = None,
 | 
			
		||||
 | 
			
		||||
) -> tuple[
 | 
			
		||||
    dict[str, Transaction],
 | 
			
		||||
    dict[str, dict],
 | 
			
		||||
]:
 | 
			
		||||
    # XXX; ERRGGG..
 | 
			
		||||
    # pack in the "primary/listing exchange" value from a
 | 
			
		||||
    # contract lookup since it seems this isn't available by
 | 
			
		||||
    # default from the `.fills()` method endpoint...
 | 
			
		||||
    fill: Fill
 | 
			
		||||
    for fill in fills:
 | 
			
		||||
        con: Contract = fill.contract
 | 
			
		||||
        conid: str = con.conId
 | 
			
		||||
        pexch: str | None = con.primaryExchange
 | 
			
		||||
 | 
			
		||||
        if not pexch:
 | 
			
		||||
            cons = await client.get_con(conid=conid)
 | 
			
		||||
            if cons:
 | 
			
		||||
                con = cons[0]
 | 
			
		||||
                pexch = con.primaryExchange or con.exchange
 | 
			
		||||
            else:
 | 
			
		||||
                # for futes it seems like the primary is always empty?
 | 
			
		||||
                pexch: str = con.exchange
 | 
			
		||||
 | 
			
		||||
        # pack in the ``Contract.secType``
 | 
			
		||||
        # entry['asset_type'] = condict['secType']
 | 
			
		||||
 | 
			
		||||
    entries: dict[str, dict] = api_trades_to_ledger_entries(
 | 
			
		||||
        accounts_def_inv,
 | 
			
		||||
        fills,
 | 
			
		||||
    )
 | 
			
		||||
    # normalize recent session's trades to the `Transaction` type
 | 
			
		||||
    trans_by_acct: dict[str, dict[str, Transaction]] = {}
 | 
			
		||||
 | 
			
		||||
    for acctid, trades_by_id in entries.items():
 | 
			
		||||
        # normalize to transaction form
 | 
			
		||||
        trans_by_acct[acctid] = norm_trade_records(
 | 
			
		||||
            trades_by_id,
 | 
			
		||||
            symcache=symcache,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    return trans_by_acct, entries
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,615 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Symbology search and normalization.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    nullcontext,
 | 
			
		||||
)
 | 
			
		||||
from decimal import Decimal
 | 
			
		||||
import time
 | 
			
		||||
from typing import (
 | 
			
		||||
    Awaitable,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from rapidfuzz import process as fuzzy
 | 
			
		||||
import ib_insync as ibis
 | 
			
		||||
import tractor
 | 
			
		||||
import trio
 | 
			
		||||
 | 
			
		||||
from piker.accounting import (
 | 
			
		||||
    Asset,
 | 
			
		||||
    MktPair,
 | 
			
		||||
    unpack_fqme,
 | 
			
		||||
)
 | 
			
		||||
from piker._cacheables import (
 | 
			
		||||
    async_lifo_cache,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from .api import (
 | 
			
		||||
        MethodProxy,
 | 
			
		||||
        Client,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
_futes_venues = (
 | 
			
		||||
    'GLOBEX',
 | 
			
		||||
    'NYMEX',
 | 
			
		||||
    'CME',
 | 
			
		||||
    'CMECRYPTO',
 | 
			
		||||
    'COMEX',
 | 
			
		||||
    # 'CMDTY',  # special name case..
 | 
			
		||||
    'CBOT',  # (treasury) yield futures
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
_adhoc_cmdty_set = {
 | 
			
		||||
    # metals
 | 
			
		||||
    # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
 | 
			
		||||
    'xauusd.cmdty',  # london gold spot ^
 | 
			
		||||
    'xagusd.cmdty',  # silver spot
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# NOTE: if you aren't seeing one of these symbol's futues contracts
 | 
			
		||||
# show up, it's likely the `.<venue>` part is wrong!
 | 
			
		||||
_adhoc_futes_set = {
 | 
			
		||||
 | 
			
		||||
    # equities
 | 
			
		||||
    'nq.cme',
 | 
			
		||||
    'mnq.cme',  # micro
 | 
			
		||||
 | 
			
		||||
    'es.cme',
 | 
			
		||||
    'mes.cme',  # micro
 | 
			
		||||
 | 
			
		||||
    # cypto$
 | 
			
		||||
    'brr.cme',
 | 
			
		||||
    'mbt.cme',  # micro
 | 
			
		||||
    'ethusdrr.cme',
 | 
			
		||||
 | 
			
		||||
    # agriculture
 | 
			
		||||
    'he.comex',  # lean hogs
 | 
			
		||||
    'le.comex',  # live cattle (geezers)
 | 
			
		||||
    'gf.comex',  # feeder cattle (younguns)
 | 
			
		||||
 | 
			
		||||
    # raw
 | 
			
		||||
    'lb.comex',  # random len lumber
 | 
			
		||||
 | 
			
		||||
    'gc.comex',
 | 
			
		||||
    'mgc.comex',  # micro
 | 
			
		||||
 | 
			
		||||
    # oil & gas
 | 
			
		||||
    'cl.nymex',
 | 
			
		||||
 | 
			
		||||
    'ni.comex',  # silver futes
 | 
			
		||||
    'qi.comex',  # mini-silver futes
 | 
			
		||||
 | 
			
		||||
    # treasury yields
 | 
			
		||||
    # etfs by duration:
 | 
			
		||||
    # SHY -> IEI -> IEF -> TLT
 | 
			
		||||
    'zt.cbot',  # 2y
 | 
			
		||||
    'z3n.cbot',  # 3y
 | 
			
		||||
    'zf.cbot',  # 5y
 | 
			
		||||
    'zn.cbot',  # 10y
 | 
			
		||||
    'zb.cbot',  # 30y
 | 
			
		||||
 | 
			
		||||
    # (micros of above)
 | 
			
		||||
    '2yy.cbot',
 | 
			
		||||
    '5yy.cbot',
 | 
			
		||||
    '10y.cbot',
 | 
			
		||||
    '30y.cbot',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# taken from list here:
 | 
			
		||||
# https://www.interactivebrokers.com/en/trading/products-spot-currencies.php
 | 
			
		||||
_adhoc_fiat_set = set((
 | 
			
		||||
    'USD, AED, AUD, CAD,'
 | 
			
		||||
    'CHF, CNH, CZK, DKK,'
 | 
			
		||||
    'EUR, GBP, HKD, HUF,'
 | 
			
		||||
    'ILS, JPY, MXN, NOK,'
 | 
			
		||||
    'NZD, PLN, RUB, SAR,'
 | 
			
		||||
    'SEK, SGD, TRY, ZAR'
 | 
			
		||||
    ).split(' ,')
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
# manually discovered tick discrepancies,
 | 
			
		||||
# onl god knows how or why they'd cuck these up..
 | 
			
		||||
_adhoc_mkt_infos: dict[int | str, dict] = {
 | 
			
		||||
    'vtgn.nasdaq': {'price_tick': Decimal('0.01')},
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# map of symbols to contract ids
 | 
			
		||||
_adhoc_symbol_map = {
 | 
			
		||||
    # https://misc.interactivebrokers.com/cstools/contract_info/v3.10/index.php?action=Conid%20Info&wlId=IB&conid=69067924
 | 
			
		||||
 | 
			
		||||
    # NOTE: some cmdtys/metals don't have trade data like gold/usd:
 | 
			
		||||
    # https://groups.io/g/twsapi/message/44174
 | 
			
		||||
    'XAUUSD': ({'conId': 69067924}, {'whatToShow': 'MIDPOINT'}),
 | 
			
		||||
}
 | 
			
		||||
for qsn in _adhoc_futes_set:
 | 
			
		||||
    sym, venue = qsn.split('.')
 | 
			
		||||
    assert venue.upper() in _futes_venues, f'{venue}'
 | 
			
		||||
    _adhoc_symbol_map[sym.upper()] = (
 | 
			
		||||
        {'exchange': venue},
 | 
			
		||||
        {},
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# exchanges we don't support at the moment due to not knowing
 | 
			
		||||
# how to do symbol-contract lookup correctly likely due
 | 
			
		||||
# to not having the data feeds subscribed.
 | 
			
		||||
_exch_skip_list = {
 | 
			
		||||
 | 
			
		||||
    'ASX',  # aussie stocks
 | 
			
		||||
    'MEXI',  # mexican stocks
 | 
			
		||||
 | 
			
		||||
    # no idea
 | 
			
		||||
    'NSE',
 | 
			
		||||
    'VALUE',
 | 
			
		||||
    'FUNDSERV',
 | 
			
		||||
    'SWB2',
 | 
			
		||||
    'PSE',
 | 
			
		||||
    'PHLX',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# optional search config the backend can register for
 | 
			
		||||
# it's symbol search handling (in this case we avoid
 | 
			
		||||
# accepting patterns before the kb has settled more then
 | 
			
		||||
# a quarter second).
 | 
			
		||||
_search_conf = {
 | 
			
		||||
    'pause_period': 6 / 16,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def open_symbol_search(ctx: tractor.Context) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
    Symbology search brokerd-endpoint.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from .api import open_client_proxies
 | 
			
		||||
    from .feed import open_data_client
 | 
			
		||||
 | 
			
		||||
    # TODO: load user defined symbol set locally for fast search?
 | 
			
		||||
    await ctx.started({})
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        open_client_proxies() as (proxies, _),
 | 
			
		||||
        open_data_client() as data_proxy,
 | 
			
		||||
    ):
 | 
			
		||||
        async with ctx.open_stream() as stream:
 | 
			
		||||
 | 
			
		||||
            # select a non-history client for symbol search to lighten
 | 
			
		||||
            # the load in the main data node.
 | 
			
		||||
            proxy = data_proxy
 | 
			
		||||
            for name, proxy in proxies.items():
 | 
			
		||||
                if proxy is data_proxy:
 | 
			
		||||
                    continue
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
            ib_client = proxy._aio_ns.ib
 | 
			
		||||
            log.info(
 | 
			
		||||
                f'Using API client for symbol-search\n'
 | 
			
		||||
                f'{ib_client}\n'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            last = time.time()
 | 
			
		||||
            async for pattern in stream:
 | 
			
		||||
                log.info(f'received {pattern}')
 | 
			
		||||
                now: float = time.time()
 | 
			
		||||
 | 
			
		||||
                # this causes tractor hang...
 | 
			
		||||
                # assert 0
 | 
			
		||||
 | 
			
		||||
                assert pattern, 'IB can not accept blank search pattern'
 | 
			
		||||
 | 
			
		||||
                # throttle search requests to no faster then 1Hz
 | 
			
		||||
                diff = now - last
 | 
			
		||||
                if diff < 1.0:
 | 
			
		||||
                    log.debug('throttle sleeping')
 | 
			
		||||
                    await trio.sleep(diff)
 | 
			
		||||
                    try:
 | 
			
		||||
                        pattern = stream.receive_nowait()
 | 
			
		||||
                    except trio.WouldBlock:
 | 
			
		||||
                        pass
 | 
			
		||||
 | 
			
		||||
                if (
 | 
			
		||||
                    not pattern
 | 
			
		||||
                    or pattern.isspace()
 | 
			
		||||
 | 
			
		||||
                    # XXX: not sure if this is a bad assumption but it
 | 
			
		||||
                    # seems to make search snappier?
 | 
			
		||||
                    or len(pattern) < 1
 | 
			
		||||
                ):
 | 
			
		||||
                    log.warning('empty pattern received, skipping..')
 | 
			
		||||
 | 
			
		||||
                    # TODO: *BUG* if nothing is returned here the client
 | 
			
		||||
                    # side will cache a null set result and not showing
 | 
			
		||||
                    # anything to the use on re-searches when this query
 | 
			
		||||
                    # timed out. We probably need a special "timeout" msg
 | 
			
		||||
                    # or something...
 | 
			
		||||
 | 
			
		||||
                    # XXX: this unblocks the far end search task which may
 | 
			
		||||
                    # hold up a multi-search nursery block
 | 
			
		||||
                    await stream.send({})
 | 
			
		||||
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                log.info(f'searching for {pattern}')
 | 
			
		||||
 | 
			
		||||
                last = time.time()
 | 
			
		||||
 | 
			
		||||
                # async batch search using api stocks endpoint and module
 | 
			
		||||
                # defined adhoc symbol set.
 | 
			
		||||
                stock_results = []
 | 
			
		||||
 | 
			
		||||
                async def extend_results(
 | 
			
		||||
                    target: Awaitable[list]
 | 
			
		||||
                ) -> None:
 | 
			
		||||
                    try:
 | 
			
		||||
                        results = await target
 | 
			
		||||
                    except tractor.trionics.Lagged:
 | 
			
		||||
                        print("IB SYM-SEARCH OVERRUN?!?")
 | 
			
		||||
                        return
 | 
			
		||||
 | 
			
		||||
                    stock_results.extend(results)
 | 
			
		||||
 | 
			
		||||
                for _ in range(10):
 | 
			
		||||
                    with trio.move_on_after(3) as cs:
 | 
			
		||||
                        async with trio.open_nursery() as sn:
 | 
			
		||||
                            sn.start_soon(
 | 
			
		||||
                                extend_results,
 | 
			
		||||
                                proxy.search_symbols(
 | 
			
		||||
                                    pattern=pattern,
 | 
			
		||||
                                    upto=5,
 | 
			
		||||
                                ),
 | 
			
		||||
                            )
 | 
			
		||||
 | 
			
		||||
                            # trigger async request
 | 
			
		||||
                            await trio.sleep(0)
 | 
			
		||||
 | 
			
		||||
                    if cs.cancelled_caught:
 | 
			
		||||
                        log.warning(
 | 
			
		||||
                            f'Search timeout? {proxy._aio_ns.ib.client}'
 | 
			
		||||
                        )
 | 
			
		||||
                        continue
 | 
			
		||||
                    elif stock_results:
 | 
			
		||||
                        break
 | 
			
		||||
                    # else:
 | 
			
		||||
                    # await tractor.pause()
 | 
			
		||||
 | 
			
		||||
                    # # match against our ad-hoc set immediately
 | 
			
		||||
                    # adhoc_matches = fuzzy.extract(
 | 
			
		||||
                    #     pattern,
 | 
			
		||||
                    #     list(_adhoc_futes_set),
 | 
			
		||||
                    #     score_cutoff=90,
 | 
			
		||||
                    # )
 | 
			
		||||
                    # log.info(f'fuzzy matched adhocs: {adhoc_matches}')
 | 
			
		||||
                    # adhoc_match_results = {}
 | 
			
		||||
                    # if adhoc_matches:
 | 
			
		||||
                    #     # TODO: do we need to pull contract details?
 | 
			
		||||
                    #     adhoc_match_results = {i[0]: {} for i in
 | 
			
		||||
                    #     adhoc_matches}
 | 
			
		||||
 | 
			
		||||
                log.debug(f'fuzzy matching stocks {stock_results}')
 | 
			
		||||
                stock_matches = fuzzy.extract(
 | 
			
		||||
                    pattern,
 | 
			
		||||
                    stock_results,
 | 
			
		||||
                    score_cutoff=50,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # matches = adhoc_match_results | {
 | 
			
		||||
                matches = {
 | 
			
		||||
                    item[0]: {} for item in stock_matches
 | 
			
		||||
                }
 | 
			
		||||
                # TODO: we used to deliver contract details
 | 
			
		||||
                # {item[2]: item[0] for item in stock_matches}
 | 
			
		||||
 | 
			
		||||
                log.debug(f"sending matches: {matches.keys()}")
 | 
			
		||||
                await stream.send(matches)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# re-mapping to piker asset type names
 | 
			
		||||
# https://github.com/erdewit/ib_insync/blob/master/ib_insync/contract.py#L113
 | 
			
		||||
_asset_type_map = {
 | 
			
		||||
    'STK': 'stock',
 | 
			
		||||
    'OPT': 'option',
 | 
			
		||||
    'FUT': 'future',
 | 
			
		||||
    'CONTFUT': 'continuous_future',
 | 
			
		||||
    'CASH': 'fiat',
 | 
			
		||||
    'IND': 'index',
 | 
			
		||||
    'CFD': 'cfd',
 | 
			
		||||
    'BOND': 'bond',
 | 
			
		||||
    'CMDTY': 'commodity',
 | 
			
		||||
    'FOP': 'futures_option',
 | 
			
		||||
    'FUND': 'mutual_fund',
 | 
			
		||||
    'WAR': 'warrant',
 | 
			
		||||
    'IOPT': 'warran',
 | 
			
		||||
    'BAG': 'bag',
 | 
			
		||||
    'CRYPTO': 'crypto',  # bc it's diff then fiat?
 | 
			
		||||
    # 'NEWS': 'news',
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def parse_patt2fqme(
 | 
			
		||||
    # client: Client,
 | 
			
		||||
    pattern: str,
 | 
			
		||||
 | 
			
		||||
) -> tuple[str, str, str, str]:
 | 
			
		||||
 | 
			
		||||
    # TODO: we can't use this currently because
 | 
			
		||||
    # ``wrapper.starTicker()`` currently cashes ticker instances
 | 
			
		||||
    # which means getting a singel quote will potentially look up
 | 
			
		||||
    # a quote for a ticker that it already streaming and thus run
 | 
			
		||||
    # into state clobbering (eg. list: Ticker.ticks). It probably
 | 
			
		||||
    # makes sense to try this once we get the pub-sub working on
 | 
			
		||||
    # individual symbols...
 | 
			
		||||
 | 
			
		||||
    # XXX UPDATE: we can probably do the tick/trades scraping
 | 
			
		||||
    # inside our eventkit handler instead to bypass this entirely?
 | 
			
		||||
 | 
			
		||||
    currency = ''
 | 
			
		||||
 | 
			
		||||
    # fqme parsing stage
 | 
			
		||||
    # ------------------
 | 
			
		||||
    if '.ib' in pattern:
 | 
			
		||||
        _, symbol, venue, expiry = unpack_fqme(pattern)
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        symbol = pattern
 | 
			
		||||
        expiry = ''
 | 
			
		||||
 | 
			
		||||
        # # another hack for forex pairs lul.
 | 
			
		||||
        # if (
 | 
			
		||||
        #     '.idealpro' in symbol
 | 
			
		||||
        #     # or '/' in symbol
 | 
			
		||||
        # ):
 | 
			
		||||
        #     exch: str = 'IDEALPRO'
 | 
			
		||||
        #     symbol = symbol.removesuffix('.idealpro')
 | 
			
		||||
        #     if '/' in symbol:
 | 
			
		||||
        #         symbol, currency = symbol.split('/')
 | 
			
		||||
 | 
			
		||||
        # else:
 | 
			
		||||
        # TODO: yes, a cache..
 | 
			
		||||
        # try:
 | 
			
		||||
        #     # give the cache a go
 | 
			
		||||
        #     return client._contracts[symbol]
 | 
			
		||||
        # except KeyError:
 | 
			
		||||
        #     log.debug(f'Looking up contract for {symbol}')
 | 
			
		||||
        expiry: str = ''
 | 
			
		||||
        if symbol.count('.') > 1:
 | 
			
		||||
            symbol, _, expiry = symbol.rpartition('.')
 | 
			
		||||
 | 
			
		||||
        # use heuristics to figure out contract "type"
 | 
			
		||||
        symbol, venue = symbol.upper().rsplit('.', maxsplit=1)
 | 
			
		||||
 | 
			
		||||
    return symbol, currency, venue, expiry
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def con2fqme(
 | 
			
		||||
    con: ibis.Contract,
 | 
			
		||||
    _cache: dict[int, (str, bool)] = {}
 | 
			
		||||
 | 
			
		||||
) -> tuple[str, bool]:
 | 
			
		||||
    '''
 | 
			
		||||
    Convert contracts to fqme-style strings to be used both in
 | 
			
		||||
    symbol-search matching and as feed tokens passed to the front
 | 
			
		||||
    end data deed layer.
 | 
			
		||||
 | 
			
		||||
    Previously seen contracts are cached by id.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # should be real volume for this contract by default
 | 
			
		||||
    calc_price: bool = False
 | 
			
		||||
    if con.conId:
 | 
			
		||||
        try:
 | 
			
		||||
            # TODO: LOL so apparently IB just changes the contract
 | 
			
		||||
            # ID (int) on a whim.. so we probably need to use an
 | 
			
		||||
            # FQME style key after all...
 | 
			
		||||
            return _cache[con.conId]
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            pass
 | 
			
		||||
 | 
			
		||||
    suffix: str = con.primaryExchange or con.exchange
 | 
			
		||||
    symbol: str = con.symbol
 | 
			
		||||
    expiry: str = con.lastTradeDateOrContractMonth or ''
 | 
			
		||||
 | 
			
		||||
    match con:
 | 
			
		||||
        case ibis.Option():
 | 
			
		||||
            # TODO: option symbol parsing and sane display:
 | 
			
		||||
            symbol = con.localSymbol.replace(' ', '')
 | 
			
		||||
 | 
			
		||||
        case (
 | 
			
		||||
            ibis.Commodity()
 | 
			
		||||
            # search API endpoint returns std con box..
 | 
			
		||||
            | ibis.Contract(secType='CMDTY')
 | 
			
		||||
        ):
 | 
			
		||||
            # commodities and forex don't have an exchange name and
 | 
			
		||||
            # no real volume so we have to calculate the price
 | 
			
		||||
            suffix = con.secType
 | 
			
		||||
 | 
			
		||||
            # no real volume on this tract
 | 
			
		||||
            calc_price = True
 | 
			
		||||
 | 
			
		||||
        case ibis.Forex() | ibis.Contract(secType='CASH'):
 | 
			
		||||
            dst, src = con.localSymbol.split('.')
 | 
			
		||||
            symbol = ''.join([dst, src])
 | 
			
		||||
            suffix = con.exchange or 'idealpro'
 | 
			
		||||
 | 
			
		||||
            # no real volume on forex feeds..
 | 
			
		||||
            calc_price = True
 | 
			
		||||
 | 
			
		||||
    if not suffix:
 | 
			
		||||
        entry = _adhoc_symbol_map.get(
 | 
			
		||||
            con.symbol or con.localSymbol
 | 
			
		||||
        )
 | 
			
		||||
        if entry:
 | 
			
		||||
            meta, kwargs = entry
 | 
			
		||||
            cid = meta.get('conId')
 | 
			
		||||
            if cid:
 | 
			
		||||
                assert con.conId == meta['conId']
 | 
			
		||||
            suffix = meta['exchange']
 | 
			
		||||
 | 
			
		||||
    # append a `.<suffix>` to the returned symbol
 | 
			
		||||
    # key for derivatives that normally is the expiry
 | 
			
		||||
    # date key.
 | 
			
		||||
    if expiry:
 | 
			
		||||
        suffix += f'.{expiry}'
 | 
			
		||||
 | 
			
		||||
    fqme_key = symbol.lower()
 | 
			
		||||
    if suffix:
 | 
			
		||||
        fqme_key = '.'.join((fqme_key, suffix)).lower()
 | 
			
		||||
 | 
			
		||||
    _cache[con.conId] = fqme_key, calc_price
 | 
			
		||||
    return fqme_key, calc_price
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@async_lifo_cache()
 | 
			
		||||
async def get_mkt_info(
 | 
			
		||||
    fqme: str,
 | 
			
		||||
 | 
			
		||||
    proxy: MethodProxy | None = None,
 | 
			
		||||
 | 
			
		||||
) -> tuple[MktPair, ibis.ContractDetails]:
 | 
			
		||||
 | 
			
		||||
    if '.ib' not in fqme:
 | 
			
		||||
        fqme += '.ib'
 | 
			
		||||
    broker, pair, venue, expiry = unpack_fqme(fqme)
 | 
			
		||||
 | 
			
		||||
    proxy: MethodProxy
 | 
			
		||||
    if proxy is not None:
 | 
			
		||||
        client_ctx = nullcontext(proxy)
 | 
			
		||||
    else:
 | 
			
		||||
        from .feed import (
 | 
			
		||||
            open_data_client,
 | 
			
		||||
        )
 | 
			
		||||
        client_ctx = open_data_client
 | 
			
		||||
 | 
			
		||||
    async with client_ctx as proxy:
 | 
			
		||||
        try:
 | 
			
		||||
            (
 | 
			
		||||
                con,  # Contract
 | 
			
		||||
                details,  # ContractDetails
 | 
			
		||||
            ) = await proxy.get_sym_details(fqme=fqme)
 | 
			
		||||
        except ConnectionError:
 | 
			
		||||
            log.exception(f'Proxy is ded {proxy._aio_ns}')
 | 
			
		||||
            raise
 | 
			
		||||
 | 
			
		||||
    # TODO: more consistent field translation
 | 
			
		||||
    atype = _asset_type_map[con.secType]
 | 
			
		||||
 | 
			
		||||
    if atype == 'commodity':
 | 
			
		||||
        venue: str = 'cmdty'
 | 
			
		||||
    else:
 | 
			
		||||
        venue = con.primaryExchange or con.exchange
 | 
			
		||||
 | 
			
		||||
    price_tick: Decimal = Decimal(str(details.minTick))
 | 
			
		||||
    ib_min_tick_gt_2: Decimal = Decimal('0.01')
 | 
			
		||||
    if (
 | 
			
		||||
        price_tick < ib_min_tick_gt_2
 | 
			
		||||
    ):
 | 
			
		||||
        # TODO: we need to add some kinda dynamic rounding sys
 | 
			
		||||
        # to our MktPair i guess?
 | 
			
		||||
        # not sure where the logic should sit, but likely inside
 | 
			
		||||
        # the `.clearing._ems` i suppose...
 | 
			
		||||
        log.warning(
 | 
			
		||||
            'IB seems to disallow a min price tick < 0.01 '
 | 
			
		||||
            'when the price is > 2.0..?\n'
 | 
			
		||||
            f'Decreasing min tick precision for {fqme} to 0.01'
 | 
			
		||||
        )
 | 
			
		||||
        # price_tick = ib_min_tick
 | 
			
		||||
        # await tractor.pause()
 | 
			
		||||
 | 
			
		||||
    if atype == 'stock':
 | 
			
		||||
        # XXX: GRRRR they don't support fractional share sizes for
 | 
			
		||||
        # stocks from the API?!
 | 
			
		||||
        # if con.secType == 'STK':
 | 
			
		||||
        size_tick = Decimal('1')
 | 
			
		||||
    else:
 | 
			
		||||
        size_tick: Decimal = Decimal(
 | 
			
		||||
            str(details.minSize).rstrip('0')
 | 
			
		||||
        )
 | 
			
		||||
        # |-> TODO: there is also the Contract.sizeIncrement, bt wtf is it?
 | 
			
		||||
 | 
			
		||||
    # NOTE: this is duplicate from the .broker.norm_trade_records()
 | 
			
		||||
    # routine, we should factor all this parsing somewhere..
 | 
			
		||||
    expiry_str = str(con.lastTradeDateOrContractMonth)
 | 
			
		||||
    # if expiry:
 | 
			
		||||
    #     expiry_str: str = str(pendulum.parse(
 | 
			
		||||
    #         str(expiry).strip(' ')
 | 
			
		||||
    #     ))
 | 
			
		||||
 | 
			
		||||
    # TODO: currently we can't pass the fiat src asset because
 | 
			
		||||
    # then we'll get a `MNQUSD` request for history data..
 | 
			
		||||
    # we need to figure out how we're going to handle this (later?)
 | 
			
		||||
    # but likely we want all backends to eventually handle
 | 
			
		||||
    # ``dst/src.venue.`` style !?
 | 
			
		||||
    src = Asset(
 | 
			
		||||
        name=str(con.currency).lower(),
 | 
			
		||||
        atype='fiat',
 | 
			
		||||
        tx_tick=Decimal('0.01'),  # right?
 | 
			
		||||
    )
 | 
			
		||||
    dst = Asset(
 | 
			
		||||
        name=con.symbol.lower(),
 | 
			
		||||
        atype=atype,
 | 
			
		||||
        tx_tick=size_tick,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    mkt = MktPair(
 | 
			
		||||
        src=src,
 | 
			
		||||
        dst=dst,
 | 
			
		||||
 | 
			
		||||
        price_tick=price_tick,
 | 
			
		||||
        size_tick=size_tick,
 | 
			
		||||
 | 
			
		||||
        bs_mktid=str(con.conId),
 | 
			
		||||
        venue=str(venue),
 | 
			
		||||
        expiry=expiry_str,
 | 
			
		||||
        broker='ib',
 | 
			
		||||
 | 
			
		||||
        # TODO: options contract info as str?
 | 
			
		||||
        # contract_info=<optionsdetails>
 | 
			
		||||
        _fqme_without_src=(atype != 'fiat'),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # just.. wow.
 | 
			
		||||
    if entry := _adhoc_mkt_infos.get(mkt.bs_fqme):
 | 
			
		||||
        log.warning(f'Frickin {mkt.fqme} has an adhoc {entry}..')
 | 
			
		||||
        new = mkt.to_dict()
 | 
			
		||||
        new['price_tick'] = entry['price_tick']
 | 
			
		||||
        new['src'] = src
 | 
			
		||||
        new['dst'] = dst
 | 
			
		||||
        mkt = MktPair(**new)
 | 
			
		||||
 | 
			
		||||
    # if possible register the bs_mktid to the just-built
 | 
			
		||||
    # mkt so that it can be retreived by order mode tasks later.
 | 
			
		||||
    # TODO NOTE: this is going to be problematic if/when we split
 | 
			
		||||
    # out the datatd vs. brokerd actors since the mktmap lookup
 | 
			
		||||
    # table will now be inaccessible..
 | 
			
		||||
    if proxy is not None:
 | 
			
		||||
        client: Client = proxy._aio_ns
 | 
			
		||||
        client._contracts[mkt.bs_fqme] = con
 | 
			
		||||
        client._cons2mkts[con] = mkt
 | 
			
		||||
 | 
			
		||||
    return mkt, details
 | 
			
		||||
| 
						 | 
				
			
			@ -58,7 +58,7 @@ your ``pps.toml`` file will have position entries like,
 | 
			
		|||
   [kraken.spot."xmreur.kraken"]
 | 
			
		||||
   size = 4.80907954
 | 
			
		||||
   ppu = 103.97000000
 | 
			
		||||
   bsuid = "XXMRZEUR"
 | 
			
		||||
   bs_mktid = "XXMRZEUR"
 | 
			
		||||
   clears = [
 | 
			
		||||
    { tid = "TFJBKK-SMBZS-VJ4UWS", cost = 0.8, price = 103.97, size = 4.80907954, dt = "2022-05-20T02:26:33.413397+00:00" },
 | 
			
		||||
   ]
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -19,43 +19,57 @@ Kraken backend.
 | 
			
		|||
 | 
			
		||||
Sub-modules within break into the core functionalities:
 | 
			
		||||
 | 
			
		||||
- ``broker.py`` part for orders / trading endpoints
 | 
			
		||||
- ``feed.py`` for real-time data feed endpoints
 | 
			
		||||
- ``api.py`` for the core API machinery which is ``trio``-ized
 | 
			
		||||
  wrapping around ``ib_insync``.
 | 
			
		||||
- .api: for the core API machinery which generally
 | 
			
		||||
        a ``asks``/``trio-websocket`` implemented ``Client``.
 | 
			
		||||
- .broker: part for orders / trading endpoints.
 | 
			
		||||
- .feed: for real-time and historical data query endpoints.
 | 
			
		||||
- .ledger: for transaction processing as it pertains to accounting.
 | 
			
		||||
- .symbols: for market (name) search and symbology meta-defs.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
 | 
			
		||||
from piker.log import get_logger
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
from .symbols import (
 | 
			
		||||
    Pair,  # for symcache
 | 
			
		||||
    open_symbol_search,
 | 
			
		||||
    # required by `.accounting`, `.data`
 | 
			
		||||
    get_mkt_info,
 | 
			
		||||
)
 | 
			
		||||
# required by `.brokers`
 | 
			
		||||
from .api import (
 | 
			
		||||
    get_client,
 | 
			
		||||
)
 | 
			
		||||
from .feed import (
 | 
			
		||||
    open_history_client,
 | 
			
		||||
    open_symbol_search,
 | 
			
		||||
    # required by `.data`
 | 
			
		||||
    stream_quotes,
 | 
			
		||||
    open_history_client,
 | 
			
		||||
)
 | 
			
		||||
from .broker import (
 | 
			
		||||
    trades_dialogue,
 | 
			
		||||
    # required by `.clearing`
 | 
			
		||||
    open_trade_dialog,
 | 
			
		||||
)
 | 
			
		||||
from .ledger import (
 | 
			
		||||
    # required by `.accounting`
 | 
			
		||||
    norm_trade,
 | 
			
		||||
    norm_trade_records,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'get_client',
 | 
			
		||||
    'trades_dialogue',
 | 
			
		||||
    'get_mkt_info',
 | 
			
		||||
    'Pair',
 | 
			
		||||
    'open_trade_dialog',
 | 
			
		||||
    'open_history_client',
 | 
			
		||||
    'open_symbol_search',
 | 
			
		||||
    'stream_quotes',
 | 
			
		||||
    'norm_trade_records',
 | 
			
		||||
    'norm_trade',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# tractor RPC enable arg
 | 
			
		||||
__enable_modules__: list[str] = [
 | 
			
		||||
    'api',
 | 
			
		||||
    'feed',
 | 
			
		||||
    'broker',
 | 
			
		||||
    'feed',
 | 
			
		||||
    'symbols',
 | 
			
		||||
]
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -15,7 +15,7 @@
 | 
			
		|||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Kraken web API wrapping.
 | 
			
		||||
Core (web) API client
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from contextlib import asynccontextmanager as acm
 | 
			
		||||
| 
						 | 
				
			
			@ -23,15 +23,12 @@ from datetime import datetime
 | 
			
		|||
import itertools
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Optional,
 | 
			
		||||
    Union,
 | 
			
		||||
)
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
import httpx
 | 
			
		||||
import pendulum
 | 
			
		||||
import asks
 | 
			
		||||
from fuzzywuzzy import process as fuzzy
 | 
			
		||||
import numpy as np
 | 
			
		||||
import urllib.parse
 | 
			
		||||
import hashlib
 | 
			
		||||
| 
						 | 
				
			
			@ -40,38 +37,37 @@ import base64
 | 
			
		|||
import trio
 | 
			
		||||
 | 
			
		||||
from piker import config
 | 
			
		||||
from piker.data.types import Struct
 | 
			
		||||
from piker.data._source import Symbol
 | 
			
		||||
from piker.data import (
 | 
			
		||||
    def_iohlcv_fields,
 | 
			
		||||
    match_from_pairs,
 | 
			
		||||
)
 | 
			
		||||
from piker.accounting._mktinfo import (
 | 
			
		||||
    Asset,
 | 
			
		||||
    digits_to_dec,
 | 
			
		||||
    dec_digits,
 | 
			
		||||
)
 | 
			
		||||
from piker.brokers._util import (
 | 
			
		||||
    resproc,
 | 
			
		||||
    SymbolNotFound,
 | 
			
		||||
    BrokerError,
 | 
			
		||||
    DataThrottle,
 | 
			
		||||
)
 | 
			
		||||
from piker.pp import Transaction
 | 
			
		||||
from . import log
 | 
			
		||||
from piker.accounting import Transaction
 | 
			
		||||
from piker.log import get_logger
 | 
			
		||||
from .symbols import Pair
 | 
			
		||||
 | 
			
		||||
log = get_logger('piker.brokers.kraken')
 | 
			
		||||
 | 
			
		||||
# <uri>/<version>/
 | 
			
		||||
_url = 'https://api.kraken.com/0'
 | 
			
		||||
 | 
			
		||||
_headers: dict[str, str] = {
 | 
			
		||||
    'User-Agent': 'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# Broker specific ohlc schema which includes a vwap field
 | 
			
		||||
_ohlc_dtype = [
 | 
			
		||||
    ('index', int),
 | 
			
		||||
    ('time', int),
 | 
			
		||||
    ('open', float),
 | 
			
		||||
    ('high', float),
 | 
			
		||||
    ('low', float),
 | 
			
		||||
    ('close', float),
 | 
			
		||||
    ('volume', float),
 | 
			
		||||
    ('count', int),
 | 
			
		||||
    ('bar_wap', float),
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
# UI components allow this to be declared such that additional
 | 
			
		||||
# (historical) fields can be exposed.
 | 
			
		||||
ohlc_dtype = np.dtype(_ohlc_dtype)
 | 
			
		||||
 | 
			
		||||
# TODO: this is the only backend providing this right?
 | 
			
		||||
# in which case we should drop it from the defaults and
 | 
			
		||||
# instead make a custom fields descr in this module!
 | 
			
		||||
_show_wap_in_history = True
 | 
			
		||||
_symbol_info_translation: dict[str, str] = {
 | 
			
		||||
    'tick_decimals': 'pair_decimals',
 | 
			
		||||
| 
						 | 
				
			
			@ -79,12 +75,18 @@ _symbol_info_translation: dict[str, str] = {
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
def get_config() -> dict[str, Any]:
 | 
			
		||||
    '''
 | 
			
		||||
    Load our section from `piker/brokers.toml`.
 | 
			
		||||
 | 
			
		||||
    conf, path = config.load()
 | 
			
		||||
    section = conf.get('kraken')
 | 
			
		||||
 | 
			
		||||
    if section is None:
 | 
			
		||||
        log.warning(f'No config section found for kraken in {path}')
 | 
			
		||||
    '''
 | 
			
		||||
    conf, path = config.load(
 | 
			
		||||
        conf_name='brokers',
 | 
			
		||||
        touch_if_dne=True,
 | 
			
		||||
    )
 | 
			
		||||
    if (section := conf.get('kraken')) is None:
 | 
			
		||||
        log.warning(
 | 
			
		||||
            f'No config section found for kraken in {path}'
 | 
			
		||||
        )
 | 
			
		||||
        return {}
 | 
			
		||||
 | 
			
		||||
    return section
 | 
			
		||||
| 
						 | 
				
			
			@ -115,79 +117,51 @@ class InvalidKey(ValueError):
 | 
			
		|||
    '''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# https://www.kraken.com/features/api#get-tradable-pairs
 | 
			
		||||
class Pair(Struct):
 | 
			
		||||
    altname: str  # alternate pair name
 | 
			
		||||
    wsname: str  # WebSocket pair name (if available)
 | 
			
		||||
    aclass_base: str  # asset class of base component
 | 
			
		||||
    base: str  # asset id of base component
 | 
			
		||||
    aclass_quote: str  # asset class of quote component
 | 
			
		||||
    quote: str  # asset id of quote component
 | 
			
		||||
    lot: str  # volume lot size
 | 
			
		||||
 | 
			
		||||
    cost_decimals: int
 | 
			
		||||
    costmin: float
 | 
			
		||||
    pair_decimals: int  # scaling decimal places for pair
 | 
			
		||||
    lot_decimals: int  # scaling decimal places for volume
 | 
			
		||||
 | 
			
		||||
    # amount to multiply lot volume by to get currency volume
 | 
			
		||||
    lot_multiplier: float
 | 
			
		||||
 | 
			
		||||
    # array of leverage amounts available when buying
 | 
			
		||||
    leverage_buy: list[int]
 | 
			
		||||
    # array of leverage amounts available when selling
 | 
			
		||||
    leverage_sell: list[int]
 | 
			
		||||
 | 
			
		||||
    # fee schedule array in [volume, percent fee] tuples
 | 
			
		||||
    fees: list[tuple[int, float]]
 | 
			
		||||
 | 
			
		||||
    # maker fee schedule array in [volume, percent fee] tuples (if on
 | 
			
		||||
    # maker/taker)
 | 
			
		||||
    fees_maker: list[tuple[int, float]]
 | 
			
		||||
 | 
			
		||||
    fee_volume_currency: str  # volume discount currency
 | 
			
		||||
    margin_call: str  # margin call level
 | 
			
		||||
    margin_stop: str  # stop-out/liquidation margin level
 | 
			
		||||
    ordermin: float  # minimum order volume for pair
 | 
			
		||||
    tick_size: float  # min price step size
 | 
			
		||||
    status: str
 | 
			
		||||
 | 
			
		||||
    short_position_limit: float = 0
 | 
			
		||||
    long_position_limit: float = float('inf')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Client:
 | 
			
		||||
 | 
			
		||||
    # global symbol normalization table
 | 
			
		||||
    _ntable: dict[str, str] = {}
 | 
			
		||||
    _atable: bidict[str, str] = bidict()
 | 
			
		||||
    # assets and mkt pairs are key-ed by kraken's ReST response
 | 
			
		||||
    # symbol-bs_mktids (we call them "X-keys" like fricking
 | 
			
		||||
    # "XXMRZEUR"). these keys used directly since ledger endpoints
 | 
			
		||||
    # return transaction sets keyed with the same set!
 | 
			
		||||
    _Assets: dict[str, Asset] = {}
 | 
			
		||||
    _AssetPairs: dict[str, Pair] = {}
 | 
			
		||||
 | 
			
		||||
    # offer lookup tables for all .altname and .wsname
 | 
			
		||||
    # to the equivalent .xname so that various symbol-schemas
 | 
			
		||||
    # can be mapped to `Pair`s in the tables above.
 | 
			
		||||
    _altnames: dict[str, str] = {}
 | 
			
		||||
    _wsnames: dict[str, str] = {}
 | 
			
		||||
 | 
			
		||||
    # key-ed by `Pair.bs_fqme: str`, and thus used for search
 | 
			
		||||
    # allowing for lookup using piker's own FQME symbology sys.
 | 
			
		||||
    _pairs: dict[str, Pair] = {}
 | 
			
		||||
    _assets: dict[str, Asset] = {}
 | 
			
		||||
 | 
			
		||||
    def __init__(
 | 
			
		||||
        self,
 | 
			
		||||
        config: dict[str, str],
 | 
			
		||||
        httpx_client: httpx.AsyncClient,
 | 
			
		||||
 | 
			
		||||
        name: str = '',
 | 
			
		||||
        api_key: str = '',
 | 
			
		||||
        secret: str = ''
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        self._sesh = asks.Session(connections=4)
 | 
			
		||||
        self._sesh.base_location = _url
 | 
			
		||||
        self._sesh.headers.update({
 | 
			
		||||
            'User-Agent':
 | 
			
		||||
                'krakenex/2.1.0 (+https://github.com/veox/python3-krakenex)'
 | 
			
		||||
        })
 | 
			
		||||
        self.conf: dict[str, str] = config
 | 
			
		||||
 | 
			
		||||
        self._sesh: httpx.AsyncClient = httpx_client
 | 
			
		||||
 | 
			
		||||
        self._name = name
 | 
			
		||||
        self._api_key = api_key
 | 
			
		||||
        self._secret = secret
 | 
			
		||||
 | 
			
		||||
        self.conf: dict[str, str] = config
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def pairs(self) -> dict[str, Pair]:
 | 
			
		||||
 | 
			
		||||
        if self._pairs is None:
 | 
			
		||||
            raise RuntimeError(
 | 
			
		||||
                "Make sure to run `cache_symbols()` on startup!"
 | 
			
		||||
                "Client didn't run `.get_mkt_pairs()` on startup?!"
 | 
			
		||||
            )
 | 
			
		||||
            # retreive and cache all symbols
 | 
			
		||||
 | 
			
		||||
        return self._pairs
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -196,10 +170,9 @@ class Client:
 | 
			
		|||
        method: str,
 | 
			
		||||
        data: dict,
 | 
			
		||||
    ) -> dict[str, Any]:
 | 
			
		||||
        resp = await self._sesh.post(
 | 
			
		||||
            path=f'/public/{method}',
 | 
			
		||||
        resp: httpx.Response = await self._sesh.post(
 | 
			
		||||
            url=f'/public/{method}',
 | 
			
		||||
            json=data,
 | 
			
		||||
            timeout=float('inf')
 | 
			
		||||
        )
 | 
			
		||||
        return resproc(resp, log)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -210,18 +183,18 @@ class Client:
 | 
			
		|||
        uri_path: str
 | 
			
		||||
    ) -> dict[str, Any]:
 | 
			
		||||
        headers = {
 | 
			
		||||
            'Content-Type':
 | 
			
		||||
                'application/x-www-form-urlencoded',
 | 
			
		||||
            'API-Key':
 | 
			
		||||
                self._api_key,
 | 
			
		||||
            'API-Sign':
 | 
			
		||||
                get_kraken_signature(uri_path, data, self._secret)
 | 
			
		||||
            'Content-Type': 'application/x-www-form-urlencoded',
 | 
			
		||||
            'API-Key': self._api_key,
 | 
			
		||||
            'API-Sign': get_kraken_signature(
 | 
			
		||||
                uri_path,
 | 
			
		||||
                data,
 | 
			
		||||
                self._secret,
 | 
			
		||||
            ),
 | 
			
		||||
        }
 | 
			
		||||
        resp = await self._sesh.post(
 | 
			
		||||
            path=f'/private/{method}',
 | 
			
		||||
        resp: httpx.Response = await self._sesh.post(
 | 
			
		||||
            url=f'/private/{method}',
 | 
			
		||||
            data=data,
 | 
			
		||||
            headers=headers,
 | 
			
		||||
            timeout=float('inf')
 | 
			
		||||
        )
 | 
			
		||||
        return resproc(resp, log)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -247,20 +220,77 @@ class Client:
 | 
			
		|||
            'Balance',
 | 
			
		||||
            {},
 | 
			
		||||
        )
 | 
			
		||||
        by_bsuid = resp['result']
 | 
			
		||||
        return {
 | 
			
		||||
            self._atable[sym].lower(): float(bal)
 | 
			
		||||
            for sym, bal in by_bsuid.items()
 | 
			
		||||
        }
 | 
			
		||||
        by_bsmktid: dict[str, dict] = resp['result']
 | 
			
		||||
 | 
			
		||||
    async def get_assets(self) -> dict[str, dict]:
 | 
			
		||||
        resp = await self._public('Assets', {})
 | 
			
		||||
        return resp['result']
 | 
			
		||||
        balances: dict = {}
 | 
			
		||||
        for xname, bal in by_bsmktid.items():
 | 
			
		||||
            asset: Asset = self._Assets[xname]
 | 
			
		||||
 | 
			
		||||
    async def cache_assets(self) -> None:
 | 
			
		||||
        assets = self.assets = await self.get_assets()
 | 
			
		||||
        for bsuid, info in assets.items():
 | 
			
		||||
            self._atable[bsuid] = info['altname']
 | 
			
		||||
            # TODO: which KEY should we use? it's used to index
 | 
			
		||||
            # the `Account.pps: dict` ..
 | 
			
		||||
            key: str = asset.name.lower()
 | 
			
		||||
            # TODO: should we just return a `Decimal` here
 | 
			
		||||
            # or is the rounded version ok?
 | 
			
		||||
            balances[key] = round(
 | 
			
		||||
                float(bal),
 | 
			
		||||
                ndigits=dec_digits(asset.tx_tick)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        return balances
 | 
			
		||||
 | 
			
		||||
    async def get_assets(
 | 
			
		||||
        self,
 | 
			
		||||
        reload: bool = False,
 | 
			
		||||
 | 
			
		||||
    ) -> dict[str, Asset]:
 | 
			
		||||
        '''
 | 
			
		||||
        Load and cache all asset infos and pack into
 | 
			
		||||
        our native ``Asset`` struct.
 | 
			
		||||
 | 
			
		||||
        https://docs.kraken.com/rest/#tag/Market-Data/operation/getAssetInfo
 | 
			
		||||
 | 
			
		||||
        return msg:
 | 
			
		||||
            "asset1": {
 | 
			
		||||
                "aclass": "string",
 | 
			
		||||
                "altname": "string",
 | 
			
		||||
                "decimals": 0,
 | 
			
		||||
                "display_decimals": 0,
 | 
			
		||||
                "collateral_value": 0,
 | 
			
		||||
                "status": "string"
 | 
			
		||||
            }
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        if (
 | 
			
		||||
            not self._assets
 | 
			
		||||
            or reload
 | 
			
		||||
        ):
 | 
			
		||||
            resp = await self._public('Assets', {})
 | 
			
		||||
            assets: dict[str, dict] = resp['result']
 | 
			
		||||
 | 
			
		||||
            for bs_mktid, info in assets.items():
 | 
			
		||||
 | 
			
		||||
                altname: str = info['altname']
 | 
			
		||||
                aclass: str = info['aclass']
 | 
			
		||||
                asset = Asset(
 | 
			
		||||
                    name=altname,
 | 
			
		||||
                    atype=f'crypto_{aclass}',
 | 
			
		||||
                    tx_tick=digits_to_dec(info['decimals']),
 | 
			
		||||
                    info=info,
 | 
			
		||||
                )
 | 
			
		||||
                # NOTE: yes we keep 2 sets since kraken insists on
 | 
			
		||||
                # keeping 3 frickin sets bc apparently they have
 | 
			
		||||
                # no sane data engineers whol all like different
 | 
			
		||||
                # keys for their fricking symbology sets..
 | 
			
		||||
                self._Assets[bs_mktid] = asset
 | 
			
		||||
                self._assets[altname.lower()] = asset
 | 
			
		||||
                self._assets[altname] = asset
 | 
			
		||||
 | 
			
		||||
        # we return the "most native" set merged with our preferred
 | 
			
		||||
        # naming (which i guess is the "altname" one) since that's
 | 
			
		||||
        # what the symcache loader will be storing, and we need the
 | 
			
		||||
        # keys that are easiest to match against in any trade
 | 
			
		||||
        # records.
 | 
			
		||||
        return self._Assets | self._assets
 | 
			
		||||
 | 
			
		||||
    async def get_trades(
 | 
			
		||||
        self,
 | 
			
		||||
| 
						 | 
				
			
			@ -323,10 +353,15 @@ class Client:
 | 
			
		|||
        Currently only withdrawals are supported.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        xfers: list[dict] = (await self.endpoint(
 | 
			
		||||
        resp = await self.endpoint(
 | 
			
		||||
            'WithdrawStatus',
 | 
			
		||||
            {'asset': asset},
 | 
			
		||||
        ))['result']
 | 
			
		||||
        )
 | 
			
		||||
        try:
 | 
			
		||||
            xfers: list[dict] = resp['result']
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            log.exception(f'Kraken suxxx: {resp}')
 | 
			
		||||
            return []
 | 
			
		||||
 | 
			
		||||
        # eg. resp schema:
 | 
			
		||||
        # 'result': [{'method': 'Bitcoin', 'aclass': 'currency', 'asset':
 | 
			
		||||
| 
						 | 
				
			
			@ -336,33 +371,28 @@ class Client:
 | 
			
		|||
        #     'amount': '0.00300726', 'fee': '0.00001000', 'time':
 | 
			
		||||
        #     1658347714, 'status': 'Success'}]}
 | 
			
		||||
 | 
			
		||||
        if xfers:
 | 
			
		||||
            import tractor
 | 
			
		||||
            await tractor.pp()
 | 
			
		||||
 | 
			
		||||
        trans: dict[str, Transaction] = {}
 | 
			
		||||
        for entry in xfers:
 | 
			
		||||
 | 
			
		||||
            # look up the normalized name and asset info
 | 
			
		||||
            asset_key = entry['asset']
 | 
			
		||||
            asset_info = self.assets[asset_key]
 | 
			
		||||
            asset = self._atable[asset_key].lower()
 | 
			
		||||
            asset_key: str = entry['asset']
 | 
			
		||||
            asset: Asset = self._Assets[asset_key]
 | 
			
		||||
            asset_key: str = asset.name.lower()
 | 
			
		||||
 | 
			
		||||
            # XXX: this is in the asset units (likely) so it isn't
 | 
			
		||||
            # quite the same as a commisions cost necessarily..)
 | 
			
		||||
            # TODO: also round this based on `Pair` cost precision info?
 | 
			
		||||
            cost = float(entry['fee'])
 | 
			
		||||
            # fqme: str = asset_key + '.kraken'
 | 
			
		||||
 | 
			
		||||
            fqsn = asset + '.kraken'
 | 
			
		||||
            pairinfo = Symbol.from_fqsn(
 | 
			
		||||
                fqsn,
 | 
			
		||||
                info={
 | 
			
		||||
                    'asset_type': 'crypto',
 | 
			
		||||
                    'lot_tick_size': asset_info['decimals'],
 | 
			
		||||
                },
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            tran = Transaction(
 | 
			
		||||
                fqsn=fqsn,
 | 
			
		||||
                sym=pairinfo,
 | 
			
		||||
            tx = Transaction(
 | 
			
		||||
                fqme=asset_key,  # this must map to an entry in .assets!
 | 
			
		||||
                tid=entry['txid'],
 | 
			
		||||
                dt=pendulum.from_timestamp(entry['time']),
 | 
			
		||||
                bsuid=f'{asset}{src_asset}',
 | 
			
		||||
                bs_mktid=f'{asset_key}{src_asset}',
 | 
			
		||||
                size=-1*(
 | 
			
		||||
                    float(entry['amount'])
 | 
			
		||||
                    +
 | 
			
		||||
| 
						 | 
				
			
			@ -374,8 +404,13 @@ class Client:
 | 
			
		|||
 | 
			
		||||
                # XXX: see note above
 | 
			
		||||
                cost=cost,
 | 
			
		||||
 | 
			
		||||
                # not a trade but a withdrawal or deposit on the
 | 
			
		||||
                # asset (chain) system.
 | 
			
		||||
                etype='transfer',
 | 
			
		||||
 | 
			
		||||
            )
 | 
			
		||||
            trans[tran.tid] = tran
 | 
			
		||||
            trans[tx.tid] = tx
 | 
			
		||||
 | 
			
		||||
        return trans
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -424,66 +459,92 @@ class Client:
 | 
			
		|||
        # txid is a transaction id given by kraken
 | 
			
		||||
        return await self.endpoint('CancelOrder', {"txid": reqid})
 | 
			
		||||
 | 
			
		||||
    async def symbol_info(
 | 
			
		||||
    async def asset_pairs(
 | 
			
		||||
        self,
 | 
			
		||||
        pair: Optional[str] = None,
 | 
			
		||||
        pair_patt: str | None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> dict[str, Pair] | Pair:
 | 
			
		||||
 | 
			
		||||
        if pair is not None:
 | 
			
		||||
            pairs = {'pair': pair}
 | 
			
		||||
        else:
 | 
			
		||||
            pairs = None  # get all pairs
 | 
			
		||||
 | 
			
		||||
        resp = await self._public('AssetPairs', pairs)
 | 
			
		||||
        err = resp['error']
 | 
			
		||||
        if err:
 | 
			
		||||
            symbolname = pairs['pair'] if pair else None
 | 
			
		||||
            raise SymbolNotFound(f'{symbolname}.kraken')
 | 
			
		||||
 | 
			
		||||
        pairs = resp['result']
 | 
			
		||||
 | 
			
		||||
        if pair is not None:
 | 
			
		||||
            _, data = next(iter(pairs.items()))
 | 
			
		||||
            return Pair(**data)
 | 
			
		||||
        else:
 | 
			
		||||
            return {key: Pair(**data) for key, data in pairs.items()}
 | 
			
		||||
 | 
			
		||||
    async def cache_symbols(self) -> dict:
 | 
			
		||||
        '''
 | 
			
		||||
        Load all market pair info build and cache it for downstream use.
 | 
			
		||||
        Query for a tradeable asset pair (info), or all if no input
 | 
			
		||||
        pattern is provided.
 | 
			
		||||
 | 
			
		||||
        A ``._ntable: dict[str, str]`` is available for mapping the
 | 
			
		||||
        websocket pair name-keys and their http endpoint API (smh)
 | 
			
		||||
        equivalents to the "alternative name" which is generally the one
 | 
			
		||||
        we actually want to use XD
 | 
			
		||||
        https://docs.kraken.com/rest/#tag/Market-Data/operation/getTradableAssetPairs
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        if not self._pairs:
 | 
			
		||||
            self._pairs.update(await self.symbol_info())
 | 
			
		||||
        if not self._AssetPairs:
 | 
			
		||||
            # get all pairs by default, or filter
 | 
			
		||||
            # to whatever pattern is provided as input.
 | 
			
		||||
            req_pairs: dict[str, str] | None = None
 | 
			
		||||
            if pair_patt is not None:
 | 
			
		||||
                req_pairs = {'pair': pair_patt}
 | 
			
		||||
 | 
			
		||||
            # table of all ws and rest keys to their alt-name values.
 | 
			
		||||
            ntable: dict[str, str] = {}
 | 
			
		||||
            resp = await self._public(
 | 
			
		||||
                'AssetPairs',
 | 
			
		||||
                req_pairs,
 | 
			
		||||
            )
 | 
			
		||||
            err = resp['error']
 | 
			
		||||
            if err:
 | 
			
		||||
                raise SymbolNotFound(pair_patt)
 | 
			
		||||
 | 
			
		||||
            for rest_key in list(self._pairs.keys()):
 | 
			
		||||
            # NOTE: we try to key pairs by our custom defined
 | 
			
		||||
            # `.bs_fqme` field since we want to offer search over
 | 
			
		||||
            # this pattern set, callers should fill out lookup
 | 
			
		||||
            # tables for kraken's bs_mktid keys to map to these
 | 
			
		||||
            # keys!
 | 
			
		||||
            # XXX: FURTHER kraken's data eng team decided to offer
 | 
			
		||||
            # 3 frickin market-pair-symbol key sets depending on
 | 
			
		||||
            # which frickin API is being used.
 | 
			
		||||
            # Example for the trading pair 'LTC<EUR'
 | 
			
		||||
            # - the "X-key" from rest eps 'XLTCZEUR'
 | 
			
		||||
            # - the "websocket key" from ws msgs is 'LTC/EUR'
 | 
			
		||||
            # - the "altname key" also delivered in pair info is 'LTCEUR'
 | 
			
		||||
            for xkey, data in resp['result'].items():
 | 
			
		||||
 | 
			
		||||
                pair: Pair = self._pairs[rest_key]
 | 
			
		||||
                altname = pair.altname
 | 
			
		||||
                wsname = pair.wsname
 | 
			
		||||
                ntable[rest_key] = ntable[wsname] = altname
 | 
			
		||||
                # NOTE: always cache in pairs tables for faster lookup
 | 
			
		||||
                pair = Pair(xname=xkey, **data)
 | 
			
		||||
 | 
			
		||||
                # register the pair under all monikers, a giant flat
 | 
			
		||||
                # surjection of all possible names to each info obj.
 | 
			
		||||
                self._pairs[altname] = self._pairs[wsname] = pair
 | 
			
		||||
                # register the above `Pair` structs for all
 | 
			
		||||
                # key-sets/monikers: a set of 4 (frickin) tables
 | 
			
		||||
                # acting as a combined surjection of all possible
 | 
			
		||||
                # (and stupid) kraken names to their `Pair` obj.
 | 
			
		||||
                self._AssetPairs[xkey] = pair
 | 
			
		||||
                self._pairs[pair.bs_fqme] = pair
 | 
			
		||||
                self._altnames[pair.altname] = pair
 | 
			
		||||
                self._wsnames[pair.wsname] = pair
 | 
			
		||||
 | 
			
		||||
            self._ntable.update(ntable)
 | 
			
		||||
        if pair_patt is not None:
 | 
			
		||||
            return next(iter(self._pairs.items()))[1]
 | 
			
		||||
 | 
			
		||||
        return self._pairs
 | 
			
		||||
        return self._AssetPairs
 | 
			
		||||
 | 
			
		||||
    async def get_mkt_pairs(
 | 
			
		||||
        self,
 | 
			
		||||
        reload: bool = False,
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
        '''
 | 
			
		||||
        Load all market pair info build and cache it for downstream
 | 
			
		||||
        use.
 | 
			
		||||
 | 
			
		||||
        Multiple pair info lookup tables (like ``._altnames:
 | 
			
		||||
        dict[str, str]``) are created for looking up the
 | 
			
		||||
        piker-native `Pair`-struct from any input of the three
 | 
			
		||||
        (yes, it's that idiotic..) available symbol/pair-key-sets
 | 
			
		||||
        that kraken frickin offers depending on the API including
 | 
			
		||||
        the .altname, .wsname and the weird ass default set they
 | 
			
		||||
        return in ReST responses .xname..
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        if (
 | 
			
		||||
            not self._pairs
 | 
			
		||||
            or reload
 | 
			
		||||
        ):
 | 
			
		||||
            await self.asset_pairs()
 | 
			
		||||
 | 
			
		||||
        return self._AssetPairs
 | 
			
		||||
 | 
			
		||||
    async def search_symbols(
 | 
			
		||||
        self,
 | 
			
		||||
        pattern: str,
 | 
			
		||||
        limit: int = None,
 | 
			
		||||
 | 
			
		||||
    ) -> dict[str, Any]:
 | 
			
		||||
        '''
 | 
			
		||||
| 
						 | 
				
			
			@ -495,16 +556,20 @@ class Client:
 | 
			
		|||
 | 
			
		||||
        '''
 | 
			
		||||
        if not len(self._pairs):
 | 
			
		||||
            await self.cache_symbols()
 | 
			
		||||
            assert self._pairs, '`Client.cache_symbols()` was never called!?'
 | 
			
		||||
            await self.get_mkt_pairs()
 | 
			
		||||
            assert self._pairs, '`Client.get_mkt_pairs()` was never called!?'
 | 
			
		||||
 | 
			
		||||
        matches = fuzzy.extractBests(
 | 
			
		||||
            pattern,
 | 
			
		||||
            self._pairs,
 | 
			
		||||
        matches: dict[str, Pair] = match_from_pairs(
 | 
			
		||||
            pairs=self._pairs,
 | 
			
		||||
            query=pattern.upper(),
 | 
			
		||||
            score_cutoff=50,
 | 
			
		||||
        )
 | 
			
		||||
        # repack in dict form
 | 
			
		||||
        return {item[0].altname: item[0] for item in matches}
 | 
			
		||||
 | 
			
		||||
        # repack in .altname-keyed output table
 | 
			
		||||
        return {
 | 
			
		||||
            pair.altname: pair
 | 
			
		||||
            for pair in matches.values()
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    async def bars(
 | 
			
		||||
        self,
 | 
			
		||||
| 
						 | 
				
			
			@ -565,11 +630,11 @@ class Client:
 | 
			
		|||
                new_bars.append(
 | 
			
		||||
                    (i,) + tuple(
 | 
			
		||||
                        ftype(bar[j]) for j, (name, ftype) in enumerate(
 | 
			
		||||
                            _ohlc_dtype[1:]
 | 
			
		||||
                            def_iohlcv_fields[1:]
 | 
			
		||||
                        )
 | 
			
		||||
                    )
 | 
			
		||||
                )
 | 
			
		||||
            array = np.array(new_bars, dtype=_ohlc_dtype) if as_np else bars
 | 
			
		||||
            array = np.array(new_bars, dtype=def_iohlcv_fields) if as_np else bars
 | 
			
		||||
            return array
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            errmsg = json['error'][0]
 | 
			
		||||
| 
						 | 
				
			
			@ -584,38 +649,55 @@ class Client:
 | 
			
		|||
                raise BrokerError(errmsg)
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def normalize_symbol(
 | 
			
		||||
    def to_bs_fqme(
 | 
			
		||||
        cls,
 | 
			
		||||
        ticker: str
 | 
			
		||||
    ) -> tuple[str, Pair]:
 | 
			
		||||
        pair_str: str
 | 
			
		||||
    ) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        Normalize symbol names to to a 3x3 pair from the global
 | 
			
		||||
        definition map which we build out from the data retreived from
 | 
			
		||||
        the 'AssetPairs' endpoint, see methods above.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        ticker = cls._ntable[ticker]
 | 
			
		||||
        return ticker.lower(), cls._pairs[ticker]
 | 
			
		||||
        try:
 | 
			
		||||
            return cls._altnames[pair_str.upper()].bs_fqme
 | 
			
		||||
        except KeyError as ke:
 | 
			
		||||
            raise SymbolNotFound(f'kraken has no {ke.args[0]}')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def get_client() -> Client:
 | 
			
		||||
 | 
			
		||||
    conf = get_config()
 | 
			
		||||
    if conf:
 | 
			
		||||
        client = Client(
 | 
			
		||||
            conf,
 | 
			
		||||
            name=conf['key_descr'],
 | 
			
		||||
            api_key=conf['api_key'],
 | 
			
		||||
            secret=conf['secret']
 | 
			
		||||
        )
 | 
			
		||||
    else:
 | 
			
		||||
        client = Client({})
 | 
			
		||||
    conf: dict[str, Any] = get_config()
 | 
			
		||||
    async with httpx.AsyncClient(
 | 
			
		||||
        base_url=_url,
 | 
			
		||||
        headers=_headers,
 | 
			
		||||
 | 
			
		||||
    # at startup, load all symbols, and asset info in
 | 
			
		||||
    # batch requests.
 | 
			
		||||
    async with trio.open_nursery() as nurse:
 | 
			
		||||
        nurse.start_soon(client.cache_assets)
 | 
			
		||||
        await client.cache_symbols()
 | 
			
		||||
        # TODO: is there a way to numerate this?
 | 
			
		||||
        # https://www.python-httpx.org/advanced/clients/#why-use-a-client
 | 
			
		||||
        # connections=4
 | 
			
		||||
    ) as trio_client:
 | 
			
		||||
        if conf:
 | 
			
		||||
            client = Client(
 | 
			
		||||
                conf,
 | 
			
		||||
                httpx_client=trio_client,
 | 
			
		||||
 | 
			
		||||
    yield client
 | 
			
		||||
                # TODO: don't break these up and just do internal
 | 
			
		||||
                # conf lookups instead..
 | 
			
		||||
                name=conf['key_descr'],
 | 
			
		||||
                api_key=conf['api_key'],
 | 
			
		||||
                secret=conf['secret']
 | 
			
		||||
            )
 | 
			
		||||
        else:
 | 
			
		||||
            client = Client(
 | 
			
		||||
                conf={},
 | 
			
		||||
                httpx_client=trio_client,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # at startup, load all symbols, and asset info in
 | 
			
		||||
        # batch requests.
 | 
			
		||||
        async with trio.open_nursery() as nurse:
 | 
			
		||||
            nurse.start_soon(client.get_assets)
 | 
			
		||||
            await client.get_mkt_pairs()
 | 
			
		||||
 | 
			
		||||
        yield client
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -18,14 +18,12 @@
 | 
			
		|||
Order api and machinery
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from collections import ChainMap, defaultdict
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    asynccontextmanager as acm,
 | 
			
		||||
    contextmanager as cm,
 | 
			
		||||
    aclosing,
 | 
			
		||||
)
 | 
			
		||||
from functools import partial
 | 
			
		||||
from itertools import count
 | 
			
		||||
import math
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
import time
 | 
			
		||||
from typing import (
 | 
			
		||||
| 
						 | 
				
			
			@ -35,20 +33,21 @@ from typing import (
 | 
			
		|||
    Union,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from async_generator import aclosing
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
import pendulum
 | 
			
		||||
import trio
 | 
			
		||||
import tractor
 | 
			
		||||
 | 
			
		||||
from piker.pp import (
 | 
			
		||||
from piker.accounting import (
 | 
			
		||||
    Position,
 | 
			
		||||
    PpTable,
 | 
			
		||||
    Account,
 | 
			
		||||
    Transaction,
 | 
			
		||||
    TransactionLedger,
 | 
			
		||||
    open_trade_ledger,
 | 
			
		||||
    open_pps,
 | 
			
		||||
    open_account,
 | 
			
		||||
)
 | 
			
		||||
from piker.clearing import(
 | 
			
		||||
    OrderDialogs,
 | 
			
		||||
)
 | 
			
		||||
from piker.data._source import Symbol
 | 
			
		||||
from piker.clearing._messages import (
 | 
			
		||||
    Order,
 | 
			
		||||
    Status,
 | 
			
		||||
| 
						 | 
				
			
			@ -60,18 +59,24 @@ from piker.clearing._messages import (
 | 
			
		|||
    BrokerdPosition,
 | 
			
		||||
    BrokerdStatus,
 | 
			
		||||
)
 | 
			
		||||
from . import log
 | 
			
		||||
from piker.brokers import (
 | 
			
		||||
    open_cached_client,
 | 
			
		||||
)
 | 
			
		||||
from piker.data import open_symcache
 | 
			
		||||
from .api import (
 | 
			
		||||
    log,
 | 
			
		||||
    Client,
 | 
			
		||||
    BrokerError,
 | 
			
		||||
    get_client,
 | 
			
		||||
)
 | 
			
		||||
from .feed import (
 | 
			
		||||
    get_console_log,
 | 
			
		||||
    open_autorecon_ws,
 | 
			
		||||
    NoBsWs,
 | 
			
		||||
    stream_messages,
 | 
			
		||||
)
 | 
			
		||||
from .ledger import (
 | 
			
		||||
    norm_trade_records,
 | 
			
		||||
    verify_balances,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
MsgUnion = Union[
 | 
			
		||||
    BrokerdCancel,
 | 
			
		||||
| 
						 | 
				
			
			@ -121,7 +126,7 @@ async def handle_order_requests(
 | 
			
		|||
    client: Client,
 | 
			
		||||
    ems_order_stream: tractor.MsgStream,
 | 
			
		||||
    token: str,
 | 
			
		||||
    apiflows: dict[int, ChainMap[dict[str, dict]]],
 | 
			
		||||
    apiflows: OrderDialogs,
 | 
			
		||||
    ids: bidict[str, int],
 | 
			
		||||
    reqids2txids: dict[int, str],
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -131,10 +136,8 @@ async def handle_order_requests(
 | 
			
		|||
    and deliver acks or errors.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # XXX: UGH, let's unify this.. with ``msgspec``.
 | 
			
		||||
    msg: dict[str, Any]
 | 
			
		||||
    order: BrokerdOrder
 | 
			
		||||
 | 
			
		||||
    # XXX: UGH, let's unify this.. with ``msgspec``!!!
 | 
			
		||||
    msg: dict | Order
 | 
			
		||||
    async for msg in ems_order_stream:
 | 
			
		||||
        log.info(f'Rx order msg:\n{pformat(msg)}')
 | 
			
		||||
        match msg:
 | 
			
		||||
| 
						 | 
				
			
			@ -180,11 +183,12 @@ async def handle_order_requests(
 | 
			
		|||
 | 
			
		||||
                # logic from old `Client.submit_limit()`
 | 
			
		||||
                if order.oid in ids:
 | 
			
		||||
                    ep = 'editOrder'
 | 
			
		||||
                    reqid = ids[order.oid]  # integer not txid
 | 
			
		||||
                    ep: str = 'editOrder'
 | 
			
		||||
                    reqid: int = ids[order.oid]  # integer not txid
 | 
			
		||||
                    try:
 | 
			
		||||
                        txid = reqids2txids[reqid]
 | 
			
		||||
                        txid: str = reqids2txids[reqid]
 | 
			
		||||
                    except KeyError:
 | 
			
		||||
 | 
			
		||||
                        # XXX: not sure if this block ever gets hit now?
 | 
			
		||||
                        log.error('TOO FAST EDIT')
 | 
			
		||||
                        reqids2txids[reqid] = TooFastEdit(reqid)
 | 
			
		||||
| 
						 | 
				
			
			@ -205,7 +209,7 @@ async def handle_order_requests(
 | 
			
		|||
                        }
 | 
			
		||||
 | 
			
		||||
                else:
 | 
			
		||||
                    ep = 'addOrder'
 | 
			
		||||
                    ep: str = 'addOrder'
 | 
			
		||||
 | 
			
		||||
                    reqid = BrokerClient.new_reqid()
 | 
			
		||||
                    ids[order.oid] = reqid
 | 
			
		||||
| 
						 | 
				
			
			@ -218,8 +222,12 @@ async def handle_order_requests(
 | 
			
		|||
                        'type': order.action,
 | 
			
		||||
                    }
 | 
			
		||||
 | 
			
		||||
                    psym = order.symbol.upper()
 | 
			
		||||
                    pair = f'{psym[:3]}/{psym[3:]}'
 | 
			
		||||
                    # XXX strip any .<venue> token which should
 | 
			
		||||
                    # ONLY ever be '.spot' rn, until we support
 | 
			
		||||
                    # futes.
 | 
			
		||||
                    bs_fqme: str = order.symbol.replace('.spot', '')
 | 
			
		||||
                    psym: str = bs_fqme.upper()
 | 
			
		||||
                    pair: str = f'{psym[:3]}/{psym[3:]}'
 | 
			
		||||
 | 
			
		||||
                # XXX: ACK the request **immediately** before sending
 | 
			
		||||
                # the api side request to ensure the ems maps the oid ->
 | 
			
		||||
| 
						 | 
				
			
			@ -257,7 +265,7 @@ async def handle_order_requests(
 | 
			
		|||
                await ws.send_msg(req)
 | 
			
		||||
 | 
			
		||||
                # placehold for sanity checking in relay loop
 | 
			
		||||
                apiflows[reqid].maps.append(msg)
 | 
			
		||||
                apiflows.add_msg(reqid, msg)
 | 
			
		||||
 | 
			
		||||
            case _:
 | 
			
		||||
                account = msg.get('account')
 | 
			
		||||
| 
						 | 
				
			
			@ -363,22 +371,23 @@ async def subscribe(
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
def trades2pps(
 | 
			
		||||
    table: PpTable,
 | 
			
		||||
    acnt: Account,
 | 
			
		||||
    ledger: TransactionLedger,
 | 
			
		||||
    acctid: str,
 | 
			
		||||
    new_trans: dict[str, Transaction] = {},
 | 
			
		||||
 | 
			
		||||
) -> tuple[
 | 
			
		||||
    list[BrokerdPosition],
 | 
			
		||||
    list[Transaction],
 | 
			
		||||
]:
 | 
			
		||||
    write_storage: bool = True,
 | 
			
		||||
 | 
			
		||||
) -> list[BrokerdPosition]:
 | 
			
		||||
    if new_trans:
 | 
			
		||||
        updated = table.update_from_trans(
 | 
			
		||||
        updated = acnt.update_from_ledger(
 | 
			
		||||
            new_trans,
 | 
			
		||||
            symcache=ledger.symcache,
 | 
			
		||||
        )
 | 
			
		||||
        log.info(f'Updated pps:\n{pformat(updated)}')
 | 
			
		||||
 | 
			
		||||
    pp_entries, closed_pp_objs = table.dump_active()
 | 
			
		||||
    pp_objs: dict[Union[str, int], Position] = table.pps
 | 
			
		||||
    pp_entries, closed_pp_objs = acnt.dump_active()
 | 
			
		||||
    pp_objs: dict[Union[str, int], Position] = acnt.pps
 | 
			
		||||
 | 
			
		||||
    pps: dict[int, Position]
 | 
			
		||||
    position_msgs: list[dict] = []
 | 
			
		||||
| 
						 | 
				
			
			@ -392,42 +401,45 @@ def trades2pps(
 | 
			
		|||
                # backend suffix prefixed but when
 | 
			
		||||
                # reading accounts from ledgers we
 | 
			
		||||
                # don't need it and/or it's prefixed
 | 
			
		||||
                # in the section table.. we should
 | 
			
		||||
                # in the section acnt.. we should
 | 
			
		||||
                # just strip this from the message
 | 
			
		||||
                # right since `.broker` is already
 | 
			
		||||
                # included?
 | 
			
		||||
                account='kraken.' + acctid,
 | 
			
		||||
                symbol=p.symbol.front_fqsn(),
 | 
			
		||||
                size=p.size,
 | 
			
		||||
                symbol=p.mkt.fqme,
 | 
			
		||||
                size=p.cumsize,
 | 
			
		||||
                avg_price=p.ppu,
 | 
			
		||||
                currency='',
 | 
			
		||||
            )
 | 
			
		||||
            position_msgs.append(msg)
 | 
			
		||||
 | 
			
		||||
    if write_storage:
 | 
			
		||||
        # TODO: ideally this blocks the this task
 | 
			
		||||
        # as little as possible. we need to either do
 | 
			
		||||
        # these writes in another actor, or try out `trio`'s
 | 
			
		||||
        # async file IO api?
 | 
			
		||||
        acnt.write_config()
 | 
			
		||||
 | 
			
		||||
    return position_msgs
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def trades_dialogue(
 | 
			
		||||
async def open_trade_dialog(
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
    loglevel: str = None,
 | 
			
		||||
 | 
			
		||||
) -> AsyncIterator[dict[str, Any]]:
 | 
			
		||||
 | 
			
		||||
    # XXX: required to propagate ``tractor`` loglevel to ``piker`` logging
 | 
			
		||||
    get_console_log(loglevel or tractor.current_actor().loglevel)
 | 
			
		||||
 | 
			
		||||
    async with get_client() as client:
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        # TODO: maybe bind these together and deliver
 | 
			
		||||
        # a tuple from `.open_cached_client()`?
 | 
			
		||||
        open_cached_client('kraken') as client,
 | 
			
		||||
        open_symcache('kraken') as symcache,
 | 
			
		||||
    ):
 | 
			
		||||
        # make ems flip to paper mode when no creds setup in
 | 
			
		||||
        # `brokers.toml` B0
 | 
			
		||||
        if not client._api_key:
 | 
			
		||||
            raise RuntimeError(
 | 
			
		||||
                'Missing Kraken API key in `brokers.toml`!?!?')
 | 
			
		||||
 | 
			
		||||
        # TODO: make ems flip to paper mode via
 | 
			
		||||
        # some returned signal if the user only wants to use
 | 
			
		||||
        # the data feed or we return this?
 | 
			
		||||
        # else:
 | 
			
		||||
        #     await ctx.started(({}, ['paper']))
 | 
			
		||||
            await ctx.started('paper')
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # NOTE: currently we expect the user to define a "source fiat"
 | 
			
		||||
        # (much like the web UI let's you set an "account currency")
 | 
			
		||||
| 
						 | 
				
			
			@ -440,10 +452,7 @@ async def trades_dialogue(
 | 
			
		|||
        acc_name = 'kraken.' + acctid
 | 
			
		||||
 | 
			
		||||
        # task local msg dialog tracking
 | 
			
		||||
        apiflows: defaultdict[
 | 
			
		||||
            int,
 | 
			
		||||
            ChainMap[dict[str, dict]],
 | 
			
		||||
        ] = defaultdict(ChainMap)
 | 
			
		||||
        apiflows = OrderDialogs()
 | 
			
		||||
 | 
			
		||||
        # 2way map for ems ids to kraken int reqids..
 | 
			
		||||
        ids: bidict[str, int] = bidict()
 | 
			
		||||
| 
						 | 
				
			
			@ -455,8 +464,8 @@ async def trades_dialogue(
 | 
			
		|||
        # - delete the *ABSOLUTE LAST* entry from account's corresponding
 | 
			
		||||
        #   trade ledgers file (NOTE this MUST be the last record
 | 
			
		||||
        #   delivered from the api ledger),
 | 
			
		||||
        # - open you ``pps.toml`` and find that same tid and delete it
 | 
			
		||||
        #   from the pp's clears table,
 | 
			
		||||
        # - open you ``account.kraken.spot.toml`` and find that
 | 
			
		||||
        #   same tid and delete it from the pos's clears table,
 | 
			
		||||
        # - set this flag to `True`
 | 
			
		||||
        #
 | 
			
		||||
        # You should see an update come in after the order mode
 | 
			
		||||
| 
						 | 
				
			
			@ -467,178 +476,85 @@ async def trades_dialogue(
 | 
			
		|||
        # update things correctly.
 | 
			
		||||
        simulate_pp_update: bool = False
 | 
			
		||||
 | 
			
		||||
        acnt: Account
 | 
			
		||||
        ledger: TransactionLedger
 | 
			
		||||
        with (
 | 
			
		||||
            open_pps(
 | 
			
		||||
            open_account(
 | 
			
		||||
                'kraken',
 | 
			
		||||
                acctid
 | 
			
		||||
            ) as table,
 | 
			
		||||
                acctid,
 | 
			
		||||
                write_on_exit=True,
 | 
			
		||||
            ) as acnt,
 | 
			
		||||
 | 
			
		||||
            open_trade_ledger(
 | 
			
		||||
                'kraken',
 | 
			
		||||
                acctid
 | 
			
		||||
            ) as ledger_dict,
 | 
			
		||||
                acctid,
 | 
			
		||||
                symcache=symcache,
 | 
			
		||||
            ) as ledger,
 | 
			
		||||
        ):
 | 
			
		||||
            # transaction-ify the ledger entries
 | 
			
		||||
            ledger_trans = norm_trade_records(ledger_dict)
 | 
			
		||||
            # TODO: loading ledger entries should all be done
 | 
			
		||||
            # within a newly implemented `async with open_account()
 | 
			
		||||
            # as acnt` where `Account.ledger: TransactionLedger`
 | 
			
		||||
            # can be used to explicitily update and write the
 | 
			
		||||
            # offline TOML files!
 | 
			
		||||
            # ------ - ------
 | 
			
		||||
            # MOL the init sequence is:
 | 
			
		||||
            # - get `Account` (with presumed pre-loaded ledger done
 | 
			
		||||
            #   beind the scenes as part of ctx enter).
 | 
			
		||||
            # - pull new trades from API, update the ledger with
 | 
			
		||||
            #   normalized to `Transaction` entries of those
 | 
			
		||||
            #   records, presumably (and implicitly) update the
 | 
			
		||||
            #   acnt state including expiries, positions,
 | 
			
		||||
            #   transfers..), and finally of course existing
 | 
			
		||||
            #   per-asset balances.
 | 
			
		||||
            # - validate all pos and balances ensuring there's
 | 
			
		||||
            #   no seemingly noticeable discrepancies?
 | 
			
		||||
 | 
			
		||||
            # LOAD and transaction-ify the EXISTING LEDGER
 | 
			
		||||
            ledger_trans: dict[str, Transaction] = await norm_trade_records(
 | 
			
		||||
                ledger,
 | 
			
		||||
                client,
 | 
			
		||||
                api_name_set='xname',
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            if not acnt.pps:
 | 
			
		||||
                acnt.update_from_ledger(
 | 
			
		||||
                    ledger_trans,
 | 
			
		||||
                    symcache=ledger.symcache,
 | 
			
		||||
                )
 | 
			
		||||
                acnt.write_config()
 | 
			
		||||
 | 
			
		||||
            # TODO: eventually probably only load
 | 
			
		||||
            # as far back as it seems is not deliverd in the
 | 
			
		||||
            # most recent 50 trades and assume that by ordering we
 | 
			
		||||
            # already have those records in the ledger.
 | 
			
		||||
            tids2trades = await client.get_trades()
 | 
			
		||||
            ledger_dict.update(tids2trades)
 | 
			
		||||
            api_trans = norm_trade_records(tids2trades)
 | 
			
		||||
            # already have those records in the ledger?
 | 
			
		||||
            tids2trades: dict[str, dict] = await client.get_trades()
 | 
			
		||||
            ledger.update(tids2trades)
 | 
			
		||||
            if tids2trades:
 | 
			
		||||
                ledger.write_config()
 | 
			
		||||
 | 
			
		||||
            api_trans: dict[str, Transaction] = await norm_trade_records(
 | 
			
		||||
                tids2trades,
 | 
			
		||||
                client,
 | 
			
		||||
                api_name_set='xname',
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # retrieve kraken reported balances
 | 
			
		||||
            # and do diff with ledger to determine
 | 
			
		||||
            # what amount of trades-transactions need
 | 
			
		||||
            # to be reloaded.
 | 
			
		||||
            balances = await client.get_balances()
 | 
			
		||||
            for dst, size in balances.items():
 | 
			
		||||
                # we don't care about tracking positions
 | 
			
		||||
                # in the user's source fiat currency.
 | 
			
		||||
                if (
 | 
			
		||||
                    dst == src_fiat
 | 
			
		||||
                    or not any(
 | 
			
		||||
                        dst in bsuid for bsuid in table.pps
 | 
			
		||||
                    )
 | 
			
		||||
                ):
 | 
			
		||||
                    log.warning(
 | 
			
		||||
                        f'Skipping balance `{dst}`:{size} for position calcs!'
 | 
			
		||||
                    )
 | 
			
		||||
                    continue
 | 
			
		||||
            balances: dict[str, float] = await client.get_balances()
 | 
			
		||||
 | 
			
		||||
                def get_likely_pair(
 | 
			
		||||
                    dst: str,
 | 
			
		||||
                    bsuid: str,
 | 
			
		||||
                    src_fiat: str = src_fiat
 | 
			
		||||
            verify_balances(
 | 
			
		||||
                acnt,
 | 
			
		||||
                src_fiat,
 | 
			
		||||
                balances,
 | 
			
		||||
                client,
 | 
			
		||||
                ledger,
 | 
			
		||||
                ledger_trans,
 | 
			
		||||
                api_trans,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
                ) -> str:
 | 
			
		||||
                    '''
 | 
			
		||||
                    Attempt to get the likely trading pair masting
 | 
			
		||||
                    a given destination asset `dst: str`.
 | 
			
		||||
 | 
			
		||||
                    '''
 | 
			
		||||
                    try:
 | 
			
		||||
                        src_name_start = bsuid.rindex(src_fiat)
 | 
			
		||||
                    except (
 | 
			
		||||
                        ValueError,   # substr not found
 | 
			
		||||
                    ):
 | 
			
		||||
                        # TODO: handle nested positions..(i.e.
 | 
			
		||||
                        # positions where the src fiat was used to
 | 
			
		||||
                        # buy some other dst which was furhter used
 | 
			
		||||
                        # to buy another dst..)
 | 
			
		||||
                        log.warning(
 | 
			
		||||
                            f'No src fiat {src_fiat} found in {bsuid}?'
 | 
			
		||||
                        )
 | 
			
		||||
                        return
 | 
			
		||||
 | 
			
		||||
                    likely_dst = bsuid[:src_name_start]
 | 
			
		||||
                    if likely_dst == dst:
 | 
			
		||||
                        return bsuid
 | 
			
		||||
 | 
			
		||||
                def has_pp(
 | 
			
		||||
                    dst: str,
 | 
			
		||||
                    size: float,
 | 
			
		||||
 | 
			
		||||
                ) -> Position | bool:
 | 
			
		||||
 | 
			
		||||
                    src2dst: dict[str, str] = {}
 | 
			
		||||
 | 
			
		||||
                    for bsuid in table.pps:
 | 
			
		||||
                        likely_pair = get_likely_pair(dst, bsuid)
 | 
			
		||||
                        if likely_pair:
 | 
			
		||||
                            src2dst[src_fiat] = dst
 | 
			
		||||
 | 
			
		||||
                    for src, dst in src2dst.items():
 | 
			
		||||
                        pair = f'{dst}{src_fiat}'
 | 
			
		||||
                        pp = table.pps.get(pair)
 | 
			
		||||
                        if (
 | 
			
		||||
                            pp
 | 
			
		||||
                            and math.isclose(pp.size, size)
 | 
			
		||||
                        ):
 | 
			
		||||
                            return pp
 | 
			
		||||
 | 
			
		||||
                        elif (
 | 
			
		||||
                            size == 0
 | 
			
		||||
                            and pp.size
 | 
			
		||||
                        ):
 | 
			
		||||
                            log.warning(
 | 
			
		||||
                                f'`kraken` account says you have  a ZERO '
 | 
			
		||||
                                f'balance for {bsuid}:{pair}\n'
 | 
			
		||||
                                f'but piker seems to think `{pp.size}`\n'
 | 
			
		||||
                                'This is likely a discrepancy in piker '
 | 
			
		||||
                                'accounting if the above number is'
 | 
			
		||||
                                "large,' though it's likely to due lack"
 | 
			
		||||
                                "f tracking xfers fees.."
 | 
			
		||||
                            )
 | 
			
		||||
                            return pp
 | 
			
		||||
 | 
			
		||||
                    return False
 | 
			
		||||
 | 
			
		||||
                pos = has_pp(dst, size)
 | 
			
		||||
                if not pos:
 | 
			
		||||
 | 
			
		||||
                    # we have a balance for which there is no pp
 | 
			
		||||
                    # entry? so we have to likely update from the
 | 
			
		||||
                    # ledger.
 | 
			
		||||
                    updated = table.update_from_trans(ledger_trans)
 | 
			
		||||
                    log.info(f'Updated pps from ledger:\n{pformat(updated)}')
 | 
			
		||||
                    pos = has_pp(dst, size)
 | 
			
		||||
 | 
			
		||||
                    if (
 | 
			
		||||
                        not pos
 | 
			
		||||
                        and not simulate_pp_update
 | 
			
		||||
                    ):
 | 
			
		||||
                        # try reloading from API
 | 
			
		||||
                        table.update_from_trans(api_trans)
 | 
			
		||||
                        pos = has_pp(dst, size)
 | 
			
		||||
                        if not pos:
 | 
			
		||||
 | 
			
		||||
                            # get transfers to make sense of abs balances.
 | 
			
		||||
                            # NOTE: we do this after ledger and API
 | 
			
		||||
                            # loading since we might not have an entry
 | 
			
		||||
                            # in the ``pps.toml`` for the necessary pair
 | 
			
		||||
                            # yet and thus this likely pair grabber will
 | 
			
		||||
                            # likely fail.
 | 
			
		||||
                            for bsuid in table.pps:
 | 
			
		||||
                                likely_pair = get_likely_pair(dst, bsuid)
 | 
			
		||||
                                if likely_pair:
 | 
			
		||||
                                    break
 | 
			
		||||
                            else:
 | 
			
		||||
                                raise ValueError(
 | 
			
		||||
                                    'Could not find a position pair in '
 | 
			
		||||
                                    'ledger for likely widthdrawal '
 | 
			
		||||
                                    f'candidate: {dst}'
 | 
			
		||||
                                )
 | 
			
		||||
 | 
			
		||||
                            if likely_pair:
 | 
			
		||||
                                # this was likely pp that had a withdrawal
 | 
			
		||||
                                # from the dst asset out of the account.
 | 
			
		||||
 | 
			
		||||
                                xfer_trans = await client.get_xfers(
 | 
			
		||||
                                    dst,
 | 
			
		||||
                                    # TODO: not all src assets are
 | 
			
		||||
                                    # 3 chars long...
 | 
			
		||||
                                    src_asset=likely_pair[3:],
 | 
			
		||||
                                )
 | 
			
		||||
                                if xfer_trans:
 | 
			
		||||
                                    updated = table.update_from_trans(
 | 
			
		||||
                                        xfer_trans,
 | 
			
		||||
                                        cost_scalar=1,
 | 
			
		||||
                                    )
 | 
			
		||||
                                    log.info(
 | 
			
		||||
                                        f'Updated {dst} from transfers:\n'
 | 
			
		||||
                                        f'{pformat(updated)}'
 | 
			
		||||
                                    )
 | 
			
		||||
 | 
			
		||||
                        if has_pp(dst, size):
 | 
			
		||||
                            raise ValueError(
 | 
			
		||||
                                'Could not reproduce balance:\n'
 | 
			
		||||
                                f'dst: {dst}, {size}\n'
 | 
			
		||||
                            )
 | 
			
		||||
 | 
			
		||||
            # only for simulate-testing a "new fill" since
 | 
			
		||||
            # XXX NOTE: only for simulate-testing a "new fill" since
 | 
			
		||||
            # otherwise we have to actually conduct a live clear.
 | 
			
		||||
            if simulate_pp_update:
 | 
			
		||||
                tid = list(tids2trades)[0]
 | 
			
		||||
| 
						 | 
				
			
			@ -646,20 +562,28 @@ async def trades_dialogue(
 | 
			
		|||
                # stage a first reqid of `0`
 | 
			
		||||
                reqids2txids[0] = last_trade_dict['ordertxid']
 | 
			
		||||
 | 
			
		||||
            ppmsgs = trades2pps(
 | 
			
		||||
                table,
 | 
			
		||||
            ppmsgs: list[BrokerdPosition] = trades2pps(
 | 
			
		||||
                acnt,
 | 
			
		||||
                ledger,
 | 
			
		||||
                acctid,
 | 
			
		||||
            )
 | 
			
		||||
            # sync with EMS delivering pps and accounts
 | 
			
		||||
            await ctx.started((ppmsgs, [acc_name]))
 | 
			
		||||
 | 
			
		||||
            # TODO: ideally this blocks the this task
 | 
			
		||||
            # as little as possible. we need to either do
 | 
			
		||||
            # these writes in another actor, or try out `trio`'s
 | 
			
		||||
            # async file IO api?
 | 
			
		||||
            acnt.write_config()
 | 
			
		||||
 | 
			
		||||
            # Get websocket token for authenticated data stream
 | 
			
		||||
            # Assert that a token was actually received.
 | 
			
		||||
            resp = await client.endpoint('GetWebSocketsToken', {})
 | 
			
		||||
            err = resp.get('error')
 | 
			
		||||
            if err:
 | 
			
		||||
            if err := resp.get('error'):
 | 
			
		||||
                raise BrokerError(err)
 | 
			
		||||
 | 
			
		||||
            token = resp['result']['token']
 | 
			
		||||
            # resp token for ws init
 | 
			
		||||
            token: str = resp['result']['token']
 | 
			
		||||
 | 
			
		||||
            ws: NoBsWs
 | 
			
		||||
            async with (
 | 
			
		||||
| 
						 | 
				
			
			@ -674,8 +598,6 @@ async def trades_dialogue(
 | 
			
		|||
                aclosing(stream_messages(ws)) as stream,
 | 
			
		||||
                trio.open_nursery() as nurse,
 | 
			
		||||
            ):
 | 
			
		||||
                stream = stream_messages(ws)
 | 
			
		||||
 | 
			
		||||
                # task for processing inbound requests from ems
 | 
			
		||||
                nurse.start_soon(
 | 
			
		||||
                    handle_order_requests,
 | 
			
		||||
| 
						 | 
				
			
			@ -690,32 +612,35 @@ async def trades_dialogue(
 | 
			
		|||
 | 
			
		||||
                # enter relay loop
 | 
			
		||||
                await handle_order_updates(
 | 
			
		||||
                    ws,
 | 
			
		||||
                    stream,
 | 
			
		||||
                    ems_stream,
 | 
			
		||||
                    apiflows,
 | 
			
		||||
                    ids,
 | 
			
		||||
                    reqids2txids,
 | 
			
		||||
                    table,
 | 
			
		||||
                    api_trans,
 | 
			
		||||
                    acctid,
 | 
			
		||||
                    acc_name,
 | 
			
		||||
                    token,
 | 
			
		||||
                    client=client,
 | 
			
		||||
                    ws=ws,
 | 
			
		||||
                    ws_stream=stream,
 | 
			
		||||
                    ems_stream=ems_stream,
 | 
			
		||||
                    apiflows=apiflows,
 | 
			
		||||
                    ids=ids,
 | 
			
		||||
                    reqids2txids=reqids2txids,
 | 
			
		||||
                    acnt=acnt,
 | 
			
		||||
                    ledger=ledger,
 | 
			
		||||
                    acctid=acctid,
 | 
			
		||||
                    acc_name=acc_name,
 | 
			
		||||
                    token=token,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def handle_order_updates(
 | 
			
		||||
    client: Client,  # only for pairs table needed in ledger proc
 | 
			
		||||
    ws: NoBsWs,
 | 
			
		||||
    ws_stream: AsyncIterator,
 | 
			
		||||
    ems_stream: tractor.MsgStream,
 | 
			
		||||
    apiflows: dict[int, ChainMap[dict[str, dict]]],
 | 
			
		||||
    apiflows: OrderDialogs,
 | 
			
		||||
    ids: bidict[str, int],
 | 
			
		||||
    reqids2txids: bidict[int, str],
 | 
			
		||||
    table: PpTable,
 | 
			
		||||
    acnt: Account,
 | 
			
		||||
 | 
			
		||||
    # transaction records which will be updated
 | 
			
		||||
    # on new trade clearing events (aka order "fills")
 | 
			
		||||
    ledger_trans: dict[str, Transaction],
 | 
			
		||||
    ledger: TransactionLedger,
 | 
			
		||||
    # ledger_trans: dict[str, Transaction],
 | 
			
		||||
    acctid: str,
 | 
			
		||||
    acc_name: str,
 | 
			
		||||
    token: str,
 | 
			
		||||
| 
						 | 
				
			
			@ -724,8 +649,8 @@ async def handle_order_updates(
 | 
			
		|||
    '''
 | 
			
		||||
    Main msg handling loop for all things order management.
 | 
			
		||||
 | 
			
		||||
    This code is broken out to make the context explicit and state variables
 | 
			
		||||
    defined in the signature clear to the reader.
 | 
			
		||||
    This code is broken out to make the context explicit and state
 | 
			
		||||
    variables defined in the signature clear to the reader.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async for msg in ws_stream:
 | 
			
		||||
| 
						 | 
				
			
			@ -733,7 +658,7 @@ async def handle_order_updates(
 | 
			
		|||
 | 
			
		||||
            # TODO: turns out you get the fill events from the
 | 
			
		||||
            # `openOrders` before you get this, so it might be better
 | 
			
		||||
            # to do all fill/status/pp updates in that sub and just use
 | 
			
		||||
            # to do all fill/status/pos updates in that sub and just use
 | 
			
		||||
            # this one for ledger syncs?
 | 
			
		||||
 | 
			
		||||
            # For eg. we could take the "last 50 trades" and do a diff
 | 
			
		||||
| 
						 | 
				
			
			@ -775,7 +700,8 @@ async def handle_order_updates(
 | 
			
		|||
                    # if tid not in ledger_trans
 | 
			
		||||
                }
 | 
			
		||||
                for tid, trade in trades.items():
 | 
			
		||||
                    assert tid not in ledger_trans
 | 
			
		||||
                    # assert tid not in ledger_trans
 | 
			
		||||
                    assert tid not in ledger
 | 
			
		||||
                    txid = trade['ordertxid']
 | 
			
		||||
                    reqid = trade.get('userref')
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -818,17 +744,25 @@ async def handle_order_updates(
 | 
			
		|||
                    )
 | 
			
		||||
                    await ems_stream.send(status_msg)
 | 
			
		||||
 | 
			
		||||
                new_trans = norm_trade_records(trades)
 | 
			
		||||
                ppmsgs = trades2pps(
 | 
			
		||||
                    table,
 | 
			
		||||
                    acctid,
 | 
			
		||||
                    new_trans,
 | 
			
		||||
                new_trans = await norm_trade_records(
 | 
			
		||||
                    trades,
 | 
			
		||||
                    client,
 | 
			
		||||
                    api_name_set='wsname',
 | 
			
		||||
                )
 | 
			
		||||
                ppmsgs: list[BrokerdPosition] = trades2pps(
 | 
			
		||||
                    acnt=acnt,
 | 
			
		||||
                    ledger=ledger,
 | 
			
		||||
                    acctid=acctid,
 | 
			
		||||
                    new_trans=new_trans,
 | 
			
		||||
                )
 | 
			
		||||
                # ppmsgs = trades2pps(
 | 
			
		||||
                #     acnt,
 | 
			
		||||
                #     acctid,
 | 
			
		||||
                #     new_trans,
 | 
			
		||||
                # )
 | 
			
		||||
                for pp_msg in ppmsgs:
 | 
			
		||||
                    await ems_stream.send(pp_msg)
 | 
			
		||||
 | 
			
		||||
                ledger_trans.update(new_trans)
 | 
			
		||||
 | 
			
		||||
            # process and relay order state change events
 | 
			
		||||
            # https://docs.kraken.com/websockets/#message-openOrders
 | 
			
		||||
            case [
 | 
			
		||||
| 
						 | 
				
			
			@ -870,8 +804,9 @@ async def handle_order_updates(
 | 
			
		|||
                    # 'vol_exec': exec_vlm}  # 0.0000
 | 
			
		||||
                    match update_msg:
 | 
			
		||||
 | 
			
		||||
                        # EMS-unknown LIVE order that needs to be
 | 
			
		||||
                        # delivered and loaded on the client-side.
 | 
			
		||||
                        # EMS-unknown pre-exising-submitted LIVE
 | 
			
		||||
                        # order that needs to be delivered and
 | 
			
		||||
                        # loaded on the client-side.
 | 
			
		||||
                        case {
 | 
			
		||||
                            'userref': reqid,
 | 
			
		||||
                            'descr': {
 | 
			
		||||
| 
						 | 
				
			
			@ -890,7 +825,7 @@ async def handle_order_updates(
 | 
			
		|||
                            ids.inverse.get(reqid) is None
 | 
			
		||||
                        ):
 | 
			
		||||
                            # parse out existing live order
 | 
			
		||||
                            fqsn = pair.replace('/', '').lower()
 | 
			
		||||
                            fqme = pair.replace('/', '').lower() + '.spot'
 | 
			
		||||
                            price = float(price)
 | 
			
		||||
                            size = float(vol)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -917,14 +852,14 @@ async def handle_order_updates(
 | 
			
		|||
                                    action=action,
 | 
			
		||||
                                    exec_mode='live',
 | 
			
		||||
                                    oid=oid,
 | 
			
		||||
                                    symbol=fqsn,
 | 
			
		||||
                                    symbol=fqme,
 | 
			
		||||
                                    account=acc_name,
 | 
			
		||||
                                    price=price,
 | 
			
		||||
                                    size=size,
 | 
			
		||||
                                ),
 | 
			
		||||
                                src='kraken',
 | 
			
		||||
                            )
 | 
			
		||||
                            apiflows[reqid].maps.append(status_msg.to_dict())
 | 
			
		||||
                            apiflows.add_msg(reqid, status_msg.to_dict())
 | 
			
		||||
                            await ems_stream.send(status_msg)
 | 
			
		||||
                            continue
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -1060,7 +995,7 @@ async def handle_order_updates(
 | 
			
		|||
                                ),
 | 
			
		||||
                            )
 | 
			
		||||
 | 
			
		||||
                            apiflows[reqid].maps.append(update_msg)
 | 
			
		||||
                            apiflows.add_msg(reqid, update_msg)
 | 
			
		||||
                            await ems_stream.send(resp)
 | 
			
		||||
 | 
			
		||||
                        # fill msg.
 | 
			
		||||
| 
						 | 
				
			
			@ -1139,9 +1074,8 @@ async def handle_order_updates(
 | 
			
		|||
                    )
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                # update the msg chain
 | 
			
		||||
                chain = apiflows[reqid]
 | 
			
		||||
                chain.maps.append(event)
 | 
			
		||||
                # update the msg history
 | 
			
		||||
                apiflows.add_msg(reqid, event)
 | 
			
		||||
 | 
			
		||||
                if status == 'error':
 | 
			
		||||
                    # any of ``{'add', 'edit', 'cancel'}``
 | 
			
		||||
| 
						 | 
				
			
			@ -1151,11 +1085,16 @@ async def handle_order_updates(
 | 
			
		|||
                        f'Failed to {action} order {reqid}:\n'
 | 
			
		||||
                        f'{errmsg}'
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    symbol: str = 'N/A'
 | 
			
		||||
                    if chain := apiflows.get(reqid):
 | 
			
		||||
                        symbol: str = chain.get('symbol', 'N/A')
 | 
			
		||||
 | 
			
		||||
                    await ems_stream.send(BrokerdError(
 | 
			
		||||
                        oid=oid,
 | 
			
		||||
                        # XXX: use old reqid in case it changed?
 | 
			
		||||
                        reqid=reqid,
 | 
			
		||||
                        symbol=chain.get('symbol', 'N/A'),
 | 
			
		||||
                        symbol=symbol,
 | 
			
		||||
 | 
			
		||||
                        reason=f'Failed {action}:\n{errmsg}',
 | 
			
		||||
                        broker_details=event
 | 
			
		||||
| 
						 | 
				
			
			@ -1180,47 +1119,3 @@ async def handle_order_updates(
 | 
			
		|||
                        })
 | 
			
		||||
            case _:
 | 
			
		||||
                log.warning(f'Unhandled trades update msg: {msg}')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def norm_trade_records(
 | 
			
		||||
    ledger: dict[str, Any],
 | 
			
		||||
 | 
			
		||||
) -> dict[str, Transaction]:
 | 
			
		||||
 | 
			
		||||
    records: dict[str, Transaction] = {}
 | 
			
		||||
 | 
			
		||||
    for tid, record in ledger.items():
 | 
			
		||||
 | 
			
		||||
        size = float(record.get('vol')) * {
 | 
			
		||||
            'buy': 1,
 | 
			
		||||
            'sell': -1,
 | 
			
		||||
        }[record['type']]
 | 
			
		||||
 | 
			
		||||
        # we normalize to kraken's `altname` always..
 | 
			
		||||
        bsuid, pair_info = Client.normalize_symbol(record['pair'])
 | 
			
		||||
        fqsn = f'{bsuid}.kraken'
 | 
			
		||||
 | 
			
		||||
        mktpair = Symbol.from_fqsn(
 | 
			
		||||
            fqsn,
 | 
			
		||||
            info={
 | 
			
		||||
                'lot_size_digits': pair_info.lot_decimals,
 | 
			
		||||
                'tick_size_digits': pair_info.pair_decimals,
 | 
			
		||||
                'asset_type': 'crypto',
 | 
			
		||||
            },
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        records[tid] = Transaction(
 | 
			
		||||
            fqsn=fqsn,
 | 
			
		||||
            sym=mktpair,
 | 
			
		||||
            tid=tid,
 | 
			
		||||
            size=size,
 | 
			
		||||
            price=float(record['price']),
 | 
			
		||||
            cost=float(record['fee']),
 | 
			
		||||
            dt=pendulum.from_timestamp(float(record['time'])),
 | 
			
		||||
            bsuid=bsuid,
 | 
			
		||||
 | 
			
		||||
            # XXX: there are no derivs on kraken right?
 | 
			
		||||
            # expiry=expiry,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    return records
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -18,40 +18,44 @@
 | 
			
		|||
Real-time and historical data feed endpoints.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from contextlib import asynccontextmanager as acm
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    asynccontextmanager as acm,
 | 
			
		||||
    aclosing,
 | 
			
		||||
)
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Optional,
 | 
			
		||||
    AsyncGenerator,
 | 
			
		||||
    Callable,
 | 
			
		||||
    Optional,
 | 
			
		||||
)
 | 
			
		||||
import time
 | 
			
		||||
 | 
			
		||||
from fuzzywuzzy import process as fuzzy
 | 
			
		||||
import numpy as np
 | 
			
		||||
import pendulum
 | 
			
		||||
from trio_typing import TaskStatus
 | 
			
		||||
from trio_util import trio_async_generator
 | 
			
		||||
import tractor
 | 
			
		||||
import trio
 | 
			
		||||
 | 
			
		||||
from piker._cacheables import open_cached_client
 | 
			
		||||
from piker.accounting._mktinfo import (
 | 
			
		||||
    MktPair,
 | 
			
		||||
)
 | 
			
		||||
from piker.brokers import (
 | 
			
		||||
    open_cached_client,
 | 
			
		||||
)
 | 
			
		||||
from piker.brokers._util import (
 | 
			
		||||
    BrokerError,
 | 
			
		||||
    DataThrottle,
 | 
			
		||||
    DataUnavailable,
 | 
			
		||||
)
 | 
			
		||||
from piker.log import get_console_log
 | 
			
		||||
from piker.data.types import Struct
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from piker.data.validate import FeedInit
 | 
			
		||||
from piker.data._web_bs import open_autorecon_ws, NoBsWs
 | 
			
		||||
from . import log
 | 
			
		||||
from .api import (
 | 
			
		||||
    Client,
 | 
			
		||||
    Pair,
 | 
			
		||||
    log,
 | 
			
		||||
)
 | 
			
		||||
from .symbols import get_mkt_info
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OHLC(Struct):
 | 
			
		||||
class OHLC(Struct, frozen=True):
 | 
			
		||||
    '''
 | 
			
		||||
    Description of the flattened OHLC quote format.
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -62,6 +66,8 @@ class OHLC(Struct):
 | 
			
		|||
    chan_id: int  # internal kraken id
 | 
			
		||||
    chan_name: str  # eg. ohlc-1  (name-interval)
 | 
			
		||||
    pair: str  # fx pair
 | 
			
		||||
 | 
			
		||||
    # unpacked from array
 | 
			
		||||
    time: float  # Begin time of interval, in seconds since epoch
 | 
			
		||||
    etime: float  # End time of interval, in seconds since epoch
 | 
			
		||||
    open: float  # Open price of interval
 | 
			
		||||
| 
						 | 
				
			
			@ -71,8 +77,6 @@ class OHLC(Struct):
 | 
			
		|||
    vwap: float  # Volume weighted average price within interval
 | 
			
		||||
    volume: float  # Accumulated volume **within interval**
 | 
			
		||||
    count: int  # Number of trades within interval
 | 
			
		||||
    # (sampled) generated tick data
 | 
			
		||||
    ticks: list[Any] = []
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def stream_messages(
 | 
			
		||||
| 
						 | 
				
			
			@ -85,26 +89,9 @@ async def stream_messages(
 | 
			
		|||
    though a single async generator.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    too_slow_count = last_hb = 0
 | 
			
		||||
 | 
			
		||||
    while True:
 | 
			
		||||
 | 
			
		||||
        with trio.move_on_after(5) as cs:
 | 
			
		||||
            msg = await ws.recv_msg()
 | 
			
		||||
 | 
			
		||||
        # trigger reconnection if heartbeat is laggy
 | 
			
		||||
        if cs.cancelled_caught:
 | 
			
		||||
 | 
			
		||||
            too_slow_count += 1
 | 
			
		||||
 | 
			
		||||
            if too_slow_count > 20:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    "Heartbeat is too slow, resetting ws connection")
 | 
			
		||||
 | 
			
		||||
                await ws._connect()
 | 
			
		||||
                too_slow_count = 0
 | 
			
		||||
                continue
 | 
			
		||||
    last_hb: float = 0
 | 
			
		||||
 | 
			
		||||
    async for msg in ws:
 | 
			
		||||
        match msg:
 | 
			
		||||
            case {'event': 'heartbeat'}:
 | 
			
		||||
                now = time.time()
 | 
			
		||||
| 
						 | 
				
			
			@ -122,7 +109,6 @@ async def stream_messages(
 | 
			
		|||
                yield msg
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@trio_async_generator
 | 
			
		||||
async def process_data_feed_msgs(
 | 
			
		||||
    ws: NoBsWs,
 | 
			
		||||
):
 | 
			
		||||
| 
						 | 
				
			
			@ -130,90 +116,99 @@ async def process_data_feed_msgs(
 | 
			
		|||
    Parse and pack data feed messages.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async for msg in stream_messages(ws):
 | 
			
		||||
        match msg:
 | 
			
		||||
            case {
 | 
			
		||||
                'errorMessage': errmsg
 | 
			
		||||
            }:
 | 
			
		||||
                raise BrokerError(errmsg)
 | 
			
		||||
    async with aclosing(stream_messages(ws)) as ws_stream:
 | 
			
		||||
        async for msg in ws_stream:
 | 
			
		||||
            match msg:
 | 
			
		||||
                case {
 | 
			
		||||
                    'errorMessage': errmsg
 | 
			
		||||
                }:
 | 
			
		||||
                    raise BrokerError(errmsg)
 | 
			
		||||
 | 
			
		||||
            case {
 | 
			
		||||
                'event': 'subscriptionStatus',
 | 
			
		||||
            } as sub:
 | 
			
		||||
                log.info(
 | 
			
		||||
                    'WS subscription is active:\n'
 | 
			
		||||
                    f'{sub}'
 | 
			
		||||
                )
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            case [
 | 
			
		||||
                chan_id,
 | 
			
		||||
                *payload_array,
 | 
			
		||||
                chan_name,
 | 
			
		||||
                pair
 | 
			
		||||
            ]:
 | 
			
		||||
                if 'ohlc' in chan_name:
 | 
			
		||||
                    ohlc = OHLC(
 | 
			
		||||
                        chan_id,
 | 
			
		||||
                        chan_name,
 | 
			
		||||
                        pair,
 | 
			
		||||
                        *payload_array[0]
 | 
			
		||||
                case {
 | 
			
		||||
                    'event': 'subscriptionStatus',
 | 
			
		||||
                } as sub:
 | 
			
		||||
                    log.info(
 | 
			
		||||
                        'WS subscription is active:\n'
 | 
			
		||||
                        f'{sub}'
 | 
			
		||||
                    )
 | 
			
		||||
                    ohlc.typecast()
 | 
			
		||||
                    yield 'ohlc', ohlc
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                elif 'spread' in chan_name:
 | 
			
		||||
                case [
 | 
			
		||||
                    chan_id,
 | 
			
		||||
                    *payload_array,
 | 
			
		||||
                    chan_name,
 | 
			
		||||
                    pair
 | 
			
		||||
                ]:
 | 
			
		||||
                    if 'ohlc' in chan_name:
 | 
			
		||||
                        array: list = payload_array[0]
 | 
			
		||||
                        ohlc = OHLC(
 | 
			
		||||
                            chan_id,
 | 
			
		||||
                            chan_name,
 | 
			
		||||
                            pair,
 | 
			
		||||
                            *map(float, array[:-1]),
 | 
			
		||||
                            count=array[-1],
 | 
			
		||||
                        )
 | 
			
		||||
                        yield 'ohlc', ohlc.copy()
 | 
			
		||||
 | 
			
		||||
                    bid, ask, ts, bsize, asize = map(
 | 
			
		||||
                        float, payload_array[0])
 | 
			
		||||
                    elif 'spread' in chan_name:
 | 
			
		||||
 | 
			
		||||
                    # TODO: really makes you think IB has a horrible API...
 | 
			
		||||
                    quote = {
 | 
			
		||||
                        'symbol': pair.replace('/', ''),
 | 
			
		||||
                        'ticks': [
 | 
			
		||||
                            {'type': 'bid', 'price': bid, 'size': bsize},
 | 
			
		||||
                            {'type': 'bsize', 'price': bid, 'size': bsize},
 | 
			
		||||
                        bid, ask, ts, bsize, asize = map(
 | 
			
		||||
                            float, payload_array[0])
 | 
			
		||||
 | 
			
		||||
                            {'type': 'ask', 'price': ask, 'size': asize},
 | 
			
		||||
                            {'type': 'asize', 'price': ask, 'size': asize},
 | 
			
		||||
                        ],
 | 
			
		||||
                    }
 | 
			
		||||
                    yield 'l1', quote
 | 
			
		||||
                        # TODO: really makes you think IB has a horrible API...
 | 
			
		||||
                        quote = {
 | 
			
		||||
                            'symbol': pair.replace('/', ''),
 | 
			
		||||
                            'ticks': [
 | 
			
		||||
                                {'type': 'bid', 'price': bid, 'size': bsize},
 | 
			
		||||
                                {'type': 'bsize', 'price': bid, 'size': bsize},
 | 
			
		||||
 | 
			
		||||
                # elif 'book' in msg[-2]:
 | 
			
		||||
                #     chan_id, *payload_array, chan_name, pair = msg
 | 
			
		||||
                #     print(msg)
 | 
			
		||||
                                {'type': 'ask', 'price': ask, 'size': asize},
 | 
			
		||||
                                {'type': 'asize', 'price': ask, 'size': asize},
 | 
			
		||||
                            ],
 | 
			
		||||
                        }
 | 
			
		||||
                        yield 'l1', quote
 | 
			
		||||
 | 
			
		||||
            case _:
 | 
			
		||||
                print(f'UNHANDLED MSG: {msg}')
 | 
			
		||||
                # yield msg
 | 
			
		||||
                    # elif 'book' in msg[-2]:
 | 
			
		||||
                    #     chan_id, *payload_array, chan_name, pair = msg
 | 
			
		||||
                    #     print(msg)
 | 
			
		||||
 | 
			
		||||
                case {
 | 
			
		||||
                    'connectionID': conid,
 | 
			
		||||
                    'event': 'systemStatus',
 | 
			
		||||
                    'status': 'online',
 | 
			
		||||
                    'version': ver,
 | 
			
		||||
                }:
 | 
			
		||||
                    log.info(
 | 
			
		||||
                        f'Established {ver} ws connection with id: {conid}'
 | 
			
		||||
                    )
 | 
			
		||||
                    continue
 | 
			
		||||
 | 
			
		||||
                case _:
 | 
			
		||||
                    print(f'UNHANDLED MSG: {msg}')
 | 
			
		||||
                    # yield msg
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def normalize(
 | 
			
		||||
    ohlc: OHLC,
 | 
			
		||||
def normalize(ohlc: OHLC) -> dict:
 | 
			
		||||
    '''
 | 
			
		||||
    Norm an `OHLC` msg to piker's minimal (live-)quote schema.
 | 
			
		||||
 | 
			
		||||
) -> dict:
 | 
			
		||||
    '''
 | 
			
		||||
    quote = ohlc.to_dict()
 | 
			
		||||
    quote['broker_ts'] = quote['time']
 | 
			
		||||
    quote['brokerd_ts'] = time.time()
 | 
			
		||||
    quote['symbol'] = quote['pair'] = quote['pair'].replace('/', '')
 | 
			
		||||
    quote['last'] = quote['close']
 | 
			
		||||
    quote['bar_wap'] = ohlc.vwap
 | 
			
		||||
 | 
			
		||||
    # seriously eh? what's with this non-symmetry everywhere
 | 
			
		||||
    # in subscription systems...
 | 
			
		||||
    # XXX: piker style is always lowercases symbols.
 | 
			
		||||
    topic = quote['pair'].replace('/', '').lower()
 | 
			
		||||
 | 
			
		||||
    # print(quote)
 | 
			
		||||
    return topic, quote
 | 
			
		||||
    return quote
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_history_client(
 | 
			
		||||
    symbol: str,
 | 
			
		||||
    mkt: MktPair,
 | 
			
		||||
 | 
			
		||||
) -> tuple[Callable, int]:
 | 
			
		||||
) -> AsyncGenerator[Callable, None]:
 | 
			
		||||
 | 
			
		||||
    symbol: str = mkt.bs_mktid
 | 
			
		||||
 | 
			
		||||
    # TODO implement history getter for the new storage layer.
 | 
			
		||||
    async with open_cached_client('kraken') as client:
 | 
			
		||||
| 
						 | 
				
			
			@ -283,45 +278,20 @@ async def stream_quotes(
 | 
			
		|||
    ``pairs`` must be formatted <crypto_symbol>/<fiat_symbol>.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # XXX: required to propagate ``tractor`` loglevel to piker logging
 | 
			
		||||
    get_console_log(loglevel or tractor.current_actor().loglevel)
 | 
			
		||||
 | 
			
		||||
    ws_pairs = {}
 | 
			
		||||
    sym_infos = {}
 | 
			
		||||
    ws_pairs: list[str] = []
 | 
			
		||||
    init_msgs: list[FeedInit] = []
 | 
			
		||||
 | 
			
		||||
    async with open_cached_client('kraken') as client, send_chan as send_chan:
 | 
			
		||||
    async with (
 | 
			
		||||
        send_chan as send_chan,
 | 
			
		||||
    ):
 | 
			
		||||
        for sym_str in symbols:
 | 
			
		||||
            mkt, pair = await get_mkt_info(sym_str)
 | 
			
		||||
            init_msgs.append(
 | 
			
		||||
                FeedInit(mkt_info=mkt)
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # keep client cached for real-time section
 | 
			
		||||
        for sym in symbols:
 | 
			
		||||
 | 
			
		||||
            # transform to upper since piker style is always lower
 | 
			
		||||
            sym = sym.upper()
 | 
			
		||||
            si: Pair = await client.symbol_info(sym)
 | 
			
		||||
            # try:
 | 
			
		||||
            #     si = Pair(**sym_info)  # validation
 | 
			
		||||
            # except TypeError:
 | 
			
		||||
            #     fields_diff = set(sym_info) - set(Pair.__struct_fields__)
 | 
			
		||||
            #     raise TypeError(
 | 
			
		||||
            #         f'Missing msg fields {fields_diff}'
 | 
			
		||||
            #     )
 | 
			
		||||
            syminfo = si.to_dict()
 | 
			
		||||
            syminfo['price_tick_size'] = 1. / 10**si.pair_decimals
 | 
			
		||||
            syminfo['lot_tick_size'] = 1. / 10**si.lot_decimals
 | 
			
		||||
            syminfo['asset_type'] = 'crypto'
 | 
			
		||||
            sym_infos[sym] = syminfo
 | 
			
		||||
            ws_pairs[sym] = si.wsname
 | 
			
		||||
 | 
			
		||||
        symbol = symbols[0].lower()
 | 
			
		||||
 | 
			
		||||
        init_msgs = {
 | 
			
		||||
            # pass back token, and bool, signalling if we're the writer
 | 
			
		||||
            # and that history has been written
 | 
			
		||||
            symbol: {
 | 
			
		||||
                'symbol_info': sym_infos[sym],
 | 
			
		||||
                'shm_write_opts': {'sum_tick_vml': False},
 | 
			
		||||
                'fqsn': sym,
 | 
			
		||||
            },
 | 
			
		||||
        }
 | 
			
		||||
            ws_pairs.append(pair.wsname)
 | 
			
		||||
 | 
			
		||||
        @acm
 | 
			
		||||
        async def subscribe(ws: NoBsWs):
 | 
			
		||||
| 
						 | 
				
			
			@ -332,7 +302,7 @@ async def stream_quotes(
 | 
			
		|||
            # https://github.com/krakenfx/kraken-wsclient-py/blob/master/kraken_wsclient_py/kraken_wsclient_py.py#L188
 | 
			
		||||
            ohlc_sub = {
 | 
			
		||||
                'event': 'subscribe',
 | 
			
		||||
                'pair': list(ws_pairs.values()),
 | 
			
		||||
                'pair': ws_pairs,
 | 
			
		||||
                'subscription': {
 | 
			
		||||
                    'name': 'ohlc',
 | 
			
		||||
                    'interval': 1,
 | 
			
		||||
| 
						 | 
				
			
			@ -348,7 +318,7 @@ async def stream_quotes(
 | 
			
		|||
            # trade data (aka L1)
 | 
			
		||||
            l1_sub = {
 | 
			
		||||
                'event': 'subscribe',
 | 
			
		||||
                'pair': list(ws_pairs.values()),
 | 
			
		||||
                'pair': ws_pairs,
 | 
			
		||||
                'subscription': {
 | 
			
		||||
                    'name': 'spread',
 | 
			
		||||
                    # 'depth': 10}
 | 
			
		||||
| 
						 | 
				
			
			@ -363,7 +333,7 @@ async def stream_quotes(
 | 
			
		|||
            # unsub from all pairs on teardown
 | 
			
		||||
            if ws.connected():
 | 
			
		||||
                await ws.send_msg({
 | 
			
		||||
                    'pair': list(ws_pairs.values()),
 | 
			
		||||
                    'pair': ws_pairs,
 | 
			
		||||
                    'event': 'unsubscribe',
 | 
			
		||||
                    'subscription': ['ohlc', 'spread'],
 | 
			
		||||
                })
 | 
			
		||||
| 
						 | 
				
			
			@ -378,88 +348,68 @@ async def stream_quotes(
 | 
			
		|||
            open_autorecon_ws(
 | 
			
		||||
                'wss://ws.kraken.com/',
 | 
			
		||||
                fixture=subscribe,
 | 
			
		||||
                reset_after=20,
 | 
			
		||||
            ) as ws,
 | 
			
		||||
 | 
			
		||||
            # avoid stream-gen closure from breaking trio..
 | 
			
		||||
            # NOTE: not sure this actually works XD particularly
 | 
			
		||||
            # if we call `ws._connect()` manally in the streaming
 | 
			
		||||
            # async gen..
 | 
			
		||||
            process_data_feed_msgs(ws) as msg_gen,
 | 
			
		||||
            aclosing(process_data_feed_msgs(ws)) as msg_gen,
 | 
			
		||||
        ):
 | 
			
		||||
            # pull a first quote and deliver
 | 
			
		||||
            typ, ohlc_last = await anext(msg_gen)
 | 
			
		||||
            topic, quote = normalize(ohlc_last)
 | 
			
		||||
            quote = normalize(ohlc_last)
 | 
			
		||||
 | 
			
		||||
            task_status.started((init_msgs,  quote))
 | 
			
		||||
 | 
			
		||||
            # lol, only "closes" when they're margin squeezing clients ;P
 | 
			
		||||
            feed_is_live.set()
 | 
			
		||||
 | 
			
		||||
            # keep start of last interval for volume tracking
 | 
			
		||||
            last_interval_start = ohlc_last.etime
 | 
			
		||||
            last_interval_start: float = ohlc_last.etime
 | 
			
		||||
 | 
			
		||||
            # start streaming
 | 
			
		||||
            async for typ, ohlc in msg_gen:
 | 
			
		||||
 | 
			
		||||
                if typ == 'ohlc':
 | 
			
		||||
            topic: str = mkt.bs_fqme
 | 
			
		||||
            async for typ, quote in msg_gen:
 | 
			
		||||
                match typ:
 | 
			
		||||
 | 
			
		||||
                    # TODO: can get rid of all this by using
 | 
			
		||||
                    # ``trades`` subscription...
 | 
			
		||||
                    # ``trades`` subscription..? Not sure why this
 | 
			
		||||
                    # wasn't used originally? (music queues) zoltannn..
 | 
			
		||||
                    # https://docs.kraken.com/websockets/#message-trade
 | 
			
		||||
                    case 'ohlc':
 | 
			
		||||
                        # generate tick values to match time & sales pane:
 | 
			
		||||
                        # https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
 | 
			
		||||
                        volume = quote.volume
 | 
			
		||||
 | 
			
		||||
                    # generate tick values to match time & sales pane:
 | 
			
		||||
                    # https://trade.kraken.com/charts/KRAKEN:BTC-USD?period=1m
 | 
			
		||||
                    volume = ohlc.volume
 | 
			
		||||
                        # new OHLC sample interval
 | 
			
		||||
                        if quote.etime > last_interval_start:
 | 
			
		||||
                            last_interval_start: float = quote.etime
 | 
			
		||||
                            tick_volume: float = volume
 | 
			
		||||
 | 
			
		||||
                    # new OHLC sample interval
 | 
			
		||||
                    if ohlc.etime > last_interval_start:
 | 
			
		||||
                        last_interval_start = ohlc.etime
 | 
			
		||||
                        tick_volume = volume
 | 
			
		||||
                        else:
 | 
			
		||||
                            # this is the tick volume *within the interval*
 | 
			
		||||
                            tick_volume: float = volume - ohlc_last.volume
 | 
			
		||||
 | 
			
		||||
                    else:
 | 
			
		||||
                        # this is the tick volume *within the interval*
 | 
			
		||||
                        tick_volume = volume - ohlc_last.volume
 | 
			
		||||
                        ohlc_last = quote
 | 
			
		||||
                        last = quote.close
 | 
			
		||||
 | 
			
		||||
                    ohlc_last = ohlc
 | 
			
		||||
                    last = ohlc.close
 | 
			
		||||
                        quote = normalize(quote)
 | 
			
		||||
                        ticks = quote.setdefault(
 | 
			
		||||
                            'ticks',
 | 
			
		||||
                            [],
 | 
			
		||||
                        )
 | 
			
		||||
                        if tick_volume:
 | 
			
		||||
                            ticks.append({
 | 
			
		||||
                                'type': 'trade',
 | 
			
		||||
                                'price': last,
 | 
			
		||||
                                'size': tick_volume,
 | 
			
		||||
                            })
 | 
			
		||||
 | 
			
		||||
                    if tick_volume:
 | 
			
		||||
                        ohlc.ticks.append({
 | 
			
		||||
                            'type': 'trade',
 | 
			
		||||
                            'price': last,
 | 
			
		||||
                            'size': tick_volume,
 | 
			
		||||
                        })
 | 
			
		||||
                    case 'l1':
 | 
			
		||||
                        # passthrough quote msg
 | 
			
		||||
                        pass
 | 
			
		||||
 | 
			
		||||
                    topic, quote = normalize(ohlc)
 | 
			
		||||
 | 
			
		||||
                elif typ == 'l1':
 | 
			
		||||
                    quote = ohlc
 | 
			
		||||
                    topic = quote['symbol'].lower()
 | 
			
		||||
                    case _:
 | 
			
		||||
                        log.warning(f'Unknown WSS message: {typ}, {quote}')
 | 
			
		||||
 | 
			
		||||
                await send_chan.send({topic: quote})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def open_symbol_search(
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
 | 
			
		||||
) -> Client:
 | 
			
		||||
    async with open_cached_client('kraken') as client:
 | 
			
		||||
 | 
			
		||||
        # load all symbols locally for fast search
 | 
			
		||||
        cache = await client.cache_symbols()
 | 
			
		||||
        await ctx.started(cache)
 | 
			
		||||
 | 
			
		||||
        async with ctx.open_stream() as stream:
 | 
			
		||||
 | 
			
		||||
            async for pattern in stream:
 | 
			
		||||
 | 
			
		||||
                matches = fuzzy.extractBests(
 | 
			
		||||
                    pattern,
 | 
			
		||||
                    cache,
 | 
			
		||||
                    score_cutoff=50,
 | 
			
		||||
                )
 | 
			
		||||
                # repack in dict form
 | 
			
		||||
                await stream.send({
 | 
			
		||||
                    pair[0].altname: pair[0]
 | 
			
		||||
                    for pair in matches
 | 
			
		||||
                })
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,269 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Trade transaction accounting and normalization.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
import math
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import pendulum
 | 
			
		||||
 | 
			
		||||
from piker.accounting import (
 | 
			
		||||
    Transaction,
 | 
			
		||||
    Position,
 | 
			
		||||
    Account,
 | 
			
		||||
    get_likely_pair,
 | 
			
		||||
    TransactionLedger,
 | 
			
		||||
    # MktPair,
 | 
			
		||||
)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from piker.data import (
 | 
			
		||||
    SymbologyCache,
 | 
			
		||||
)
 | 
			
		||||
from .api import (
 | 
			
		||||
    log,
 | 
			
		||||
    Client,
 | 
			
		||||
    Pair,
 | 
			
		||||
)
 | 
			
		||||
# from .feed import get_mkt_info
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def norm_trade(
 | 
			
		||||
    tid: str,
 | 
			
		||||
    record: dict[str, Any],
 | 
			
		||||
 | 
			
		||||
    # this is the dict that was returned from
 | 
			
		||||
    # `Client.get_mkt_pairs()` and when running offline ledger
 | 
			
		||||
    # processing from `.accounting`, this will be the table loaded
 | 
			
		||||
    # into `SymbologyCache.pairs`.
 | 
			
		||||
    pairs: dict[str, Struct],
 | 
			
		||||
    symcache: SymbologyCache | None = None,
 | 
			
		||||
 | 
			
		||||
) -> Transaction:
 | 
			
		||||
 | 
			
		||||
    size: float = float(record.get('vol')) * {
 | 
			
		||||
        'buy': 1,
 | 
			
		||||
        'sell': -1,
 | 
			
		||||
    }[record['type']]
 | 
			
		||||
 | 
			
		||||
    # NOTE: this value may be either the websocket OR the rest schema
 | 
			
		||||
    # so we need to detect the key format and then choose the
 | 
			
		||||
    # correct symbol lookup table to evetually get a ``Pair``..
 | 
			
		||||
    # See internals of `Client.asset_pairs()` for deats!
 | 
			
		||||
    src_pair_key: str = record['pair']
 | 
			
		||||
 | 
			
		||||
    # XXX: kraken's data engineering is soo bad they require THREE
 | 
			
		||||
    # different pair schemas (more or less seemingly tied to
 | 
			
		||||
    # transport-APIs)..LITERALLY they return different market id
 | 
			
		||||
    # pairs in the ledger endpoints vs. the websocket event subs..
 | 
			
		||||
    # lookup pair using appropriately provided tabled depending
 | 
			
		||||
    # on API-key-schema..
 | 
			
		||||
    pair: Pair = pairs[src_pair_key]
 | 
			
		||||
    fqme: str = pair.bs_fqme.lower() + '.kraken'
 | 
			
		||||
 | 
			
		||||
    return Transaction(
 | 
			
		||||
        fqme=fqme,
 | 
			
		||||
        tid=tid,
 | 
			
		||||
        size=size,
 | 
			
		||||
        price=float(record['price']),
 | 
			
		||||
        cost=float(record['fee']),
 | 
			
		||||
        dt=pendulum.from_timestamp(float(record['time'])),
 | 
			
		||||
        bs_mktid=pair.bs_mktid,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def norm_trade_records(
 | 
			
		||||
    ledger: dict[str, Any],
 | 
			
		||||
    client: Client,
 | 
			
		||||
    api_name_set: str = 'xname',
 | 
			
		||||
 | 
			
		||||
) -> dict[str, Transaction]:
 | 
			
		||||
    '''
 | 
			
		||||
    Loop through an input ``dict`` of trade records
 | 
			
		||||
    and convert them to ``Transactions``.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    records: dict[str, Transaction] = {}
 | 
			
		||||
    for tid, record in ledger.items():
 | 
			
		||||
 | 
			
		||||
        # manual_fqme: str = f'{bs_mktid.lower()}.kraken'
 | 
			
		||||
        # mkt: MktPair = (await get_mkt_info(manual_fqme))[0]
 | 
			
		||||
        # fqme: str = mkt.fqme
 | 
			
		||||
        # assert fqme == manual_fqme
 | 
			
		||||
        pairs: dict[str, Pair] = {
 | 
			
		||||
            'xname': client._AssetPairs,
 | 
			
		||||
            'wsname': client._wsnames,
 | 
			
		||||
            'altname': client._altnames,
 | 
			
		||||
        }[api_name_set]
 | 
			
		||||
 | 
			
		||||
        records[tid] = norm_trade(
 | 
			
		||||
            tid,
 | 
			
		||||
            record,
 | 
			
		||||
            pairs=pairs,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    return records
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def has_pp(
 | 
			
		||||
    acnt: Account,
 | 
			
		||||
    src_fiat: str,
 | 
			
		||||
    dst: str,
 | 
			
		||||
    size: float,
 | 
			
		||||
 | 
			
		||||
) -> Position | None:
 | 
			
		||||
 | 
			
		||||
    src2dst: dict[str, str] = {}
 | 
			
		||||
    for bs_mktid in acnt.pps:
 | 
			
		||||
        likely_pair = get_likely_pair(
 | 
			
		||||
            src_fiat,
 | 
			
		||||
            dst,
 | 
			
		||||
            bs_mktid,
 | 
			
		||||
        )
 | 
			
		||||
        if likely_pair:
 | 
			
		||||
            src2dst[src_fiat] = dst
 | 
			
		||||
 | 
			
		||||
    for src, dst in src2dst.items():
 | 
			
		||||
        pair: str = f'{dst}{src_fiat}'
 | 
			
		||||
        pos: Position = acnt.pps.get(pair)
 | 
			
		||||
        if (
 | 
			
		||||
            pos
 | 
			
		||||
            and math.isclose(pos.size, size)
 | 
			
		||||
        ):
 | 
			
		||||
            return pos
 | 
			
		||||
 | 
			
		||||
        elif (
 | 
			
		||||
            size == 0
 | 
			
		||||
            and pos.size
 | 
			
		||||
        ):
 | 
			
		||||
            log.warning(
 | 
			
		||||
                f'`kraken` account says you have  a ZERO '
 | 
			
		||||
                f'balance for {bs_mktid}:{pair}\n'
 | 
			
		||||
                f'but piker seems to think `{pos.size}`\n'
 | 
			
		||||
                'This is likely a discrepancy in piker '
 | 
			
		||||
                'accounting if the above number is'
 | 
			
		||||
                "large,' though it's likely to due lack"
 | 
			
		||||
                "f tracking xfers fees.."
 | 
			
		||||
            )
 | 
			
		||||
            return pos
 | 
			
		||||
 | 
			
		||||
    return None  # indicate no entry found
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: factor most of this "account updating from txns" into the
 | 
			
		||||
# the `Account` impl so has to provide for hiding the mostly
 | 
			
		||||
# cross-provider updates from txn sets
 | 
			
		||||
async def verify_balances(
 | 
			
		||||
    acnt: Account,
 | 
			
		||||
    src_fiat: str,
 | 
			
		||||
    balances: dict[str, float],
 | 
			
		||||
    client: Client,
 | 
			
		||||
    ledger: TransactionLedger,
 | 
			
		||||
    ledger_trans: dict[str, Transaction],  # from toml
 | 
			
		||||
    api_trans: dict[str, Transaction],  # from API
 | 
			
		||||
 | 
			
		||||
    simulate_pp_update: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    for dst, size in balances.items():
 | 
			
		||||
 | 
			
		||||
        # we don't care about tracking positions
 | 
			
		||||
        # in the user's source fiat currency.
 | 
			
		||||
        if (
 | 
			
		||||
            dst == src_fiat
 | 
			
		||||
            or not any(
 | 
			
		||||
                dst in bs_mktid for bs_mktid in acnt.pps
 | 
			
		||||
            )
 | 
			
		||||
        ):
 | 
			
		||||
            log.warning(
 | 
			
		||||
                f'Skipping balance `{dst}`:{size} for position calcs!'
 | 
			
		||||
            )
 | 
			
		||||
            continue
 | 
			
		||||
 | 
			
		||||
        # we have a balance for which there is no pos entry
 | 
			
		||||
        # - we have to likely update from the ledger?
 | 
			
		||||
        if not has_pp(acnt, src_fiat, dst, size):
 | 
			
		||||
            updated = acnt.update_from_ledger(
 | 
			
		||||
                ledger_trans,
 | 
			
		||||
                symcache=ledger.symcache,
 | 
			
		||||
            )
 | 
			
		||||
            log.info(f'Updated pps from ledger:\n{pformat(updated)}')
 | 
			
		||||
 | 
			
		||||
            # FIRST try reloading from API records
 | 
			
		||||
            if (
 | 
			
		||||
                not has_pp(acnt, src_fiat, dst, size)
 | 
			
		||||
                and not simulate_pp_update
 | 
			
		||||
            ):
 | 
			
		||||
                acnt.update_from_ledger(
 | 
			
		||||
                    api_trans,
 | 
			
		||||
                    symcache=ledger.symcache,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # get transfers to make sense of abs
 | 
			
		||||
                # balances.
 | 
			
		||||
                # NOTE: we do this after ledger and API
 | 
			
		||||
                # loading since we might not have an
 | 
			
		||||
                # entry in the
 | 
			
		||||
                # ``account.kraken.spot.toml`` for the
 | 
			
		||||
                # necessary pair yet and thus this
 | 
			
		||||
                # likely pair grabber will likely fail.
 | 
			
		||||
                if not has_pp(acnt, src_fiat, dst, size):
 | 
			
		||||
                    for bs_mktid in acnt.pps:
 | 
			
		||||
                        likely_pair: str | None = get_likely_pair(
 | 
			
		||||
                            src_fiat,
 | 
			
		||||
                            dst,
 | 
			
		||||
                            bs_mktid,
 | 
			
		||||
                        )
 | 
			
		||||
                        if likely_pair:
 | 
			
		||||
                            break
 | 
			
		||||
                    else:
 | 
			
		||||
                        raise ValueError(
 | 
			
		||||
                            'Could not find a position pair in '
 | 
			
		||||
                            'ledger for likely widthdrawal '
 | 
			
		||||
                            f'candidate: {dst}'
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                    # this was likely pos that had a withdrawal
 | 
			
		||||
                    # from the dst asset out of the account.
 | 
			
		||||
                    if likely_pair:
 | 
			
		||||
                        xfer_trans = await client.get_xfers(
 | 
			
		||||
                            dst,
 | 
			
		||||
 | 
			
		||||
                            # TODO: not all src assets are
 | 
			
		||||
                            # 3 chars long...
 | 
			
		||||
                            src_asset=likely_pair[3:],
 | 
			
		||||
                        )
 | 
			
		||||
                        if xfer_trans:
 | 
			
		||||
                            updated = acnt.update_from_ledger(
 | 
			
		||||
                                xfer_trans,
 | 
			
		||||
                                cost_scalar=1,
 | 
			
		||||
                                symcache=ledger.symcache,
 | 
			
		||||
                            )
 | 
			
		||||
                            log.info(
 | 
			
		||||
                                f'Updated {dst} from transfers:\n'
 | 
			
		||||
                                f'{pformat(updated)}'
 | 
			
		||||
                            )
 | 
			
		||||
 | 
			
		||||
                if has_pp(acnt, src_fiat, dst, size):
 | 
			
		||||
                    raise ValueError(
 | 
			
		||||
                        'Could not reproduce balance:\n'
 | 
			
		||||
                        f'dst: {dst}, {size}\n'
 | 
			
		||||
                    )
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,206 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Symbology defs and search.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from decimal import Decimal
 | 
			
		||||
 | 
			
		||||
import tractor
 | 
			
		||||
from rapidfuzz import process as fuzzy
 | 
			
		||||
 | 
			
		||||
from piker._cacheables import (
 | 
			
		||||
    async_lifo_cache,
 | 
			
		||||
)
 | 
			
		||||
from piker.accounting._mktinfo import (
 | 
			
		||||
    digits_to_dec,
 | 
			
		||||
)
 | 
			
		||||
from piker.brokers import (
 | 
			
		||||
    open_cached_client,
 | 
			
		||||
    SymbolNotFound,
 | 
			
		||||
)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from piker.accounting._mktinfo import (
 | 
			
		||||
    Asset,
 | 
			
		||||
    MktPair,
 | 
			
		||||
    unpack_fqme,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# https://www.kraken.com/features/api#get-tradable-pairs
 | 
			
		||||
class Pair(Struct):
 | 
			
		||||
    xname: str  # idiotic bs_mktid equiv i guess?
 | 
			
		||||
    altname: str  # alternate pair name
 | 
			
		||||
    wsname: str  # WebSocket pair name (if available)
 | 
			
		||||
    aclass_base: str  # asset class of base component
 | 
			
		||||
    base: str  # asset id of base component
 | 
			
		||||
    aclass_quote: str  # asset class of quote component
 | 
			
		||||
    quote: str  # asset id of quote component
 | 
			
		||||
    lot: str  # volume lot size
 | 
			
		||||
 | 
			
		||||
    cost_decimals: int
 | 
			
		||||
    costmin: float
 | 
			
		||||
    pair_decimals: int  # scaling decimal places for pair
 | 
			
		||||
    lot_decimals: int  # scaling decimal places for volume
 | 
			
		||||
 | 
			
		||||
    # amount to multiply lot volume by to get currency volume
 | 
			
		||||
    lot_multiplier: float
 | 
			
		||||
 | 
			
		||||
    # array of leverage amounts available when buying
 | 
			
		||||
    leverage_buy: list[int]
 | 
			
		||||
    # array of leverage amounts available when selling
 | 
			
		||||
    leverage_sell: list[int]
 | 
			
		||||
 | 
			
		||||
    # fee schedule array in [volume, percent fee] tuples
 | 
			
		||||
    fees: list[tuple[int, float]]
 | 
			
		||||
 | 
			
		||||
    # maker fee schedule array in [volume, percent fee] tuples (if on
 | 
			
		||||
    # maker/taker)
 | 
			
		||||
    fees_maker: list[tuple[int, float]]
 | 
			
		||||
 | 
			
		||||
    fee_volume_currency: str  # volume discount currency
 | 
			
		||||
    margin_call: str  # margin call level
 | 
			
		||||
    margin_stop: str  # stop-out/liquidation margin level
 | 
			
		||||
    ordermin: float  # minimum order volume for pair
 | 
			
		||||
    tick_size: float  # min price step size
 | 
			
		||||
    status: str
 | 
			
		||||
 | 
			
		||||
    short_position_limit: float = 0
 | 
			
		||||
    long_position_limit: float = float('inf')
 | 
			
		||||
 | 
			
		||||
    # TODO: should we make this a literal NamespacePath ref?
 | 
			
		||||
    ns_path: str = 'piker.brokers.kraken:Pair'
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_mktid(self) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        Kraken seems to index it's market symbol sets in
 | 
			
		||||
        transaction ledgers using the key returned from rest
 | 
			
		||||
        queries.. so use that since apparently they can't
 | 
			
		||||
        make up their minds on a better key set XD
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        return self.xname
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def price_tick(self) -> Decimal:
 | 
			
		||||
        return digits_to_dec(self.pair_decimals)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def size_tick(self) -> Decimal:
 | 
			
		||||
        return digits_to_dec(self.lot_decimals)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_dst_asset(self) -> str:
 | 
			
		||||
        dst, _ = self.wsname.split('/')
 | 
			
		||||
        return dst
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_src_asset(self) -> str:
 | 
			
		||||
        _, src = self.wsname.split('/')
 | 
			
		||||
        return src
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def bs_fqme(self) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        Basically the `.altname` but with special '.' handling and
 | 
			
		||||
        `.SPOT` suffix appending (for future multi-venue support).
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        dst, src = self.wsname.split('/')
 | 
			
		||||
        # XXX: omg for stupid shite like ETH2.S/ETH..
 | 
			
		||||
        dst = dst.replace('.', '-')
 | 
			
		||||
        return f'{dst}{src}.SPOT'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def open_symbol_search(ctx: tractor.Context) -> None:
 | 
			
		||||
    async with open_cached_client('kraken') as client:
 | 
			
		||||
 | 
			
		||||
        # load all symbols locally for fast search
 | 
			
		||||
        cache = await client.get_mkt_pairs()
 | 
			
		||||
        await ctx.started(cache)
 | 
			
		||||
 | 
			
		||||
        async with ctx.open_stream() as stream:
 | 
			
		||||
            async for pattern in stream:
 | 
			
		||||
                await stream.send(
 | 
			
		||||
                    await client.search_symbols(pattern)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@async_lifo_cache()
 | 
			
		||||
async def get_mkt_info(
 | 
			
		||||
    fqme: str,
 | 
			
		||||
 | 
			
		||||
) -> tuple[MktPair, Pair]:
 | 
			
		||||
    '''
 | 
			
		||||
    Query for and return a `MktPair` and backend-native `Pair` (or
 | 
			
		||||
    wtv else) info.
 | 
			
		||||
 | 
			
		||||
    If more then one fqme is provided return a ``dict`` of native
 | 
			
		||||
    key-strs to `MktPair`s.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    venue: str = 'spot'
 | 
			
		||||
    expiry: str = ''
 | 
			
		||||
    if '.kraken' not in fqme:
 | 
			
		||||
        fqme += '.kraken'
 | 
			
		||||
 | 
			
		||||
    broker, pair, venue, expiry = unpack_fqme(fqme)
 | 
			
		||||
    venue: str = venue or 'spot'
 | 
			
		||||
 | 
			
		||||
    if venue.lower() != 'spot':
 | 
			
		||||
        raise SymbolNotFound(
 | 
			
		||||
            'kraken only supports spot markets right now!\n'
 | 
			
		||||
            f'{fqme}\n'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    async with open_cached_client('kraken') as client:
 | 
			
		||||
 | 
			
		||||
        # uppercase since kraken bs_mktid is always upper
 | 
			
		||||
        # bs_fqme, _, broker = fqme.partition('.')
 | 
			
		||||
        # pair_str: str = bs_fqme.upper()
 | 
			
		||||
        pair_str: str = f'{pair}.{venue}'
 | 
			
		||||
 | 
			
		||||
        pair: Pair | None = client._pairs.get(pair_str.upper())
 | 
			
		||||
        if not pair:
 | 
			
		||||
            bs_fqme: str = client.to_bs_fqme(pair_str)
 | 
			
		||||
            pair: Pair = client._pairs[bs_fqme]
 | 
			
		||||
 | 
			
		||||
        if not (assets := client._assets):
 | 
			
		||||
            assets: dict[str, Asset] = await client.get_assets()
 | 
			
		||||
 | 
			
		||||
        dst_asset: Asset = assets[pair.bs_dst_asset]
 | 
			
		||||
        src_asset: Asset = assets[pair.bs_src_asset]
 | 
			
		||||
 | 
			
		||||
        mkt = MktPair(
 | 
			
		||||
            dst=dst_asset,
 | 
			
		||||
            src=src_asset,
 | 
			
		||||
 | 
			
		||||
            price_tick=pair.price_tick,
 | 
			
		||||
            size_tick=pair.size_tick,
 | 
			
		||||
            bs_mktid=pair.bs_mktid,
 | 
			
		||||
 | 
			
		||||
            expiry=expiry,
 | 
			
		||||
            venue=venue or 'spot',
 | 
			
		||||
 | 
			
		||||
            # TODO: futes
 | 
			
		||||
            # _atype=_atype,
 | 
			
		||||
 | 
			
		||||
            broker='kraken',
 | 
			
		||||
        )
 | 
			
		||||
        return mkt, pair
 | 
			
		||||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0)
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -40,13 +40,17 @@ import wrapt
 | 
			
		|||
import asks
 | 
			
		||||
 | 
			
		||||
from ..calc import humanize, percent_change
 | 
			
		||||
from .._cacheables import open_cached_client, async_lifo_cache
 | 
			
		||||
from . import open_cached_client
 | 
			
		||||
from piker._cacheables import async_lifo_cache
 | 
			
		||||
from .. import config
 | 
			
		||||
from ._util import resproc, BrokerError, SymbolNotFound
 | 
			
		||||
from ..log import get_logger, colorize_json, get_console_log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
from ..log import (
 | 
			
		||||
    colorize_json,
 | 
			
		||||
)
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
_use_practice_account = False
 | 
			
		||||
_refresh_token_ep = 'https://{}login.questrade.com/oauth2/'
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -27,12 +27,13 @@ from typing import List
 | 
			
		|||
from async_generator import asynccontextmanager
 | 
			
		||||
import asks
 | 
			
		||||
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
from ._util import resproc, BrokerError
 | 
			
		||||
from ._util import (
 | 
			
		||||
    resproc,
 | 
			
		||||
    BrokerError,
 | 
			
		||||
    log,
 | 
			
		||||
)
 | 
			
		||||
from ..calc import percent_change
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
_service_ep = 'https://api.robinhood.com'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -65,8 +66,10 @@ class Client:
 | 
			
		|||
        self.api = _API(self._sess)
 | 
			
		||||
 | 
			
		||||
    def _zip_in_order(self, symbols: [str], quotes: List[dict]):
 | 
			
		||||
        return {quote.get('symbol', sym) if quote else sym: quote
 | 
			
		||||
                for sym, quote in zip(symbols, results_dict)}
 | 
			
		||||
        return {
 | 
			
		||||
            quote.get('symbol', sym) if quote else sym: quote
 | 
			
		||||
            for sym, quote in zip(symbols, quotes)
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    async def quote(self, symbols: [str]):
 | 
			
		||||
        """Retrieve quotes for a list of ``symbols``.
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,49 @@
 | 
			
		|||
piker.clearing
 | 
			
		||||
______________
 | 
			
		||||
trade execution-n-control subsys for both live and paper trading as
 | 
			
		||||
well as algo-trading manual override/interaction across any backend
 | 
			
		||||
broker and data provider.
 | 
			
		||||
 | 
			
		||||
avail UIs
 | 
			
		||||
*********
 | 
			
		||||
 | 
			
		||||
order ctl
 | 
			
		||||
---------
 | 
			
		||||
the `piker.clearing` subsys is exposed mainly though
 | 
			
		||||
the `piker chart` GUI as a "chart trader" style UX and
 | 
			
		||||
is automatically enabled whenever a chart is opened.
 | 
			
		||||
 | 
			
		||||
.. ^TODO, more prose here!
 | 
			
		||||
 | 
			
		||||
the "manual" order control features are exposed via the
 | 
			
		||||
`piker.ui.order_mode` API and can pretty much always be
 | 
			
		||||
used (at least) in simulated-trading mode, aka "paper"-mode, and
 | 
			
		||||
the micro-manual is as follows:
 | 
			
		||||
 | 
			
		||||
``order_mode`` (
 | 
			
		||||
    edge triggered activation by any of the following keys,
 | 
			
		||||
    ``mouse-click`` on y-level to submit at that price
 | 
			
		||||
    ):
 | 
			
		||||
 | 
			
		||||
    - ``f``/ ``ctl-f`` to stage buy
 | 
			
		||||
    - ``d``/ ``ctl-d`` to stage sell
 | 
			
		||||
    - ``a`` to stage alert
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
``search_mode`` (
 | 
			
		||||
    ``ctl-l`` or ``ctl-space`` to open,
 | 
			
		||||
    ``ctl-c`` or ``ctl-space`` to close
 | 
			
		||||
    ) :
 | 
			
		||||
 | 
			
		||||
    - begin typing to have symbol search automatically lookup
 | 
			
		||||
      symbols from all loaded backend (broker) providers
 | 
			
		||||
    - arrow keys and mouse click to navigate selection
 | 
			
		||||
    - vi-like ``ctl-[hjkl]`` for navigation
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
position (pp) mgmt
 | 
			
		||||
------------------
 | 
			
		||||
you can also configure your position allocation limits from the
 | 
			
		||||
sidepane.
 | 
			
		||||
 | 
			
		||||
.. ^TODO, explain and provide tut once more refined!
 | 
			
		||||
| 
						 | 
				
			
			@ -18,9 +18,38 @@
 | 
			
		|||
Market machinery for order executions, book, management.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from ._client import open_ems
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
from ._client import (
 | 
			
		||||
    open_ems,
 | 
			
		||||
    OrderClient,
 | 
			
		||||
)
 | 
			
		||||
from ._ems import (
 | 
			
		||||
    open_brokerd_dialog,
 | 
			
		||||
)
 | 
			
		||||
from ._util import OrderDialogs
 | 
			
		||||
from ._messages import(
 | 
			
		||||
    Order,
 | 
			
		||||
    Status,
 | 
			
		||||
    Cancel,
 | 
			
		||||
 | 
			
		||||
    # TODO: deprecate these and replace end-2-end with
 | 
			
		||||
    # client-side-dialog set above B)
 | 
			
		||||
    # https://github.com/pikers/piker/issues/514
 | 
			
		||||
    BrokerdPosition
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'FeeModel',
 | 
			
		||||
    'open_ems',
 | 
			
		||||
    'OrderClient',
 | 
			
		||||
    'open_brokerd_dialog',
 | 
			
		||||
    'OrderDialogs',
 | 
			
		||||
    'Order',
 | 
			
		||||
    'Status',
 | 
			
		||||
    'Cancel',
 | 
			
		||||
    'BrokerdPosition'
 | 
			
		||||
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -27,68 +27,103 @@ import trio
 | 
			
		|||
import tractor
 | 
			
		||||
from tractor.trionics import broadcast_receiver
 | 
			
		||||
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
from ..data.types import Struct
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,  # sub-sys logger
 | 
			
		||||
)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from ..service import maybe_open_emsd
 | 
			
		||||
from ._messages import (
 | 
			
		||||
    Order,
 | 
			
		||||
    Cancel,
 | 
			
		||||
    BrokerdPosition,
 | 
			
		||||
)
 | 
			
		||||
from ..brokers import get_brokermod
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from ._messages import (
 | 
			
		||||
        BrokerdPosition,
 | 
			
		||||
        Status,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
class OrderClient(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    EMS-client-side order book ctl and tracking.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OrderBook(Struct):
 | 
			
		||||
    '''EMS-client-side order book ctl and tracking.
 | 
			
		||||
 | 
			
		||||
    A style similar to "model-view" is used here where this api is
 | 
			
		||||
    provided as a supervised control for an EMS actor which does all the
 | 
			
		||||
    hard/fast work of talking to brokers/exchanges to conduct
 | 
			
		||||
    executions.
 | 
			
		||||
 | 
			
		||||
    Currently, this is mostly for keeping local state to match the EMS
 | 
			
		||||
    and use received events to trigger graphics updates.
 | 
			
		||||
    (A)sync API for submitting orders and alerts to the `emsd` service;
 | 
			
		||||
    this is the main control for execution management from client code.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # IPC stream to `emsd` actor
 | 
			
		||||
    _ems_stream: tractor.MsgStream
 | 
			
		||||
 | 
			
		||||
    # mem channels used to relay order requests to the EMS daemon
 | 
			
		||||
    _to_ems: trio.abc.SendChannel
 | 
			
		||||
    _from_order_book: trio.abc.ReceiveChannel
 | 
			
		||||
    _to_relay_task: trio.abc.SendChannel
 | 
			
		||||
    _from_sync_order_client: trio.abc.ReceiveChannel
 | 
			
		||||
 | 
			
		||||
    # history table
 | 
			
		||||
    _sent_orders: dict[str, Order] = {}
 | 
			
		||||
 | 
			
		||||
    def send(
 | 
			
		||||
    def send_nowait(
 | 
			
		||||
        self,
 | 
			
		||||
        msg: Order | dict,
 | 
			
		||||
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
    ) -> dict | Order:
 | 
			
		||||
        '''
 | 
			
		||||
        Sync version of ``.send()``.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        self._sent_orders[msg.oid] = msg
 | 
			
		||||
        self._to_ems.send_nowait(msg)
 | 
			
		||||
        self._to_relay_task.send_nowait(msg)
 | 
			
		||||
        return msg
 | 
			
		||||
 | 
			
		||||
    def send_update(
 | 
			
		||||
    async def send(
 | 
			
		||||
        self,
 | 
			
		||||
        msg: Order | dict,
 | 
			
		||||
 | 
			
		||||
    ) -> dict | Order:
 | 
			
		||||
        '''
 | 
			
		||||
        Send a new order msg async to the `emsd` service.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        self._sent_orders[msg.oid] = msg
 | 
			
		||||
        await self._ems_stream.send(msg)
 | 
			
		||||
        return msg
 | 
			
		||||
 | 
			
		||||
    def update_nowait(
 | 
			
		||||
        self,
 | 
			
		||||
        uuid: str,
 | 
			
		||||
        **data: dict,
 | 
			
		||||
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
        '''
 | 
			
		||||
        Sync version of ``.update()``.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        cmd = self._sent_orders[uuid]
 | 
			
		||||
        msg = cmd.copy(update=data)
 | 
			
		||||
        self._sent_orders[uuid] = msg
 | 
			
		||||
        self._to_ems.send_nowait(msg)
 | 
			
		||||
        return cmd
 | 
			
		||||
        self._to_relay_task.send_nowait(msg)
 | 
			
		||||
        return msg
 | 
			
		||||
 | 
			
		||||
    def cancel(self, uuid: str) -> bool:
 | 
			
		||||
        """Cancel an order (or alert) in the EMS.
 | 
			
		||||
    async def update(
 | 
			
		||||
        self,
 | 
			
		||||
        uuid: str,
 | 
			
		||||
        **data: dict,
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
        '''
 | 
			
		||||
        Update an existing order dialog with a msg updated from
 | 
			
		||||
        ``update`` kwargs.
 | 
			
		||||
 | 
			
		||||
        """
 | 
			
		||||
        '''
 | 
			
		||||
        cmd = self._sent_orders[uuid]
 | 
			
		||||
        msg = cmd.copy(update=data)
 | 
			
		||||
        self._sent_orders[uuid] = msg
 | 
			
		||||
        await self._ems_stream.send(msg)
 | 
			
		||||
        return msg
 | 
			
		||||
 | 
			
		||||
    def _mk_cancel_msg(
 | 
			
		||||
        self,
 | 
			
		||||
        uuid: str,
 | 
			
		||||
    ) -> Cancel:
 | 
			
		||||
        cmd = self._sent_orders.get(uuid)
 | 
			
		||||
        if not cmd:
 | 
			
		||||
            log.error(
 | 
			
		||||
| 
						 | 
				
			
			@ -96,77 +131,77 @@ class OrderBook(Struct):
 | 
			
		|||
                f'Maybe there is a stale entry or line?\n'
 | 
			
		||||
                f'You should report this as a bug!'
 | 
			
		||||
            )
 | 
			
		||||
        msg = Cancel(
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        fqme = str(cmd.symbol)
 | 
			
		||||
        return Cancel(
 | 
			
		||||
            oid=uuid,
 | 
			
		||||
            symbol=cmd.symbol,
 | 
			
		||||
        )
 | 
			
		||||
        self._to_ems.send_nowait(msg)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_orders: OrderBook = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_orders(
 | 
			
		||||
    emsd_uid: tuple[str, str] = None
 | 
			
		||||
) -> OrderBook:
 | 
			
		||||
    """"
 | 
			
		||||
    OrderBook singleton factory per actor.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    if emsd_uid is not None:
 | 
			
		||||
        # TODO: read in target emsd's active book on startup
 | 
			
		||||
        pass
 | 
			
		||||
 | 
			
		||||
    global _orders
 | 
			
		||||
 | 
			
		||||
    if _orders is None:
 | 
			
		||||
        size = 100
 | 
			
		||||
        tx, rx = trio.open_memory_channel(size)
 | 
			
		||||
        brx = broadcast_receiver(rx, size)
 | 
			
		||||
 | 
			
		||||
        # setup local ui event streaming channels for request/resp
 | 
			
		||||
        # streamging with EMS daemon
 | 
			
		||||
        _orders = OrderBook(
 | 
			
		||||
            _to_ems=tx,
 | 
			
		||||
            _from_order_book=brx,
 | 
			
		||||
            symbol=fqme,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    return _orders
 | 
			
		||||
    def cancel_nowait(
 | 
			
		||||
        self,
 | 
			
		||||
        uuid: str,
 | 
			
		||||
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        '''
 | 
			
		||||
        Sync version of ``.cancel()``.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        self._to_relay_task.send_nowait(
 | 
			
		||||
            self._mk_cancel_msg(uuid)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    async def cancel(
 | 
			
		||||
        self,
 | 
			
		||||
        uuid: str,
 | 
			
		||||
 | 
			
		||||
    ) -> bool:
 | 
			
		||||
        '''
 | 
			
		||||
        Cancel an already existintg order (or alert) dialog.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        await self._ems_stream.send(
 | 
			
		||||
            self._mk_cancel_msg(uuid)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: we can get rid of this relay loop once we move
 | 
			
		||||
# order_mode inputs to async code!
 | 
			
		||||
async def relay_order_cmds_from_sync_code(
 | 
			
		||||
 | 
			
		||||
async def relay_orders_from_sync_code(
 | 
			
		||||
 | 
			
		||||
    client: OrderClient,
 | 
			
		||||
    symbol_key: str,
 | 
			
		||||
    to_ems_stream: tractor.MsgStream,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    """
 | 
			
		||||
    Order streaming task: deliver orders transmitted from UI
 | 
			
		||||
    to downstream consumers.
 | 
			
		||||
    '''
 | 
			
		||||
    Order submission relay task: deliver orders sent from synchronous (UI)
 | 
			
		||||
    code to the EMS via ``OrderClient._from_sync_order_client``.
 | 
			
		||||
 | 
			
		||||
    This is run in the UI actor (usually the one running Qt but could be
 | 
			
		||||
    any other client service code). This process simply delivers order
 | 
			
		||||
    messages to the above ``_to_ems`` send channel (from sync code using
 | 
			
		||||
    messages to the above ``_to_relay_task`` send channel (from sync code using
 | 
			
		||||
    ``.send_nowait()``), these values are pulled from the channel here
 | 
			
		||||
    and relayed to any consumer(s) that called this function using
 | 
			
		||||
    a ``tractor`` portal.
 | 
			
		||||
 | 
			
		||||
    This effectively makes order messages look like they're being
 | 
			
		||||
    "pushed" from the parent to the EMS where local sync code is likely
 | 
			
		||||
    doing the pushing from some UI.
 | 
			
		||||
    doing the pushing from some non-async UI handler.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    book = get_orders()
 | 
			
		||||
    async with book._from_order_book.subscribe() as orders_stream:
 | 
			
		||||
        async for cmd in orders_stream:
 | 
			
		||||
    '''
 | 
			
		||||
    async with (
 | 
			
		||||
        client._from_sync_order_client.subscribe() as sync_order_cmds
 | 
			
		||||
    ):
 | 
			
		||||
        async for cmd in sync_order_cmds:
 | 
			
		||||
            sym = cmd.symbol
 | 
			
		||||
            msg = pformat(cmd)
 | 
			
		||||
            msg = pformat(cmd.to_dict())
 | 
			
		||||
 | 
			
		||||
            if sym == symbol_key:
 | 
			
		||||
                log.info(f'Send order cmd:\n{msg}')
 | 
			
		||||
                # send msg over IPC / wire
 | 
			
		||||
                await to_ems_stream.send(cmd)
 | 
			
		||||
 | 
			
		||||
            else:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    f'Ignoring unmatched order cmd for {sym} != {symbol_key}:'
 | 
			
		||||
| 
						 | 
				
			
			@ -176,77 +211,46 @@ async def relay_order_cmds_from_sync_code(
 | 
			
		|||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_ems(
 | 
			
		||||
    fqsn: str,
 | 
			
		||||
    fqme: str,
 | 
			
		||||
    mode: str = 'live',
 | 
			
		||||
    loglevel: str = 'error',
 | 
			
		||||
 | 
			
		||||
) -> tuple[
 | 
			
		||||
    OrderBook,
 | 
			
		||||
    tractor.MsgStream,
 | 
			
		||||
    OrderClient,  # client
 | 
			
		||||
    tractor.MsgStream,  # order ctl stream
 | 
			
		||||
    dict[
 | 
			
		||||
        # brokername, acctid
 | 
			
		||||
        tuple[str, str],
 | 
			
		||||
        list[BrokerdPosition],
 | 
			
		||||
        dict[str, BrokerdPosition],
 | 
			
		||||
    ],
 | 
			
		||||
    list[str],
 | 
			
		||||
    dict[str, Status],
 | 
			
		||||
]:
 | 
			
		||||
    '''
 | 
			
		||||
    Spawn an EMS daemon and begin sending orders and receiving
 | 
			
		||||
    alerts.
 | 
			
		||||
    (Maybe) spawn an EMS-daemon (emsd), deliver an `OrderClient` for
 | 
			
		||||
    requesting orders/alerts and a `trades_stream` which delivers all
 | 
			
		||||
    response-msgs.
 | 
			
		||||
 | 
			
		||||
    This EMS tries to reduce most broker's terrible order entry apis to
 | 
			
		||||
    a very simple protocol built on a few easy to grok and/or
 | 
			
		||||
    "rantsy" premises:
 | 
			
		||||
 | 
			
		||||
    - most users will prefer "dark mode" where orders are not submitted
 | 
			
		||||
      to a broker until and execution condition is triggered
 | 
			
		||||
      (aka client-side "hidden orders")
 | 
			
		||||
 | 
			
		||||
    - Brokers over-complicate their apis and generally speaking hire
 | 
			
		||||
      poor designers to create them. We're better off using creating a super
 | 
			
		||||
      minimal, schema-simple, request-event-stream protocol to unify all the
 | 
			
		||||
      existing piles of shit (and shocker, it'll probably just end up
 | 
			
		||||
      looking like a decent crypto exchange's api)
 | 
			
		||||
 | 
			
		||||
    - all order types can be implemented with client-side limit orders
 | 
			
		||||
 | 
			
		||||
    - we aren't reinventing a wheel in this case since none of these
 | 
			
		||||
      brokers are exposing FIX protocol; it is they doing the re-invention.
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
    TODO: make some fancy diagrams using mermaid.io
 | 
			
		||||
 | 
			
		||||
    the possible set of responses from the stream  is currently:
 | 
			
		||||
    - 'dark_submitted', 'broker_submitted'
 | 
			
		||||
    - 'dark_cancelled', 'broker_cancelled'
 | 
			
		||||
    - 'dark_executed', 'broker_executed'
 | 
			
		||||
    - 'broker_filled'
 | 
			
		||||
    This is a "client side" entrypoint which may spawn the `emsd` service
 | 
			
		||||
    if it can't be discovered and generally speaking is the lowest level
 | 
			
		||||
    broker control client-API.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # wait for service to connect back to us signalling
 | 
			
		||||
    # ready for order commands
 | 
			
		||||
    book = get_orders()
 | 
			
		||||
    # TODO: prolly hand in the `MktPair` instance directly here as well!
 | 
			
		||||
    from piker.accounting import unpack_fqme
 | 
			
		||||
    broker, mktep, venue, suffix = unpack_fqme(fqme)
 | 
			
		||||
 | 
			
		||||
    from ..data._source import unpack_fqsn
 | 
			
		||||
    broker, symbol, suffix = unpack_fqsn(fqsn)
 | 
			
		||||
 | 
			
		||||
    async with maybe_open_emsd(broker) as portal:
 | 
			
		||||
 | 
			
		||||
        mod = get_brokermod(broker)
 | 
			
		||||
        if (
 | 
			
		||||
            not getattr(mod, 'trades_dialogue', None)
 | 
			
		||||
            or mode == 'paper'
 | 
			
		||||
        ):
 | 
			
		||||
            mode = 'paper'
 | 
			
		||||
    async with maybe_open_emsd(
 | 
			
		||||
        broker,
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
    ) as portal:
 | 
			
		||||
 | 
			
		||||
        from ._ems import _emsd_main
 | 
			
		||||
        async with (
 | 
			
		||||
            # connect to emsd
 | 
			
		||||
            portal.open_context(
 | 
			
		||||
 | 
			
		||||
                _emsd_main,
 | 
			
		||||
                fqsn=fqsn,
 | 
			
		||||
                fqme=fqme,
 | 
			
		||||
                exec_mode=mode,
 | 
			
		||||
                loglevel=loglevel,
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -262,18 +266,36 @@ async def open_ems(
 | 
			
		|||
            # open 2-way trade command stream
 | 
			
		||||
            ctx.open_stream() as trades_stream,
 | 
			
		||||
        ):
 | 
			
		||||
            size: int = 100  # what should this be?
 | 
			
		||||
            tx, rx = trio.open_memory_channel(size)
 | 
			
		||||
            brx = broadcast_receiver(rx, size)
 | 
			
		||||
 | 
			
		||||
            # setup local ui event streaming channels for request/resp
 | 
			
		||||
            # streamging with EMS daemon
 | 
			
		||||
            client = OrderClient(
 | 
			
		||||
                _ems_stream=trades_stream,
 | 
			
		||||
                _to_relay_task=tx,
 | 
			
		||||
                _from_sync_order_client=brx,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            client._ems_stream = trades_stream
 | 
			
		||||
 | 
			
		||||
            # start sync code order msg delivery task
 | 
			
		||||
            async with trio.open_nursery() as n:
 | 
			
		||||
                n.start_soon(
 | 
			
		||||
                    relay_order_cmds_from_sync_code,
 | 
			
		||||
                    fqsn,
 | 
			
		||||
                    relay_orders_from_sync_code,
 | 
			
		||||
                    client,
 | 
			
		||||
                    fqme,
 | 
			
		||||
                    trades_stream
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                yield (
 | 
			
		||||
                    book,
 | 
			
		||||
                    client,
 | 
			
		||||
                    trades_stream,
 | 
			
		||||
                    positions,
 | 
			
		||||
                    accounts,
 | 
			
		||||
                    dialogs,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # stop the sync-msg-relay task on exit.
 | 
			
		||||
                n.cancel_scope.cancel()
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
				
			
			@ -18,41 +18,14 @@
 | 
			
		|||
Clearing sub-system message and protocols.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
# from collections import (
 | 
			
		||||
#     ChainMap,
 | 
			
		||||
#     deque,
 | 
			
		||||
# )
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from typing import (
 | 
			
		||||
    Optional,
 | 
			
		||||
    Literal,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from msgspec import field
 | 
			
		||||
 | 
			
		||||
from ..data._source import Symbol
 | 
			
		||||
from ..data.types import Struct
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: a composite for tracking msg flow on 2-legged
 | 
			
		||||
# dialogs.
 | 
			
		||||
# class Dialog(ChainMap):
 | 
			
		||||
#     '''
 | 
			
		||||
#     Msg collection abstraction to easily track the state changes of
 | 
			
		||||
#     a msg flow in one high level, query-able and immutable construct.
 | 
			
		||||
 | 
			
		||||
#     The main use case is to query data from a (long-running)
 | 
			
		||||
#     msg-transaction-sequence
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
#     '''
 | 
			
		||||
#     def update(
 | 
			
		||||
#         self,
 | 
			
		||||
#         msg,
 | 
			
		||||
#     ) -> None:
 | 
			
		||||
#         self.maps.insert(0, msg.to_dict())
 | 
			
		||||
 | 
			
		||||
#     def flatten(self) -> dict:
 | 
			
		||||
#         return dict(self)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: ``msgspec`` stuff worth paying attention to:
 | 
			
		||||
| 
						 | 
				
			
			@ -94,7 +67,8 @@ class Order(Struct):
 | 
			
		|||
 | 
			
		||||
    # internal ``emdsd`` unique "order id"
 | 
			
		||||
    oid: str  # uuid4
 | 
			
		||||
    symbol: str | Symbol
 | 
			
		||||
    # TODO: figure out how to optionally typecast this to `MktPair`?
 | 
			
		||||
    symbol: str  # | MktPair
 | 
			
		||||
    account: str  # should we set a default as '' ?
 | 
			
		||||
 | 
			
		||||
    price: float
 | 
			
		||||
| 
						 | 
				
			
			@ -140,7 +114,7 @@ class Status(Struct):
 | 
			
		|||
 | 
			
		||||
    # this maps normally to the ``BrokerdOrder.reqid`` below, an id
 | 
			
		||||
    # normally allocated internally by the backend broker routing system
 | 
			
		||||
    reqid: Optional[int | str] = None
 | 
			
		||||
    reqid: int | str | None = None
 | 
			
		||||
 | 
			
		||||
    # the (last) source order/request msg if provided
 | 
			
		||||
    # (eg. the Order/Cancel which causes this msg) and
 | 
			
		||||
| 
						 | 
				
			
			@ -153,7 +127,7 @@ class Status(Struct):
 | 
			
		|||
    # event that wasn't originated by piker's emsd (eg. some external
 | 
			
		||||
    # trading system which does it's own order control but that you
 | 
			
		||||
    # might want to "track" using piker UIs/systems).
 | 
			
		||||
    src: Optional[str] = None
 | 
			
		||||
    src: str | None = None
 | 
			
		||||
 | 
			
		||||
    # set when a cancel request msg was set for this order flow dialog
 | 
			
		||||
    # but the brokerd dialog isn't yet in a cancelled state.
 | 
			
		||||
| 
						 | 
				
			
			@ -164,6 +138,18 @@ class Status(Struct):
 | 
			
		|||
    brokerd_msg: dict = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Error(Status):
 | 
			
		||||
    resp: str = 'error'
 | 
			
		||||
 | 
			
		||||
    # TODO: allow re-wrapping from existing (last) status?
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_status(
 | 
			
		||||
        cls,
 | 
			
		||||
        msg: Status,
 | 
			
		||||
    ) -> Error:
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# ---------------
 | 
			
		||||
# emsd -> brokerd
 | 
			
		||||
# ---------------
 | 
			
		||||
| 
						 | 
				
			
			@ -181,7 +167,7 @@ class BrokerdCancel(Struct):
 | 
			
		|||
    # for setting a unique order id then this value will be relayed back
 | 
			
		||||
    # on the emsd order request stream as the ``BrokerdOrderAck.reqid``
 | 
			
		||||
    # field
 | 
			
		||||
    reqid: Optional[int | str] = None
 | 
			
		||||
    reqid: int | str | None = None
 | 
			
		||||
    action: str = 'cancel'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -191,7 +177,7 @@ class BrokerdOrder(Struct):
 | 
			
		|||
    account: str
 | 
			
		||||
    time_ns: int
 | 
			
		||||
 | 
			
		||||
    symbol: str  # fqsn
 | 
			
		||||
    symbol: str  # fqme
 | 
			
		||||
    price: float
 | 
			
		||||
    size: float
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -205,7 +191,7 @@ class BrokerdOrder(Struct):
 | 
			
		|||
    # for setting a unique order id then this value will be relayed back
 | 
			
		||||
    # on the emsd order request stream as the ``BrokerdOrderAck.reqid``
 | 
			
		||||
    # field
 | 
			
		||||
    reqid: Optional[int | str] = None
 | 
			
		||||
    reqid: int | str | None = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# ---------------
 | 
			
		||||
| 
						 | 
				
			
			@ -227,24 +213,27 @@ class BrokerdOrderAck(Struct):
 | 
			
		|||
 | 
			
		||||
    # emsd id originally sent in matching request msg
 | 
			
		||||
    oid: str
 | 
			
		||||
    # TODO: do we need this?
 | 
			
		||||
    account: str = ''
 | 
			
		||||
    name: str = 'ack'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class BrokerdStatus(Struct):
 | 
			
		||||
 | 
			
		||||
    reqid: int | str
 | 
			
		||||
    time_ns: int
 | 
			
		||||
    reqid: int | str
 | 
			
		||||
    status: Literal[
 | 
			
		||||
        'open',
 | 
			
		||||
        'canceled',
 | 
			
		||||
        'fill',
 | 
			
		||||
        'pending',
 | 
			
		||||
        'error',
 | 
			
		||||
        # 'error',  # NOTE: use `BrokerdError`
 | 
			
		||||
        'closed',
 | 
			
		||||
    ]
 | 
			
		||||
 | 
			
		||||
    account: str
 | 
			
		||||
    name: str = 'status'
 | 
			
		||||
 | 
			
		||||
    oid: str = ''
 | 
			
		||||
    # TODO: do we need this?
 | 
			
		||||
    account: str | None = None,
 | 
			
		||||
    filled: float = 0.0
 | 
			
		||||
    reason: str = ''
 | 
			
		||||
    remaining: float = 0.0
 | 
			
		||||
| 
						 | 
				
			
			@ -259,24 +248,24 @@ class BrokerdStatus(Struct):
 | 
			
		|||
 | 
			
		||||
class BrokerdFill(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    A single message indicating a "fill-details" event from the broker
 | 
			
		||||
    if avaiable.
 | 
			
		||||
    A single message indicating a "fill-details" event from the
 | 
			
		||||
    broker if avaiable.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # brokerd timestamp required for order mode arrow placement on x-axis
 | 
			
		||||
    # TODO: maybe int if we force ns?
 | 
			
		||||
    # we need to normalize this somehow since backends will use their
 | 
			
		||||
    # own format and likely across many disparate epoch clocks...
 | 
			
		||||
    time_ns: int
 | 
			
		||||
    broker_time: float
 | 
			
		||||
    reqid: int | str
 | 
			
		||||
    time_ns: int
 | 
			
		||||
 | 
			
		||||
    # order exeuction related
 | 
			
		||||
    size: float
 | 
			
		||||
    price: float
 | 
			
		||||
 | 
			
		||||
    name: str = 'fill'
 | 
			
		||||
    action: Optional[str] = None
 | 
			
		||||
    action: str | None = None
 | 
			
		||||
    broker_details: dict = {}  # meta-data (eg. commisions etc.)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -287,23 +276,27 @@ class BrokerdError(Struct):
 | 
			
		|||
    This is still a TODO thing since we're not sure how to employ it yet.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    oid: str
 | 
			
		||||
    symbol: str
 | 
			
		||||
    reason: str
 | 
			
		||||
 | 
			
		||||
    # TODO: drop this right?
 | 
			
		||||
    symbol: str | None = None
 | 
			
		||||
 | 
			
		||||
    oid: str | None = None
 | 
			
		||||
    # if no brokerd order request was actually submitted (eg. we errored
 | 
			
		||||
    # at the ``pikerd`` layer) then there will be ``reqid`` allocated.
 | 
			
		||||
    reqid: Optional[int | str] = None
 | 
			
		||||
    reqid: str | None = None
 | 
			
		||||
 | 
			
		||||
    name: str = 'error'
 | 
			
		||||
    broker_details: dict = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: yeah, so we REALLY need to completely deprecate
 | 
			
		||||
# this and use the `.accounting.Position` msg-type instead..
 | 
			
		||||
class BrokerdPosition(Struct):
 | 
			
		||||
    '''Position update event from brokerd.
 | 
			
		||||
    '''
 | 
			
		||||
    Position update event from brokerd.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
 | 
			
		||||
    broker: str
 | 
			
		||||
    account: str
 | 
			
		||||
    symbol: str
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -14,21 +14,24 @@
 | 
			
		|||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
Fake trading for forward testing.
 | 
			
		||||
'''
 | 
			
		||||
Fake trading: a full forward testing simulation engine.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
We can real-time emulate any mkt conditions you want bruddr B)
 | 
			
		||||
Just slide us the model que quieres..
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from collections import defaultdict
 | 
			
		||||
from contextlib import asynccontextmanager
 | 
			
		||||
from contextlib import asynccontextmanager as acm
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from operator import itemgetter
 | 
			
		||||
import itertools
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
import time
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Optional,
 | 
			
		||||
    Callable,
 | 
			
		||||
)
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
import uuid
 | 
			
		||||
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
| 
						 | 
				
			
			@ -36,18 +39,30 @@ import pendulum
 | 
			
		|||
import trio
 | 
			
		||||
import tractor
 | 
			
		||||
 | 
			
		||||
from .. import data
 | 
			
		||||
from ..data.types import Struct
 | 
			
		||||
from ..data._source import Symbol
 | 
			
		||||
from ..pp import (
 | 
			
		||||
from piker.brokers import get_brokermod
 | 
			
		||||
from piker.service import find_service
 | 
			
		||||
from piker.accounting import (
 | 
			
		||||
    Account,
 | 
			
		||||
    MktPair,
 | 
			
		||||
    Position,
 | 
			
		||||
    Transaction,
 | 
			
		||||
    TransactionLedger,
 | 
			
		||||
    open_account,
 | 
			
		||||
    open_trade_ledger,
 | 
			
		||||
    open_pps,
 | 
			
		||||
    unpack_fqme,
 | 
			
		||||
)
 | 
			
		||||
from piker.data import (
 | 
			
		||||
    Feed,
 | 
			
		||||
    SymbologyCache,
 | 
			
		||||
    iterticks,
 | 
			
		||||
    open_feed,
 | 
			
		||||
    open_symcache,
 | 
			
		||||
)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,  # sub-sys logger
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
from ..data._normalize import iterticks
 | 
			
		||||
from ..data._source import unpack_fqsn
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
from ._messages import (
 | 
			
		||||
    BrokerdCancel,
 | 
			
		||||
    BrokerdOrder,
 | 
			
		||||
| 
						 | 
				
			
			@ -58,10 +73,6 @@ from ._messages import (
 | 
			
		|||
    BrokerdError,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from ..config import load
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class PaperBoi(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
| 
						 | 
				
			
			@ -72,17 +83,17 @@ class PaperBoi(Struct):
 | 
			
		|||
 | 
			
		||||
    '''
 | 
			
		||||
    broker: str
 | 
			
		||||
 | 
			
		||||
    ems_trades_stream: tractor.MsgStream
 | 
			
		||||
    acnt: Account
 | 
			
		||||
    ledger: TransactionLedger
 | 
			
		||||
    fees: Callable
 | 
			
		||||
 | 
			
		||||
    # map of paper "live" orders which be used
 | 
			
		||||
    # to simulate fills based on paper engine settings
 | 
			
		||||
    _buys: defaultdict[str, bidict]
 | 
			
		||||
    _sells: defaultdict[str, bidict]
 | 
			
		||||
    _reqids: bidict
 | 
			
		||||
    _positions: dict[str, Position]
 | 
			
		||||
    _trade_ledger: dict[str, Any]
 | 
			
		||||
    _syms: dict[str, Symbol] = {}
 | 
			
		||||
    _mkts: dict[str, MktPair] = {}
 | 
			
		||||
 | 
			
		||||
    # init edge case L1 spread
 | 
			
		||||
    last_ask: tuple[float, float] = (float('inf'), 0)  # price, size
 | 
			
		||||
| 
						 | 
				
			
			@ -95,7 +106,7 @@ class PaperBoi(Struct):
 | 
			
		|||
        price: float,
 | 
			
		||||
        action: str,
 | 
			
		||||
        size: float,
 | 
			
		||||
        reqid: Optional[str],
 | 
			
		||||
        reqid: str | None,
 | 
			
		||||
 | 
			
		||||
    ) -> int:
 | 
			
		||||
        '''
 | 
			
		||||
| 
						 | 
				
			
			@ -119,9 +130,12 @@ class PaperBoi(Struct):
 | 
			
		|||
        # for dark orders since we want the dark_executed
 | 
			
		||||
        # to trigger first thus creating a lookup entry
 | 
			
		||||
        # in the broker trades event processing loop
 | 
			
		||||
        await trio.sleep(0.05)
 | 
			
		||||
        await trio.sleep(0.01)
 | 
			
		||||
 | 
			
		||||
        if action == 'sell':
 | 
			
		||||
        if (
 | 
			
		||||
            action == 'sell'
 | 
			
		||||
            and size > 0
 | 
			
		||||
        ):
 | 
			
		||||
            size = -size
 | 
			
		||||
 | 
			
		||||
        msg = BrokerdStatus(
 | 
			
		||||
| 
						 | 
				
			
			@ -183,7 +197,7 @@ class PaperBoi(Struct):
 | 
			
		|||
            self._sells[symbol].pop(oid, None)
 | 
			
		||||
 | 
			
		||||
        # TODO: net latency model
 | 
			
		||||
        await trio.sleep(0.05)
 | 
			
		||||
        await trio.sleep(0.01)
 | 
			
		||||
 | 
			
		||||
        msg = BrokerdStatus(
 | 
			
		||||
            status='canceled',
 | 
			
		||||
| 
						 | 
				
			
			@ -197,7 +211,7 @@ class PaperBoi(Struct):
 | 
			
		|||
    async def fake_fill(
 | 
			
		||||
        self,
 | 
			
		||||
 | 
			
		||||
        fqsn: str,
 | 
			
		||||
        fqme: str,
 | 
			
		||||
        price: float,
 | 
			
		||||
        size: float,
 | 
			
		||||
        action: str,  # one of {'buy', 'sell'}
 | 
			
		||||
| 
						 | 
				
			
			@ -216,7 +230,7 @@ class PaperBoi(Struct):
 | 
			
		|||
 | 
			
		||||
        '''
 | 
			
		||||
        # TODO: net latency model
 | 
			
		||||
        await trio.sleep(0.05)
 | 
			
		||||
        await trio.sleep(0.01)
 | 
			
		||||
        fill_time_ns = time.time_ns()
 | 
			
		||||
        fill_time_s = time.time()
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -250,43 +264,59 @@ class PaperBoi(Struct):
 | 
			
		|||
            )
 | 
			
		||||
            await self.ems_trades_stream.send(msg)
 | 
			
		||||
 | 
			
		||||
        # lookup any existing position
 | 
			
		||||
        key = fqsn.rstrip(f'.{self.broker}')
 | 
			
		||||
        # NOTE: for paper we set the "bs_mktid" as just the fqme since
 | 
			
		||||
        # we don't actually have any unique backend symbol ourselves
 | 
			
		||||
        # other then this thing, our fqme address.
 | 
			
		||||
        bs_mktid: str = fqme
 | 
			
		||||
        if fees := self.fees:
 | 
			
		||||
            cost: float = fees(price, size)
 | 
			
		||||
        else:
 | 
			
		||||
            cost: float = 0
 | 
			
		||||
 | 
			
		||||
        t = Transaction(
 | 
			
		||||
            fqsn=fqsn,
 | 
			
		||||
            sym=self._syms[fqsn],
 | 
			
		||||
            fqme=fqme,
 | 
			
		||||
            tid=oid,
 | 
			
		||||
            size=size,
 | 
			
		||||
            price=price,
 | 
			
		||||
            cost=0,  # TODO: cost model
 | 
			
		||||
            cost=cost,
 | 
			
		||||
            dt=pendulum.from_timestamp(fill_time_s),
 | 
			
		||||
            bsuid=key,
 | 
			
		||||
            bs_mktid=bs_mktid,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        with (
 | 
			
		||||
            open_trade_ledger(self.broker, 'paper') as ledger,
 | 
			
		||||
            open_pps(self.broker, 'paper', write_on_exit=True) as table
 | 
			
		||||
        ):
 | 
			
		||||
            tx = t.to_dict()
 | 
			
		||||
            tx.pop('sym')
 | 
			
		||||
            ledger.update({oid: tx})
 | 
			
		||||
            # Write to pps toml right now
 | 
			
		||||
            table.update_from_trans({oid: t})
 | 
			
		||||
        # update in-mem ledger and pos table
 | 
			
		||||
        self.ledger.update_from_t(t)
 | 
			
		||||
        self.acnt.update_from_ledger(
 | 
			
		||||
            {oid: t},
 | 
			
		||||
            symcache=self.ledger._symcache,
 | 
			
		||||
 | 
			
		||||
            pp = table.pps[key]
 | 
			
		||||
            pp_msg = BrokerdPosition(
 | 
			
		||||
                broker=self.broker,
 | 
			
		||||
                account='paper',
 | 
			
		||||
                symbol=fqsn,
 | 
			
		||||
                # TODO: we need to look up the asset currency from
 | 
			
		||||
                # broker info. i guess for crypto this can be
 | 
			
		||||
                # inferred from the pair?
 | 
			
		||||
                currency=key,
 | 
			
		||||
                size=pp.size,
 | 
			
		||||
                avg_price=pp.ppu,
 | 
			
		||||
            )
 | 
			
		||||
            # XXX when a backend has no symcache support yet we can
 | 
			
		||||
            # simply pass in the gmi() retreived table created
 | 
			
		||||
            # during init :o
 | 
			
		||||
            _mktmap_table=self._mkts,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
            await self.ems_trades_stream.send(pp_msg)
 | 
			
		||||
        # transmit pp msg to ems
 | 
			
		||||
        pp: Position = self.acnt.pps[bs_mktid]
 | 
			
		||||
 | 
			
		||||
        pp_msg = BrokerdPosition(
 | 
			
		||||
            broker=self.broker,
 | 
			
		||||
            account='paper',
 | 
			
		||||
            symbol=fqme,
 | 
			
		||||
 | 
			
		||||
            size=pp.cumsize,
 | 
			
		||||
            avg_price=pp.ppu,
 | 
			
		||||
 | 
			
		||||
            # TODO: we need to look up the asset currency from
 | 
			
		||||
            # broker info. i guess for crypto this can be
 | 
			
		||||
            # inferred from the pair?
 | 
			
		||||
            # currency=bs_mktid,
 | 
			
		||||
        )
 | 
			
		||||
        # write all updates to filesys immediately
 | 
			
		||||
        # (adds latency but that works for simulation anyway)
 | 
			
		||||
        self.ledger.write_config()
 | 
			
		||||
        self.acnt.write_config()
 | 
			
		||||
 | 
			
		||||
        await self.ems_trades_stream.send(pp_msg)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def simulate_fills(
 | 
			
		||||
| 
						 | 
				
			
			@ -313,6 +343,7 @@ async def simulate_fills(
 | 
			
		|||
    # this stream may eventually contain multiple symbols
 | 
			
		||||
    async for quotes in quote_stream:
 | 
			
		||||
        for sym, quote in quotes.items():
 | 
			
		||||
            # print(sym)
 | 
			
		||||
            for tick in iterticks(
 | 
			
		||||
                quote,
 | 
			
		||||
                # dark order price filter(s)
 | 
			
		||||
| 
						 | 
				
			
			@ -421,7 +452,7 @@ async def simulate_fills(
 | 
			
		|||
 | 
			
		||||
                        # clearing price would have filled entirely
 | 
			
		||||
                        await client.fake_fill(
 | 
			
		||||
                            fqsn=sym,
 | 
			
		||||
                            fqme=sym,
 | 
			
		||||
                            # todo slippage to determine fill price
 | 
			
		||||
                            price=tick_price,
 | 
			
		||||
                            size=size,
 | 
			
		||||
| 
						 | 
				
			
			@ -469,6 +500,7 @@ async def handle_order_requests(
 | 
			
		|||
                    BrokerdOrderAck(
 | 
			
		||||
                        oid=order.oid,
 | 
			
		||||
                        reqid=reqid,
 | 
			
		||||
                        account='paper'
 | 
			
		||||
                    )
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -512,86 +544,207 @@ _sells: defaultdict[
 | 
			
		|||
        tuple[float, float, str, str],  # order info
 | 
			
		||||
    ]
 | 
			
		||||
] = defaultdict(bidict)
 | 
			
		||||
_positions: dict[str, Position] = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def trades_dialogue(
 | 
			
		||||
async def open_trade_dialog(
 | 
			
		||||
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
    broker: str,
 | 
			
		||||
    fqsn: str,
 | 
			
		||||
    loglevel: str = None,
 | 
			
		||||
    fqme: str | None = None,  # if empty, we only boot broker mode
 | 
			
		||||
    loglevel: str = 'warning',
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
 | 
			
		||||
    tractor.log.get_console_log(loglevel)
 | 
			
		||||
    # enable piker.clearing console log for *this* subactor
 | 
			
		||||
    get_console_log(loglevel)
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        data.open_feed(
 | 
			
		||||
            [fqsn],
 | 
			
		||||
            loglevel=loglevel,
 | 
			
		||||
        ) as feed,
 | 
			
		||||
    symcache: SymbologyCache
 | 
			
		||||
    async with open_symcache(get_brokermod(broker)) as symcache:
 | 
			
		||||
 | 
			
		||||
    ):
 | 
			
		||||
        acnt: Account
 | 
			
		||||
        ledger: TransactionLedger
 | 
			
		||||
        with (
 | 
			
		||||
 | 
			
		||||
        with open_pps(broker, 'paper') as table:
 | 
			
		||||
            # save pps in local state
 | 
			
		||||
            _positions.update(table.pps)
 | 
			
		||||
            # TODO: probably do the symcache and ledger loading
 | 
			
		||||
            # implicitly behind this? Deliver an account, and ledger
 | 
			
		||||
            # pair or make the ledger an attr of the account?
 | 
			
		||||
            open_account(
 | 
			
		||||
                broker,
 | 
			
		||||
                'paper',
 | 
			
		||||
                write_on_exit=True,
 | 
			
		||||
            ) as acnt,
 | 
			
		||||
 | 
			
		||||
        pp_msgs: list[BrokerdPosition] = []
 | 
			
		||||
        pos: Position
 | 
			
		||||
        token: str  # f'{symbol}.{self.broker}'
 | 
			
		||||
        for token, pos in _positions.items():
 | 
			
		||||
            pp_msgs.append(BrokerdPosition(
 | 
			
		||||
                broker=broker,
 | 
			
		||||
                account='paper',
 | 
			
		||||
                symbol=pos.symbol.front_fqsn(),
 | 
			
		||||
                size=pos.size,
 | 
			
		||||
                avg_price=pos.ppu,
 | 
			
		||||
            open_trade_ledger(
 | 
			
		||||
                broker,
 | 
			
		||||
                'paper',
 | 
			
		||||
                symcache=symcache,
 | 
			
		||||
            ) as ledger
 | 
			
		||||
        ):
 | 
			
		||||
            # NOTE: WE MUST retreive market(pair) info from each
 | 
			
		||||
            # backend broker since ledger entries (in their
 | 
			
		||||
            # provider-native format) often don't contain necessary
 | 
			
		||||
            # market info per trade record entry..
 | 
			
		||||
            # FURTHER, if no fqme was passed in, we presume we're
 | 
			
		||||
            # running in "ledger-sync-only mode" and thus we load
 | 
			
		||||
            # mkt info for each symbol found in the ledger to
 | 
			
		||||
            # an acnt table manually.
 | 
			
		||||
 | 
			
		||||
            # TODO: how to process ledger info from backends?
 | 
			
		||||
            # - should we be rolling our own actor-cached version of these
 | 
			
		||||
            #   client API refs or using portal IPC to send requests to the
 | 
			
		||||
            #   existing brokerd daemon?
 | 
			
		||||
            # - alternatively we can possibly expect and use
 | 
			
		||||
            #   a `.broker.ledger.norm_trade()` ep?
 | 
			
		||||
            brokermod: ModuleType = get_brokermod(broker)
 | 
			
		||||
            gmi: Callable = getattr(brokermod, 'get_mkt_info', None)
 | 
			
		||||
 | 
			
		||||
            # update all transactions with mkt info before
 | 
			
		||||
            # loading any pps
 | 
			
		||||
            mkt_by_fqme: dict[str, MktPair] = {}
 | 
			
		||||
            if (
 | 
			
		||||
                fqme
 | 
			
		||||
                and fqme not in symcache.mktmaps
 | 
			
		||||
            ):
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    f'Symcache for {broker} has no `{fqme}` entry?\n'
 | 
			
		||||
                    'Manually requesting mkt map data via `.get_mkt_info()`..'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                bs_fqme, _, broker = fqme.rpartition('.')
 | 
			
		||||
                mkt, pair = await gmi(bs_fqme)
 | 
			
		||||
                mkt_by_fqme[mkt.fqme] = mkt
 | 
			
		||||
 | 
			
		||||
            # for each sym in the ledger load its `MktPair` info
 | 
			
		||||
            for tid, txdict in ledger.data.items():
 | 
			
		||||
                l_fqme: str = txdict.get('fqme') or txdict['fqsn']
 | 
			
		||||
 | 
			
		||||
                if (
 | 
			
		||||
                    gmi
 | 
			
		||||
                    and l_fqme not in symcache.mktmaps
 | 
			
		||||
                    and l_fqme not in mkt_by_fqme
 | 
			
		||||
                ):
 | 
			
		||||
                    log.warning(
 | 
			
		||||
                        f'Symcache for {broker} has no `{l_fqme}` entry?\n'
 | 
			
		||||
                        'Manually requesting mkt map data via `.get_mkt_info()`..'
 | 
			
		||||
                    )
 | 
			
		||||
                    mkt, pair = await gmi(
 | 
			
		||||
                        l_fqme.rstrip(f'.{broker}'),
 | 
			
		||||
                    )
 | 
			
		||||
                    mkt_by_fqme[l_fqme] = mkt
 | 
			
		||||
 | 
			
		||||
                # if an ``fqme: str`` input was provided we only
 | 
			
		||||
                # need a ``MktPair`` for that one market, since we're
 | 
			
		||||
                # running in real simulated-clearing mode, not just ledger
 | 
			
		||||
                # syncing.
 | 
			
		||||
                if (
 | 
			
		||||
                    fqme is not None
 | 
			
		||||
                    and fqme in mkt_by_fqme
 | 
			
		||||
                ):
 | 
			
		||||
                    break
 | 
			
		||||
 | 
			
		||||
            # update pos table from ledger history and provide a ``MktPair``
 | 
			
		||||
            # lookup for internal position accounting calcs.
 | 
			
		||||
            acnt.update_from_ledger(
 | 
			
		||||
                ledger,
 | 
			
		||||
 | 
			
		||||
                # NOTE: if the symcache fails on fqme lookup
 | 
			
		||||
                # (either sycache not yet supported or not filled
 | 
			
		||||
                # in) use manually constructed table from calling
 | 
			
		||||
                # the `.get_mkt_info()` provider EP above.
 | 
			
		||||
                _mktmap_table=mkt_by_fqme,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            pp_msgs: list[BrokerdPosition] = []
 | 
			
		||||
            pos: Position
 | 
			
		||||
            token: str  # f'{symbol}.{self.broker}'
 | 
			
		||||
            for token, pos in acnt.pps.items():
 | 
			
		||||
 | 
			
		||||
                pp_msgs.append(BrokerdPosition(
 | 
			
		||||
                    broker=broker,
 | 
			
		||||
                    account='paper',
 | 
			
		||||
                    symbol=pos.mkt.fqme,
 | 
			
		||||
                    size=pos.cumsize,
 | 
			
		||||
                    avg_price=pos.ppu,
 | 
			
		||||
                ))
 | 
			
		||||
 | 
			
		||||
            await ctx.started((
 | 
			
		||||
                pp_msgs,
 | 
			
		||||
                ['paper'],
 | 
			
		||||
            ))
 | 
			
		||||
 | 
			
		||||
        await ctx.started((
 | 
			
		||||
            pp_msgs,
 | 
			
		||||
            ['paper'],
 | 
			
		||||
        ))
 | 
			
		||||
            # write new positions state in case ledger was
 | 
			
		||||
            # newer then that tracked in pps.toml
 | 
			
		||||
            acnt.write_config()
 | 
			
		||||
 | 
			
		||||
        async with (
 | 
			
		||||
            ctx.open_stream() as ems_stream,
 | 
			
		||||
            trio.open_nursery() as n,
 | 
			
		||||
        ):
 | 
			
		||||
            client = PaperBoi(
 | 
			
		||||
                broker,
 | 
			
		||||
                ems_stream,
 | 
			
		||||
                _buys=_buys,
 | 
			
		||||
                _sells=_sells,
 | 
			
		||||
            # exit early since no fqme was passed,
 | 
			
		||||
            # normally this case is just to load
 | 
			
		||||
            # positions "offline".
 | 
			
		||||
            if fqme is None:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    'Paper engine only running in position delivery mode!\n'
 | 
			
		||||
                    'NO SIMULATED CLEARING LOOP IS ACTIVE!'
 | 
			
		||||
                )
 | 
			
		||||
                await trio.sleep_forever()
 | 
			
		||||
                return
 | 
			
		||||
 | 
			
		||||
                _reqids=_reqids,
 | 
			
		||||
            feed: Feed
 | 
			
		||||
            async with (
 | 
			
		||||
                open_feed(
 | 
			
		||||
                    [fqme],
 | 
			
		||||
                    loglevel=loglevel,
 | 
			
		||||
                ) as feed,
 | 
			
		||||
            ):
 | 
			
		||||
                # sanity check all the mkt infos
 | 
			
		||||
                for fqme, flume in feed.flumes.items():
 | 
			
		||||
                    mkt: MktPair = symcache.mktmaps.get(fqme) or mkt_by_fqme[fqme]
 | 
			
		||||
                    if mkt != flume.mkt:
 | 
			
		||||
                        diff: tuple = mkt - flume.mkt
 | 
			
		||||
                        log.warning(
 | 
			
		||||
                            'MktPair sig mismatch?\n'
 | 
			
		||||
                            f'{pformat(diff)}'
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                _positions=_positions,
 | 
			
		||||
                get_cost: Callable = getattr(
 | 
			
		||||
                    brokermod,
 | 
			
		||||
                    'get_cost',
 | 
			
		||||
                    None,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # TODO: load postions from ledger file
 | 
			
		||||
                _trade_ledger={},
 | 
			
		||||
                _syms={
 | 
			
		||||
                    fqsn: flume.symbol
 | 
			
		||||
                    for fqsn, flume in feed.flumes.items()
 | 
			
		||||
                }
 | 
			
		||||
            )
 | 
			
		||||
                async with (
 | 
			
		||||
                    ctx.open_stream() as ems_stream,
 | 
			
		||||
                    trio.open_nursery() as n,
 | 
			
		||||
                ):
 | 
			
		||||
                    client = PaperBoi(
 | 
			
		||||
                        broker=broker,
 | 
			
		||||
                        ems_trades_stream=ems_stream,
 | 
			
		||||
                        acnt=acnt,
 | 
			
		||||
                        ledger=ledger,
 | 
			
		||||
                        fees=get_cost,
 | 
			
		||||
 | 
			
		||||
            n.start_soon(
 | 
			
		||||
                handle_order_requests,
 | 
			
		||||
                client,
 | 
			
		||||
                ems_stream,
 | 
			
		||||
            )
 | 
			
		||||
                        _buys=_buys,
 | 
			
		||||
                        _sells=_sells,
 | 
			
		||||
                        _reqids=_reqids,
 | 
			
		||||
 | 
			
		||||
            # paper engine simulator clearing task
 | 
			
		||||
            await simulate_fills(feed.streams[broker], client)
 | 
			
		||||
                        _mkts=mkt_by_fqme,
 | 
			
		||||
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    n.start_soon(
 | 
			
		||||
                        handle_order_requests,
 | 
			
		||||
                        client,
 | 
			
		||||
                        ems_stream,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    # paper engine simulator clearing task
 | 
			
		||||
                    await simulate_fills(feed.streams[broker], client)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@asynccontextmanager
 | 
			
		||||
@acm
 | 
			
		||||
async def open_paperboi(
 | 
			
		||||
    fqsn: str,
 | 
			
		||||
    loglevel: str,
 | 
			
		||||
    fqme: str | None = None,
 | 
			
		||||
    broker: str | None = None,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
) -> Callable:
 | 
			
		||||
    '''
 | 
			
		||||
| 
						 | 
				
			
			@ -599,28 +752,91 @@ async def open_paperboi(
 | 
			
		|||
    its context.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    broker, symbol, expiry = unpack_fqsn(fqsn)
 | 
			
		||||
    if not fqme:
 | 
			
		||||
        assert broker, 'One of `broker` or `fqme` is required siss..!'
 | 
			
		||||
    else:
 | 
			
		||||
        broker, _, _, _ = unpack_fqme(fqme)
 | 
			
		||||
 | 
			
		||||
    we_spawned: bool = False
 | 
			
		||||
    service_name = f'paperboi.{broker}'
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        tractor.find_actor(service_name) as portal,
 | 
			
		||||
        tractor.open_nursery() as tn,
 | 
			
		||||
        find_service(service_name) as portal,
 | 
			
		||||
        tractor.open_nursery() as an,
 | 
			
		||||
    ):
 | 
			
		||||
        # only spawn if no paperboi already is up
 | 
			
		||||
        # (we likely don't need more then one proc for basic
 | 
			
		||||
        # simulated order clearing)
 | 
			
		||||
        # NOTE: only spawn if no paperboi already is up since we likely
 | 
			
		||||
        # don't need more then one actor for simulated order clearing
 | 
			
		||||
        # per broker-backend.
 | 
			
		||||
        if portal is None:
 | 
			
		||||
            log.info('Starting new paper-engine actor')
 | 
			
		||||
            portal = await tn.start_actor(
 | 
			
		||||
            portal = await an.start_actor(
 | 
			
		||||
                service_name,
 | 
			
		||||
                enable_modules=[__name__]
 | 
			
		||||
            )
 | 
			
		||||
            we_spawned = True
 | 
			
		||||
 | 
			
		||||
        async with portal.open_context(
 | 
			
		||||
            trades_dialogue,
 | 
			
		||||
            open_trade_dialog,
 | 
			
		||||
            broker=broker,
 | 
			
		||||
            fqsn=fqsn,
 | 
			
		||||
            fqme=fqme,
 | 
			
		||||
            loglevel=loglevel,
 | 
			
		||||
 | 
			
		||||
        ) as (ctx, first):
 | 
			
		||||
            yield ctx, first
 | 
			
		||||
 | 
			
		||||
            # ALWAYS tear down connection AND any newly spawned
 | 
			
		||||
            # paperboi actor on exit!
 | 
			
		||||
            await ctx.cancel()
 | 
			
		||||
 | 
			
		||||
            if we_spawned:
 | 
			
		||||
                await portal.cancel_actor()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def norm_trade(
 | 
			
		||||
    tid: str,
 | 
			
		||||
    txdict: dict,
 | 
			
		||||
    pairs: dict[str, Struct],
 | 
			
		||||
    symcache: SymbologyCache | None = None,
 | 
			
		||||
 | 
			
		||||
    brokermod: ModuleType | None = None,
 | 
			
		||||
 | 
			
		||||
) -> Transaction:
 | 
			
		||||
    from pendulum import (
 | 
			
		||||
        DateTime,
 | 
			
		||||
        parse,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # special field handling for datetimes
 | 
			
		||||
    # to ensure pendulum is used!
 | 
			
		||||
    dt: DateTime = parse(txdict['dt'])
 | 
			
		||||
    expiry: str | None = txdict.get('expiry')
 | 
			
		||||
    fqme: str = txdict.get('fqme') or txdict.pop('fqsn')
 | 
			
		||||
 | 
			
		||||
    price: float = txdict['price']
 | 
			
		||||
    size: float =  txdict['size']
 | 
			
		||||
    cost: float = txdict.get('cost', 0)
 | 
			
		||||
    if (
 | 
			
		||||
        brokermod
 | 
			
		||||
        and (get_cost := getattr(
 | 
			
		||||
            brokermod,
 | 
			
		||||
            'get_cost',
 | 
			
		||||
            False,
 | 
			
		||||
        ))
 | 
			
		||||
    ):
 | 
			
		||||
        cost = get_cost(
 | 
			
		||||
            price,
 | 
			
		||||
            size,
 | 
			
		||||
            is_taker=True,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    return Transaction(
 | 
			
		||||
        fqme=fqme,
 | 
			
		||||
        tid=txdict['tid'],
 | 
			
		||||
        dt=dt,
 | 
			
		||||
        price=price,
 | 
			
		||||
        size=size,
 | 
			
		||||
        cost=cost,
 | 
			
		||||
        bs_mktid=txdict['bs_mktid'],
 | 
			
		||||
        expiry=parse(expiry) if expiry else None,
 | 
			
		||||
        etype='clear',
 | 
			
		||||
    )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,93 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
"""
 | 
			
		||||
Sub-sys module commons.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from collections import ChainMap
 | 
			
		||||
from functools import partial
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
subsys: str = 'piker.clearing'
 | 
			
		||||
 | 
			
		||||
log = get_logger(subsys)
 | 
			
		||||
 | 
			
		||||
get_console_log = partial(
 | 
			
		||||
    get_console_log,
 | 
			
		||||
    name=subsys,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class OrderDialogs(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    Order control dialog (and thus transaction) tracking via
 | 
			
		||||
    message recording.
 | 
			
		||||
 | 
			
		||||
    Allows easily recording messages associated with a given set of
 | 
			
		||||
    order control transactions and looking up the latest field
 | 
			
		||||
    state using the entire (reverse chronological) msg flow.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    _flows: dict[str, ChainMap] = {}
 | 
			
		||||
 | 
			
		||||
    def add_msg(
 | 
			
		||||
        self,
 | 
			
		||||
        oid: str,
 | 
			
		||||
        msg: dict,
 | 
			
		||||
    ) -> None:
 | 
			
		||||
 | 
			
		||||
        # NOTE: manually enter a new map on the first msg add to
 | 
			
		||||
        # avoid creating one with an empty dict first entry in
 | 
			
		||||
        # `ChainMap.maps` which is the default if none passed at
 | 
			
		||||
        # init.
 | 
			
		||||
        cm: ChainMap = self._flows.get(oid)
 | 
			
		||||
        if cm:
 | 
			
		||||
            cm.maps.insert(0, msg)
 | 
			
		||||
        else:
 | 
			
		||||
            cm = ChainMap(msg)
 | 
			
		||||
            self._flows[oid] = cm
 | 
			
		||||
 | 
			
		||||
    # TODO: wrap all this in the `collections.abc.Mapping` interface?
 | 
			
		||||
    def get(
 | 
			
		||||
        self,
 | 
			
		||||
        oid: str,
 | 
			
		||||
 | 
			
		||||
    ) -> ChainMap[str, Any]:
 | 
			
		||||
        '''
 | 
			
		||||
        Return the dialog `ChainMap` for provided id.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        return self._flows.get(oid, None)
 | 
			
		||||
 | 
			
		||||
    def pop(
 | 
			
		||||
        self,
 | 
			
		||||
        oid: str,
 | 
			
		||||
 | 
			
		||||
    ) -> ChainMap[str, Any]:
 | 
			
		||||
        '''
 | 
			
		||||
        Pop and thus remove the `ChainMap` containing the msg flow
 | 
			
		||||
        for the given order id.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        if (flow := self._flows.pop(oid, None)) is None:
 | 
			
		||||
            log.warning(f'No flow found for oid: {oid}')
 | 
			
		||||
 | 
			
		||||
        return flow
 | 
			
		||||
| 
						 | 
				
			
			@ -1,28 +1,33 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers)
 | 
			
		||||
# Copyright (C) 2018-present Tyler Goodlet
 | 
			
		||||
# (in stewardship for pikers, everywhere.)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
# This program is free software: you can redistribute it and/or
 | 
			
		||||
# modify it under the terms of the GNU Affero General Public
 | 
			
		||||
# License as published by the Free Software Foundation, either
 | 
			
		||||
# version 3 of the License, or (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
 | 
			
		||||
# Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public
 | 
			
		||||
# License along with this program.  If not, see
 | 
			
		||||
# <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
CLI commons.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
import os
 | 
			
		||||
# from contextlib import AsyncExitStack
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
 | 
			
		||||
import click
 | 
			
		||||
import trio
 | 
			
		||||
import tractor
 | 
			
		||||
from tractor._multiaddr import parse_maddr
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_console_log,
 | 
			
		||||
| 
						 | 
				
			
			@ -37,74 +42,178 @@ from ..service import (
 | 
			
		|||
from .. import config
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger('cli')
 | 
			
		||||
log = get_logger('piker.cli')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_trans_eps(
 | 
			
		||||
    network: dict | None = None,
 | 
			
		||||
    maddrs: list[tuple] | None = None,
 | 
			
		||||
 | 
			
		||||
) -> dict[str, dict[str, dict]]:
 | 
			
		||||
 | 
			
		||||
    # transport-oriented endpoint multi-addresses
 | 
			
		||||
    eps: dict[
 | 
			
		||||
        str,  # service name, eg. `pikerd`, `emsd`..
 | 
			
		||||
 | 
			
		||||
        # libp2p style multi-addresses parsed into prot layers
 | 
			
		||||
        list[dict[str, str | int]]
 | 
			
		||||
    ] = {}
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
        network
 | 
			
		||||
        and not maddrs
 | 
			
		||||
    ):
 | 
			
		||||
        # load network section and (attempt to) connect all endpoints
 | 
			
		||||
        # which are reachable B)
 | 
			
		||||
        for key, maddrs in network.items():
 | 
			
		||||
            match key:
 | 
			
		||||
 | 
			
		||||
                # TODO: resolve table across multiple discov
 | 
			
		||||
                # prots Bo
 | 
			
		||||
                case 'resolv':
 | 
			
		||||
                    pass
 | 
			
		||||
 | 
			
		||||
                case 'pikerd':
 | 
			
		||||
                    dname: str = key
 | 
			
		||||
                    for maddr in maddrs:
 | 
			
		||||
                        layers: dict = parse_maddr(maddr)
 | 
			
		||||
                        eps.setdefault(
 | 
			
		||||
                            dname,
 | 
			
		||||
                            [],
 | 
			
		||||
                        ).append(layers)
 | 
			
		||||
 | 
			
		||||
    elif maddrs:
 | 
			
		||||
        # presume user is manually specifying the root actor ep.
 | 
			
		||||
        eps['pikerd'] = [parse_maddr(maddr)]
 | 
			
		||||
 | 
			
		||||
    return eps
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@click.command()
 | 
			
		||||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
 | 
			
		||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
 | 
			
		||||
@click.option('--pdb', is_flag=True, help='Enable tractor debug mode')
 | 
			
		||||
@click.option('--host', '-h', default=None, help='Host addr to bind')
 | 
			
		||||
@click.option('--port', '-p', default=None, help='Port number to bind')
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--tsdb',
 | 
			
		||||
    is_flag=True,
 | 
			
		||||
    help='Enable local ``marketstore`` instance'
 | 
			
		||||
    '--loglevel',
 | 
			
		||||
    '-l',
 | 
			
		||||
    default='warning',
 | 
			
		||||
    help='Logging level',
 | 
			
		||||
)
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--es',
 | 
			
		||||
    '--tl',
 | 
			
		||||
    is_flag=True,
 | 
			
		||||
    help='Enable local ``elasticsearch`` instance'
 | 
			
		||||
    help='Enable tractor-runtime logs',
 | 
			
		||||
)
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--pdb',
 | 
			
		||||
    is_flag=True,
 | 
			
		||||
    help='Enable tractor debug mode',
 | 
			
		||||
)
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--maddr',
 | 
			
		||||
    '-m',
 | 
			
		||||
    default=None,
 | 
			
		||||
    help='Multiaddrs to bind or contact',
 | 
			
		||||
)
 | 
			
		||||
# @click.option(
 | 
			
		||||
#     '--tsdb',
 | 
			
		||||
#     is_flag=True,
 | 
			
		||||
#     help='Enable local ``marketstore`` instance'
 | 
			
		||||
# )
 | 
			
		||||
# @click.option(
 | 
			
		||||
#     '--es',
 | 
			
		||||
#     is_flag=True,
 | 
			
		||||
#     help='Enable local ``elasticsearch`` instance'
 | 
			
		||||
# )
 | 
			
		||||
def pikerd(
 | 
			
		||||
    maddr: list[str] | None,
 | 
			
		||||
    loglevel: str,
 | 
			
		||||
    host: str,
 | 
			
		||||
    port: int,
 | 
			
		||||
    tl: bool,
 | 
			
		||||
    pdb: bool,
 | 
			
		||||
    tsdb: bool,
 | 
			
		||||
    es: bool,
 | 
			
		||||
    # tsdb: bool,
 | 
			
		||||
    # es: bool,
 | 
			
		||||
):
 | 
			
		||||
    '''
 | 
			
		||||
    Spawn the piker broker-daemon.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from tractor.devx import maybe_open_crash_handler
 | 
			
		||||
    with maybe_open_crash_handler(pdb=pdb):
 | 
			
		||||
        log = get_console_log(loglevel, name='cli')
 | 
			
		||||
 | 
			
		||||
    from ..service import open_pikerd
 | 
			
		||||
    log = get_console_log(loglevel)
 | 
			
		||||
        if pdb:
 | 
			
		||||
            log.warning((
 | 
			
		||||
                "\n"
 | 
			
		||||
                "!!! YOU HAVE ENABLED DAEMON DEBUG MODE !!!\n"
 | 
			
		||||
                "When a `piker` daemon crashes it will block the "
 | 
			
		||||
                "task-thread until resumed from console!\n"
 | 
			
		||||
                "\n"
 | 
			
		||||
            ))
 | 
			
		||||
 | 
			
		||||
    if pdb:
 | 
			
		||||
        log.warning((
 | 
			
		||||
            "\n"
 | 
			
		||||
            "!!! You have enabled daemon DEBUG mode !!!\n"
 | 
			
		||||
            "If a daemon crashes it will likely block"
 | 
			
		||||
            " the service until resumed from console!\n"
 | 
			
		||||
            "\n"
 | 
			
		||||
        ))
 | 
			
		||||
        # service-actor registry endpoint socket-address set
 | 
			
		||||
        regaddrs: list[tuple[str, int]] = []
 | 
			
		||||
 | 
			
		||||
    reg_addr: None | tuple[str, int] = None
 | 
			
		||||
    if host or port:
 | 
			
		||||
        reg_addr = (
 | 
			
		||||
            host or _default_registry_host,
 | 
			
		||||
            int(port) or _default_registry_port,
 | 
			
		||||
        conf, _ = config.load(
 | 
			
		||||
            conf_name='conf',
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    async def main():
 | 
			
		||||
        async with (
 | 
			
		||||
            open_pikerd(
 | 
			
		||||
                tsdb=tsdb,
 | 
			
		||||
                es=es,
 | 
			
		||||
                loglevel=loglevel,
 | 
			
		||||
                debug_mode=pdb,
 | 
			
		||||
                registry_addr=reg_addr,
 | 
			
		||||
 | 
			
		||||
            ),  # normally delivers a ``Services`` handle
 | 
			
		||||
            trio.open_nursery() as n,
 | 
			
		||||
        network: dict = conf.get('network')
 | 
			
		||||
        if (
 | 
			
		||||
            network is None
 | 
			
		||||
            and not maddr
 | 
			
		||||
        ):
 | 
			
		||||
            regaddrs = [(
 | 
			
		||||
                _default_registry_host,
 | 
			
		||||
                _default_registry_port,
 | 
			
		||||
            )]
 | 
			
		||||
 | 
			
		||||
            await trio.sleep_forever()
 | 
			
		||||
        else:
 | 
			
		||||
            eps: dict = load_trans_eps(
 | 
			
		||||
                network,
 | 
			
		||||
                maddr,
 | 
			
		||||
            )
 | 
			
		||||
            for layers in eps['pikerd']:
 | 
			
		||||
                regaddrs.append((
 | 
			
		||||
                    layers['ipv4']['addr'],
 | 
			
		||||
                    layers['tcp']['port'],
 | 
			
		||||
                ))
 | 
			
		||||
 | 
			
		||||
    trio.run(main)
 | 
			
		||||
        from .. import service
 | 
			
		||||
 | 
			
		||||
        async def main():
 | 
			
		||||
            service_mngr: service.Services
 | 
			
		||||
 | 
			
		||||
            async with (
 | 
			
		||||
                service.open_pikerd(
 | 
			
		||||
                    registry_addrs=regaddrs,
 | 
			
		||||
                    loglevel=loglevel,
 | 
			
		||||
                    debug_mode=pdb,
 | 
			
		||||
 | 
			
		||||
                ) as service_mngr,  # normally delivers a ``Services`` handle
 | 
			
		||||
 | 
			
		||||
                # AsyncExitStack() as stack,
 | 
			
		||||
            ):
 | 
			
		||||
                # TODO: spawn all other sub-actor daemons according to
 | 
			
		||||
                # multiaddress endpoint spec defined by user config
 | 
			
		||||
                assert service_mngr
 | 
			
		||||
 | 
			
		||||
                # if tsdb:
 | 
			
		||||
                #     dname, conf = await stack.enter_async_context(
 | 
			
		||||
                #         service.marketstore.start_ahab_daemon(
 | 
			
		||||
                #             service_mngr,
 | 
			
		||||
                #             loglevel=loglevel,
 | 
			
		||||
                #         )
 | 
			
		||||
                #     )
 | 
			
		||||
                #     log.info(f'TSDB `{dname}` up with conf:\n{conf}')
 | 
			
		||||
 | 
			
		||||
                # if es:
 | 
			
		||||
                #     dname, conf = await stack.enter_async_context(
 | 
			
		||||
                #         service.elastic.start_ahab_daemon(
 | 
			
		||||
                #             service_mngr,
 | 
			
		||||
                #             loglevel=loglevel,
 | 
			
		||||
                #         )
 | 
			
		||||
                #     )
 | 
			
		||||
                #     log.info(f'DB `{dname}` up with conf:\n{conf}')
 | 
			
		||||
 | 
			
		||||
                await trio.sleep_forever()
 | 
			
		||||
 | 
			
		||||
        trio.run(main)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@click.group(context_settings=config._context_defaults)
 | 
			
		||||
| 
						 | 
				
			
			@ -117,8 +226,24 @@ def pikerd(
 | 
			
		|||
@click.option('--loglevel', '-l', default='warning', help='Logging level')
 | 
			
		||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
 | 
			
		||||
@click.option('--configdir', '-c', help='Configuration directory')
 | 
			
		||||
@click.option('--host', '-h', default=None, help='Host addr to bind')
 | 
			
		||||
@click.option('--port', '-p', default=None, help='Port number to bind')
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--pdb',
 | 
			
		||||
    is_flag=True,
 | 
			
		||||
    help='Enable runtime debug mode ',
 | 
			
		||||
)
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--maddr',
 | 
			
		||||
    '-m',
 | 
			
		||||
    default=None,
 | 
			
		||||
    multiple=True,
 | 
			
		||||
    help='Multiaddr to bind',
 | 
			
		||||
)
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--regaddr',
 | 
			
		||||
    '-r',
 | 
			
		||||
    default=None,
 | 
			
		||||
    help='Registrar addr to contact',
 | 
			
		||||
)
 | 
			
		||||
@click.pass_context
 | 
			
		||||
def cli(
 | 
			
		||||
    ctx: click.Context,
 | 
			
		||||
| 
						 | 
				
			
			@ -126,14 +251,19 @@ def cli(
 | 
			
		|||
    loglevel: str,
 | 
			
		||||
    tl: bool,
 | 
			
		||||
    configdir: str,
 | 
			
		||||
    host: str,
 | 
			
		||||
    port: int,
 | 
			
		||||
    pdb: bool,
 | 
			
		||||
 | 
			
		||||
    # TODO: make these list[str] with multiple -m maddr0 -m maddr1
 | 
			
		||||
    maddr: list[str],
 | 
			
		||||
    regaddr: str,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    if configdir is not None:
 | 
			
		||||
        assert os.path.isdir(configdir), f"`{configdir}` is not a valid path"
 | 
			
		||||
        config._override_config_dir(configdir)
 | 
			
		||||
 | 
			
		||||
    # TODO: for typer see
 | 
			
		||||
    # https://typer.tiangolo.com/tutorial/commands/context/
 | 
			
		||||
    ctx.ensure_object(dict)
 | 
			
		||||
 | 
			
		||||
    if not brokers:
 | 
			
		||||
| 
						 | 
				
			
			@ -141,15 +271,25 @@ def cli(
 | 
			
		|||
        from piker.brokers import __brokers__
 | 
			
		||||
        brokers = __brokers__
 | 
			
		||||
 | 
			
		||||
    brokermods = [get_brokermod(broker) for broker in brokers]
 | 
			
		||||
    brokermods: dict[str, ModuleType] = {
 | 
			
		||||
        broker: get_brokermod(broker) for broker in brokers
 | 
			
		||||
    }
 | 
			
		||||
    assert brokermods
 | 
			
		||||
 | 
			
		||||
    reg_addr: None | tuple[str, int] = None
 | 
			
		||||
    if host or port:
 | 
			
		||||
        reg_addr = (
 | 
			
		||||
            host or _default_registry_host,
 | 
			
		||||
            int(port) or _default_registry_port,
 | 
			
		||||
        )
 | 
			
		||||
    # TODO: load endpoints from `conf::[network].pikerd`
 | 
			
		||||
    # - pikerd vs. regd, separate registry daemon?
 | 
			
		||||
    # - expose datad vs. brokerd?
 | 
			
		||||
    # - bind emsd with certain perms on public iface?
 | 
			
		||||
    regaddrs: list[tuple[str, int]] = regaddr or [(
 | 
			
		||||
        _default_registry_host,
 | 
			
		||||
        _default_registry_port,
 | 
			
		||||
    )]
 | 
			
		||||
 | 
			
		||||
    # TODO: factor [network] section parsing out from pikerd
 | 
			
		||||
    # above and call it here as well.
 | 
			
		||||
    # if maddr:
 | 
			
		||||
    #     for addr in maddr:
 | 
			
		||||
    #         layers: dict = parse_maddr(addr)
 | 
			
		||||
 | 
			
		||||
    ctx.obj.update({
 | 
			
		||||
        'brokers': brokers,
 | 
			
		||||
| 
						 | 
				
			
			@ -159,7 +299,12 @@ def cli(
 | 
			
		|||
        'log': get_console_log(loglevel),
 | 
			
		||||
        'confdir': config._config_dir,
 | 
			
		||||
        'wl_path': config._watchlists_data_path,
 | 
			
		||||
        'registry_addr': reg_addr,
 | 
			
		||||
        'registry_addrs': regaddrs,
 | 
			
		||||
        'pdb': pdb,  # debug mode flag
 | 
			
		||||
 | 
			
		||||
        # TODO: endpoint parsing, pinging and binding
 | 
			
		||||
        # on no existing server.
 | 
			
		||||
        # 'maddrs': maddr,
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
    # allow enabling same loglevel in ``tractor`` machinery
 | 
			
		||||
| 
						 | 
				
			
			@ -206,13 +351,15 @@ def services(config, tl, ports):
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
def _load_clis() -> None:
 | 
			
		||||
    from ..service import marketstore  # noqa
 | 
			
		||||
    from ..service import elastic
 | 
			
		||||
    from ..data import cli  # noqa
 | 
			
		||||
    # from ..service import elastic  # noqa
 | 
			
		||||
    from ..brokers import cli  # noqa
 | 
			
		||||
    from ..ui import cli  # noqa
 | 
			
		||||
    from ..watchlists import cli  # noqa
 | 
			
		||||
 | 
			
		||||
    # typer implemented
 | 
			
		||||
    from ..storage import cli  # noqa
 | 
			
		||||
    from ..accounting import cli  # noqa
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# load downstream cli modules
 | 
			
		||||
_load_clis()
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
							
								
								
									
										236
									
								
								piker/config.py
								
								
								
								
							
							
						
						
									
										236
									
								
								piker/config.py
								
								
								
								
							| 
						 | 
				
			
			@ -21,14 +21,20 @@ Platform configuration (files) mgmt.
 | 
			
		|||
import platform
 | 
			
		||||
import sys
 | 
			
		||||
import os
 | 
			
		||||
from os import path
 | 
			
		||||
from os.path import dirname
 | 
			
		||||
import shutil
 | 
			
		||||
from typing import Optional
 | 
			
		||||
from typing import (
 | 
			
		||||
    Callable,
 | 
			
		||||
    MutableMapping,
 | 
			
		||||
)
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
import toml
 | 
			
		||||
import tomlkit
 | 
			
		||||
try:
 | 
			
		||||
    import tomllib
 | 
			
		||||
except ModuleNotFoundError:
 | 
			
		||||
    import tomli as tomllib
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
from .log import get_logger
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -98,14 +104,15 @@ def get_app_dir(
 | 
			
		|||
    # `tractor`) with the testing dir and check for it whenever we
 | 
			
		||||
    # detect `pytest` is being used (which it isn't under normal
 | 
			
		||||
    # operation).
 | 
			
		||||
    if "pytest" in sys.modules:
 | 
			
		||||
        import tractor
 | 
			
		||||
        actor = tractor.current_actor(err_on_no_runtime=False)
 | 
			
		||||
        if actor:  # runtime is up
 | 
			
		||||
            rvs = tractor._state._runtime_vars
 | 
			
		||||
            testdirpath = Path(rvs['piker_vars']['piker_test_dir'])
 | 
			
		||||
            assert testdirpath.exists(), 'piker test harness might be borked!?'
 | 
			
		||||
            app_name = str(testdirpath)
 | 
			
		||||
    # if "pytest" in sys.modules:
 | 
			
		||||
    #     import tractor
 | 
			
		||||
    #     actor = tractor.current_actor(err_on_no_runtime=False)
 | 
			
		||||
    #     if actor:  # runtime is up
 | 
			
		||||
    #         rvs = tractor._state._runtime_vars
 | 
			
		||||
    #         import pdbp; pdbp.set_trace()
 | 
			
		||||
    #         testdirpath = Path(rvs['piker_vars']['piker_test_dir'])
 | 
			
		||||
    #         assert testdirpath.exists(), 'piker test harness might be borked!?'
 | 
			
		||||
    #         app_name = str(testdirpath)
 | 
			
		||||
 | 
			
		||||
    if platform.system() == 'Windows':
 | 
			
		||||
        key = "APPDATA" if roaming else "LOCALAPPDATA"
 | 
			
		||||
| 
						 | 
				
			
			@ -126,30 +133,38 @@ def get_app_dir(
 | 
			
		|||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_config_dir = _click_config_dir = get_app_dir('piker')
 | 
			
		||||
_parent_user = os.environ.get('SUDO_USER')
 | 
			
		||||
_click_config_dir: Path = Path(get_app_dir('piker'))
 | 
			
		||||
_config_dir: Path = _click_config_dir
 | 
			
		||||
 | 
			
		||||
if _parent_user:
 | 
			
		||||
    non_root_user_dir = os.path.expanduser(
 | 
			
		||||
        f'~{_parent_user}'
 | 
			
		||||
# NOTE: when using `sudo` we attempt to determine the non-root user
 | 
			
		||||
# and still use their normal config dir.
 | 
			
		||||
if (
 | 
			
		||||
    (_parent_user := os.environ.get('SUDO_USER'))
 | 
			
		||||
    and
 | 
			
		||||
    _parent_user != 'root'
 | 
			
		||||
):
 | 
			
		||||
    non_root_user_dir = Path(
 | 
			
		||||
        os.path.expanduser(f'~{_parent_user}')
 | 
			
		||||
    )
 | 
			
		||||
    root = 'root'
 | 
			
		||||
    root: str = 'root'
 | 
			
		||||
    _ccds: str = str(_click_config_dir)  # click config dir as string
 | 
			
		||||
    i_tail: int = int(_ccds.rfind(root) + len(root))
 | 
			
		||||
    _config_dir = (
 | 
			
		||||
        non_root_user_dir +
 | 
			
		||||
        _click_config_dir[
 | 
			
		||||
            _click_config_dir.rfind(root) + len(root):
 | 
			
		||||
        ]
 | 
			
		||||
        non_root_user_dir
 | 
			
		||||
        /
 | 
			
		||||
        Path(_ccds[i_tail+1:])  # +1 to capture trailing '/'
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_conf_names: set[str] = {
 | 
			
		||||
    'brokers',
 | 
			
		||||
    'pps',
 | 
			
		||||
    'trades',
 | 
			
		||||
    'watchlists',
 | 
			
		||||
    'paper_trades'
 | 
			
		||||
    'conf',  # god config
 | 
			
		||||
    'brokers',  # sec backend deatz
 | 
			
		||||
    'watchlists',  # (user defined) market lists
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
_watchlists_data_path = os.path.join(_config_dir, 'watchlists.json')
 | 
			
		||||
# TODO: probably drop all this super legacy, questrade specific,
 | 
			
		||||
# config stuff XD ?
 | 
			
		||||
_watchlists_data_path: Path = _config_dir / Path('watchlists.json')
 | 
			
		||||
_context_defaults = dict(
 | 
			
		||||
    default_map={
 | 
			
		||||
        # Questrade specific quote poll rates
 | 
			
		||||
| 
						 | 
				
			
			@ -163,6 +178,14 @@ _context_defaults = dict(
 | 
			
		|||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class ConfigurationError(Exception):
 | 
			
		||||
    'Misconfigured settings, likely in a TOML file.'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class NoSignature(ConfigurationError):
 | 
			
		||||
    'No credentials setup for broker backend!'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _override_config_dir(
 | 
			
		||||
    path: str
 | 
			
		||||
) -> None:
 | 
			
		||||
| 
						 | 
				
			
			@ -177,10 +200,19 @@ def _conf_fn_w_ext(
 | 
			
		|||
    return f'{name}.toml'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_conf_dir() -> Path:
 | 
			
		||||
    '''
 | 
			
		||||
    Return the user configuration directory ``Path``
 | 
			
		||||
    on the local filesystem.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    return _config_dir
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_conf_path(
 | 
			
		||||
    conf_name: str = 'brokers',
 | 
			
		||||
 | 
			
		||||
) -> str:
 | 
			
		||||
) -> Path:
 | 
			
		||||
    '''
 | 
			
		||||
    Return the top-level default config path normally under
 | 
			
		||||
    ``~/.config/piker`` on linux for a given ``conf_name``, the config
 | 
			
		||||
| 
						 | 
				
			
			@ -188,7 +220,6 @@ def get_conf_path(
 | 
			
		|||
 | 
			
		||||
    Contains files such as:
 | 
			
		||||
    - brokers.toml
 | 
			
		||||
    - pp.toml
 | 
			
		||||
    - watchlists.toml
 | 
			
		||||
 | 
			
		||||
    # maybe coming soon ;)
 | 
			
		||||
| 
						 | 
				
			
			@ -196,72 +227,102 @@ def get_conf_path(
 | 
			
		|||
    - strats.toml
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    assert conf_name in _conf_names
 | 
			
		||||
    if 'account.' not in conf_name:
 | 
			
		||||
        assert str(conf_name) in _conf_names
 | 
			
		||||
 | 
			
		||||
    fn = _conf_fn_w_ext(conf_name)
 | 
			
		||||
    return os.path.join(
 | 
			
		||||
        _config_dir,
 | 
			
		||||
        fn,
 | 
			
		||||
    )
 | 
			
		||||
    return _config_dir / Path(fn)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def repodir():
 | 
			
		||||
def repodir() -> Path:
 | 
			
		||||
    '''
 | 
			
		||||
    Return the abspath to the repo directory.
 | 
			
		||||
    Return the abspath as ``Path`` to the git repo's root dir.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    dirpath = path.abspath(
 | 
			
		||||
        # we're 3 levels down in **this** module file
 | 
			
		||||
        dirname(dirname(os.path.realpath(__file__)))
 | 
			
		||||
    )
 | 
			
		||||
    return dirpath
 | 
			
		||||
    repodir: Path = Path(__file__).absolute().parent.parent
 | 
			
		||||
    confdir: Path = repodir / 'config'
 | 
			
		||||
 | 
			
		||||
    if not confdir.is_dir():
 | 
			
		||||
        # prolly inside stupid GH actions CI..
 | 
			
		||||
        repodir: Path = Path(os.environ.get('GITHUB_WORKSPACE'))
 | 
			
		||||
        confdir: Path = repodir / 'config'
 | 
			
		||||
 | 
			
		||||
    assert confdir.is_dir(), f'{confdir} DNE, {repodir} is likely incorrect!'
 | 
			
		||||
    return repodir
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load(
 | 
			
		||||
    conf_name: str = 'brokers',
 | 
			
		||||
    path: str = None,
 | 
			
		||||
    # NOTE: always appended with .toml suffix
 | 
			
		||||
    conf_name: str = 'conf',
 | 
			
		||||
    path: Path | None = None,
 | 
			
		||||
 | 
			
		||||
    decode: Callable[
 | 
			
		||||
        [str | bytes,],
 | 
			
		||||
        MutableMapping,
 | 
			
		||||
    ] = tomllib.loads,
 | 
			
		||||
 | 
			
		||||
    touch_if_dne: bool = False,
 | 
			
		||||
 | 
			
		||||
    **tomlkws,
 | 
			
		||||
 | 
			
		||||
) -> (dict, str):
 | 
			
		||||
) -> tuple[dict, Path]:
 | 
			
		||||
    '''
 | 
			
		||||
    Load config file by name.
 | 
			
		||||
 | 
			
		||||
    If desired config is not in the top level piker-user config path then
 | 
			
		||||
    pass the ``path: Path`` explicitly.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    path = path or get_conf_path(conf_name)
 | 
			
		||||
 | 
			
		||||
    if not os.path.isdir(_config_dir):
 | 
			
		||||
        Path(_config_dir).mkdir(parents=True, exist_ok=True)
 | 
			
		||||
 | 
			
		||||
    if not os.path.isfile(path):
 | 
			
		||||
        fn = _conf_fn_w_ext(conf_name)
 | 
			
		||||
 | 
			
		||||
        template = os.path.join(
 | 
			
		||||
            repodir(),
 | 
			
		||||
            'config',
 | 
			
		||||
            fn
 | 
			
		||||
    # create the $HOME/.config/piker dir if dne
 | 
			
		||||
    if not _config_dir.is_dir():
 | 
			
		||||
        _config_dir.mkdir(
 | 
			
		||||
            parents=True,
 | 
			
		||||
            exist_ok=True,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    path_provided: bool = path is not None
 | 
			
		||||
    path: Path = path or get_conf_path(conf_name)
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
        not path.is_file()
 | 
			
		||||
        and touch_if_dne
 | 
			
		||||
    ):
 | 
			
		||||
        # only do a template if no path provided,
 | 
			
		||||
        # just touch an empty file with same name.
 | 
			
		||||
        if path_provided:
 | 
			
		||||
            with path.open(mode='x'):
 | 
			
		||||
                pass
 | 
			
		||||
 | 
			
		||||
        # try to copy in a template config to the user's dir if one
 | 
			
		||||
        # exists.
 | 
			
		||||
        else:
 | 
			
		||||
            fn: str = _conf_fn_w_ext(conf_name)
 | 
			
		||||
            template: Path = repodir() / 'config' / fn
 | 
			
		||||
            if template.is_file():
 | 
			
		||||
                shutil.copyfile(template, path)
 | 
			
		||||
 | 
			
		||||
            elif fn and template:
 | 
			
		||||
                assert template.is_file(), f'{template} is not a file!?'
 | 
			
		||||
 | 
			
		||||
            assert path.is_file(), f'Config file {path} not created!?'
 | 
			
		||||
 | 
			
		||||
    with path.open(mode='r') as fp:
 | 
			
		||||
        config: dict = decode(
 | 
			
		||||
            fp.read(),
 | 
			
		||||
            **tomlkws,
 | 
			
		||||
        )
 | 
			
		||||
        # try to copy in a template config to the user's directory
 | 
			
		||||
        # if one exists.
 | 
			
		||||
        if os.path.isfile(template):
 | 
			
		||||
            shutil.copyfile(template, path)
 | 
			
		||||
        else:
 | 
			
		||||
            # create an empty file
 | 
			
		||||
            with open(path, 'x'):
 | 
			
		||||
                pass
 | 
			
		||||
    else:
 | 
			
		||||
        with open(path, 'r'):
 | 
			
		||||
            pass  # touch it
 | 
			
		||||
 | 
			
		||||
    config = toml.load(path, **tomlkws)
 | 
			
		||||
    log.debug(f"Read config file {path}")
 | 
			
		||||
    return config, path
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def write(
 | 
			
		||||
    config: dict,  # toml config as dict
 | 
			
		||||
    name: str = 'brokers',
 | 
			
		||||
    path: str = None,
 | 
			
		||||
 | 
			
		||||
    name: str | None = None,
 | 
			
		||||
    path: Path | None = None,
 | 
			
		||||
    fail_empty: bool = True,
 | 
			
		||||
 | 
			
		||||
    **toml_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
| 
						 | 
				
			
			@ -271,34 +332,41 @@ def write(
 | 
			
		|||
    Create a ``brokers.ini`` file if one does not exist.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    path = path or get_conf_path(name)
 | 
			
		||||
    dirname = os.path.dirname(path)
 | 
			
		||||
    if not os.path.isdir(dirname):
 | 
			
		||||
        log.debug(f"Creating config dir {_config_dir}")
 | 
			
		||||
        os.makedirs(dirname)
 | 
			
		||||
    if name:
 | 
			
		||||
        path: Path = path or get_conf_path(name)
 | 
			
		||||
        dirname: Path = path.parent
 | 
			
		||||
        if not dirname.is_dir():
 | 
			
		||||
            log.debug(f"Creating config dir {_config_dir}")
 | 
			
		||||
            dirname.mkdir()
 | 
			
		||||
 | 
			
		||||
    if not config and fail_empty:
 | 
			
		||||
    if (
 | 
			
		||||
        not config
 | 
			
		||||
        and fail_empty
 | 
			
		||||
    ):
 | 
			
		||||
        raise ValueError(
 | 
			
		||||
            "Watch out you're trying to write a blank config!")
 | 
			
		||||
            "Watch out you're trying to write a blank config!"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    log.debug(
 | 
			
		||||
        f"Writing config `{name}` file to:\n"
 | 
			
		||||
        f"{path}"
 | 
			
		||||
    )
 | 
			
		||||
    with open(path, 'w') as cf:
 | 
			
		||||
        return toml.dump(
 | 
			
		||||
    with path.open(mode='w') as fp:
 | 
			
		||||
        return tomlkit.dump(  # preserve style on write B)
 | 
			
		||||
            config,
 | 
			
		||||
            cf,
 | 
			
		||||
            fp,
 | 
			
		||||
            **toml_kwargs,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def load_accounts(
 | 
			
		||||
    providers: Optional[list[str]] = None
 | 
			
		||||
    providers: list[str] | None = None
 | 
			
		||||
 | 
			
		||||
) -> bidict[str, Optional[str]]:
 | 
			
		||||
) -> bidict[str, str | None]:
 | 
			
		||||
 | 
			
		||||
    conf, path = load()
 | 
			
		||||
    conf, path = load(
 | 
			
		||||
        conf_name='brokers',
 | 
			
		||||
    )
 | 
			
		||||
    accounts = bidict()
 | 
			
		||||
    for provider_name, section in conf.items():
 | 
			
		||||
        accounts_section = section.get('accounts')
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -22,13 +22,7 @@ and storing data from your brokers as well as
 | 
			
		|||
sharing live streams over a network.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
import tractor
 | 
			
		||||
import trio
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
from ._normalize import iterticks
 | 
			
		||||
from .ticktools import iterticks
 | 
			
		||||
from ._sharedmem import (
 | 
			
		||||
    maybe_open_shm_array,
 | 
			
		||||
    attach_shm_array,
 | 
			
		||||
| 
						 | 
				
			
			@ -36,53 +30,42 @@ from ._sharedmem import (
 | 
			
		|||
    get_shm_token,
 | 
			
		||||
    ShmArray,
 | 
			
		||||
)
 | 
			
		||||
from ._source import (
 | 
			
		||||
    def_iohlcv_fields,
 | 
			
		||||
    def_ohlcv_fields,
 | 
			
		||||
)
 | 
			
		||||
from .feed import (
 | 
			
		||||
    Feed,
 | 
			
		||||
    open_feed,
 | 
			
		||||
)
 | 
			
		||||
from .flows import Flume
 | 
			
		||||
from ._symcache import (
 | 
			
		||||
    SymbologyCache,
 | 
			
		||||
    open_symcache,
 | 
			
		||||
    get_symcache,
 | 
			
		||||
    match_from_pairs,
 | 
			
		||||
)
 | 
			
		||||
from ._sampling import open_sample_stream
 | 
			
		||||
from ..types import Struct
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
__all__: list[str] = [
 | 
			
		||||
    'Flume',
 | 
			
		||||
    'Feed',
 | 
			
		||||
    'open_feed',
 | 
			
		||||
    'ShmArray',
 | 
			
		||||
    'iterticks',
 | 
			
		||||
    'maybe_open_shm_array',
 | 
			
		||||
    'match_from_pairs',
 | 
			
		||||
    'attach_shm_array',
 | 
			
		||||
    'open_shm_array',
 | 
			
		||||
    'get_shm_token',
 | 
			
		||||
    'def_iohlcv_fields',
 | 
			
		||||
    'def_ohlcv_fields',
 | 
			
		||||
    'open_symcache',
 | 
			
		||||
    'open_sample_stream',
 | 
			
		||||
    'get_symcache',
 | 
			
		||||
    'Struct',
 | 
			
		||||
    'SymbologyCache',
 | 
			
		||||
    'types',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def _setup_persistent_brokerd(
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
    brokername: str,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
    Allocate a actor-wide service nursery in ``brokerd``
 | 
			
		||||
    such that feeds can be run in the background persistently by
 | 
			
		||||
    the broker backend as needed.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    get_console_log(tractor.current_actor().loglevel)
 | 
			
		||||
 | 
			
		||||
    from .feed import (
 | 
			
		||||
        _bus,
 | 
			
		||||
        get_feed_bus,
 | 
			
		||||
    )
 | 
			
		||||
    global _bus
 | 
			
		||||
    assert not _bus
 | 
			
		||||
 | 
			
		||||
    async with trio.open_nursery() as service_nursery:
 | 
			
		||||
        # assign a nursery to the feeds bus for spawning
 | 
			
		||||
        # background tasks from clients
 | 
			
		||||
        get_feed_bus(brokername, service_nursery)
 | 
			
		||||
 | 
			
		||||
        # unblock caller
 | 
			
		||||
        await ctx.started()
 | 
			
		||||
 | 
			
		||||
        # we pin this task to keep the feeds manager active until the
 | 
			
		||||
        # parent actor decides to tear it down
 | 
			
		||||
        await trio.sleep_forever()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0)
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -13,10 +13,10 @@
 | 
			
		|||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
"""
 | 
			
		||||
'''
 | 
			
		||||
Pre-(path)-graphics formatted x/y nd/1d rendering subsystem.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from typing import (
 | 
			
		||||
    Optional,
 | 
			
		||||
| 
						 | 
				
			
			@ -39,7 +39,12 @@ if TYPE_CHECKING:
 | 
			
		|||
    from ._dataviz import (
 | 
			
		||||
        Viz,
 | 
			
		||||
    )
 | 
			
		||||
    from .._profile import Profiler
 | 
			
		||||
    from piker.toolz import Profiler
 | 
			
		||||
 | 
			
		||||
# default gap between bars: "bar gap multiplier"
 | 
			
		||||
# - 0.5 is no overlap between OC arms,
 | 
			
		||||
# - 1.0 is full overlap on each neighbor sample
 | 
			
		||||
BGM: float = 0.16
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class IncrementalFormatter(msgspec.Struct):
 | 
			
		||||
| 
						 | 
				
			
			@ -222,6 +227,7 @@ class IncrementalFormatter(msgspec.Struct):
 | 
			
		|||
        profiler: Profiler,
 | 
			
		||||
 | 
			
		||||
        slice_to_inview: bool = True,
 | 
			
		||||
        force_full_realloc: bool = False,
 | 
			
		||||
 | 
			
		||||
    ) -> tuple[
 | 
			
		||||
        np.ndarray,
 | 
			
		||||
| 
						 | 
				
			
			@ -248,7 +254,10 @@ class IncrementalFormatter(msgspec.Struct):
 | 
			
		|||
 | 
			
		||||
        # we first need to allocate xy data arrays
 | 
			
		||||
        # from the source data.
 | 
			
		||||
        if self.y_nd is None:
 | 
			
		||||
        if (
 | 
			
		||||
            self.y_nd is None
 | 
			
		||||
            or force_full_realloc
 | 
			
		||||
        ):
 | 
			
		||||
            self.xy_nd_start = shm._first.value
 | 
			
		||||
            self.xy_nd_stop = shm._last.value
 | 
			
		||||
            self.x_nd, self.y_nd = self.allocate_xy_nd(
 | 
			
		||||
| 
						 | 
				
			
			@ -509,6 +518,7 @@ class IncrementalFormatter(msgspec.Struct):
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
class OHLCBarsFmtr(IncrementalFormatter):
 | 
			
		||||
 | 
			
		||||
    x_offset: np.ndarray = np.array([
 | 
			
		||||
        -0.5,
 | 
			
		||||
        0,
 | 
			
		||||
| 
						 | 
				
			
			@ -600,8 +610,9 @@ class OHLCBarsFmtr(IncrementalFormatter):
 | 
			
		|||
        vr: tuple[int, int],
 | 
			
		||||
 | 
			
		||||
        start: int = 0,  # XXX: do we need this?
 | 
			
		||||
 | 
			
		||||
        # 0.5 is no overlap between arms, 1.0 is full overlap
 | 
			
		||||
        w: float = 0.16,
 | 
			
		||||
        gap: float = BGM,
 | 
			
		||||
 | 
			
		||||
    ) -> tuple[
 | 
			
		||||
        np.ndarray,
 | 
			
		||||
| 
						 | 
				
			
			@ -618,7 +629,7 @@ class OHLCBarsFmtr(IncrementalFormatter):
 | 
			
		|||
            array[:-1],
 | 
			
		||||
            start,
 | 
			
		||||
            bar_w=self.index_step_size,
 | 
			
		||||
            bar_gap=w * self.index_step_size,
 | 
			
		||||
            bar_gap=gap * self.index_step_size,
 | 
			
		||||
 | 
			
		||||
            # XXX: don't ask, due to a ``numba`` bug..
 | 
			
		||||
            use_time_index=(self.index_field == 'time'),
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -42,10 +42,7 @@ from numba import (
 | 
			
		|||
    # float64, optional, int64,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
from ._util import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ds_m4(
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,82 +0,0 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Stream format enforcement.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from itertools import chain
 | 
			
		||||
from typing import AsyncIterator
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def iterticks(
 | 
			
		||||
    quote: dict,
 | 
			
		||||
    types: tuple[str] = (
 | 
			
		||||
        'trade',
 | 
			
		||||
        'dark_trade',
 | 
			
		||||
    ),
 | 
			
		||||
    deduplicate_darks: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> AsyncIterator:
 | 
			
		||||
    '''
 | 
			
		||||
    Iterate through ticks delivered per quote cycle.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    if deduplicate_darks:
 | 
			
		||||
        assert 'dark_trade' in types
 | 
			
		||||
 | 
			
		||||
    # print(f"{quote}\n\n")
 | 
			
		||||
    ticks = quote.get('ticks', ())
 | 
			
		||||
    trades = {}
 | 
			
		||||
    darks = {}
 | 
			
		||||
 | 
			
		||||
    if ticks:
 | 
			
		||||
 | 
			
		||||
        # do a first pass and attempt to remove duplicate dark
 | 
			
		||||
        # trades with the same tick signature.
 | 
			
		||||
        if deduplicate_darks:
 | 
			
		||||
            for tick in ticks:
 | 
			
		||||
                ttype = tick.get('type')
 | 
			
		||||
 | 
			
		||||
                time = tick.get('time', None)
 | 
			
		||||
                if time:
 | 
			
		||||
                    sig = (
 | 
			
		||||
                        time,
 | 
			
		||||
                        tick['price'],
 | 
			
		||||
                        tick.get('size')
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    if ttype == 'dark_trade':
 | 
			
		||||
                        darks[sig] = tick
 | 
			
		||||
 | 
			
		||||
                    elif ttype == 'trade':
 | 
			
		||||
                        trades[sig] = tick
 | 
			
		||||
 | 
			
		||||
            # filter duplicates
 | 
			
		||||
            for sig, tick in trades.items():
 | 
			
		||||
                tick = darks.pop(sig, None)
 | 
			
		||||
                if tick:
 | 
			
		||||
                    ticks.remove(tick)
 | 
			
		||||
                    # print(f'DUPLICATE {tick}')
 | 
			
		||||
 | 
			
		||||
            # re-insert ticks
 | 
			
		||||
            ticks.extend(list(chain(trades.values(), darks.values())))
 | 
			
		||||
 | 
			
		||||
        for tick in ticks:
 | 
			
		||||
            # print(f"{quote['symbol']}: {tick}")
 | 
			
		||||
            ttype = tick.get('type')
 | 
			
		||||
            if ttype in types:
 | 
			
		||||
                yield tick
 | 
			
		||||
| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0)
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -17,11 +17,6 @@
 | 
			
		|||
Super fast ``QPainterPath`` generation related operator routines.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from math import (
 | 
			
		||||
    ceil,
 | 
			
		||||
    floor,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import numpy as np
 | 
			
		||||
from numpy.lib import recfunctions as rfn
 | 
			
		||||
from numba import (
 | 
			
		||||
| 
						 | 
				
			
			@ -35,11 +30,6 @@ from numba import (
 | 
			
		|||
# TODO: for ``numba`` typing..
 | 
			
		||||
# from ._source import numba_ohlc_dtype
 | 
			
		||||
from ._m4 import ds_m4
 | 
			
		||||
from .._profile import (
 | 
			
		||||
    Profiler,
 | 
			
		||||
    pg_profile_enabled,
 | 
			
		||||
    ms_slower_then,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def xy_downsample(
 | 
			
		||||
| 
						 | 
				
			
			@ -135,7 +125,7 @@ def path_arrays_from_ohlc(
 | 
			
		|||
    half_w: float = bar_w/2
 | 
			
		||||
 | 
			
		||||
    # TODO: report bug for assert @
 | 
			
		||||
    # /home/goodboy/repos/piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
 | 
			
		||||
    # ../piker/env/lib/python3.8/site-packages/numba/core/typing/builtins.py:991
 | 
			
		||||
    for i, q in enumerate(data[start:], start):
 | 
			
		||||
 | 
			
		||||
        open = q['open']
 | 
			
		||||
| 
						 | 
				
			
			@ -237,20 +227,20 @@ def trace_hl(
 | 
			
		|||
 | 
			
		||||
    for i in range(hl.size):
 | 
			
		||||
        row = hl[i]
 | 
			
		||||
        l, h = row['low'], row['high']
 | 
			
		||||
        lo, hi = row['low'], row['high']
 | 
			
		||||
 | 
			
		||||
        up_diff = h - last_l
 | 
			
		||||
        down_diff = last_h - l
 | 
			
		||||
        up_diff = hi - last_l
 | 
			
		||||
        down_diff = last_h - lo
 | 
			
		||||
 | 
			
		||||
        if up_diff > down_diff:
 | 
			
		||||
            out[2*i + 1] = h
 | 
			
		||||
            out[2*i + 1] = hi
 | 
			
		||||
            out[2*i] = last_l
 | 
			
		||||
        else:
 | 
			
		||||
            out[2*i + 1] = l
 | 
			
		||||
            out[2*i + 1] = lo
 | 
			
		||||
            out[2*i] = last_h
 | 
			
		||||
 | 
			
		||||
        last_l = l
 | 
			
		||||
        last_h = h
 | 
			
		||||
        last_l = lo
 | 
			
		||||
        last_h = hi
 | 
			
		||||
 | 
			
		||||
        x[2*i] = int(i) - margin
 | 
			
		||||
        x[2*i + 1] = int(i) + margin
 | 
			
		||||
| 
						 | 
				
			
			@ -289,158 +279,3 @@ def ohlc_flatten(
 | 
			
		|||
            num=len(flat),
 | 
			
		||||
        )
 | 
			
		||||
    return x, flat
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def slice_from_time(
 | 
			
		||||
    arr: np.ndarray,
 | 
			
		||||
    start_t: float,
 | 
			
		||||
    stop_t: float,
 | 
			
		||||
    step: float,  # sampler period step-diff
 | 
			
		||||
 | 
			
		||||
) -> slice:
 | 
			
		||||
    '''
 | 
			
		||||
    Calculate array indices mapped from a time range and return them in
 | 
			
		||||
    a slice.
 | 
			
		||||
 | 
			
		||||
    Given an input array with an epoch `'time'` series entry, calculate
 | 
			
		||||
    the indices which span the time range and return in a slice. Presume
 | 
			
		||||
    each `'time'` step increment is uniform and when the time stamp
 | 
			
		||||
    series contains gaps (the uniform presumption is untrue) use
 | 
			
		||||
    ``np.searchsorted()`` binary search to look up the appropriate
 | 
			
		||||
    index.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    profiler = Profiler(
 | 
			
		||||
        msg='slice_from_time()',
 | 
			
		||||
        disabled=not pg_profile_enabled(),
 | 
			
		||||
        ms_threshold=ms_slower_then,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    times = arr['time']
 | 
			
		||||
    t_first = floor(times[0])
 | 
			
		||||
    t_last = ceil(times[-1])
 | 
			
		||||
 | 
			
		||||
    # the greatest index we can return which slices to the
 | 
			
		||||
    # end of the input array.
 | 
			
		||||
    read_i_max = arr.shape[0]
 | 
			
		||||
 | 
			
		||||
    # compute (presumed) uniform-time-step index offsets
 | 
			
		||||
    i_start_t = floor(start_t)
 | 
			
		||||
    read_i_start = floor(((i_start_t - t_first) // step)) - 1
 | 
			
		||||
 | 
			
		||||
    i_stop_t = ceil(stop_t)
 | 
			
		||||
 | 
			
		||||
    # XXX: edge case -> always set stop index to last in array whenever
 | 
			
		||||
    # the input stop time is detected to be greater then the equiv time
 | 
			
		||||
    # stamp at that last entry.
 | 
			
		||||
    if i_stop_t >= t_last:
 | 
			
		||||
        read_i_stop = read_i_max
 | 
			
		||||
    else:
 | 
			
		||||
        read_i_stop = ceil((i_stop_t - t_first) // step) + 1
 | 
			
		||||
 | 
			
		||||
    # always clip outputs to array support
 | 
			
		||||
    # for read start:
 | 
			
		||||
    # - never allow a start < the 0 index
 | 
			
		||||
    # - never allow an end index > the read array len
 | 
			
		||||
    read_i_start = min(
 | 
			
		||||
        max(0, read_i_start),
 | 
			
		||||
        read_i_max - 1,
 | 
			
		||||
    )
 | 
			
		||||
    read_i_stop = max(
 | 
			
		||||
        0,
 | 
			
		||||
        min(read_i_stop, read_i_max),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # check for larger-then-latest calculated index for given start
 | 
			
		||||
    # time, in which case we do a binary search for the correct index.
 | 
			
		||||
    # NOTE: this is usually the result of a time series with time gaps
 | 
			
		||||
    # where it is expected that each index step maps to a uniform step
 | 
			
		||||
    # in the time stamp series.
 | 
			
		||||
    t_iv_start = times[read_i_start]
 | 
			
		||||
    if (
 | 
			
		||||
        t_iv_start > i_start_t
 | 
			
		||||
    ):
 | 
			
		||||
        # do a binary search for the best index mapping to ``start_t``
 | 
			
		||||
        # given we measured an overshoot using the uniform-time-step
 | 
			
		||||
        # calculation from above.
 | 
			
		||||
 | 
			
		||||
        # TODO: once we start caching these per source-array,
 | 
			
		||||
        # we can just overwrite ``read_i_start`` directly.
 | 
			
		||||
        new_read_i_start = np.searchsorted(
 | 
			
		||||
            times,
 | 
			
		||||
            i_start_t,
 | 
			
		||||
            side='left',
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # TODO: minimize binary search work as much as possible:
 | 
			
		||||
        # - cache these remap values which compensate for gaps in the
 | 
			
		||||
        #   uniform time step basis where we calc a later start
 | 
			
		||||
        #   index for the given input ``start_t``.
 | 
			
		||||
        # - can we shorten the input search sequence by heuristic?
 | 
			
		||||
        #   up_to_arith_start = index[:read_i_start]
 | 
			
		||||
 | 
			
		||||
        if (
 | 
			
		||||
            new_read_i_start <= read_i_start
 | 
			
		||||
        ):
 | 
			
		||||
            # t_diff = t_iv_start - start_t
 | 
			
		||||
            # print(
 | 
			
		||||
            #     f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
 | 
			
		||||
            #     f'start_t:{start_t} -> 0index start_t:{t_iv_start}\n'
 | 
			
		||||
            #     f'diff: {t_diff}\n'
 | 
			
		||||
            #     f'REMAPPED START i: {read_i_start} -> {new_read_i_start}\n'
 | 
			
		||||
            # )
 | 
			
		||||
            read_i_start = new_read_i_start
 | 
			
		||||
 | 
			
		||||
    t_iv_stop = times[read_i_stop - 1]
 | 
			
		||||
    if (
 | 
			
		||||
        t_iv_stop > i_stop_t
 | 
			
		||||
    ):
 | 
			
		||||
        # t_diff = stop_t - t_iv_stop
 | 
			
		||||
        # print(
 | 
			
		||||
        #     f"WE'RE CUTTING OUT TIME - STEP:{step}\n"
 | 
			
		||||
        #     f'calced iv stop:{t_iv_stop} -> stop_t:{stop_t}\n'
 | 
			
		||||
        #     f'diff: {t_diff}\n'
 | 
			
		||||
        #     # f'SHOULD REMAP STOP: {read_i_start} -> {new_read_i_start}\n'
 | 
			
		||||
        # )
 | 
			
		||||
        new_read_i_stop = np.searchsorted(
 | 
			
		||||
            times[read_i_start:],
 | 
			
		||||
            # times,
 | 
			
		||||
            i_stop_t,
 | 
			
		||||
            side='right',
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if (
 | 
			
		||||
            new_read_i_stop <= read_i_stop
 | 
			
		||||
        ):
 | 
			
		||||
            read_i_stop = read_i_start + new_read_i_stop + 1
 | 
			
		||||
 | 
			
		||||
    # sanity checks for range size
 | 
			
		||||
    # samples = (i_stop_t - i_start_t) // step
 | 
			
		||||
    # index_diff = read_i_stop - read_i_start + 1
 | 
			
		||||
    # if index_diff > (samples + 3):
 | 
			
		||||
    #     breakpoint()
 | 
			
		||||
 | 
			
		||||
    # read-relative indexes: gives a slice where `shm.array[read_slc]`
 | 
			
		||||
    # will be the data spanning the input time range `start_t` ->
 | 
			
		||||
    # `stop_t`
 | 
			
		||||
    read_slc = slice(
 | 
			
		||||
        int(read_i_start),
 | 
			
		||||
        int(read_i_stop),
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    profiler(
 | 
			
		||||
        'slicing complete'
 | 
			
		||||
        # f'{start_t} -> {abs_slc.start} | {read_slc.start}\n'
 | 
			
		||||
        # f'{stop_t} -> {abs_slc.stop} | {read_slc.stop}\n'
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # NOTE: if caller needs absolute buffer indices they can
 | 
			
		||||
    # slice the buffer abs index like so:
 | 
			
		||||
    # index = arr['index']
 | 
			
		||||
    # abs_indx = index[read_slc]
 | 
			
		||||
    # abs_slc = slice(
 | 
			
		||||
    #     int(abs_indx[0]),
 | 
			
		||||
    #     int(abs_indx[-1]),
 | 
			
		||||
    # )
 | 
			
		||||
 | 
			
		||||
    return read_slc
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -27,19 +27,29 @@ from collections import (
 | 
			
		|||
from contextlib import asynccontextmanager as acm
 | 
			
		||||
import time
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    AsyncIterator,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import tractor
 | 
			
		||||
from tractor import (
 | 
			
		||||
    Context,
 | 
			
		||||
    MsgStream,
 | 
			
		||||
    Channel,
 | 
			
		||||
)
 | 
			
		||||
from tractor.trionics import (
 | 
			
		||||
    maybe_open_nursery,
 | 
			
		||||
)
 | 
			
		||||
import trio
 | 
			
		||||
from trio_typing import TaskStatus
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
from .ticktools import (
 | 
			
		||||
    frame_ticks,
 | 
			
		||||
    _tick_groups,
 | 
			
		||||
)
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
from ..service import maybe_spawn_daemon
 | 
			
		||||
| 
						 | 
				
			
			@ -48,9 +58,10 @@ if TYPE_CHECKING:
 | 
			
		|||
    from ._sharedmem import (
 | 
			
		||||
        ShmArray,
 | 
			
		||||
    )
 | 
			
		||||
    from .feed import _FeedsBus
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
    from .feed import (
 | 
			
		||||
        _FeedsBus,
 | 
			
		||||
        Sub,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# highest frequency sample step is 1 second by default, though in
 | 
			
		||||
| 
						 | 
				
			
			@ -91,7 +102,7 @@ class Sampler:
 | 
			
		|||
        float,
 | 
			
		||||
        list[
 | 
			
		||||
            float,
 | 
			
		||||
            set[tractor.MsgStream]
 | 
			
		||||
            set[MsgStream]
 | 
			
		||||
        ],
 | 
			
		||||
    ] = defaultdict(
 | 
			
		||||
        lambda: [
 | 
			
		||||
| 
						 | 
				
			
			@ -232,6 +243,7 @@ class Sampler:
 | 
			
		|||
        self,
 | 
			
		||||
        period_s: float,
 | 
			
		||||
        time_stamp: float | None = None,
 | 
			
		||||
        info: dict | None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        '''
 | 
			
		||||
| 
						 | 
				
			
			@ -254,16 +266,20 @@ class Sampler:
 | 
			
		|||
            f'broadcasting {period_s} -> {last_ts}\n'
 | 
			
		||||
            # f'consumers: {subs}'
 | 
			
		||||
        )
 | 
			
		||||
        borked: set[tractor.MsgStream] = set()
 | 
			
		||||
        sent: set[tractor.MsgStream] = set()
 | 
			
		||||
        borked: set[MsgStream] = set()
 | 
			
		||||
        sent: set[MsgStream] = set()
 | 
			
		||||
        while True:
 | 
			
		||||
            try:
 | 
			
		||||
                for stream in (subs - sent):
 | 
			
		||||
                    try:
 | 
			
		||||
                        await stream.send({
 | 
			
		||||
                        msg = {
 | 
			
		||||
                            'index': time_stamp or last_ts,
 | 
			
		||||
                            'period': period_s,
 | 
			
		||||
                        })
 | 
			
		||||
                        }
 | 
			
		||||
                        if info:
 | 
			
		||||
                            msg.update(info)
 | 
			
		||||
 | 
			
		||||
                        await stream.send(msg)
 | 
			
		||||
                        sent.add(stream)
 | 
			
		||||
 | 
			
		||||
                    except (
 | 
			
		||||
| 
						 | 
				
			
			@ -289,14 +305,24 @@ class Sampler:
 | 
			
		|||
                )
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    async def broadcast_all(self) -> None:
 | 
			
		||||
        for period_s in self.subscribers:
 | 
			
		||||
            await self.broadcast(period_s)
 | 
			
		||||
    async def broadcast_all(
 | 
			
		||||
        self,
 | 
			
		||||
        info: dict | None = None,
 | 
			
		||||
    ) -> None:
 | 
			
		||||
 | 
			
		||||
        # NOTE: take a copy of subs since removals can happen
 | 
			
		||||
        # during the broadcast checkpoint which can cause
 | 
			
		||||
        # a `RuntimeError` on interation of the underlying `dict`.
 | 
			
		||||
        for period_s in list(self.subscribers):
 | 
			
		||||
            await self.broadcast(
 | 
			
		||||
                period_s,
 | 
			
		||||
                info=info,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
async def register_with_sampler(
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
    ctx: Context,
 | 
			
		||||
    period_s: float,
 | 
			
		||||
    shms_by_period: dict[float, dict] | None = None,
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -353,17 +379,29 @@ async def register_with_sampler(
 | 
			
		|||
 | 
			
		||||
            if open_index_stream:
 | 
			
		||||
                try:
 | 
			
		||||
                    async with ctx.open_stream() as stream:
 | 
			
		||||
                    async with ctx.open_stream(
 | 
			
		||||
                        allow_overruns=True,
 | 
			
		||||
                    ) as stream:
 | 
			
		||||
                        if sub_for_broadcasts:
 | 
			
		||||
                            subs.add(stream)
 | 
			
		||||
 | 
			
		||||
                        # except broadcast requests from the subscriber
 | 
			
		||||
                        async for msg in stream:
 | 
			
		||||
                            if msg == 'broadcast_all':
 | 
			
		||||
                                await Sampler.broadcast_all()
 | 
			
		||||
                            if 'broadcast_all' in msg:
 | 
			
		||||
                                await Sampler.broadcast_all(
 | 
			
		||||
                                    info=msg['broadcast_all'],
 | 
			
		||||
                                )
 | 
			
		||||
                finally:
 | 
			
		||||
                    if sub_for_broadcasts:
 | 
			
		||||
                        subs.remove(stream)
 | 
			
		||||
                    if (
 | 
			
		||||
                        sub_for_broadcasts
 | 
			
		||||
                        and subs
 | 
			
		||||
                    ):
 | 
			
		||||
                        try:
 | 
			
		||||
                            subs.remove(stream)
 | 
			
		||||
                        except KeyError:
 | 
			
		||||
                            log.warning(
 | 
			
		||||
                                f'{stream._ctx.chan.uid} sub already removed!?'
 | 
			
		||||
                            )
 | 
			
		||||
            else:
 | 
			
		||||
                # if no shms are passed in we just wait until cancelled
 | 
			
		||||
                # by caller.
 | 
			
		||||
| 
						 | 
				
			
			@ -429,7 +467,7 @@ async def spawn_samplerd(
 | 
			
		|||
async def maybe_open_samplerd(
 | 
			
		||||
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
    **pikerd_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tractor.Portal:  # noqa
 | 
			
		||||
    '''
 | 
			
		||||
| 
						 | 
				
			
			@ -442,9 +480,9 @@ async def maybe_open_samplerd(
 | 
			
		|||
    async with maybe_spawn_daemon(
 | 
			
		||||
        dname,
 | 
			
		||||
        service_task_target=spawn_samplerd,
 | 
			
		||||
        spawn_args={'loglevel': loglevel},
 | 
			
		||||
        spawn_args={},
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
        **pikerd_kwargs,
 | 
			
		||||
 | 
			
		||||
    ) as portal:
 | 
			
		||||
        yield portal
 | 
			
		||||
| 
						 | 
				
			
			@ -460,6 +498,8 @@ async def open_sample_stream(
 | 
			
		|||
    cache_key: str | None = None,
 | 
			
		||||
    allow_new_sampler: bool = True,
 | 
			
		||||
 | 
			
		||||
    ensure_is_active: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> AsyncIterator[dict[str, float]]:
 | 
			
		||||
    '''
 | 
			
		||||
    Subscribe to OHLC sampling "step" events: when the time aggregation
 | 
			
		||||
| 
						 | 
				
			
			@ -502,11 +542,20 @@ async def open_sample_stream(
 | 
			
		|||
            },
 | 
			
		||||
        ) as (ctx, first)
 | 
			
		||||
    ):
 | 
			
		||||
        async with (
 | 
			
		||||
            ctx.open_stream() as istream,
 | 
			
		||||
        if ensure_is_active:
 | 
			
		||||
            assert len(first) > 1
 | 
			
		||||
 | 
			
		||||
            # TODO: we don't need this task-bcasting right?
 | 
			
		||||
            # istream.subscribe() as istream,
 | 
			
		||||
        async with (
 | 
			
		||||
            ctx.open_stream(
 | 
			
		||||
                allow_overruns=True,
 | 
			
		||||
            ) as istream,
 | 
			
		||||
 | 
			
		||||
            # TODO: we DO need this task-bcasting so that
 | 
			
		||||
            # for eg. the history chart update loop eventually
 | 
			
		||||
            # receceives all backfilling event msgs such that
 | 
			
		||||
            # the underlying graphics format arrays are
 | 
			
		||||
            # re-allocated until all history is loaded!
 | 
			
		||||
            istream.subscribe() as istream,
 | 
			
		||||
        ):
 | 
			
		||||
            yield istream
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -551,9 +600,9 @@ async def sample_and_broadcast(
 | 
			
		|||
            # TODO: we should probably not write every single
 | 
			
		||||
            # value to an OHLC sample stream XD
 | 
			
		||||
            # for a tick stream sure.. but this is excessive..
 | 
			
		||||
            ticks = quote['ticks']
 | 
			
		||||
            ticks: list[dict] = quote['ticks']
 | 
			
		||||
            for tick in ticks:
 | 
			
		||||
                ticktype = tick['type']
 | 
			
		||||
                ticktype: str = tick['type']
 | 
			
		||||
 | 
			
		||||
                # write trade events to shm last OHLC sample
 | 
			
		||||
                if ticktype in ('trade', 'utrade'):
 | 
			
		||||
| 
						 | 
				
			
			@ -563,13 +612,14 @@ async def sample_and_broadcast(
 | 
			
		|||
                    # more compact inline-way to do this assignment
 | 
			
		||||
                    # to both buffers?
 | 
			
		||||
                    for shm in [rt_shm, hist_shm]:
 | 
			
		||||
 | 
			
		||||
                        # update last entry
 | 
			
		||||
                        # benchmarked in the 4-5 us range
 | 
			
		||||
                        o, high, low, v = shm.array[-1][
 | 
			
		||||
                            ['open', 'high', 'low', 'volume']
 | 
			
		||||
                        ]
 | 
			
		||||
 | 
			
		||||
                        new_v = tick.get('size', 0)
 | 
			
		||||
                        new_v: float = tick.get('size', 0)
 | 
			
		||||
 | 
			
		||||
                        if v == 0 and new_v:
 | 
			
		||||
                            # no trades for this bar yet so the open
 | 
			
		||||
| 
						 | 
				
			
			@ -588,14 +638,14 @@ async def sample_and_broadcast(
 | 
			
		|||
                            'high',
 | 
			
		||||
                            'low',
 | 
			
		||||
                            'close',
 | 
			
		||||
                            'bar_wap',  # can be optionally provided
 | 
			
		||||
                            # 'bar_wap',  # can be optionally provided
 | 
			
		||||
                            'volume',
 | 
			
		||||
                        ]][-1] = (
 | 
			
		||||
                            o,
 | 
			
		||||
                            max(high, last),
 | 
			
		||||
                            min(low, last),
 | 
			
		||||
                            last,
 | 
			
		||||
                            quote.get('bar_wap', 0),
 | 
			
		||||
                            # quote.get('bar_wap', 0),
 | 
			
		||||
                            volume,
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -607,48 +657,49 @@ async def sample_and_broadcast(
 | 
			
		|||
            # eventually block this producer end of the feed and
 | 
			
		||||
            # thus other consumers still attached.
 | 
			
		||||
            sub_key: str = broker_symbol.lower()
 | 
			
		||||
            subs: list[
 | 
			
		||||
                tuple[
 | 
			
		||||
                    tractor.MsgStream | trio.MemorySendChannel,
 | 
			
		||||
                    float | None,  # tick throttle in Hz
 | 
			
		||||
                ]
 | 
			
		||||
            ] = bus.get_subs(sub_key)
 | 
			
		||||
            subs: set[Sub] = bus.get_subs(sub_key)
 | 
			
		||||
 | 
			
		||||
            # NOTE: by default the broker backend doesn't append
 | 
			
		||||
            # it's own "name" into the fqsn schema (but maybe it
 | 
			
		||||
            # it's own "name" into the fqme schema (but maybe it
 | 
			
		||||
            # should?) so we have to manually generate the correct
 | 
			
		||||
            # key here.
 | 
			
		||||
            fqsn = f'{broker_symbol}.{brokername}'
 | 
			
		||||
            fqme: str = f'{broker_symbol}.{brokername}'
 | 
			
		||||
            lags: int = 0
 | 
			
		||||
 | 
			
		||||
            # TODO: speed up this loop in an AOT compiled lang (like
 | 
			
		||||
            # rust or nim or zig) and/or instead of doing a fan out to
 | 
			
		||||
            # TCP sockets here, we add a shm-style tick queue which
 | 
			
		||||
            # readers can pull from instead of placing the burden of
 | 
			
		||||
            # broadcast on solely on this `brokerd` actor. see issues:
 | 
			
		||||
            # XXX TODO XXX: speed up this loop in an AOT compiled
 | 
			
		||||
            # lang (like rust or nim or zig)!
 | 
			
		||||
            # AND/OR instead of doing a fan out to TCP sockets
 | 
			
		||||
            # here, we add a shm-style tick queue which readers can
 | 
			
		||||
            # pull from instead of placing the burden of broadcast
 | 
			
		||||
            # on solely on this `brokerd` actor. see issues:
 | 
			
		||||
            # - https://github.com/pikers/piker/issues/98
 | 
			
		||||
            # - https://github.com/pikers/piker/issues/107
 | 
			
		||||
 | 
			
		||||
            for (stream, tick_throttle) in subs.copy():
 | 
			
		||||
            # for (stream, tick_throttle) in subs.copy():
 | 
			
		||||
            for sub in subs.copy():
 | 
			
		||||
                ipc: MsgStream = sub.ipc
 | 
			
		||||
                throttle: float = sub.throttle_rate
 | 
			
		||||
                try:
 | 
			
		||||
                    with trio.move_on_after(0.2) as cs:
 | 
			
		||||
                        if tick_throttle:
 | 
			
		||||
                        if throttle:
 | 
			
		||||
                            send_chan: trio.abc.SendChannel = sub.send_chan
 | 
			
		||||
 | 
			
		||||
                            # this is a send mem chan that likely
 | 
			
		||||
                            # pushes to the ``uniform_rate_send()`` below.
 | 
			
		||||
                            try:
 | 
			
		||||
                                stream.send_nowait(
 | 
			
		||||
                                    (fqsn, quote)
 | 
			
		||||
                                send_chan.send_nowait(
 | 
			
		||||
                                    (fqme, quote)
 | 
			
		||||
                                )
 | 
			
		||||
                            except trio.WouldBlock:
 | 
			
		||||
                                overruns[sub_key] += 1
 | 
			
		||||
                                ctx = stream._ctx
 | 
			
		||||
                                chan = ctx.chan
 | 
			
		||||
                                ctx: Context = ipc._ctx
 | 
			
		||||
                                chan: Channel = ctx.chan
 | 
			
		||||
 | 
			
		||||
                                log.warning(
 | 
			
		||||
                                    f'Feed OVERRUN {sub_key}'
 | 
			
		||||
                                    '@{bus.brokername} -> \n'
 | 
			
		||||
                                    f'feed @ {chan.uid}\n'
 | 
			
		||||
                                    f'throttle = {tick_throttle} Hz'
 | 
			
		||||
                                    f'throttle = {throttle} Hz'
 | 
			
		||||
                                )
 | 
			
		||||
 | 
			
		||||
                                if overruns[sub_key] > 6:
 | 
			
		||||
| 
						 | 
				
			
			@ -665,33 +716,33 @@ async def sample_and_broadcast(
 | 
			
		|||
                                            f'{sub_key}:'
 | 
			
		||||
                                            f'{ctx.cid}@{chan.uid}'
 | 
			
		||||
                                        )
 | 
			
		||||
                                        await stream.aclose()
 | 
			
		||||
                                        await ipc.aclose()
 | 
			
		||||
                                        raise trio.BrokenResourceError
 | 
			
		||||
                        else:
 | 
			
		||||
                            await stream.send(
 | 
			
		||||
                                {fqsn: quote}
 | 
			
		||||
                            await ipc.send(
 | 
			
		||||
                                {fqme: quote}
 | 
			
		||||
                            )
 | 
			
		||||
 | 
			
		||||
                    if cs.cancelled_caught:
 | 
			
		||||
                        lags += 1
 | 
			
		||||
                        if lags > 10:
 | 
			
		||||
                            await tractor.breakpoint()
 | 
			
		||||
                            await tractor.pause()
 | 
			
		||||
 | 
			
		||||
                except (
 | 
			
		||||
                    trio.BrokenResourceError,
 | 
			
		||||
                    trio.ClosedResourceError,
 | 
			
		||||
                    trio.EndOfChannel,
 | 
			
		||||
                ):
 | 
			
		||||
                    ctx = stream._ctx
 | 
			
		||||
                    chan = ctx.chan
 | 
			
		||||
                    ctx: Context = ipc._ctx
 | 
			
		||||
                    chan: Channel = ctx.chan
 | 
			
		||||
                    if ctx:
 | 
			
		||||
                        log.warning(
 | 
			
		||||
                            'Dropped `brokerd`-quotes-feed connection:\n'
 | 
			
		||||
                            f'{broker_symbol}:'
 | 
			
		||||
                            f'{ctx.cid}@{chan.uid}'
 | 
			
		||||
                        )
 | 
			
		||||
                    if tick_throttle:
 | 
			
		||||
                        assert stream._closed
 | 
			
		||||
                    if sub.throttle_rate:
 | 
			
		||||
                        assert ipc._closed
 | 
			
		||||
 | 
			
		||||
                    # XXX: do we need to deregister here
 | 
			
		||||
                    # if it's done in the fee bus code?
 | 
			
		||||
| 
						 | 
				
			
			@ -700,69 +751,15 @@ async def sample_and_broadcast(
 | 
			
		|||
                    # since there seems to be some kinda race..
 | 
			
		||||
                    bus.remove_subs(
 | 
			
		||||
                        sub_key,
 | 
			
		||||
                        {(stream, tick_throttle)},
 | 
			
		||||
                        {sub},
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# a working tick-type-classes template
 | 
			
		||||
_tick_groups = {
 | 
			
		||||
    'clears': {'trade', 'dark_trade', 'last'},
 | 
			
		||||
    'bids': {'bid', 'bsize'},
 | 
			
		||||
    'asks': {'ask', 'asize'},
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def frame_ticks(
 | 
			
		||||
    first_quote: dict,
 | 
			
		||||
    last_quote: dict,
 | 
			
		||||
    ticks_by_type: dict,
 | 
			
		||||
) -> None:
 | 
			
		||||
    # append quotes since last iteration into the last quote's
 | 
			
		||||
    # tick array/buffer.
 | 
			
		||||
    ticks = last_quote.get('ticks')
 | 
			
		||||
 | 
			
		||||
    # TODO: once we decide to get fancy really we should
 | 
			
		||||
    # have a shared mem tick buffer that is just
 | 
			
		||||
    # continually filled and the UI just ready from it
 | 
			
		||||
    # at it's display rate.
 | 
			
		||||
    if ticks:
 | 
			
		||||
        # TODO: do we need this any more or can we just
 | 
			
		||||
        # expect the receiver to unwind the below
 | 
			
		||||
        # `ticks_by_type: dict`?
 | 
			
		||||
        # => undwinding would potentially require a
 | 
			
		||||
        # `dict[str, set | list]` instead with an
 | 
			
		||||
        # included `'types' field which is an (ordered)
 | 
			
		||||
        # set of tick type fields in the order which
 | 
			
		||||
        # types arrived?
 | 
			
		||||
        first_quote['ticks'].extend(ticks)
 | 
			
		||||
 | 
			
		||||
        # XXX: build a tick-by-type table of lists
 | 
			
		||||
        # of tick messages. This allows for less
 | 
			
		||||
        # iteration on the receiver side by allowing for
 | 
			
		||||
        # a single "latest tick event" look up by
 | 
			
		||||
        # indexing the last entry in each sub-list.
 | 
			
		||||
        # tbt = {
 | 
			
		||||
        #     'types': ['bid', 'asize', 'last', .. '<type_n>'],
 | 
			
		||||
 | 
			
		||||
        #     'bid': [tick0, tick1, tick2, .., tickn],
 | 
			
		||||
        #     'asize': [tick0, tick1, tick2, .., tickn],
 | 
			
		||||
        #     'last': [tick0, tick1, tick2, .., tickn],
 | 
			
		||||
        #     ...
 | 
			
		||||
        #     '<type_n>': [tick0, tick1, tick2, .., tickn],
 | 
			
		||||
        # }
 | 
			
		||||
 | 
			
		||||
        # append in reverse FIFO order for in-order iteration on
 | 
			
		||||
        # receiver side.
 | 
			
		||||
        for tick in ticks:
 | 
			
		||||
            ttype = tick['type']
 | 
			
		||||
            ticks_by_type[ttype].append(tick)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def uniform_rate_send(
 | 
			
		||||
 | 
			
		||||
    rate: float,
 | 
			
		||||
    quote_stream: trio.abc.ReceiveChannel,
 | 
			
		||||
    stream: tractor.MsgStream,
 | 
			
		||||
    stream: MsgStream,
 | 
			
		||||
 | 
			
		||||
    task_status: TaskStatus = trio.TASK_STATUS_IGNORED,
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -782,9 +779,6 @@ async def uniform_rate_send(
 | 
			
		|||
    https://gist.github.com/njsmith/7ea44ec07e901cb78ebe1dd8dd846cb9
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # try not to error-out on overruns of the subscribed client
 | 
			
		||||
    stream._ctx._backpressure = True
 | 
			
		||||
 | 
			
		||||
    # TODO: compute the approx overhead latency per cycle
 | 
			
		||||
    left_to_sleep = throttle_period = 1/rate - 0.000616
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -794,10 +788,10 @@ async def uniform_rate_send(
 | 
			
		|||
    diff = 0
 | 
			
		||||
 | 
			
		||||
    task_status.started()
 | 
			
		||||
    ticks_by_type: defaultdict[
 | 
			
		||||
    ticks_by_type: dict[
 | 
			
		||||
        str,
 | 
			
		||||
        list[dict],
 | 
			
		||||
    ] = defaultdict(list)
 | 
			
		||||
        list[dict[str, Any]],
 | 
			
		||||
    ] = {}
 | 
			
		||||
 | 
			
		||||
    clear_types = _tick_groups['clears']
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -825,9 +819,9 @@ async def uniform_rate_send(
 | 
			
		|||
                    # expired we aren't supposed to send yet so append
 | 
			
		||||
                    # to the tick frame.
 | 
			
		||||
                    frame_ticks(
 | 
			
		||||
                        first_quote,
 | 
			
		||||
                        last_quote,
 | 
			
		||||
                        ticks_by_type,
 | 
			
		||||
                        ticks_in_order=first_quote['ticks'],
 | 
			
		||||
                        ticks_by_type=ticks_by_type,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    # send cycle isn't due yet so continue waiting
 | 
			
		||||
| 
						 | 
				
			
			@ -847,8 +841,8 @@ async def uniform_rate_send(
 | 
			
		|||
 | 
			
		||||
                frame_ticks(
 | 
			
		||||
                    first_quote,
 | 
			
		||||
                    first_quote,
 | 
			
		||||
                    ticks_by_type,
 | 
			
		||||
                    ticks_in_order=first_quote['ticks'],
 | 
			
		||||
                    ticks_by_type=ticks_by_type,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            # we have a quote already so send it now.
 | 
			
		||||
| 
						 | 
				
			
			@ -864,9 +858,9 @@ async def uniform_rate_send(
 | 
			
		|||
                    break
 | 
			
		||||
 | 
			
		||||
                frame_ticks(
 | 
			
		||||
                    first_quote,
 | 
			
		||||
                    last_quote,
 | 
			
		||||
                    ticks_by_type,
 | 
			
		||||
                    ticks_in_order=first_quote['ticks'],
 | 
			
		||||
                    ticks_by_type=ticks_by_type,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        # measured_rate = 1 / (time.time() - last_send)
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -32,21 +32,9 @@ import numpy as np
 | 
			
		|||
from numpy.lib import recfunctions as rfn
 | 
			
		||||
import tractor
 | 
			
		||||
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
from ._source import base_iohlc_dtype
 | 
			
		||||
from .types import Struct
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# how  much is probably dependent on lifestyle
 | 
			
		||||
_secs_in_day = int(60 * 60 * 24)
 | 
			
		||||
# we try for a buncha times, but only on a run-every-other-day kinda week.
 | 
			
		||||
_days_worth = 16
 | 
			
		||||
_default_size = _days_worth * _secs_in_day
 | 
			
		||||
# where to start the new data append index
 | 
			
		||||
_rt_buffer_start = int((_days_worth - 1) * _secs_in_day)
 | 
			
		||||
from ._util import log
 | 
			
		||||
from ._source import def_iohlcv_fields
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def cuckoff_mantracker():
 | 
			
		||||
| 
						 | 
				
			
			@ -73,7 +61,6 @@ def cuckoff_mantracker():
 | 
			
		|||
    mantracker._resource_tracker = ManTracker()
 | 
			
		||||
    mantracker.register = mantracker._resource_tracker.register
 | 
			
		||||
    mantracker.ensure_running = mantracker._resource_tracker.ensure_running
 | 
			
		||||
    # ensure_running = mantracker._resource_tracker.ensure_running
 | 
			
		||||
    mantracker.unregister = mantracker._resource_tracker.unregister
 | 
			
		||||
    mantracker.getfd = mantracker._resource_tracker.getfd
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -171,7 +158,7 @@ def _make_token(
 | 
			
		|||
    to access a shared array.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    dtype = base_iohlc_dtype if dtype is None else dtype
 | 
			
		||||
    dtype = def_iohlcv_fields if dtype is None else dtype
 | 
			
		||||
    return _Token(
 | 
			
		||||
        shm_name=key,
 | 
			
		||||
        shm_first_index_name=key + "_first",
 | 
			
		||||
| 
						 | 
				
			
			@ -261,7 +248,6 @@ class ShmArray:
 | 
			
		|||
        # to load an empty array..
 | 
			
		||||
        if len(a) == 0 and self._post_init:
 | 
			
		||||
            raise RuntimeError('Empty array race condition hit!?')
 | 
			
		||||
            # breakpoint()
 | 
			
		||||
 | 
			
		||||
        return a
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -271,7 +257,7 @@ class ShmArray:
 | 
			
		|||
 | 
			
		||||
        # type that all field values will be cast to
 | 
			
		||||
        # in the returned view.
 | 
			
		||||
        common_dtype: np.dtype = np.float,
 | 
			
		||||
        common_dtype: np.dtype = float,
 | 
			
		||||
 | 
			
		||||
    ) -> np.ndarray:
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -326,7 +312,7 @@ class ShmArray:
 | 
			
		|||
        field_map: Optional[dict[str, str]] = None,
 | 
			
		||||
        prepend: bool = False,
 | 
			
		||||
        update_first: bool = True,
 | 
			
		||||
        start: Optional[int] = None,
 | 
			
		||||
        start: int | None = None,
 | 
			
		||||
 | 
			
		||||
    ) -> int:
 | 
			
		||||
        '''
 | 
			
		||||
| 
						 | 
				
			
			@ -368,7 +354,11 @@ class ShmArray:
 | 
			
		|||
            # tries to access ``.array`` (which due to the index
 | 
			
		||||
            # overlap will be empty). Pretty sure we've fixed it now
 | 
			
		||||
            # but leaving this here as a reminder.
 | 
			
		||||
            if prepend and update_first and length:
 | 
			
		||||
            if (
 | 
			
		||||
                prepend
 | 
			
		||||
                and update_first
 | 
			
		||||
                and length
 | 
			
		||||
            ):
 | 
			
		||||
                assert index < self._first.value
 | 
			
		||||
 | 
			
		||||
            if (
 | 
			
		||||
| 
						 | 
				
			
			@ -442,10 +432,10 @@ class ShmArray:
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
def open_shm_array(
 | 
			
		||||
 | 
			
		||||
    key: Optional[str] = None,
 | 
			
		||||
    size: int = _default_size,  # see above
 | 
			
		||||
    dtype: Optional[np.dtype] = None,
 | 
			
		||||
    size: int,
 | 
			
		||||
    key: str | None = None,
 | 
			
		||||
    dtype: np.dtype | None = None,
 | 
			
		||||
    append_start_index: int | None = None,
 | 
			
		||||
    readonly: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> ShmArray:
 | 
			
		||||
| 
						 | 
				
			
			@ -510,10 +500,13 @@ def open_shm_array(
 | 
			
		|||
    # ``ShmArray._start.value: int = 0`` and the yet-to-be written
 | 
			
		||||
    # real-time section will start at ``ShmArray.index: int``.
 | 
			
		||||
 | 
			
		||||
    # this sets the index to 3/4 of the length of the buffer
 | 
			
		||||
    # leaving a "days worth of second samples" for the real-time
 | 
			
		||||
    # section.
 | 
			
		||||
    last.value = first.value = _rt_buffer_start
 | 
			
		||||
    # this sets the index to nearly 2/3rds into the the length of
 | 
			
		||||
    # the buffer leaving at least a "days worth of second samples"
 | 
			
		||||
    # for the real-time section.
 | 
			
		||||
    if append_start_index is None:
 | 
			
		||||
        append_start_index = round(size * 0.616)
 | 
			
		||||
 | 
			
		||||
    last.value = first.value = append_start_index
 | 
			
		||||
 | 
			
		||||
    shmarr = ShmArray(
 | 
			
		||||
        array,
 | 
			
		||||
| 
						 | 
				
			
			@ -527,7 +520,6 @@ def open_shm_array(
 | 
			
		|||
 | 
			
		||||
    # "unlink" created shm on process teardown by
 | 
			
		||||
    # pushing teardown calls onto actor context stack
 | 
			
		||||
 | 
			
		||||
    stack = tractor.current_actor().lifetime_stack
 | 
			
		||||
    stack.callback(shmarr.close)
 | 
			
		||||
    stack.callback(shmarr.destroy)
 | 
			
		||||
| 
						 | 
				
			
			@ -622,7 +614,10 @@ def attach_shm_array(
 | 
			
		|||
 | 
			
		||||
def maybe_open_shm_array(
 | 
			
		||||
    key: str,
 | 
			
		||||
    dtype: Optional[np.dtype] = None,
 | 
			
		||||
    size: int,
 | 
			
		||||
    dtype: np.dtype | None = None,
 | 
			
		||||
    append_start_index: int | None = None,
 | 
			
		||||
    readonly: bool = False,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tuple[ShmArray, bool]:
 | 
			
		||||
| 
						 | 
				
			
			@ -643,13 +638,18 @@ def maybe_open_shm_array(
 | 
			
		|||
    use ``attach_shm_array``.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    size = kwargs.pop('size', _default_size)
 | 
			
		||||
    try:
 | 
			
		||||
        # see if we already know this key
 | 
			
		||||
        token = _known_tokens[key]
 | 
			
		||||
        return attach_shm_array(token=token, **kwargs), False
 | 
			
		||||
        return (
 | 
			
		||||
            attach_shm_array(
 | 
			
		||||
                token=token,
 | 
			
		||||
                readonly=readonly,
 | 
			
		||||
            ),
 | 
			
		||||
            False,
 | 
			
		||||
        )
 | 
			
		||||
    except KeyError:
 | 
			
		||||
        log.warning(f"Could not find {key} in shms cache")
 | 
			
		||||
        log.debug(f"Could not find {key} in shms cache")
 | 
			
		||||
        if dtype:
 | 
			
		||||
            token = _make_token(
 | 
			
		||||
                key,
 | 
			
		||||
| 
						 | 
				
			
			@ -659,15 +659,23 @@ def maybe_open_shm_array(
 | 
			
		|||
            try:
 | 
			
		||||
                return attach_shm_array(token=token, **kwargs), False
 | 
			
		||||
            except FileNotFoundError:
 | 
			
		||||
                log.warning(f"Could not attach to shm with token {token}")
 | 
			
		||||
                log.debug(f"Could not attach to shm with token {token}")
 | 
			
		||||
 | 
			
		||||
        # This actor does not know about memory
 | 
			
		||||
        # associated with the provided "key".
 | 
			
		||||
        # Attempt to open a block and expect
 | 
			
		||||
        # to fail if a block has been allocated
 | 
			
		||||
        # on the OS by someone else.
 | 
			
		||||
        return open_shm_array(key=key, dtype=dtype, **kwargs), True
 | 
			
		||||
 | 
			
		||||
        return (
 | 
			
		||||
            open_shm_array(
 | 
			
		||||
                key=key,
 | 
			
		||||
                size=size,
 | 
			
		||||
                dtype=dtype,
 | 
			
		||||
                append_start_index=append_start_index,
 | 
			
		||||
                readonly=readonly,
 | 
			
		||||
            ),
 | 
			
		||||
            True,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
def try_read(
 | 
			
		||||
    array: np.ndarray
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship for piker0)
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -18,38 +18,47 @@
 | 
			
		|||
numpy data source coversion helpers.
 | 
			
		||||
"""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from decimal import (
 | 
			
		||||
    Decimal,
 | 
			
		||||
    ROUND_HALF_EVEN,
 | 
			
		||||
)
 | 
			
		||||
from typing import Any
 | 
			
		||||
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
import numpy as np
 | 
			
		||||
 | 
			
		||||
from .types import Struct
 | 
			
		||||
# from numba import from_dtype
 | 
			
		||||
 | 
			
		||||
def_iohlcv_fields: list[tuple[str, type]] = [
 | 
			
		||||
 | 
			
		||||
ohlc_fields = [
 | 
			
		||||
    ('time', float),
 | 
			
		||||
    # YES WE KNOW, this isn't needed in polars but we use it for doing
 | 
			
		||||
    # ring-buffer like pre/append ops our our `ShmArray` real-time
 | 
			
		||||
    # numpy-array buffering system such that there is a master index
 | 
			
		||||
    # that can be used for index-arithmetic when write data to the
 | 
			
		||||
    # "middle" of the array. See the ``tractor.ipc.shm`` pkg for more
 | 
			
		||||
    # details.
 | 
			
		||||
    ('index', int),
 | 
			
		||||
 | 
			
		||||
    # presume int for epoch stamps since it's most common
 | 
			
		||||
    # and makes the most sense to avoid float rounding issues.
 | 
			
		||||
    # TODO: if we want higher reso we should use the new
 | 
			
		||||
    # ``time.time_ns()`` in python 3.10+
 | 
			
		||||
    ('time', int),
 | 
			
		||||
    ('open', float),
 | 
			
		||||
    ('high', float),
 | 
			
		||||
    ('low', float),
 | 
			
		||||
    ('close', float),
 | 
			
		||||
    ('volume', float),
 | 
			
		||||
    ('bar_wap', float),
 | 
			
		||||
 | 
			
		||||
    # TODO: can we elim this from default field set to save on mem?
 | 
			
		||||
    # i think only kraken really uses this in terms of what we get from
 | 
			
		||||
    # their ohlc history API?
 | 
			
		||||
    # ('bar_wap', float),  # shouldn't be default right?
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
ohlc_with_index = ohlc_fields.copy()
 | 
			
		||||
ohlc_with_index.insert(0, ('index', int))
 | 
			
		||||
 | 
			
		||||
# our minimum structured array layout for ohlc data
 | 
			
		||||
base_iohlc_dtype = np.dtype(ohlc_with_index)
 | 
			
		||||
base_ohlc_dtype = np.dtype(ohlc_fields)
 | 
			
		||||
# remove index field
 | 
			
		||||
def_ohlcv_fields: list[tuple[str, type]] = def_iohlcv_fields.copy()
 | 
			
		||||
def_ohlcv_fields.pop(0)
 | 
			
		||||
assert (len(def_iohlcv_fields) - len(def_ohlcv_fields)) == 1
 | 
			
		||||
 | 
			
		||||
# TODO: for now need to construct this manually for readonly arrays, see
 | 
			
		||||
# https://github.com/numba/numba/issues/4511
 | 
			
		||||
# from numba import from_dtype
 | 
			
		||||
# base_ohlc_dtype = np.dtype(def_ohlc_fields)
 | 
			
		||||
# numba_ohlc_dtype = from_dtype(base_ohlc_dtype)
 | 
			
		||||
 | 
			
		||||
# map time frame "keys" to seconds values
 | 
			
		||||
| 
						 | 
				
			
			@ -64,32 +73,6 @@ tf_in_1s = bidict({
 | 
			
		|||
})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mk_fqsn(
 | 
			
		||||
    provider: str,
 | 
			
		||||
    symbol: str,
 | 
			
		||||
 | 
			
		||||
) -> str:
 | 
			
		||||
    '''
 | 
			
		||||
    Generate a "fully qualified symbol name" which is
 | 
			
		||||
    a reverse-hierarchical cross broker/provider symbol
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    return '.'.join([symbol, provider]).lower()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def float_digits(
 | 
			
		||||
    value: float,
 | 
			
		||||
) -> int:
 | 
			
		||||
    '''
 | 
			
		||||
    Return the number of precision digits read from a float value.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    if value == 0:
 | 
			
		||||
        return 0
 | 
			
		||||
 | 
			
		||||
    return int(-Decimal(str(value)).as_tuple().exponent)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def ohlc_zeros(length: int) -> np.ndarray:
 | 
			
		||||
    """Construct an OHLC field formatted structarray.
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -100,220 +83,6 @@ def ohlc_zeros(length: int) -> np.ndarray:
 | 
			
		|||
    return np.zeros(length, dtype=base_ohlc_dtype)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def unpack_fqsn(fqsn: str) -> tuple[str, str, str]:
 | 
			
		||||
    '''
 | 
			
		||||
    Unpack a fully-qualified-symbol-name to ``tuple``.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    venue = ''
 | 
			
		||||
    suffix = ''
 | 
			
		||||
 | 
			
		||||
    # TODO: probably reverse the order of all this XD
 | 
			
		||||
    tokens = fqsn.split('.')
 | 
			
		||||
    if len(tokens) < 3:
 | 
			
		||||
        # probably crypto
 | 
			
		||||
        symbol, broker = tokens
 | 
			
		||||
        return (
 | 
			
		||||
            broker,
 | 
			
		||||
            symbol,
 | 
			
		||||
            '',
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    elif len(tokens) > 3:
 | 
			
		||||
        symbol, venue, suffix, broker = tokens
 | 
			
		||||
    else:
 | 
			
		||||
        symbol, venue, broker = tokens
 | 
			
		||||
        suffix = ''
 | 
			
		||||
 | 
			
		||||
    # head, _, broker = fqsn.rpartition('.')
 | 
			
		||||
    # symbol, _, suffix = head.rpartition('.')
 | 
			
		||||
    return (
 | 
			
		||||
        broker,
 | 
			
		||||
        '.'.join([symbol, venue]),
 | 
			
		||||
        suffix,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MktPair(Struct, frozen=True):
 | 
			
		||||
 | 
			
		||||
    src: str  # source asset name being used to buy
 | 
			
		||||
    src_type: str  # source asset's financial type/classification name
 | 
			
		||||
    # ^ specifies a "class" of financial instrument
 | 
			
		||||
    # egs. stock, futer, option, bond etc.
 | 
			
		||||
 | 
			
		||||
    dst: str  # destination asset name being bought
 | 
			
		||||
    dst_type: str  # destination asset's financial type/classification name
 | 
			
		||||
 | 
			
		||||
    price_tick: float  # minimum price increment value increment
 | 
			
		||||
    price_tick_digits: int  # required decimal digits for above
 | 
			
		||||
 | 
			
		||||
    size_tick: float  # minimum size (aka vlm) increment value increment
 | 
			
		||||
    size_tick_digits: int  # required decimal digits for above
 | 
			
		||||
 | 
			
		||||
    venue: str | None = None  # market venue provider name
 | 
			
		||||
    expiry: str | None = None  # for derivs, expiry datetime parseable str
 | 
			
		||||
 | 
			
		||||
    # for derivs, info describing contract, egs.
 | 
			
		||||
    # strike price, call or put, swap type, exercise model, etc.
 | 
			
		||||
    contract_info: str | None = None
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_msg(
 | 
			
		||||
        self,
 | 
			
		||||
        msg: dict[str, Any],
 | 
			
		||||
 | 
			
		||||
    ) -> MktPair:
 | 
			
		||||
        '''
 | 
			
		||||
        Constructor for a received msg-dict normally received over IPC.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    # fqa, fqma, .. etc. see issue:
 | 
			
		||||
    # https://github.com/pikers/piker/issues/467
 | 
			
		||||
    @property
 | 
			
		||||
    def fqsn(self) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        Return the fully qualified market (endpoint) name for the
 | 
			
		||||
        pair of transacting assets.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: rework the below `Symbol` (which was originally inspired and
 | 
			
		||||
# derived from stuff in quantdom) into a simpler, ipc msg ready, market
 | 
			
		||||
# endpoint meta-data container type as per the drafted interace above.
 | 
			
		||||
class Symbol(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    I guess this is some kinda container thing for dealing with
 | 
			
		||||
    all the different meta-data formats from brokers?
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    key: str
 | 
			
		||||
    tick_size: float = 0.01
 | 
			
		||||
    lot_tick_size: float = 0.0  # "volume" precision as min step value
 | 
			
		||||
    tick_size_digits: int = 2
 | 
			
		||||
    lot_size_digits: int = 0
 | 
			
		||||
    suffix: str = ''
 | 
			
		||||
    broker_info: dict[str, dict[str, Any]] = {}
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_broker_info(
 | 
			
		||||
        cls,
 | 
			
		||||
        broker: str,
 | 
			
		||||
        symbol: str,
 | 
			
		||||
        info: dict[str, Any],
 | 
			
		||||
        suffix: str = '',
 | 
			
		||||
 | 
			
		||||
    ) -> Symbol:
 | 
			
		||||
 | 
			
		||||
        tick_size = info.get('price_tick_size', 0.01)
 | 
			
		||||
        lot_size = info.get('lot_tick_size', 0.0)
 | 
			
		||||
 | 
			
		||||
        return Symbol(
 | 
			
		||||
            key=symbol,
 | 
			
		||||
            tick_size=tick_size,
 | 
			
		||||
            lot_tick_size=lot_size,
 | 
			
		||||
            tick_size_digits=float_digits(tick_size),
 | 
			
		||||
            lot_size_digits=float_digits(lot_size),
 | 
			
		||||
            suffix=suffix,
 | 
			
		||||
            broker_info={broker: info},
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_fqsn(
 | 
			
		||||
        cls,
 | 
			
		||||
        fqsn: str,
 | 
			
		||||
        info: dict[str, Any],
 | 
			
		||||
 | 
			
		||||
    ) -> Symbol:
 | 
			
		||||
        broker, key, suffix = unpack_fqsn(fqsn)
 | 
			
		||||
        return cls.from_broker_info(
 | 
			
		||||
            broker,
 | 
			
		||||
            key,
 | 
			
		||||
            info=info,
 | 
			
		||||
            suffix=suffix,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def type_key(self) -> str:
 | 
			
		||||
        return list(self.broker_info.values())[0]['asset_type']
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def brokers(self) -> list[str]:
 | 
			
		||||
        return list(self.broker_info.keys())
 | 
			
		||||
 | 
			
		||||
    def nearest_tick(self, value: float) -> float:
 | 
			
		||||
        '''
 | 
			
		||||
        Return the nearest tick value based on mininum increment.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        mult = 1 / self.tick_size
 | 
			
		||||
        return round(value * mult) / mult
 | 
			
		||||
 | 
			
		||||
    def front_feed(self) -> tuple[str, str]:
 | 
			
		||||
        '''
 | 
			
		||||
        Return the "current" feed key for this symbol.
 | 
			
		||||
 | 
			
		||||
        (i.e. the broker + symbol key in a tuple).
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        return (
 | 
			
		||||
            list(self.broker_info.keys())[0],
 | 
			
		||||
            self.key,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def tokens(self) -> tuple[str]:
 | 
			
		||||
        broker, key = self.front_feed()
 | 
			
		||||
        if self.suffix:
 | 
			
		||||
            return (key, self.suffix, broker)
 | 
			
		||||
        else:
 | 
			
		||||
            return (key, broker)
 | 
			
		||||
 | 
			
		||||
    @property
 | 
			
		||||
    def fqsn(self) -> str:
 | 
			
		||||
        return '.'.join(self.tokens()).lower()
 | 
			
		||||
 | 
			
		||||
    def front_fqsn(self) -> str:
 | 
			
		||||
        '''
 | 
			
		||||
        fqsn = "fully qualified symbol name"
 | 
			
		||||
 | 
			
		||||
        Basically the idea here is for all client-ish code (aka programs/actors
 | 
			
		||||
        that ask the provider agnostic layers in the stack for data) should be
 | 
			
		||||
        able to tell which backend / venue / derivative each data feed/flow is
 | 
			
		||||
        from by an explicit string key of the current form:
 | 
			
		||||
 | 
			
		||||
        <instrumentname>.<venue>.<suffixwithmetadata>.<brokerbackendname>
 | 
			
		||||
 | 
			
		||||
        TODO: I have thoughts that we should actually change this to be
 | 
			
		||||
        more like an "attr lookup" (like how the web should have done
 | 
			
		||||
        urls, but marketting peeps ruined it etc. etc.):
 | 
			
		||||
 | 
			
		||||
        <broker>.<venue>.<instrumentname>.<suffixwithmetadata>
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        tokens = self.tokens()
 | 
			
		||||
        fqsn = '.'.join(map(str.lower, tokens))
 | 
			
		||||
        return fqsn
 | 
			
		||||
 | 
			
		||||
    def quantize_size(
 | 
			
		||||
        self,
 | 
			
		||||
        size: float,
 | 
			
		||||
 | 
			
		||||
    ) -> Decimal:
 | 
			
		||||
        '''
 | 
			
		||||
        Truncate input ``size: float`` using ``Decimal``
 | 
			
		||||
        and ``.lot_size_digits``.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        digits = self.lot_size_digits
 | 
			
		||||
        return Decimal(size).quantize(
 | 
			
		||||
            Decimal(f'1.{"0".ljust(digits, "0")}'),
 | 
			
		||||
            rounding=ROUND_HALF_EVEN
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def _nan_to_closest_num(array: np.ndarray):
 | 
			
		||||
    """Return interpolated values instead of NaN.
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,510 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Mega-simple symbology cache via TOML files.
 | 
			
		||||
 | 
			
		||||
Allow backend data providers and/or brokers to stash their
 | 
			
		||||
symbology sets (aka the meta data we normalize into our
 | 
			
		||||
`.accounting.MktPair` type) to the filesystem for faster lookup and
 | 
			
		||||
offline usage.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    asynccontextmanager as acm,
 | 
			
		||||
)
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Sequence,
 | 
			
		||||
    Hashable,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
 | 
			
		||||
from rapidfuzz import process as fuzzy
 | 
			
		||||
import tomli_w  # for fast symbol cache writing
 | 
			
		||||
import tractor
 | 
			
		||||
import trio
 | 
			
		||||
try:
 | 
			
		||||
    import tomllib
 | 
			
		||||
except ModuleNotFoundError:
 | 
			
		||||
    import tomli as tomllib
 | 
			
		||||
from msgspec import field
 | 
			
		||||
 | 
			
		||||
from piker.log import get_logger
 | 
			
		||||
from piker import config
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from piker.brokers import (
 | 
			
		||||
    open_cached_client,
 | 
			
		||||
    get_brokermod,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from ..accounting import (
 | 
			
		||||
        Asset,
 | 
			
		||||
        MktPair,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
log = get_logger('data.cache')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class SymbologyCache(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    Asset meta-data cache which holds lookup tables for 3 sets of
 | 
			
		||||
    market-symbology related struct-types required by the
 | 
			
		||||
    `.accounting` and `.data` subsystems.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    mod: ModuleType
 | 
			
		||||
    fp: Path
 | 
			
		||||
 | 
			
		||||
    # all asset-money-systems descriptions as minimally defined by
 | 
			
		||||
    # in `.accounting.Asset`
 | 
			
		||||
    assets: dict[str, Asset] = field(default_factory=dict)
 | 
			
		||||
 | 
			
		||||
    # backend-system pairs loaded in provider (schema) specific
 | 
			
		||||
    # structs.
 | 
			
		||||
    pairs: dict[str, Struct] = field(default_factory=dict)
 | 
			
		||||
    # serialized namespace path to the backend's pair-info-`Struct`
 | 
			
		||||
    # defn B)
 | 
			
		||||
    pair_ns_path: tractor.msg.NamespacePath | None = None
 | 
			
		||||
 | 
			
		||||
    # TODO: piker-normalized `.accounting.MktPair` table?
 | 
			
		||||
    # loaded from the `.pairs` and a normalizer
 | 
			
		||||
    # provided by the backend pkg.
 | 
			
		||||
    mktmaps: dict[str, MktPair] = field(default_factory=dict)
 | 
			
		||||
 | 
			
		||||
    def write_config(self) -> None:
 | 
			
		||||
 | 
			
		||||
        # put the backend's pair-struct type ref at the top
 | 
			
		||||
        # of file if possible.
 | 
			
		||||
        cachedict: dict[str, Any] = {
 | 
			
		||||
            'pair_ns_path': str(self.pair_ns_path) or '',
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
        # serialize all tables as dicts for TOML.
 | 
			
		||||
        for key, table in {
 | 
			
		||||
            'assets': self.assets,
 | 
			
		||||
            'pairs': self.pairs,
 | 
			
		||||
            'mktmaps': self.mktmaps,
 | 
			
		||||
        }.items():
 | 
			
		||||
            if not table:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    f'Asset cache table for `{key}` is empty?'
 | 
			
		||||
                )
 | 
			
		||||
                continue
 | 
			
		||||
 | 
			
		||||
            dct = cachedict[key] = {}
 | 
			
		||||
            for key, struct in table.items():
 | 
			
		||||
                dct[key] = struct.to_dict(include_non_members=False)
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            with self.fp.open(mode='wb') as fp:
 | 
			
		||||
                tomli_w.dump(cachedict, fp)
 | 
			
		||||
        except TypeError:
 | 
			
		||||
            self.fp.unlink()
 | 
			
		||||
            raise
 | 
			
		||||
 | 
			
		||||
    async def load(self) -> None:
 | 
			
		||||
        '''
 | 
			
		||||
        Explicitly load the "symbology set" for this provider by using
 | 
			
		||||
        2 required `Client` methods:
 | 
			
		||||
 | 
			
		||||
          - `.get_assets()`: returning a table of `Asset`s
 | 
			
		||||
          - `.get_mkt_pairs()`: returning a table of pair-`Struct`
 | 
			
		||||
            types, custom defined by the particular backend.
 | 
			
		||||
 | 
			
		||||
        AND, the required `.get_mkt_info()` module-level endpoint
 | 
			
		||||
        which maps `fqme: str` -> `MktPair`s.
 | 
			
		||||
 | 
			
		||||
        These tables are then used to fill out the `.assets`, `.pairs` and
 | 
			
		||||
        `.mktmaps` tables on this cache instance, respectively.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        async with open_cached_client(self.mod.name) as client:
 | 
			
		||||
 | 
			
		||||
            if get_assets := getattr(client, 'get_assets', None):
 | 
			
		||||
                assets: dict[str, Asset] = await get_assets()
 | 
			
		||||
                for bs_mktid, asset in assets.items():
 | 
			
		||||
                    self.assets[bs_mktid] = asset
 | 
			
		||||
            else:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    'No symbology cache `Asset` support for `{provider}`..\n'
 | 
			
		||||
                    'Implement `Client.get_assets()`!'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            if get_mkt_pairs := getattr(client, 'get_mkt_pairs', None):
 | 
			
		||||
 | 
			
		||||
                pairs: dict[str, Struct] = await get_mkt_pairs()
 | 
			
		||||
                for bs_fqme, pair in pairs.items():
 | 
			
		||||
 | 
			
		||||
                    # NOTE: every backend defined pair should
 | 
			
		||||
                    # declare it's ns path for roundtrip
 | 
			
		||||
                    # serialization lookup.
 | 
			
		||||
                    if not getattr(pair, 'ns_path', None):
 | 
			
		||||
                        raise TypeError(
 | 
			
		||||
                            f'Pair-struct for {self.mod.name} MUST define a '
 | 
			
		||||
                            '`.ns_path: str`!\n'
 | 
			
		||||
                            f'{pair}'
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                    entry = await self.mod.get_mkt_info(pair.bs_fqme)
 | 
			
		||||
                    if not entry:
 | 
			
		||||
                        continue
 | 
			
		||||
 | 
			
		||||
                    mkt: MktPair
 | 
			
		||||
                    pair: Struct
 | 
			
		||||
                    mkt, _pair = entry
 | 
			
		||||
                    assert _pair is pair, (
 | 
			
		||||
                        f'`{self.mod.name}` backend probably has a '
 | 
			
		||||
                        'keying-symmetry problem between the pair-`Struct` '
 | 
			
		||||
                        'returned from `Client.get_mkt_pairs()`and the '
 | 
			
		||||
                        'module level endpoint: `.get_mkt_info()`\n\n'
 | 
			
		||||
                        "Here's the struct diff:\n"
 | 
			
		||||
                        f'{_pair - pair}'
 | 
			
		||||
                    )
 | 
			
		||||
                    # NOTE XXX: this means backends MUST implement
 | 
			
		||||
                    # a `Struct.bs_mktid: str` field to provide
 | 
			
		||||
                    # a native-keyed map to their own symbol
 | 
			
		||||
                    # set(s).
 | 
			
		||||
                    self.pairs[pair.bs_mktid] = pair
 | 
			
		||||
 | 
			
		||||
                    # NOTE: `MktPair`s are keyed here using piker's
 | 
			
		||||
                    # internal FQME schema so that search,
 | 
			
		||||
                    # accounting and feed init can be accomplished
 | 
			
		||||
                    # a sane, uniform, normalized basis.
 | 
			
		||||
                    self.mktmaps[mkt.fqme] = mkt
 | 
			
		||||
 | 
			
		||||
                self.pair_ns_path: str = tractor.msg.NamespacePath.from_ref(
 | 
			
		||||
                    pair,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            else:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    'No symbology cache `Pair` support for `{provider}`..\n'
 | 
			
		||||
                    'Implement `Client.get_mkt_pairs()`!'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        return self
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_dict(
 | 
			
		||||
        cls: type,
 | 
			
		||||
        data: dict,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
    ) -> SymbologyCache:
 | 
			
		||||
 | 
			
		||||
        # normal init inputs
 | 
			
		||||
        cache = cls(**kwargs)
 | 
			
		||||
 | 
			
		||||
        # XXX WARNING: this may break if backend namespacing
 | 
			
		||||
        # changes (eg. `Pair` class def is moved to another
 | 
			
		||||
        # module) in which case you can manually update the
 | 
			
		||||
        # `pair_ns_path` in the symcache file and try again.
 | 
			
		||||
        # TODO: probably a verbose error about this?
 | 
			
		||||
        Pair: type = tractor.msg.NamespacePath(
 | 
			
		||||
            str(data['pair_ns_path'])
 | 
			
		||||
        ).load_ref()
 | 
			
		||||
 | 
			
		||||
        pairtable = data.pop('pairs')
 | 
			
		||||
        for key, pairtable in pairtable.items():
 | 
			
		||||
 | 
			
		||||
            # allow each serialized pair-dict-table to declare its
 | 
			
		||||
            # specific struct type's path in cases where a backend
 | 
			
		||||
            # supports multiples (normally with different
 | 
			
		||||
            # schemas..) and we are storing them in a flat `.pairs`
 | 
			
		||||
            # table.
 | 
			
		||||
            ThisPair = Pair
 | 
			
		||||
            if this_pair_type := pairtable.get('ns_path'):
 | 
			
		||||
                ThisPair: type = tractor.msg.NamespacePath(
 | 
			
		||||
                    str(this_pair_type)
 | 
			
		||||
                ).load_ref()
 | 
			
		||||
 | 
			
		||||
            pair: Struct = ThisPair(**pairtable)
 | 
			
		||||
            cache.pairs[key] = pair
 | 
			
		||||
 | 
			
		||||
        from ..accounting import (
 | 
			
		||||
            Asset,
 | 
			
		||||
            MktPair,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # load `dict` -> `Asset`
 | 
			
		||||
        assettable = data.pop('assets')
 | 
			
		||||
        for name, asdict in assettable.items():
 | 
			
		||||
            cache.assets[name] = Asset.from_msg(asdict)
 | 
			
		||||
 | 
			
		||||
        # load `dict` -> `MktPair`
 | 
			
		||||
        dne: list[str] = []
 | 
			
		||||
        mkttable = data.pop('mktmaps')
 | 
			
		||||
        for fqme, mktdict in mkttable.items():
 | 
			
		||||
 | 
			
		||||
            mkt = MktPair.from_msg(mktdict)
 | 
			
		||||
            assert mkt.fqme == fqme
 | 
			
		||||
 | 
			
		||||
            # sanity check asset refs from those (presumably)
 | 
			
		||||
            # loaded asset set above.
 | 
			
		||||
            src: Asset = cache.assets[mkt.src.name]
 | 
			
		||||
            assert src == mkt.src
 | 
			
		||||
            dst: Asset
 | 
			
		||||
            if not (dst := cache.assets.get(mkt.dst.name)):
 | 
			
		||||
                dne.append(mkt.dst.name)
 | 
			
		||||
                continue
 | 
			
		||||
            else:
 | 
			
		||||
                assert dst.name == mkt.dst.name
 | 
			
		||||
 | 
			
		||||
            cache.mktmaps[fqme] = mkt
 | 
			
		||||
 | 
			
		||||
        log.warning(
 | 
			
		||||
            f'These `MktPair.dst: Asset`s DNE says `{cache.mod.name}`?\n'
 | 
			
		||||
            f'{pformat(dne)}'
 | 
			
		||||
        )
 | 
			
		||||
        return cache
 | 
			
		||||
 | 
			
		||||
    @staticmethod
 | 
			
		||||
    async def from_scratch(
 | 
			
		||||
        mod: ModuleType,
 | 
			
		||||
        fp: Path,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
 | 
			
		||||
    ) -> SymbologyCache:
 | 
			
		||||
        '''
 | 
			
		||||
        Generate (a) new symcache (contents) entirely from scratch
 | 
			
		||||
        including all (TOML) serialized data and file.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        log.info(f'GENERATING symbology cache for `{mod.name}`')
 | 
			
		||||
        cache = SymbologyCache(
 | 
			
		||||
            mod=mod,
 | 
			
		||||
            fp=fp,
 | 
			
		||||
            **kwargs,
 | 
			
		||||
        )
 | 
			
		||||
        await cache.load()
 | 
			
		||||
        cache.write_config()
 | 
			
		||||
        return cache
 | 
			
		||||
 | 
			
		||||
    def search(
 | 
			
		||||
        self,
 | 
			
		||||
        pattern: str,
 | 
			
		||||
        table: str = 'mktmaps'
 | 
			
		||||
 | 
			
		||||
    ) -> dict[str, Struct]:
 | 
			
		||||
        '''
 | 
			
		||||
        (Fuzzy) search this cache's `.mktmaps` table, which is
 | 
			
		||||
        keyed by FQMEs, for `pattern: str` and return the best
 | 
			
		||||
        matches in a `dict` including the `MktPair` values.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        matches = fuzzy.extract(
 | 
			
		||||
            pattern,
 | 
			
		||||
            getattr(self, table),
 | 
			
		||||
            score_cutoff=50,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        # repack in dict[fqme, MktPair] form
 | 
			
		||||
        return {
 | 
			
		||||
            item[0].fqme: item[0]
 | 
			
		||||
            for item in matches
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# actor-process-local in-mem-cache of symcaches (by backend).
 | 
			
		||||
_caches: dict[str, SymbologyCache] = {}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mk_cachefile(
 | 
			
		||||
    provider: str,
 | 
			
		||||
) -> Path:
 | 
			
		||||
    cachedir: Path = config.get_conf_dir() / '_cache'
 | 
			
		||||
    if not cachedir.is_dir():
 | 
			
		||||
        log.info(f'Creating `nativedb` director: {cachedir}')
 | 
			
		||||
        cachedir.mkdir()
 | 
			
		||||
 | 
			
		||||
    cachefile: Path = cachedir / f'{str(provider)}.symcache.toml'
 | 
			
		||||
    cachefile.touch()
 | 
			
		||||
    return cachefile
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_symcache(
 | 
			
		||||
    mod_or_name: ModuleType | str,
 | 
			
		||||
 | 
			
		||||
    reload: bool = False,
 | 
			
		||||
    only_from_memcache: bool = False,  # no API req
 | 
			
		||||
    _no_symcache: bool = False,  # no backend support
 | 
			
		||||
 | 
			
		||||
) -> SymbologyCache:
 | 
			
		||||
 | 
			
		||||
    if isinstance(mod_or_name, str):
 | 
			
		||||
        mod = get_brokermod(mod_or_name)
 | 
			
		||||
    else:
 | 
			
		||||
        mod: ModuleType = mod_or_name
 | 
			
		||||
 | 
			
		||||
    provider: str = mod.name
 | 
			
		||||
    cachefile: Path = mk_cachefile(provider)
 | 
			
		||||
 | 
			
		||||
    # NOTE: certain backends might not support a symbology cache
 | 
			
		||||
    # (easily) and thus we allow for an empty instance to be loaded
 | 
			
		||||
    # and manually filled in at the whim of the caller presuming
 | 
			
		||||
    # the backend pkg-module is annotated appropriately.
 | 
			
		||||
    if (
 | 
			
		||||
        getattr(mod, '_no_symcache', False)
 | 
			
		||||
        or _no_symcache
 | 
			
		||||
    ):
 | 
			
		||||
        yield SymbologyCache(
 | 
			
		||||
            mod=mod,
 | 
			
		||||
            fp=cachefile,
 | 
			
		||||
        )
 | 
			
		||||
        # don't do nuttin
 | 
			
		||||
        return
 | 
			
		||||
 | 
			
		||||
    # actor-level cache-cache XD
 | 
			
		||||
    global _caches
 | 
			
		||||
    if not reload:
 | 
			
		||||
        try:
 | 
			
		||||
            yield _caches[provider]
 | 
			
		||||
        except KeyError:
 | 
			
		||||
            msg: str = (
 | 
			
		||||
                f'No asset info cache exists yet for `{provider}`'
 | 
			
		||||
            )
 | 
			
		||||
            if only_from_memcache:
 | 
			
		||||
                raise RuntimeError(msg)
 | 
			
		||||
            else:
 | 
			
		||||
                log.warning(msg)
 | 
			
		||||
 | 
			
		||||
    # if no cache exists or an explicit reload is requested, load
 | 
			
		||||
    # the provider API and call appropriate endpoints to populate
 | 
			
		||||
    # the mkt and asset tables.
 | 
			
		||||
    if (
 | 
			
		||||
        reload
 | 
			
		||||
        or not cachefile.is_file()
 | 
			
		||||
    ):
 | 
			
		||||
        cache = await SymbologyCache.from_scratch(
 | 
			
		||||
            mod=mod,
 | 
			
		||||
            fp=cachefile,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        log.info(
 | 
			
		||||
            f'Loading EXISTING `{mod.name}` symbology cache:\n'
 | 
			
		||||
            f'> {cachefile}'
 | 
			
		||||
        )
 | 
			
		||||
        import time
 | 
			
		||||
        now = time.time()
 | 
			
		||||
        with cachefile.open('rb') as existing_fp:
 | 
			
		||||
            data: dict[str, dict] = tomllib.load(existing_fp)
 | 
			
		||||
            log.runtime(f'SYMCACHE TOML LOAD TIME: {time.time() - now}')
 | 
			
		||||
 | 
			
		||||
            # if there's an empty file for some reason we need
 | 
			
		||||
            # to do a full reload as well!
 | 
			
		||||
            if not data:
 | 
			
		||||
                cache = await SymbologyCache.from_scratch(
 | 
			
		||||
                    mod=mod,
 | 
			
		||||
                    fp=cachefile,
 | 
			
		||||
                )
 | 
			
		||||
            else:
 | 
			
		||||
                cache = SymbologyCache.from_dict(
 | 
			
		||||
                    data,
 | 
			
		||||
                    mod=mod,
 | 
			
		||||
                    fp=cachefile,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        # TODO: use a real profiling sys..
 | 
			
		||||
        # https://github.com/pikers/piker/issues/337
 | 
			
		||||
        log.info(f'SYMCACHE LOAD TIME: {time.time() - now}')
 | 
			
		||||
 | 
			
		||||
    yield cache
 | 
			
		||||
 | 
			
		||||
    # TODO: write only when changes detected? but that should
 | 
			
		||||
    # never happen right except on reload?
 | 
			
		||||
    # cache.write_config()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_symcache(
 | 
			
		||||
    provider: str,
 | 
			
		||||
    force_reload: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> SymbologyCache:
 | 
			
		||||
    '''
 | 
			
		||||
    Get any available symbology/assets cache from sync code by
 | 
			
		||||
    (maybe) manually running `trio` to do the work.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # spawn tractor runtime and generate cache
 | 
			
		||||
    # if not existing.
 | 
			
		||||
    async def sched_gen_symcache():
 | 
			
		||||
        async with (
 | 
			
		||||
            # only for runtime's debug mode
 | 
			
		||||
            tractor.open_nursery(debug_mode=True),
 | 
			
		||||
 | 
			
		||||
            open_symcache(
 | 
			
		||||
                get_brokermod(provider),
 | 
			
		||||
                reload=force_reload,
 | 
			
		||||
            ) as symcache,
 | 
			
		||||
        ):
 | 
			
		||||
            return symcache
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        symcache: SymbologyCache = trio.run(sched_gen_symcache)
 | 
			
		||||
        assert symcache
 | 
			
		||||
    except BaseException:
 | 
			
		||||
        import pdbp
 | 
			
		||||
        pdbp.xpm()
 | 
			
		||||
 | 
			
		||||
    return symcache
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def match_from_pairs(
 | 
			
		||||
    pairs: dict[str, Struct],
 | 
			
		||||
    query: str,
 | 
			
		||||
    score_cutoff: int = 50,
 | 
			
		||||
    **extract_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> dict[str, Struct]:
 | 
			
		||||
    '''
 | 
			
		||||
    Fuzzy search over a "pairs table" maintained by most backends
 | 
			
		||||
    as part of their symbology-info caching internals.
 | 
			
		||||
 | 
			
		||||
    Scan the native symbol key set and return best ranked
 | 
			
		||||
    matches back in a new `dict`.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
 | 
			
		||||
    # TODO: somehow cache this list (per call) like we were in
 | 
			
		||||
    # `open_symbol_search()`?
 | 
			
		||||
    keys: list[str] = list(pairs)
 | 
			
		||||
    matches: list[tuple[
 | 
			
		||||
        Sequence[Hashable],  # matching input key
 | 
			
		||||
        Any,  # scores
 | 
			
		||||
        Any,
 | 
			
		||||
    ]] = fuzzy.extract(
 | 
			
		||||
        # NOTE: most backends provide keys uppercased
 | 
			
		||||
        query=query,
 | 
			
		||||
        choices=keys,
 | 
			
		||||
        score_cutoff=score_cutoff,
 | 
			
		||||
        **extract_kwargs,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # pop and repack pairs in output dict
 | 
			
		||||
    matched_pairs: dict[str, Struct] = {}
 | 
			
		||||
    for item in matches:
 | 
			
		||||
        pair_key: str = item[0]
 | 
			
		||||
        matched_pairs[pair_key] = pairs[pair_key]
 | 
			
		||||
 | 
			
		||||
    return matched_pairs
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,34 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Data layer module commons.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from functools import partial
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
subsys: str = 'piker.data'
 | 
			
		||||
 | 
			
		||||
log = get_logger(subsys)
 | 
			
		||||
 | 
			
		||||
get_console_log = partial(
 | 
			
		||||
    get_console_log,
 | 
			
		||||
    name=subsys,
 | 
			
		||||
)
 | 
			
		||||
| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -18,23 +18,29 @@
 | 
			
		|||
ToOlS fOr CoPInG wITh "tHE wEB" protocols.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from contextlib import (
 | 
			
		||||
    asynccontextmanager,
 | 
			
		||||
    AsyncExitStack,
 | 
			
		||||
    asynccontextmanager as acm,
 | 
			
		||||
)
 | 
			
		||||
from itertools import count
 | 
			
		||||
from functools import partial
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Optional,
 | 
			
		||||
    Callable,
 | 
			
		||||
    AsyncContextManager,
 | 
			
		||||
    AsyncGenerator,
 | 
			
		||||
    Iterable,
 | 
			
		||||
)
 | 
			
		||||
import json
 | 
			
		||||
 | 
			
		||||
import trio
 | 
			
		||||
import trio_websocket
 | 
			
		||||
from trio_typing import TaskStatus
 | 
			
		||||
from trio_websocket import (
 | 
			
		||||
    WebSocketConnection,
 | 
			
		||||
    open_websocket_url,
 | 
			
		||||
)
 | 
			
		||||
from wsproto.utilities import LocalProtocolError
 | 
			
		||||
from trio_websocket._impl import (
 | 
			
		||||
    ConnectionClosed,
 | 
			
		||||
| 
						 | 
				
			
			@ -44,20 +50,23 @@ from trio_websocket._impl import (
 | 
			
		|||
    ConnectionTimeout,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
 | 
			
		||||
from .types import Struct
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from ._util import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class NoBsWs:
 | 
			
		||||
    '''
 | 
			
		||||
    Make ``trio_websocket`` sockets stay up no matter the bs.
 | 
			
		||||
 | 
			
		||||
    You can provide a ``fixture`` async-context-manager which will be
 | 
			
		||||
    enter/exitted around each reconnect operation.
 | 
			
		||||
    A shim interface that allows client code to stream from some
 | 
			
		||||
    ``WebSocketConnection`` but where any connectivy bs is handled
 | 
			
		||||
    automatcially and entirely in the background.
 | 
			
		||||
 | 
			
		||||
    NOTE: this type should never be created directly but instead is
 | 
			
		||||
    provided via the ``open_autorecon_ws()`` factor below.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # apparently we can QoS for all sorts of reasons..so catch em.
 | 
			
		||||
    recon_errors = (
 | 
			
		||||
        ConnectionClosed,
 | 
			
		||||
        DisconnectionTimeout,
 | 
			
		||||
| 
						 | 
				
			
			@ -70,68 +79,42 @@ class NoBsWs:
 | 
			
		|||
    def __init__(
 | 
			
		||||
        self,
 | 
			
		||||
        url: str,
 | 
			
		||||
        stack: AsyncExitStack,
 | 
			
		||||
        fixture: Optional[Callable] = None,
 | 
			
		||||
        rxchan: trio.MemoryReceiveChannel,
 | 
			
		||||
        msg_recv_timeout: float,
 | 
			
		||||
 | 
			
		||||
        serializer: ModuleType = json
 | 
			
		||||
    ):
 | 
			
		||||
        self.url = url
 | 
			
		||||
        self.fixture = fixture
 | 
			
		||||
        self._stack = stack
 | 
			
		||||
        self._ws: 'WebSocketConnection' = None  # noqa
 | 
			
		||||
        self._rx = rxchan
 | 
			
		||||
        self._timeout = msg_recv_timeout
 | 
			
		||||
 | 
			
		||||
        # TODO: is there some method we can call
 | 
			
		||||
        # on the underlying `._ws` to get this?
 | 
			
		||||
        self._connected: bool = False
 | 
			
		||||
        # signaling between caller and relay task which determines when
 | 
			
		||||
        # socket is connected (and subscribed).
 | 
			
		||||
        self._connected: trio.Event = trio.Event()
 | 
			
		||||
 | 
			
		||||
    async def _connect(
 | 
			
		||||
        self,
 | 
			
		||||
        tries: int = 1000,
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        # dynamically reset by the bg relay task
 | 
			
		||||
        self._ws: WebSocketConnection | None = None
 | 
			
		||||
        self._cs: trio.CancelScope | None = None
 | 
			
		||||
 | 
			
		||||
        self._connected = False
 | 
			
		||||
        while True:
 | 
			
		||||
            try:
 | 
			
		||||
                await self._stack.aclose()
 | 
			
		||||
            except self.recon_errors:
 | 
			
		||||
                await trio.sleep(0.5)
 | 
			
		||||
            else:
 | 
			
		||||
                break
 | 
			
		||||
 | 
			
		||||
        last_err = None
 | 
			
		||||
        for i in range(tries):
 | 
			
		||||
            try:
 | 
			
		||||
                self._ws = await self._stack.enter_async_context(
 | 
			
		||||
                    trio_websocket.open_websocket_url(self.url)
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                if self.fixture is not None:
 | 
			
		||||
                    # rerun user code fixture
 | 
			
		||||
                    ret = await self._stack.enter_async_context(
 | 
			
		||||
                        self.fixture(self)
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    assert ret is None
 | 
			
		||||
 | 
			
		||||
                log.info(f'Connection success: {self.url}')
 | 
			
		||||
 | 
			
		||||
                self._connected = True
 | 
			
		||||
                return self._ws
 | 
			
		||||
 | 
			
		||||
            except self.recon_errors as err:
 | 
			
		||||
                last_err = err
 | 
			
		||||
                log.error(
 | 
			
		||||
                    f'{self} connection bail with '
 | 
			
		||||
                    f'{type(err)}...retry attempt {i}'
 | 
			
		||||
                )
 | 
			
		||||
                await trio.sleep(0.5)
 | 
			
		||||
                self._connected = False
 | 
			
		||||
                continue
 | 
			
		||||
        else:
 | 
			
		||||
            log.exception('ws connection fail...')
 | 
			
		||||
            raise last_err
 | 
			
		||||
        # interchange codec methods
 | 
			
		||||
        # TODO: obviously the method API here may be different
 | 
			
		||||
        # for another interchange format..
 | 
			
		||||
        self._dumps: Callable = serializer.dumps
 | 
			
		||||
        self._loads: Callable = serializer.loads
 | 
			
		||||
 | 
			
		||||
    def connected(self) -> bool:
 | 
			
		||||
        return self._connected
 | 
			
		||||
        return self._connected.is_set()
 | 
			
		||||
 | 
			
		||||
    async def reset(self) -> None:
 | 
			
		||||
        '''
 | 
			
		||||
        Reset the underlying ws connection by cancelling
 | 
			
		||||
        the bg relay task and waiting for it to signal
 | 
			
		||||
        a new connection.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        self._connected = trio.Event()
 | 
			
		||||
        self._cs.cancel()
 | 
			
		||||
        await self._connected.wait()
 | 
			
		||||
 | 
			
		||||
    async def send_msg(
 | 
			
		||||
        self,
 | 
			
		||||
| 
						 | 
				
			
			@ -139,18 +122,15 @@ class NoBsWs:
 | 
			
		|||
    ) -> None:
 | 
			
		||||
        while True:
 | 
			
		||||
            try:
 | 
			
		||||
                return await self._ws.send_message(json.dumps(data))
 | 
			
		||||
                msg: Any = self._dumps(data)
 | 
			
		||||
                return await self._ws.send_message(msg)
 | 
			
		||||
            except self.recon_errors:
 | 
			
		||||
                await self._connect()
 | 
			
		||||
                await self.reset()
 | 
			
		||||
 | 
			
		||||
    async def recv_msg(
 | 
			
		||||
        self,
 | 
			
		||||
    ) -> Any:
 | 
			
		||||
        while True:
 | 
			
		||||
            try:
 | 
			
		||||
                return json.loads(await self._ws.get_message())
 | 
			
		||||
            except self.recon_errors:
 | 
			
		||||
                await self._connect()
 | 
			
		||||
    async def recv_msg(self) -> Any:
 | 
			
		||||
        msg: Any = await self._rx.receive()
 | 
			
		||||
        data = self._loads(msg)
 | 
			
		||||
        return data
 | 
			
		||||
 | 
			
		||||
    def __aiter__(self):
 | 
			
		||||
        return self
 | 
			
		||||
| 
						 | 
				
			
			@ -158,32 +138,229 @@ class NoBsWs:
 | 
			
		|||
    async def __anext__(self):
 | 
			
		||||
        return await self.recv_msg()
 | 
			
		||||
 | 
			
		||||
    def set_recv_timeout(
 | 
			
		||||
        self,
 | 
			
		||||
        timeout: float,
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        self._timeout = timeout
 | 
			
		||||
 | 
			
		||||
@asynccontextmanager
 | 
			
		||||
 | 
			
		||||
async def _reconnect_forever(
 | 
			
		||||
    url: str,
 | 
			
		||||
    snd: trio.MemorySendChannel,
 | 
			
		||||
    nobsws: NoBsWs,
 | 
			
		||||
    reset_after: int,  # msg recv timeout before reset attempt
 | 
			
		||||
 | 
			
		||||
    fixture: AsyncContextManager | None = None,
 | 
			
		||||
    task_status: TaskStatus = trio.TASK_STATUS_IGNORED,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
 | 
			
		||||
    # TODO: can we just report "where" in the call stack
 | 
			
		||||
    # the client code is using the ws stream?
 | 
			
		||||
    # Maybe we can just drop this since it's already in the log msg
 | 
			
		||||
    # orefix?
 | 
			
		||||
    if fixture is not None:
 | 
			
		||||
        src_mod: str = fixture.__module__
 | 
			
		||||
    else:
 | 
			
		||||
        src_mod: str = 'unknown'
 | 
			
		||||
 | 
			
		||||
    async def proxy_msgs(
 | 
			
		||||
        ws: WebSocketConnection,
 | 
			
		||||
        pcs: trio.CancelScope,  # parent cancel scope
 | 
			
		||||
    ):
 | 
			
		||||
        '''
 | 
			
		||||
        Receive (under `timeout` deadline) all msgs from from underlying
 | 
			
		||||
        websocket and relay them to (calling) parent task via ``trio``
 | 
			
		||||
        mem chan.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        # after so many msg recv timeouts, reset the connection
 | 
			
		||||
        timeouts: int = 0
 | 
			
		||||
 | 
			
		||||
        while True:
 | 
			
		||||
            with trio.move_on_after(
 | 
			
		||||
                # can be dynamically changed by user code
 | 
			
		||||
                nobsws._timeout,
 | 
			
		||||
            ) as cs:
 | 
			
		||||
                try:
 | 
			
		||||
                    msg: Any = await ws.get_message()
 | 
			
		||||
                    await snd.send(msg)
 | 
			
		||||
                except nobsws.recon_errors:
 | 
			
		||||
                    log.exception(
 | 
			
		||||
                        f'{src_mod}\n'
 | 
			
		||||
                        f'{url} connection bail with:'
 | 
			
		||||
                    )
 | 
			
		||||
                    await trio.sleep(0.5)
 | 
			
		||||
                    pcs.cancel()
 | 
			
		||||
 | 
			
		||||
                    # go back to reonnect loop in parent task
 | 
			
		||||
                    return
 | 
			
		||||
 | 
			
		||||
            if cs.cancelled_caught:
 | 
			
		||||
                timeouts += 1
 | 
			
		||||
                if timeouts > reset_after:
 | 
			
		||||
                    log.error(
 | 
			
		||||
                        f'{src_mod}\n'
 | 
			
		||||
                        'WS feed seems down and slow af.. reconnecting\n'
 | 
			
		||||
                    )
 | 
			
		||||
                    pcs.cancel()
 | 
			
		||||
 | 
			
		||||
                    # go back to reonnect loop in parent task
 | 
			
		||||
                    return
 | 
			
		||||
 | 
			
		||||
    async def open_fixture(
 | 
			
		||||
        fixture: AsyncContextManager,
 | 
			
		||||
        nobsws: NoBsWs,
 | 
			
		||||
        task_status: TaskStatus = trio.TASK_STATUS_IGNORED,
 | 
			
		||||
    ):
 | 
			
		||||
        '''
 | 
			
		||||
        Open user provided `@acm` and sleep until any connection
 | 
			
		||||
        reset occurs.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        async with fixture(nobsws) as ret:
 | 
			
		||||
            assert ret is None
 | 
			
		||||
            task_status.started()
 | 
			
		||||
            await trio.sleep_forever()
 | 
			
		||||
 | 
			
		||||
    # last_err = None
 | 
			
		||||
    nobsws._connected = trio.Event()
 | 
			
		||||
    task_status.started()
 | 
			
		||||
 | 
			
		||||
    while not snd._closed:
 | 
			
		||||
        log.info(
 | 
			
		||||
            f'{src_mod}\n'
 | 
			
		||||
            f'{url} trying (RE)CONNECT'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        ws: WebSocketConnection
 | 
			
		||||
        try:
 | 
			
		||||
            async with (
 | 
			
		||||
                trio.open_nursery() as n,
 | 
			
		||||
                open_websocket_url(url) as ws,
 | 
			
		||||
            ):
 | 
			
		||||
                cs = nobsws._cs = n.cancel_scope
 | 
			
		||||
                nobsws._ws = ws
 | 
			
		||||
                log.info(
 | 
			
		||||
                    f'{src_mod}\n'
 | 
			
		||||
                    f'Connection success: {url}'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # begin relay loop to forward msgs
 | 
			
		||||
                n.start_soon(
 | 
			
		||||
                    proxy_msgs,
 | 
			
		||||
                    ws,
 | 
			
		||||
                    cs,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                if fixture is not None:
 | 
			
		||||
                    log.info(
 | 
			
		||||
                        f'{src_mod}\n'
 | 
			
		||||
                        f'Entering fixture: {fixture}'
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    # TODO: should we return an explicit sub-cs
 | 
			
		||||
                    # from this fixture task?
 | 
			
		||||
                    await n.start(
 | 
			
		||||
                        open_fixture,
 | 
			
		||||
                        fixture,
 | 
			
		||||
                        nobsws,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                # indicate to wrapper / opener that we are up and block
 | 
			
		||||
                # to let tasks run **inside** the ws open block above.
 | 
			
		||||
                nobsws._connected.set()
 | 
			
		||||
                await trio.sleep_forever()
 | 
			
		||||
        except HandshakeError:
 | 
			
		||||
            log.exception('Retrying connection')
 | 
			
		||||
 | 
			
		||||
        # ws & nursery block ends
 | 
			
		||||
 | 
			
		||||
        nobsws._connected = trio.Event()
 | 
			
		||||
        if cs.cancelled_caught:
 | 
			
		||||
            log.cancel(
 | 
			
		||||
                f'{url} connection cancelled!'
 | 
			
		||||
            )
 | 
			
		||||
            # if wrapper cancelled us, we expect it to also
 | 
			
		||||
            # have re-assigned a new event
 | 
			
		||||
            assert (
 | 
			
		||||
                nobsws._connected
 | 
			
		||||
                and not nobsws._connected.is_set()
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # -> from here, move to next reconnect attempt iteration
 | 
			
		||||
        # in the while loop above Bp
 | 
			
		||||
 | 
			
		||||
    else:
 | 
			
		||||
        log.exception(
 | 
			
		||||
            f'{src_mod}\n'
 | 
			
		||||
            'ws connection closed by client...'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_autorecon_ws(
 | 
			
		||||
    url: str,
 | 
			
		||||
 | 
			
		||||
    # TODO: proper type cannot smh
 | 
			
		||||
    fixture: Optional[Callable] = None,
 | 
			
		||||
    fixture: AsyncContextManager | None = None,
 | 
			
		||||
 | 
			
		||||
    # time in sec between msgs received before
 | 
			
		||||
    # we presume connection might need a reset.
 | 
			
		||||
    msg_recv_timeout: float = 16,
 | 
			
		||||
 | 
			
		||||
    # count of the number of above timeouts before connection reset
 | 
			
		||||
    reset_after: int = 3,
 | 
			
		||||
 | 
			
		||||
) -> AsyncGenerator[tuple[...],  NoBsWs]:
 | 
			
		||||
    """Apparently we can QoS for all sorts of reasons..so catch em.
 | 
			
		||||
    '''
 | 
			
		||||
    An auto-reconnect websocket (wrapper API) around
 | 
			
		||||
    ``trio_websocket.open_websocket_url()`` providing automatic
 | 
			
		||||
    re-connection on network errors, msg latency and thus roaming.
 | 
			
		||||
 | 
			
		||||
    """
 | 
			
		||||
    async with AsyncExitStack() as stack:
 | 
			
		||||
        ws = NoBsWs(url, stack, fixture=fixture)
 | 
			
		||||
        await ws._connect()
 | 
			
		||||
    Here we implement a re-connect websocket interface where a bg
 | 
			
		||||
    nursery runs ``WebSocketConnection.receive_message()``s in a loop
 | 
			
		||||
    and restarts the full http(s) handshake on catches of certain
 | 
			
		||||
    connetivity errors, or some user defined recv timeout.
 | 
			
		||||
 | 
			
		||||
    You can provide a ``fixture`` async-context-manager which will be
 | 
			
		||||
    entered/exitted around each connection reset; eg. for (re)requesting
 | 
			
		||||
    subscriptions without requiring streaming setup code to rerun.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    snd: trio.MemorySendChannel
 | 
			
		||||
    rcv: trio.MemoryReceiveChannel
 | 
			
		||||
    snd, rcv = trio.open_memory_channel(616)
 | 
			
		||||
 | 
			
		||||
    async with trio.open_nursery() as n:
 | 
			
		||||
        nobsws = NoBsWs(
 | 
			
		||||
            url,
 | 
			
		||||
            rcv,
 | 
			
		||||
            msg_recv_timeout=msg_recv_timeout,
 | 
			
		||||
        )
 | 
			
		||||
        await n.start(
 | 
			
		||||
            partial(
 | 
			
		||||
                _reconnect_forever,
 | 
			
		||||
                url,
 | 
			
		||||
                snd,
 | 
			
		||||
                nobsws,
 | 
			
		||||
                fixture=fixture,
 | 
			
		||||
                reset_after=reset_after,
 | 
			
		||||
            )
 | 
			
		||||
        )
 | 
			
		||||
        await nobsws._connected.wait()
 | 
			
		||||
        assert nobsws._cs
 | 
			
		||||
        assert nobsws.connected()
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            yield ws
 | 
			
		||||
 | 
			
		||||
            yield nobsws
 | 
			
		||||
        finally:
 | 
			
		||||
            await stack.aclose()
 | 
			
		||||
            n.cancel_scope.cancel()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
JSONRPC response-request style machinery for transparent multiplexing of msgs
 | 
			
		||||
over a NoBsWs.
 | 
			
		||||
JSONRPC response-request style machinery for transparent multiplexing
 | 
			
		||||
of msgs over a `NoBsWs`.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -195,48 +372,87 @@ class JSONRPCResult(Struct):
 | 
			
		|||
    error: Optional[dict] = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@asynccontextmanager
 | 
			
		||||
@acm
 | 
			
		||||
async def open_jsonrpc_session(
 | 
			
		||||
    url: str,
 | 
			
		||||
    start_id: int = 0,
 | 
			
		||||
    response_type: type = JSONRPCResult,
 | 
			
		||||
    request_type: Optional[type] = None,
 | 
			
		||||
    request_hook: Optional[Callable] = None,
 | 
			
		||||
    error_hook: Optional[Callable] = None,
 | 
			
		||||
    msg_recv_timeout: float = float('inf'),
 | 
			
		||||
    # ^NOTE, since only `deribit` is using this jsonrpc stuff atm
 | 
			
		||||
    # and options mkts are generally "slow moving"..
 | 
			
		||||
    #
 | 
			
		||||
    # FURTHER if we break the underlying ws connection then since we
 | 
			
		||||
    # don't pass a `fixture` to the task that manages `NoBsWs`, i.e.
 | 
			
		||||
    # `_reconnect_forever()`, the jsonrpc "transport pipe" get's
 | 
			
		||||
    # broken and never restored with wtv init sequence is required to
 | 
			
		||||
    # re-establish a working req-resp session.
 | 
			
		||||
 | 
			
		||||
) -> Callable[[str, dict], dict]:
 | 
			
		||||
    '''
 | 
			
		||||
    Init a json-RPC-over-websocket connection to the provided `url`.
 | 
			
		||||
 | 
			
		||||
    A `json_rpc: Callable[[str, dict], dict` is delivered to the
 | 
			
		||||
    caller for sending requests and a bg-`trio.Task` handles
 | 
			
		||||
    processing of response msgs including error reporting/raising in
 | 
			
		||||
    the parent/caller task.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # NOTE, store all request msgs so we can raise errors on the
 | 
			
		||||
    # caller side!
 | 
			
		||||
    req_msgs: dict[int, dict] = {}
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        trio.open_nursery() as n,
 | 
			
		||||
        open_autorecon_ws(url) as ws
 | 
			
		||||
        trio.open_nursery() as tn,
 | 
			
		||||
        open_autorecon_ws(
 | 
			
		||||
            url=url,
 | 
			
		||||
            msg_recv_timeout=msg_recv_timeout,
 | 
			
		||||
        ) as ws
 | 
			
		||||
    ):
 | 
			
		||||
        rpc_id: Iterable = count(start_id)
 | 
			
		||||
        rpc_id: Iterable[int] = count(start_id)
 | 
			
		||||
        rpc_results: dict[int, dict] = {}
 | 
			
		||||
 | 
			
		||||
        async def json_rpc(method: str, params: dict) -> dict:
 | 
			
		||||
        async def json_rpc(
 | 
			
		||||
            method: str,
 | 
			
		||||
            params: dict,
 | 
			
		||||
        ) -> dict:
 | 
			
		||||
            '''
 | 
			
		||||
            perform a json rpc call and wait for the result, raise exception in
 | 
			
		||||
            case of error field present on response
 | 
			
		||||
            '''
 | 
			
		||||
            nonlocal req_msgs
 | 
			
		||||
 | 
			
		||||
            req_id: int = next(rpc_id)
 | 
			
		||||
            msg = {
 | 
			
		||||
                'jsonrpc': '2.0',
 | 
			
		||||
                'id': next(rpc_id),
 | 
			
		||||
                'id': req_id,
 | 
			
		||||
                'method': method,
 | 
			
		||||
                'params': params
 | 
			
		||||
            }
 | 
			
		||||
            _id = msg['id']
 | 
			
		||||
 | 
			
		||||
            rpc_results[_id] = {
 | 
			
		||||
            result = rpc_results[_id] = {
 | 
			
		||||
                'result': None,
 | 
			
		||||
                'event': trio.Event()
 | 
			
		||||
                'error': None,
 | 
			
		||||
                'event': trio.Event(),  # signal caller resp arrived
 | 
			
		||||
            }
 | 
			
		||||
            req_msgs[_id] = msg
 | 
			
		||||
 | 
			
		||||
            await ws.send_msg(msg)
 | 
			
		||||
 | 
			
		||||
            # wait for reponse before unblocking requester code
 | 
			
		||||
            await rpc_results[_id]['event'].wait()
 | 
			
		||||
 | 
			
		||||
            ret = rpc_results[_id]['result']
 | 
			
		||||
            if (maybe_result := result['result']):
 | 
			
		||||
                ret = maybe_result
 | 
			
		||||
                del rpc_results[_id]
 | 
			
		||||
 | 
			
		||||
            del rpc_results[_id]
 | 
			
		||||
            else:
 | 
			
		||||
                err = result['error']
 | 
			
		||||
                raise Exception(
 | 
			
		||||
                    f'JSONRPC request failed\n'
 | 
			
		||||
                    f'req: {msg}\n'
 | 
			
		||||
                    f'resp: {err}\n'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            if ret.error is not None:
 | 
			
		||||
                raise Exception(json.dumps(ret.error, indent=4))
 | 
			
		||||
| 
						 | 
				
			
			@ -251,6 +467,7 @@ async def open_jsonrpc_session(
 | 
			
		|||
            the server side.
 | 
			
		||||
 | 
			
		||||
            '''
 | 
			
		||||
            nonlocal req_msgs
 | 
			
		||||
            async for msg in ws:
 | 
			
		||||
                match msg:
 | 
			
		||||
                    case {
 | 
			
		||||
| 
						 | 
				
			
			@ -274,19 +491,28 @@ async def open_jsonrpc_session(
 | 
			
		|||
                        'params': _,
 | 
			
		||||
                    }:
 | 
			
		||||
                        log.debug(f'Recieved\n{msg}')
 | 
			
		||||
                        if request_hook:
 | 
			
		||||
                            await request_hook(request_type(**msg))
 | 
			
		||||
 | 
			
		||||
                    case {
 | 
			
		||||
                        'error': error
 | 
			
		||||
                    }:
 | 
			
		||||
                        log.warning(f'Recieved\n{error}')
 | 
			
		||||
                        if error_hook:
 | 
			
		||||
                            await error_hook(response_type(**msg))
 | 
			
		||||
                        # retreive orig request msg, set error
 | 
			
		||||
                        # response in original "result" msg,
 | 
			
		||||
                        # THEN FINALLY set the event to signal caller
 | 
			
		||||
                        # to raise the error in the parent task.
 | 
			
		||||
                        req_id: int = error['id']
 | 
			
		||||
                        req_msg: dict = req_msgs[req_id]
 | 
			
		||||
                        result: dict = rpc_results[req_id]
 | 
			
		||||
                        result['error'] = error
 | 
			
		||||
                        result['event'].set()
 | 
			
		||||
                        log.error(
 | 
			
		||||
                            f'JSONRPC request failed\n'
 | 
			
		||||
                            f'req: {req_msg}\n'
 | 
			
		||||
                            f'resp: {error}\n'
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                    case _:
 | 
			
		||||
                        log.warning(f'Unhandled JSON-RPC msg!?\n{msg}')
 | 
			
		||||
 | 
			
		||||
        n.start_soon(recv_task)
 | 
			
		||||
        tn.start_soon(recv_task)
 | 
			
		||||
        yield json_rpc
 | 
			
		||||
        n.cancel_scope.cancel()
 | 
			
		||||
        tn.cancel_scope.cancel()
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,258 +0,0 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship of piker0)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
marketstore cli.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
import trio
 | 
			
		||||
import tractor
 | 
			
		||||
import click
 | 
			
		||||
 | 
			
		||||
from ..service.marketstore import (
 | 
			
		||||
    # get_client,
 | 
			
		||||
    # stream_quotes,
 | 
			
		||||
    ingest_quote_stream,
 | 
			
		||||
    # _url,
 | 
			
		||||
    # _tick_tbk_ids,
 | 
			
		||||
    # mk_tbk,
 | 
			
		||||
)
 | 
			
		||||
from ..cli import cli
 | 
			
		||||
from .. import watchlists as wl
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@cli.command()
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--url',
 | 
			
		||||
    default='ws://localhost:5993/ws',
 | 
			
		||||
    help='HTTP URL of marketstore instance'
 | 
			
		||||
)
 | 
			
		||||
@click.argument('names', nargs=-1)
 | 
			
		||||
@click.pass_obj
 | 
			
		||||
def ms_stream(
 | 
			
		||||
    config: dict,
 | 
			
		||||
    names: list[str],
 | 
			
		||||
    url: str,
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
    Connect to a marketstore time bucket stream for (a set of) symbols(s)
 | 
			
		||||
    and print to console.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async def main():
 | 
			
		||||
        # async for quote in stream_quotes(symbols=names):
 | 
			
		||||
        #    log.info(f"Received quote:\n{quote}")
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    trio.run(main)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# @cli.command()
 | 
			
		||||
# @click.option(
 | 
			
		||||
#     '--url',
 | 
			
		||||
#     default=_url,
 | 
			
		||||
#     help='HTTP URL of marketstore instance'
 | 
			
		||||
# )
 | 
			
		||||
# @click.argument('names', nargs=-1)
 | 
			
		||||
# @click.pass_obj
 | 
			
		||||
# def ms_destroy(config: dict, names: list[str], url: str) -> None:
 | 
			
		||||
#     """Destroy symbol entries in the local marketstore instance.
 | 
			
		||||
#     """
 | 
			
		||||
#     async def main():
 | 
			
		||||
#         nonlocal names
 | 
			
		||||
#         async with get_client(url) as client:
 | 
			
		||||
#
 | 
			
		||||
#             if not names:
 | 
			
		||||
#                 names = await client.list_symbols()
 | 
			
		||||
#
 | 
			
		||||
#             # default is to wipe db entirely.
 | 
			
		||||
#             answer = input(
 | 
			
		||||
#                 "This will entirely wipe you local marketstore db @ "
 | 
			
		||||
#                 f"{url} of the following symbols:\n {pformat(names)}"
 | 
			
		||||
#                 "\n\nDelete [N/y]?\n")
 | 
			
		||||
#
 | 
			
		||||
#             if answer == 'y':
 | 
			
		||||
#                 for sym in names:
 | 
			
		||||
#                     # tbk = _tick_tbk.format(sym)
 | 
			
		||||
#                     tbk = tuple(sym, *_tick_tbk_ids)
 | 
			
		||||
#                     print(f"Destroying {tbk}..")
 | 
			
		||||
#                     await client.destroy(mk_tbk(tbk))
 | 
			
		||||
#             else:
 | 
			
		||||
#                 print("Nothing deleted.")
 | 
			
		||||
#
 | 
			
		||||
#     tractor.run(main)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@cli.command()
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--tsdb_host',
 | 
			
		||||
    default='localhost'
 | 
			
		||||
)
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--tsdb_port',
 | 
			
		||||
    default=5993
 | 
			
		||||
)
 | 
			
		||||
@click.argument('symbols', nargs=-1)
 | 
			
		||||
@click.pass_obj
 | 
			
		||||
def storesh(
 | 
			
		||||
    config,
 | 
			
		||||
    tl,
 | 
			
		||||
    host,
 | 
			
		||||
    port,
 | 
			
		||||
    symbols: list[str],
 | 
			
		||||
):
 | 
			
		||||
    '''
 | 
			
		||||
    Start an IPython shell ready to query the local marketstore db.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from piker.data.marketstore import open_tsdb_client
 | 
			
		||||
    from piker.service import open_piker_runtime
 | 
			
		||||
 | 
			
		||||
    async def main():
 | 
			
		||||
        nonlocal symbols
 | 
			
		||||
 | 
			
		||||
        async with open_piker_runtime(
 | 
			
		||||
            'storesh',
 | 
			
		||||
            enable_modules=['piker.service._ahab'],
 | 
			
		||||
        ):
 | 
			
		||||
            symbol = symbols[0]
 | 
			
		||||
 | 
			
		||||
            async with open_tsdb_client(symbol):
 | 
			
		||||
                # TODO: ask if user wants to write history for detected
 | 
			
		||||
                # available shm buffers?
 | 
			
		||||
                from tractor.trionics import ipython_embed
 | 
			
		||||
                await ipython_embed()
 | 
			
		||||
 | 
			
		||||
    trio.run(main)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@cli.command()
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--host',
 | 
			
		||||
    default='localhost'
 | 
			
		||||
)
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--port',
 | 
			
		||||
    default=5993
 | 
			
		||||
)
 | 
			
		||||
@click.option(
 | 
			
		||||
    '--delete',
 | 
			
		||||
    '-d',
 | 
			
		||||
    is_flag=True,
 | 
			
		||||
    help='Delete history (1 Min) for symbol(s)',
 | 
			
		||||
)
 | 
			
		||||
@click.argument('symbols', nargs=-1)
 | 
			
		||||
@click.pass_obj
 | 
			
		||||
def storage(
 | 
			
		||||
    config,
 | 
			
		||||
    host,
 | 
			
		||||
    port,
 | 
			
		||||
    symbols: list[str],
 | 
			
		||||
    delete: bool,
 | 
			
		||||
 | 
			
		||||
):
 | 
			
		||||
    '''
 | 
			
		||||
    Start an IPython shell ready to query the local marketstore db.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from piker.service.marketstore import open_tsdb_client
 | 
			
		||||
    from piker.service import open_piker_runtime
 | 
			
		||||
 | 
			
		||||
    async def main():
 | 
			
		||||
        nonlocal symbols
 | 
			
		||||
 | 
			
		||||
        async with open_piker_runtime(
 | 
			
		||||
            'tsdb_storage',
 | 
			
		||||
            enable_modules=['piker.service._ahab'],
 | 
			
		||||
        ):
 | 
			
		||||
            symbol = symbols[0]
 | 
			
		||||
            async with open_tsdb_client(symbol) as storage:
 | 
			
		||||
                if delete:
 | 
			
		||||
                    for fqsn in symbols:
 | 
			
		||||
                        syms = await storage.client.list_symbols()
 | 
			
		||||
 | 
			
		||||
                        resp60s = await storage.delete_ts(fqsn, 60)
 | 
			
		||||
 | 
			
		||||
                        msgish = resp60s.ListFields()[0][1]
 | 
			
		||||
                        if 'error' in str(msgish):
 | 
			
		||||
 | 
			
		||||
                            # TODO: MEGA LOL, apparently the symbols don't
 | 
			
		||||
                            # flush out until you refresh something or other
 | 
			
		||||
                            # (maybe the WALFILE)... #lelandorlulzone, classic
 | 
			
		||||
                            # alpaca(Rtm) design here ..
 | 
			
		||||
                            # well, if we ever can make this work we
 | 
			
		||||
                            # probably want to dogsplain the real reason
 | 
			
		||||
                            # for the delete errurz..llululu
 | 
			
		||||
                            if fqsn not in syms:
 | 
			
		||||
                                log.error(f'Pair {fqsn} dne in DB')
 | 
			
		||||
 | 
			
		||||
                            log.error(f'Deletion error: {fqsn}\n{msgish}')
 | 
			
		||||
 | 
			
		||||
                        resp1s = await storage.delete_ts(fqsn, 1)
 | 
			
		||||
                        msgish = resp1s.ListFields()[0][1]
 | 
			
		||||
                        if 'error' in str(msgish):
 | 
			
		||||
                            log.error(f'Deletion error: {fqsn}\n{msgish}')
 | 
			
		||||
 | 
			
		||||
    trio.run(main)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@cli.command()
 | 
			
		||||
@click.option('--test-file', '-t', help='Test quote stream file')
 | 
			
		||||
@click.option('--tl', is_flag=True, help='Enable tractor logging')
 | 
			
		||||
@click.argument('name', nargs=1, required=True)
 | 
			
		||||
@click.pass_obj
 | 
			
		||||
def ingest(config, name, test_file, tl):
 | 
			
		||||
    '''
 | 
			
		||||
    Ingest real-time broker quotes and ticks to a marketstore instance.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # global opts
 | 
			
		||||
    loglevel = config['loglevel']
 | 
			
		||||
    tractorloglevel = config['tractorloglevel']
 | 
			
		||||
    # log = config['log']
 | 
			
		||||
 | 
			
		||||
    watchlist_from_file = wl.ensure_watchlists(config['wl_path'])
 | 
			
		||||
    watchlists = wl.merge_watchlist(watchlist_from_file, wl._builtins)
 | 
			
		||||
    symbols = watchlists[name]
 | 
			
		||||
 | 
			
		||||
    grouped_syms = {}
 | 
			
		||||
    for sym in symbols:
 | 
			
		||||
        symbol, _, provider = sym.rpartition('.')
 | 
			
		||||
        if provider not in grouped_syms:
 | 
			
		||||
            grouped_syms[provider] = []
 | 
			
		||||
 | 
			
		||||
        grouped_syms[provider].append(symbol)
 | 
			
		||||
 | 
			
		||||
    async def entry_point():
 | 
			
		||||
        async with tractor.open_nursery() as n:
 | 
			
		||||
            for provider, symbols in grouped_syms.items():
 | 
			
		||||
                await n.run_in_actor(
 | 
			
		||||
                    ingest_quote_stream,
 | 
			
		||||
                    name='ingest_marketstore',
 | 
			
		||||
                    symbols=symbols,
 | 
			
		||||
                    brokername=provider,
 | 
			
		||||
                    tries=1,
 | 
			
		||||
                    actorloglevel=loglevel,
 | 
			
		||||
                    loglevel=tractorloglevel
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    tractor.run(entry_point)
 | 
			
		||||
							
								
								
									
										1206
									
								
								piker/data/feed.py
								
								
								
								
							
							
						
						
									
										1206
									
								
								piker/data/feed.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
				
			
			@ -15,7 +15,7 @@
 | 
			
		|||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
abstractions for organizing, managing and generally operating-on
 | 
			
		||||
Public abstractions for organizing, managing and generally operating-on
 | 
			
		||||
real-time data processing data-structures.
 | 
			
		||||
 | 
			
		||||
"Streams, flumes, cascades and flows.."
 | 
			
		||||
| 
						 | 
				
			
			@ -30,54 +30,27 @@ import tractor
 | 
			
		|||
import pendulum
 | 
			
		||||
import numpy as np
 | 
			
		||||
 | 
			
		||||
from .types import Struct
 | 
			
		||||
from ._source import (
 | 
			
		||||
    Symbol,
 | 
			
		||||
)
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from ._sharedmem import (
 | 
			
		||||
    attach_shm_array,
 | 
			
		||||
    ShmArray,
 | 
			
		||||
    _Token,
 | 
			
		||||
)
 | 
			
		||||
# from .._profile import (
 | 
			
		||||
#     Profiler,
 | 
			
		||||
#     pg_profile_enabled,
 | 
			
		||||
# )
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    # from pyqtgraph import PlotItem
 | 
			
		||||
    from ..accounting import MktPair
 | 
			
		||||
    from .feed import Feed
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: ideas for further abstractions as per
 | 
			
		||||
# https://github.com/pikers/piker/issues/216 and
 | 
			
		||||
# https://github.com/pikers/piker/issues/270:
 | 
			
		||||
# - a ``Cascade`` would be the minimal "connection" of 2 ``Flumes``
 | 
			
		||||
#   as per circuit parlance:
 | 
			
		||||
#   https://en.wikipedia.org/wiki/Two-port_network#Cascade_connection
 | 
			
		||||
#     - could cover the combination of our `FspAdmin` and the
 | 
			
		||||
#       backend `.fsp._engine` related machinery to "connect" one flume
 | 
			
		||||
#       to another?
 | 
			
		||||
# - a (financial signal) ``Flow`` would be the a "collection" of such
 | 
			
		||||
#    minmial cascades. Some engineering based jargon concepts:
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Signal_chain
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Daisy_chain_(electrical_engineering)
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Audio_signal_flow
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Digital_signal_processing#Implementation
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Dataflow_programming
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Signal_programming
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Incremental_computing
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Flume(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    Composite reference type which points to all the addressing handles
 | 
			
		||||
    and other meta-data necessary for the read, measure and management
 | 
			
		||||
    of a set of real-time updated data flows.
 | 
			
		||||
    Composite reference type which points to all the addressing
 | 
			
		||||
    handles and other meta-data necessary for the read, measure and
 | 
			
		||||
    management of a set of real-time updated data flows.
 | 
			
		||||
 | 
			
		||||
    Can be thought of as a "flow descriptor" or "flow frame" which
 | 
			
		||||
    describes the high level properties of a set of data flows that can
 | 
			
		||||
    be used seamlessly across process-memory boundaries.
 | 
			
		||||
    describes the high level properties of a set of data flows that
 | 
			
		||||
    can be used seamlessly across process-memory boundaries.
 | 
			
		||||
 | 
			
		||||
    Each instance's sub-components normally includes:
 | 
			
		||||
     - a msg oriented quote stream provided via an IPC transport
 | 
			
		||||
| 
						 | 
				
			
			@ -89,7 +62,7 @@ class Flume(Struct):
 | 
			
		|||
       queuing properties.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    symbol: Symbol
 | 
			
		||||
    mkt: MktPair
 | 
			
		||||
    first_quote: dict
 | 
			
		||||
    _rt_shm_token: _Token
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -100,6 +73,7 @@ class Flume(Struct):
 | 
			
		|||
    # private shm refs loaded dynamically from tokens
 | 
			
		||||
    _hist_shm: ShmArray | None = None
 | 
			
		||||
    _rt_shm: ShmArray | None = None
 | 
			
		||||
    _readonly: bool = True
 | 
			
		||||
 | 
			
		||||
    stream: tractor.MsgStream | None = None
 | 
			
		||||
    izero_hist: int = 0
 | 
			
		||||
| 
						 | 
				
			
			@ -116,7 +90,7 @@ class Flume(Struct):
 | 
			
		|||
        if self._rt_shm is None:
 | 
			
		||||
            self._rt_shm = attach_shm_array(
 | 
			
		||||
                token=self._rt_shm_token,
 | 
			
		||||
                readonly=True,
 | 
			
		||||
                readonly=self._readonly,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        return self._rt_shm
 | 
			
		||||
| 
						 | 
				
			
			@ -129,12 +103,10 @@ class Flume(Struct):
 | 
			
		|||
                'No shm token has been set for the history buffer?'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if (
 | 
			
		||||
            self._hist_shm is None
 | 
			
		||||
        ):
 | 
			
		||||
        if self._hist_shm is None:
 | 
			
		||||
            self._hist_shm = attach_shm_array(
 | 
			
		||||
                token=self._hist_shm_token,
 | 
			
		||||
                readonly=True,
 | 
			
		||||
                readonly=self._readonly,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        return self._hist_shm
 | 
			
		||||
| 
						 | 
				
			
			@ -153,10 +125,10 @@ class Flume(Struct):
 | 
			
		|||
        period and ratio between them.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        times = self.hist_shm.array['time']
 | 
			
		||||
        end = pendulum.from_timestamp(times[-1])
 | 
			
		||||
        start = pendulum.from_timestamp(times[times != times[-1]][-1])
 | 
			
		||||
        hist_step_size_s = (end - start).seconds
 | 
			
		||||
        times: np.ndarray = self.hist_shm.array['time']
 | 
			
		||||
        end: float | int = pendulum.from_timestamp(times[-1])
 | 
			
		||||
        start: float | int = pendulum.from_timestamp(times[times != times[-1]][-1])
 | 
			
		||||
        hist_step_size_s: float = (end - start).seconds
 | 
			
		||||
 | 
			
		||||
        times = self.rt_shm.array['time']
 | 
			
		||||
        end = pendulum.from_timestamp(times[-1])
 | 
			
		||||
| 
						 | 
				
			
			@ -172,21 +144,42 @@ class Flume(Struct):
 | 
			
		|||
 | 
			
		||||
    # TODO: get native msgspec decoding for these workinn
 | 
			
		||||
    def to_msg(self) -> dict:
 | 
			
		||||
        msg = self.to_dict()
 | 
			
		||||
        msg['symbol'] = msg['symbol'].to_dict()
 | 
			
		||||
 | 
			
		||||
        # can't serialize the stream or feed objects, it's expected
 | 
			
		||||
        # you'll have a ref to it since this msg should be rxed on
 | 
			
		||||
        # a stream on whatever far end IPC..
 | 
			
		||||
        msg = self.to_dict()
 | 
			
		||||
        msg['mkt'] = self.mkt.to_dict()
 | 
			
		||||
 | 
			
		||||
        # NOTE: pop all un-msg-serializable fields:
 | 
			
		||||
        # - `tractor.MsgStream`
 | 
			
		||||
        # - `Feed`
 | 
			
		||||
        # - `Shmarray`
 | 
			
		||||
        # it's expected the `.from_msg()` on the other side
 | 
			
		||||
        # will get instead some kind of msg-compat version
 | 
			
		||||
        # that it can load.
 | 
			
		||||
        msg.pop('stream')
 | 
			
		||||
        msg.pop('feed')
 | 
			
		||||
        msg.pop('_rt_shm')
 | 
			
		||||
        msg.pop('_hist_shm')
 | 
			
		||||
 | 
			
		||||
        return msg
 | 
			
		||||
 | 
			
		||||
    @classmethod
 | 
			
		||||
    def from_msg(cls, msg: dict) -> dict:
 | 
			
		||||
        symbol = Symbol(**msg.pop('symbol'))
 | 
			
		||||
    def from_msg(
 | 
			
		||||
        cls,
 | 
			
		||||
        msg: dict,
 | 
			
		||||
        readonly: bool = True,
 | 
			
		||||
 | 
			
		||||
    ) -> dict:
 | 
			
		||||
        '''
 | 
			
		||||
        Load from an IPC msg presumably in either `dict` or
 | 
			
		||||
        `msgspec.Struct` form.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        mkt_msg = msg.pop('mkt')
 | 
			
		||||
        from ..accounting import MktPair  # cycle otherwise..
 | 
			
		||||
        mkt = MktPair.from_msg(mkt_msg)
 | 
			
		||||
        msg |= {'_readonly': readonly}
 | 
			
		||||
        return cls(
 | 
			
		||||
            symbol=symbol,
 | 
			
		||||
            mkt=mkt,
 | 
			
		||||
            **msg,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -208,3 +201,21 @@ class Flume(Struct):
 | 
			
		|||
        )
 | 
			
		||||
        imx = times.shape[0] - 1
 | 
			
		||||
        return min(first, imx)
 | 
			
		||||
 | 
			
		||||
    # only set by external msg or creator, never
 | 
			
		||||
    # manually!
 | 
			
		||||
    _has_vlm: bool = True
 | 
			
		||||
 | 
			
		||||
    def has_vlm(self) -> bool:
 | 
			
		||||
 | 
			
		||||
        if not self._has_vlm:
 | 
			
		||||
            return False
 | 
			
		||||
 | 
			
		||||
        # make sure that the instrument supports volume history
 | 
			
		||||
        # (sometimes this is not the case for some commodities and
 | 
			
		||||
        # derivatives)
 | 
			
		||||
        vlm: np.ndarray = self.rt_shm.array['volume']
 | 
			
		||||
        return not bool(
 | 
			
		||||
            np.all(np.isin(vlm, -1))
 | 
			
		||||
            or np.all(np.isnan(vlm))
 | 
			
		||||
        )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -23,7 +23,7 @@ Api layer likely in here...
 | 
			
		|||
from types import ModuleType
 | 
			
		||||
from importlib import import_module
 | 
			
		||||
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
from ._util import get_logger
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,173 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Tick event stream processing, filter-by-types, format-normalization.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from itertools import chain
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    AsyncIterator,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
# tick-type-classes template for all possible "lowest level" events
 | 
			
		||||
# that can can be emitted by the "top of book" L1 queues and
 | 
			
		||||
# price-matching (with eventual clearing) in a double auction
 | 
			
		||||
# market (queuing) system.
 | 
			
		||||
_tick_groups: dict[str, set[str]] = {
 | 
			
		||||
    'clears': {'trade', 'dark_trade', 'last'},
 | 
			
		||||
    'bids': {'bid', 'bsize'},
 | 
			
		||||
    'asks': {'ask', 'asize'},
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
# XXX alo define the flattened set of all such "fundamental ticks"
 | 
			
		||||
# so that it can be used as filter, eg. in the graphics display
 | 
			
		||||
# loop to compute running windowed y-ranges B)
 | 
			
		||||
_auction_ticks: set[str] = set.union(*_tick_groups.values())
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def frame_ticks(
 | 
			
		||||
    quote: dict[str, Any],
 | 
			
		||||
 | 
			
		||||
    ticks_by_type: dict | None = None,
 | 
			
		||||
    ticks_in_order: list[dict[str, Any]] | None = None
 | 
			
		||||
 | 
			
		||||
) -> dict[
 | 
			
		||||
    str,
 | 
			
		||||
    list[dict[str, Any]]
 | 
			
		||||
]:
 | 
			
		||||
    '''
 | 
			
		||||
    XXX: build a tick-by-type table of lists
 | 
			
		||||
    of tick messages. This allows for less
 | 
			
		||||
    iteration on the receiver side by allowing for
 | 
			
		||||
    a single "latest tick event" look up by
 | 
			
		||||
    indexing the last entry in each sub-list.
 | 
			
		||||
 | 
			
		||||
    tbt = {
 | 
			
		||||
        'types': ['bid', 'asize', 'last', .. '<type_n>'],
 | 
			
		||||
 | 
			
		||||
        'bid': [tick0, tick1, tick2, .., tickn],
 | 
			
		||||
        'asize': [tick0, tick1, tick2, .., tickn],
 | 
			
		||||
        'last': [tick0, tick1, tick2, .., tickn],
 | 
			
		||||
        ...
 | 
			
		||||
        '<type_n>': [tick0, tick1, tick2, .., tickn],
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    If `ticks_in_order` is provided, append any retrieved ticks
 | 
			
		||||
    since last iteration into this array/buffer/list.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # TODO: once we decide to get fancy really we should
 | 
			
		||||
    # have a shared mem tick buffer that is just
 | 
			
		||||
    # continually filled and the UI just ready from it
 | 
			
		||||
    # at it's display rate.
 | 
			
		||||
 | 
			
		||||
    tbt = ticks_by_type if ticks_by_type is not None else {}
 | 
			
		||||
    if not (ticks := quote.get('ticks')):
 | 
			
		||||
        return tbt
 | 
			
		||||
 | 
			
		||||
    # append in reverse FIFO order for in-order iteration on
 | 
			
		||||
    # receiver side.
 | 
			
		||||
    tick: dict[str, Any]
 | 
			
		||||
    for tick in ticks:
 | 
			
		||||
        tbt.setdefault(
 | 
			
		||||
            tick['type'],
 | 
			
		||||
            [],
 | 
			
		||||
        ).append(tick)
 | 
			
		||||
 | 
			
		||||
    # TODO: do we need this any more or can we just
 | 
			
		||||
    # expect the receiver to unwind the below
 | 
			
		||||
    # `ticks_by_type: dict`?
 | 
			
		||||
    # => undwinding would potentially require a
 | 
			
		||||
    # `dict[str, set | list]` instead with an
 | 
			
		||||
    # included `'types' field which is an (ordered)
 | 
			
		||||
    # set of tick type fields in the order which
 | 
			
		||||
    # types arrived?
 | 
			
		||||
    if ticks_in_order:
 | 
			
		||||
        ticks_in_order.extend(ticks)
 | 
			
		||||
 | 
			
		||||
    return tbt
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def iterticks(
 | 
			
		||||
    quote: dict,
 | 
			
		||||
    types: tuple[str] = (
 | 
			
		||||
        'trade',
 | 
			
		||||
        'dark_trade',
 | 
			
		||||
    ),
 | 
			
		||||
    deduplicate_darks: bool = False,
 | 
			
		||||
    reverse: bool = False,
 | 
			
		||||
 | 
			
		||||
    # TODO: should we offer delegating to `frame_ticks()` above
 | 
			
		||||
    # with this?
 | 
			
		||||
    frame_by_type: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> AsyncIterator:
 | 
			
		||||
    '''
 | 
			
		||||
    Iterate through ticks delivered per quote cycle, filter and
 | 
			
		||||
    yield any declared in `types`.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    if deduplicate_darks:
 | 
			
		||||
        assert 'dark_trade' in types
 | 
			
		||||
 | 
			
		||||
    # print(f"{quote}\n\n")
 | 
			
		||||
    ticks = quote.get('ticks', ())
 | 
			
		||||
    trades = {}
 | 
			
		||||
    darks = {}
 | 
			
		||||
 | 
			
		||||
    if ticks:
 | 
			
		||||
 | 
			
		||||
        # do a first pass and attempt to remove duplicate dark
 | 
			
		||||
        # trades with the same tick signature.
 | 
			
		||||
        if deduplicate_darks:
 | 
			
		||||
            for tick in ticks:
 | 
			
		||||
                ttype = tick.get('type')
 | 
			
		||||
 | 
			
		||||
                time = tick.get('time', None)
 | 
			
		||||
                if time:
 | 
			
		||||
                    sig = (
 | 
			
		||||
                        time,
 | 
			
		||||
                        tick['price'],
 | 
			
		||||
                        tick.get('size')
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                    if ttype == 'dark_trade':
 | 
			
		||||
                        darks[sig] = tick
 | 
			
		||||
 | 
			
		||||
                    elif ttype == 'trade':
 | 
			
		||||
                        trades[sig] = tick
 | 
			
		||||
 | 
			
		||||
            # filter duplicates
 | 
			
		||||
            for sig, tick in trades.items():
 | 
			
		||||
                tick = darks.pop(sig, None)
 | 
			
		||||
                if tick:
 | 
			
		||||
                    ticks.remove(tick)
 | 
			
		||||
                    # print(f'DUPLICATE {tick}')
 | 
			
		||||
 | 
			
		||||
            # re-insert ticks
 | 
			
		||||
            ticks.extend(list(chain(trades.values(), darks.values())))
 | 
			
		||||
 | 
			
		||||
        # most-recent-first
 | 
			
		||||
        if reverse:
 | 
			
		||||
            ticks = reversed(ticks)
 | 
			
		||||
 | 
			
		||||
        for tick in ticks:
 | 
			
		||||
            # print(f"{quote['symbol']}: {tick}")
 | 
			
		||||
            ttype = tick.get('type')
 | 
			
		||||
            if ttype in types:
 | 
			
		||||
                yield tick
 | 
			
		||||
| 
						 | 
				
			
			@ -1,88 +0,0 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Guillermo Rodriguez (in stewardship for piker0)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
Built-in (extension) types.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
import sys
 | 
			
		||||
from typing import Optional
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
 | 
			
		||||
import msgspec
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Struct(
 | 
			
		||||
    msgspec.Struct,
 | 
			
		||||
 | 
			
		||||
    # https://jcristharif.com/msgspec/structs.html#tagged-unions
 | 
			
		||||
    # tag='pikerstruct',
 | 
			
		||||
    # tag=True,
 | 
			
		||||
):
 | 
			
		||||
    '''
 | 
			
		||||
    A "human friendlier" (aka repl buddy) struct subtype.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    def to_dict(self) -> dict:
 | 
			
		||||
        return {
 | 
			
		||||
            f: getattr(self, f)
 | 
			
		||||
            for f in self.__struct_fields__
 | 
			
		||||
        }
 | 
			
		||||
 | 
			
		||||
    # Lul, doesn't seem to work that well..
 | 
			
		||||
    # def __repr__(self):
 | 
			
		||||
    #     # only turn on pprint when we detect a python REPL
 | 
			
		||||
    #     # at runtime B)
 | 
			
		||||
    #     if (
 | 
			
		||||
    #         hasattr(sys, 'ps1')
 | 
			
		||||
    #         # TODO: check if we're in pdb
 | 
			
		||||
    #     ):
 | 
			
		||||
    #         return self.pformat()
 | 
			
		||||
 | 
			
		||||
    #     return super().__repr__()
 | 
			
		||||
 | 
			
		||||
    def pformat(self) -> str:
 | 
			
		||||
        return f'Struct({pformat(self.to_dict())})'
 | 
			
		||||
 | 
			
		||||
    def copy(
 | 
			
		||||
        self,
 | 
			
		||||
        update: Optional[dict] = None,
 | 
			
		||||
 | 
			
		||||
    ) -> msgspec.Struct:
 | 
			
		||||
        '''
 | 
			
		||||
        Validate-typecast all self defined fields, return a copy of us
 | 
			
		||||
        with all such fields.
 | 
			
		||||
 | 
			
		||||
        This is kinda like the default behaviour in `pydantic.BaseModel`.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        if update:
 | 
			
		||||
            for k, v in update.items():
 | 
			
		||||
                setattr(self, k, v)
 | 
			
		||||
 | 
			
		||||
        # roundtrip serialize to validate
 | 
			
		||||
        return msgspec.msgpack.Decoder(
 | 
			
		||||
            type=type(self)
 | 
			
		||||
        ).decode(
 | 
			
		||||
            msgspec.msgpack.Encoder().encode(self)
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    def typecast(
 | 
			
		||||
        self,
 | 
			
		||||
        # fields: Optional[list[str]] = None,
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        for fname, ftype in self.__annotations__.items():
 | 
			
		||||
            setattr(self, fname, ftype(getattr(self, fname)))
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,265 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
'''
 | 
			
		||||
Data feed synchronization protocols, init msgs, and general
 | 
			
		||||
data-provider-backend-agnostic schema definitions.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from decimal import Decimal
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Callable,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from msgspec import field
 | 
			
		||||
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from piker.accounting import (
 | 
			
		||||
    Asset,
 | 
			
		||||
    MktPair,
 | 
			
		||||
)
 | 
			
		||||
from ._util import log
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FeedInitializationError(ValueError):
 | 
			
		||||
    '''
 | 
			
		||||
    Live data feed setup failed due to API / msg incompatiblity!
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class FeedInit(Struct, frozen=True):
 | 
			
		||||
    '''
 | 
			
		||||
    A stringent data provider startup msg schema validator.
 | 
			
		||||
 | 
			
		||||
    The fields defined here are matched with those absolutely required
 | 
			
		||||
    from each backend broker/data provider.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    mkt_info: MktPair
 | 
			
		||||
 | 
			
		||||
    # NOTE: only field we use rn in ``.data.feed``
 | 
			
		||||
    # TODO: maybe make a SamplerConfig(Struct)?
 | 
			
		||||
    shm_write_opts: dict[str, Any] = field(
 | 
			
		||||
        default_factory=lambda: {
 | 
			
		||||
        'has_vlm': True,
 | 
			
		||||
        'sum_tick_vlm': True,
 | 
			
		||||
    })
 | 
			
		||||
 | 
			
		||||
# XXX: we group backend endpoints into 3
 | 
			
		||||
# groups to determine "degrees" of functionality.
 | 
			
		||||
_eps: dict[str, list[str]] = {
 | 
			
		||||
 | 
			
		||||
    # basic API `Client` layer
 | 
			
		||||
    'middleware': [
 | 
			
		||||
        'get_client',
 | 
			
		||||
    ],
 | 
			
		||||
 | 
			
		||||
    # (live) data streaming / loading / search
 | 
			
		||||
    'datad': [
 | 
			
		||||
        'get_mkt_info',
 | 
			
		||||
        'open_history_client',
 | 
			
		||||
        'open_symbol_search',
 | 
			
		||||
        'stream_quotes',
 | 
			
		||||
    ],
 | 
			
		||||
 | 
			
		||||
    # live order control and trading
 | 
			
		||||
    'brokerd': [
 | 
			
		||||
        'trades_dialogue',
 | 
			
		||||
        'open_trade_dialog',  # live order ctl
 | 
			
		||||
        'norm_trade', # ledger normalizer for txns
 | 
			
		||||
    ],
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def validate_backend(
 | 
			
		||||
    mod: ModuleType,
 | 
			
		||||
    syms: list[str],
 | 
			
		||||
    init_msgs: list[FeedInit] | dict[str, dict[str, Any]],
 | 
			
		||||
 | 
			
		||||
    # TODO: do a module method scan and report mismatches.
 | 
			
		||||
    check_eps: bool = False,
 | 
			
		||||
 | 
			
		||||
    api_log_msg_level: str = 'critical'
 | 
			
		||||
 | 
			
		||||
) -> FeedInit:
 | 
			
		||||
    '''
 | 
			
		||||
    Fail on malformed live quotes feed config/init or warn on changes
 | 
			
		||||
    that haven't been implemented by this backend yet.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    for daemon_name, eps in _eps.items():
 | 
			
		||||
        for name in eps:
 | 
			
		||||
            ep: Callable = getattr(
 | 
			
		||||
                mod,
 | 
			
		||||
                name,
 | 
			
		||||
                None,
 | 
			
		||||
            )
 | 
			
		||||
            if ep is None:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    f'Provider backend {mod.name} is missing '
 | 
			
		||||
                    f'{daemon_name} support :(\n'
 | 
			
		||||
                    f'The following endpoint is missing: {name}'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    inits: list[
 | 
			
		||||
        FeedInit | dict[str, Any]
 | 
			
		||||
    ] = init_msgs
 | 
			
		||||
 | 
			
		||||
    # convert to list if from old dict-style
 | 
			
		||||
    if isinstance(init_msgs, dict):
 | 
			
		||||
        inits = list(init_msgs.values())
 | 
			
		||||
 | 
			
		||||
    init: FeedInit | dict[str, Any]
 | 
			
		||||
    for i, init in enumerate(inits):
 | 
			
		||||
 | 
			
		||||
        # XXX: eventually this WILL NOT necessarily be true.
 | 
			
		||||
        if i > 0:
 | 
			
		||||
            assert not len(init_msgs) == 1
 | 
			
		||||
            if isinstance(init_msgs, dict):
 | 
			
		||||
                keys: set = set(init_msgs.keys()) - set(syms)
 | 
			
		||||
                raise FeedInitializationError(
 | 
			
		||||
                    'TOO MANY INIT MSGS!\n'
 | 
			
		||||
                    f'Unexpected keys: {keys}\n'
 | 
			
		||||
                    'ALL MSGS:\n'
 | 
			
		||||
                    f'{pformat(init_msgs)}\n'
 | 
			
		||||
                )
 | 
			
		||||
            else:
 | 
			
		||||
                raise FeedInitializationError(
 | 
			
		||||
                    'TOO MANY INIT MSGS!\n'
 | 
			
		||||
                    f'{pformat(init_msgs)}\n'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    # TODO: once all backends are updated we can remove this branching.
 | 
			
		||||
    rx_msg: bool = False
 | 
			
		||||
    warn_msg: str = ''
 | 
			
		||||
    if not isinstance(init, FeedInit):
 | 
			
		||||
        warn_msg += (
 | 
			
		||||
            '\n'
 | 
			
		||||
            '--------------------------\n'
 | 
			
		||||
            ':::DEPRECATED API STYLE:::\n'
 | 
			
		||||
            '--------------------------\n'
 | 
			
		||||
            f'`{mod.name}.stream_quotes()` should deliver '
 | 
			
		||||
            '`.started(FeedInit)`\n'
 | 
			
		||||
            f'|-> CURRENTLY it is using DEPRECATED `.started(dict)` style!\n'
 | 
			
		||||
            f'|-> SEE `FeedInit` in `piker.data.validate`\n'
 | 
			
		||||
            '--------------------------------------------\n'
 | 
			
		||||
        )
 | 
			
		||||
    else:
 | 
			
		||||
        rx_msg = True
 | 
			
		||||
 | 
			
		||||
    # verify feed init state / schema
 | 
			
		||||
    bs_fqme: str  # backend specific fqme
 | 
			
		||||
    mkt: MktPair
 | 
			
		||||
 | 
			
		||||
    match init:
 | 
			
		||||
 | 
			
		||||
        # backend is using old dict msg delivery
 | 
			
		||||
        case {
 | 
			
		||||
            'symbol_info': dict(symbol_info),
 | 
			
		||||
            'fqsn': bs_fqme,
 | 
			
		||||
        } | {
 | 
			
		||||
            'mkt_info': dict(symbol_info),
 | 
			
		||||
            'fqsn': bs_fqme,
 | 
			
		||||
        }:
 | 
			
		||||
            symbol_info: dict
 | 
			
		||||
            warn_msg += (
 | 
			
		||||
                'It may also be still using the legacy `Symbol` style API\n'
 | 
			
		||||
                'IT SHOULD BE PORTED TO THE NEW '
 | 
			
		||||
                '`.accounting._mktinfo.MktPair`\n'
 | 
			
		||||
                'STATTTTT!!!\n'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # XXX use default legacy (aka discrete precision) mkt
 | 
			
		||||
            # price/size_ticks if none delivered.
 | 
			
		||||
            price_tick = symbol_info.get(
 | 
			
		||||
                'price_tick_size',
 | 
			
		||||
                Decimal('0.01'),
 | 
			
		||||
            )
 | 
			
		||||
            size_tick = symbol_info.get(
 | 
			
		||||
                'lot_tick_size',
 | 
			
		||||
                Decimal('1'),
 | 
			
		||||
            )
 | 
			
		||||
            bs_mktid = init.get('bs_mktid') or bs_fqme
 | 
			
		||||
            mkt = MktPair.from_fqme(
 | 
			
		||||
                fqme=f'{bs_fqme}.{mod.name}',
 | 
			
		||||
 | 
			
		||||
                price_tick=price_tick,
 | 
			
		||||
                size_tick=size_tick,
 | 
			
		||||
 | 
			
		||||
                bs_mktid=str(bs_mktid),
 | 
			
		||||
                _atype=symbol_info['asset_type']
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        # backend is using new `MktPair` but not entirely
 | 
			
		||||
        case {
 | 
			
		||||
            'mkt_info': MktPair(
 | 
			
		||||
                dst=Asset(),
 | 
			
		||||
            ) as mkt,
 | 
			
		||||
            'fqsn': bs_fqme,
 | 
			
		||||
        }:
 | 
			
		||||
            warn_msg += (
 | 
			
		||||
                f'{mod.name} in API compat transition?\n'
 | 
			
		||||
                "It's half dict, half man..\n"
 | 
			
		||||
                '-------------------------------------\n'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        case FeedInit(
 | 
			
		||||
            mkt_info=MktPair(dst=Asset()) as mkt,
 | 
			
		||||
            shm_write_opts=dict(shm_opts),
 | 
			
		||||
        ) as init:
 | 
			
		||||
            name: str = mod.name
 | 
			
		||||
            log.info(
 | 
			
		||||
                f"{name}'s `MktPair` info:\n"
 | 
			
		||||
                f'{pformat(mkt.to_dict())}\n'
 | 
			
		||||
                f'shm conf: {pformat(shm_opts)}\n'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        case _:
 | 
			
		||||
            raise FeedInitializationError(init)
 | 
			
		||||
 | 
			
		||||
    # build a msg if we received a dict for input.
 | 
			
		||||
    if not rx_msg:
 | 
			
		||||
        assert bs_fqme in mkt.fqme
 | 
			
		||||
        init = FeedInit(
 | 
			
		||||
            mkt_info=mkt,
 | 
			
		||||
            shm_write_opts=init.get('shm_write_opts'),
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # `MktPair` value audits
 | 
			
		||||
    mkt = init.mkt_info
 | 
			
		||||
    assert mkt.type_key
 | 
			
		||||
 | 
			
		||||
    # backend is using new `MktPair` but not embedded `Asset` types
 | 
			
		||||
    # for the .src/.dst..
 | 
			
		||||
    if not isinstance(mkt.src, Asset):
 | 
			
		||||
        warn_msg += (
 | 
			
		||||
            f'ALSO, {mod.name.upper()} should try to deliver\n'
 | 
			
		||||
            'the new `MktPair.src: Asset` field!\n'
 | 
			
		||||
            '-----------------------------------------------\n'
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    # complain about any non-idealities
 | 
			
		||||
    if warn_msg:
 | 
			
		||||
        # TODO: would be nice to register an API_COMPAT or something in
 | 
			
		||||
        # maybe cyan for this in general throughput piker no?
 | 
			
		||||
        logmeth = getattr(log, api_log_msg_level)
 | 
			
		||||
        logmeth(warn_msg)
 | 
			
		||||
 | 
			
		||||
    return init.copy()
 | 
			
		||||
| 
						 | 
				
			
			@ -22,17 +22,40 @@ from typing import AsyncIterator
 | 
			
		|||
 | 
			
		||||
import numpy as np
 | 
			
		||||
 | 
			
		||||
from ._engine import cascade
 | 
			
		||||
from ._api import (
 | 
			
		||||
    maybe_mk_fsp_shm,
 | 
			
		||||
    Fsp,
 | 
			
		||||
)
 | 
			
		||||
from ._engine import (
 | 
			
		||||
    cascade,
 | 
			
		||||
    Cascade,
 | 
			
		||||
)
 | 
			
		||||
from ._volume import (
 | 
			
		||||
    dolla_vlm,
 | 
			
		||||
    flow_rates,
 | 
			
		||||
    tina_vwap,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
__all__ = ['cascade']
 | 
			
		||||
__all__: list[str] = [
 | 
			
		||||
    'cascade',
 | 
			
		||||
    'Cascade',
 | 
			
		||||
    'maybe_mk_fsp_shm',
 | 
			
		||||
    'Fsp',
 | 
			
		||||
    'dolla_vlm',
 | 
			
		||||
    'flow_rates',
 | 
			
		||||
    'tina_vwap',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def latency(
 | 
			
		||||
    source: 'TickStream[Dict[str, float]]',  # noqa
 | 
			
		||||
    ohlcv: np.ndarray
 | 
			
		||||
 | 
			
		||||
) -> AsyncIterator[np.ndarray]:
 | 
			
		||||
    """Latency measurements, broker to piker.
 | 
			
		||||
    """
 | 
			
		||||
    '''
 | 
			
		||||
    Latency measurements, broker to piker.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # TODO: do we want to offer yielding this async
 | 
			
		||||
    # before the rt data connection comes up?
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -174,18 +174,10 @@ def fsp(
 | 
			
		|||
    return Fsp(wrapped, outputs=(wrapped.__name__,))
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mk_fsp_shm_key(
 | 
			
		||||
    sym: str,
 | 
			
		||||
    target: Fsp
 | 
			
		||||
 | 
			
		||||
) -> str:
 | 
			
		||||
    uid = tractor.current_actor().uid
 | 
			
		||||
    return f'{sym}.fsp.{target.name}.{".".join(uid)}'
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def maybe_mk_fsp_shm(
 | 
			
		||||
    sym: str,
 | 
			
		||||
    target: Fsp,
 | 
			
		||||
    size: int,
 | 
			
		||||
    readonly: bool = True,
 | 
			
		||||
 | 
			
		||||
) -> (str, ShmArray, bool):
 | 
			
		||||
| 
						 | 
				
			
			@ -194,7 +186,8 @@ def maybe_mk_fsp_shm(
 | 
			
		|||
    exists, otherwise load the shm already existing for that token.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    assert isinstance(sym, str), '`sym` should be file-name-friendly `str`'
 | 
			
		||||
    if not isinstance(sym, str):
 | 
			
		||||
        raise ValueError('`sym: str` should be file-name-friendly')
 | 
			
		||||
 | 
			
		||||
    # TODO: load output types from `Fsp`
 | 
			
		||||
    # - should `index` be a required internal field?
 | 
			
		||||
| 
						 | 
				
			
			@ -206,11 +199,14 @@ def maybe_mk_fsp_shm(
 | 
			
		|||
        [(field_name, float) for field_name in target.outputs]
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    key = mk_fsp_shm_key(sym, target)
 | 
			
		||||
    # (attempt to) uniquely key the fsp shm buffers
 | 
			
		||||
    actor_name, uuid = tractor.current_actor().uid
 | 
			
		||||
    uuid_snip: str = uuid[:16]
 | 
			
		||||
    key: str = f'piker.{actor_name}[{uuid_snip}].{sym}.{target.name}'
 | 
			
		||||
 | 
			
		||||
    shm, opened = maybe_open_shm_array(
 | 
			
		||||
        key,
 | 
			
		||||
        # TODO: create entry for each time frame
 | 
			
		||||
        size=size,
 | 
			
		||||
        dtype=fsp_dtype,
 | 
			
		||||
        readonly=True,
 | 
			
		||||
    )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -18,13 +18,12 @@
 | 
			
		|||
core task logic for processing chains
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from dataclasses import dataclass
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from contextlib import asynccontextmanager as acm
 | 
			
		||||
from functools import partial
 | 
			
		||||
from typing import (
 | 
			
		||||
    AsyncIterator,
 | 
			
		||||
    Callable,
 | 
			
		||||
    Optional,
 | 
			
		||||
    Union,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import numpy as np
 | 
			
		||||
| 
						 | 
				
			
			@ -33,9 +32,9 @@ from trio_typing import TaskStatus
 | 
			
		|||
import tractor
 | 
			
		||||
from tractor.msg import NamespacePath
 | 
			
		||||
 | 
			
		||||
from piker.types import Struct
 | 
			
		||||
from ..log import get_logger, get_console_log
 | 
			
		||||
from .. import data
 | 
			
		||||
from ..data import attach_shm_array
 | 
			
		||||
from ..data.feed import (
 | 
			
		||||
    Flume,
 | 
			
		||||
    Feed,
 | 
			
		||||
| 
						 | 
				
			
			@ -45,23 +44,17 @@ from ..data._sampling import (
 | 
			
		|||
    _default_delay_s,
 | 
			
		||||
    open_sample_stream,
 | 
			
		||||
)
 | 
			
		||||
from ..data._source import Symbol
 | 
			
		||||
from ..accounting import MktPair
 | 
			
		||||
from ._api import (
 | 
			
		||||
    Fsp,
 | 
			
		||||
    _load_builtins,
 | 
			
		||||
    _Token,
 | 
			
		||||
)
 | 
			
		||||
from .._profile import Profiler
 | 
			
		||||
from ..toolz import Profiler
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@dataclass
 | 
			
		||||
class TaskTracker:
 | 
			
		||||
    complete: trio.Event
 | 
			
		||||
    cs: trio.CancelScope
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def filter_quotes_by_sym(
 | 
			
		||||
 | 
			
		||||
    sym: str,
 | 
			
		||||
| 
						 | 
				
			
			@ -82,51 +75,190 @@ async def filter_quotes_by_sym(
 | 
			
		|||
        if quote:
 | 
			
		||||
            yield quote
 | 
			
		||||
 | 
			
		||||
# TODO: unifying the abstractions in this FSP subsys/layer:
 | 
			
		||||
# -[ ] move the `.data.flows.Flume` type into this
 | 
			
		||||
#   module/subsys/pkg?
 | 
			
		||||
# -[ ] ideas for further abstractions as per
 | 
			
		||||
#   - https://github.com/pikers/piker/issues/216,
 | 
			
		||||
#   - https://github.com/pikers/piker/issues/270:
 | 
			
		||||
#   - a (financial signal) ``Flow`` would be the a "collection" of such
 | 
			
		||||
#     minmial cascades. Some engineering based jargon concepts:
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Signal_chain
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Daisy_chain_(electrical_engineering)
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Audio_signal_flow
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Digital_signal_processing#Implementation
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Dataflow_programming
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Signal_programming
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Incremental_computing
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Signal-flow_graph
 | 
			
		||||
#     - https://en.wikipedia.org/wiki/Signal-flow_graph#Basic_components
 | 
			
		||||
 | 
			
		||||
async def fsp_compute(
 | 
			
		||||
# -[ ] we probably want to eval THE BELOW design and unify with the
 | 
			
		||||
#   proto `TaskManager` in the `tractor` dev branch as well as with
 | 
			
		||||
#   our below idea for `Cascade`:
 | 
			
		||||
#   - https://github.com/goodboy/tractor/pull/363
 | 
			
		||||
class Cascade(Struct):
 | 
			
		||||
    '''
 | 
			
		||||
    As per sig-proc engineering parlance, this is a chaining of
 | 
			
		||||
    `Flume`s, which are themselves collections of "Streams"
 | 
			
		||||
    implemented currently via `ShmArray`s.
 | 
			
		||||
 | 
			
		||||
    symbol: Symbol,
 | 
			
		||||
    flume: Flume,
 | 
			
		||||
    A `Cascade` is be the minimal "connection" of 2 `Flumes`
 | 
			
		||||
    as per circuit parlance:
 | 
			
		||||
    https://en.wikipedia.org/wiki/Two-port_network#Cascade_connection
 | 
			
		||||
 | 
			
		||||
    TODO:
 | 
			
		||||
      -[ ] could cover the combination of our `FspAdmin` and the
 | 
			
		||||
        backend `.fsp._engine` related machinery to "connect" one flume
 | 
			
		||||
        to another?
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # TODO: make these `Flume`s
 | 
			
		||||
    src: Flume
 | 
			
		||||
    dst: Flume
 | 
			
		||||
    tn: trio.Nursery
 | 
			
		||||
    fsp: Fsp  # UI-side middleware ctl API
 | 
			
		||||
 | 
			
		||||
    # filled during cascade/.bind_func() (fsp_compute) init phases
 | 
			
		||||
    bind_func: Callable | None = None
 | 
			
		||||
    complete: trio.Event | None = None
 | 
			
		||||
    cs: trio.CancelScope | None = None
 | 
			
		||||
    client_stream: tractor.MsgStream | None = None
 | 
			
		||||
 | 
			
		||||
    async def resync(self) -> int:
 | 
			
		||||
        # TODO: adopt an incremental update engine/approach
 | 
			
		||||
        # where possible here eventually!
 | 
			
		||||
        log.info(f're-syncing fsp {self.fsp.name} to source')
 | 
			
		||||
        self.cs.cancel()
 | 
			
		||||
        await self.complete.wait()
 | 
			
		||||
        index: int = await self.tn.start(self.bind_func)
 | 
			
		||||
 | 
			
		||||
        # always trigger UI refresh after history update,
 | 
			
		||||
        # see ``piker.ui._fsp.FspAdmin.open_chain()`` and
 | 
			
		||||
        # ``piker.ui._display.trigger_update()``.
 | 
			
		||||
        dst_shm: ShmArray = self.dst.rt_shm
 | 
			
		||||
        await self.client_stream.send({
 | 
			
		||||
            'fsp_update': {
 | 
			
		||||
                'key': dst_shm.token,
 | 
			
		||||
                'first': dst_shm._first.value,
 | 
			
		||||
                'last': dst_shm._last.value,
 | 
			
		||||
            }
 | 
			
		||||
        })
 | 
			
		||||
        return index
 | 
			
		||||
 | 
			
		||||
    def is_synced(self) -> tuple[bool, int, int]:
 | 
			
		||||
        '''
 | 
			
		||||
        Predicate to dertmine if a destination FSP
 | 
			
		||||
        output array is aligned to its source array.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        src_shm: ShmArray = self.src.rt_shm
 | 
			
		||||
        dst_shm: ShmArray = self.dst.rt_shm
 | 
			
		||||
        step_diff = src_shm.index - dst_shm.index
 | 
			
		||||
        len_diff = abs(len(src_shm.array) - len(dst_shm.array))
 | 
			
		||||
        synced: bool = not (
 | 
			
		||||
            # the source is likely backfilling and we must
 | 
			
		||||
            # sync history calculations
 | 
			
		||||
            len_diff > 2
 | 
			
		||||
 | 
			
		||||
            # we aren't step synced to the source and may be
 | 
			
		||||
            # leading/lagging by a step
 | 
			
		||||
            or step_diff > 1
 | 
			
		||||
            or step_diff < 0
 | 
			
		||||
        )
 | 
			
		||||
        if not synced:
 | 
			
		||||
            fsp: Fsp = self.fsp
 | 
			
		||||
            log.warning(
 | 
			
		||||
                '***DESYNCED FSP***\n'
 | 
			
		||||
                f'{fsp.ns_path}@{src_shm.token}\n'
 | 
			
		||||
                f'step_diff: {step_diff}\n'
 | 
			
		||||
                f'len_diff: {len_diff}\n'
 | 
			
		||||
            )
 | 
			
		||||
        return (
 | 
			
		||||
            synced,
 | 
			
		||||
            step_diff,
 | 
			
		||||
            len_diff,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    async def poll_and_sync_to_step(self) -> int:
 | 
			
		||||
        synced, step_diff, _ = self.is_synced()
 | 
			
		||||
        while not synced:
 | 
			
		||||
            await self.resync()
 | 
			
		||||
            synced, step_diff, _ = self.is_synced()
 | 
			
		||||
 | 
			
		||||
        return step_diff
 | 
			
		||||
 | 
			
		||||
    @acm
 | 
			
		||||
    async def open_edge(
 | 
			
		||||
        self,
 | 
			
		||||
        bind_func: Callable,
 | 
			
		||||
    ) -> int:
 | 
			
		||||
        self.bind_func = bind_func
 | 
			
		||||
        index = await self.tn.start(bind_func)
 | 
			
		||||
        yield index
 | 
			
		||||
        # TODO: what do we want on teardown/error?
 | 
			
		||||
        # -[ ] dynamic reconnection after update?
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def connect_streams(
 | 
			
		||||
    casc: Cascade,
 | 
			
		||||
    mkt: MktPair,
 | 
			
		||||
    quote_stream: trio.abc.ReceiveChannel,
 | 
			
		||||
    src: Flume,
 | 
			
		||||
    dst: Flume,
 | 
			
		||||
 | 
			
		||||
    src: ShmArray,
 | 
			
		||||
    dst: ShmArray,
 | 
			
		||||
 | 
			
		||||
    func: Callable,
 | 
			
		||||
    edge_func: Callable,
 | 
			
		||||
 | 
			
		||||
    # attach_stream: bool = False,
 | 
			
		||||
    task_status: TaskStatus[None] = trio.TASK_STATUS_IGNORED,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
    Stream and per-sample compute and write the cascade of
 | 
			
		||||
    2 `Flumes`/streams given some operating `func`.
 | 
			
		||||
 | 
			
		||||
    https://en.wikipedia.org/wiki/Signal-flow_graph#Basic_components
 | 
			
		||||
 | 
			
		||||
    Not literally, but something like:
 | 
			
		||||
 | 
			
		||||
        edge_func(Flume_in) -> Flume_out
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    profiler = Profiler(
 | 
			
		||||
        delayed=False,
 | 
			
		||||
        disabled=True
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    fqsn = symbol.front_fqsn()
 | 
			
		||||
    out_stream = func(
 | 
			
		||||
    # TODO: just pull it from src.mkt.fqme no?
 | 
			
		||||
    # fqme: str = mkt.fqme
 | 
			
		||||
    fqme: str = src.mkt.fqme
 | 
			
		||||
 | 
			
		||||
    # TODO: dynamic introspection of what the underlying (vertex)
 | 
			
		||||
    # function actually requires from input node (flumes) then
 | 
			
		||||
    # deliver those inputs as part of a graph "compilation" step?
 | 
			
		||||
    out_stream = edge_func(
 | 
			
		||||
 | 
			
		||||
        # TODO: do we even need this if we do the feed api right?
 | 
			
		||||
        # shouldn't a local stream do this before we get a handle
 | 
			
		||||
        # to the async iterable? it's that or we do some kinda
 | 
			
		||||
        # async itertools style?
 | 
			
		||||
        filter_quotes_by_sym(fqsn, quote_stream),
 | 
			
		||||
        filter_quotes_by_sym(fqme, quote_stream),
 | 
			
		||||
 | 
			
		||||
        # XXX: currently the ``ohlcv`` arg
 | 
			
		||||
        flume.rt_shm,
 | 
			
		||||
        # XXX: currently the ``ohlcv`` arg, but we should allow
 | 
			
		||||
        # (dynamic) requests for src flume (node) streams?
 | 
			
		||||
        src.rt_shm,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # HISTORY COMPUTE PHASE
 | 
			
		||||
    # conduct a single iteration of fsp with historical bars input
 | 
			
		||||
    # and get historical output.
 | 
			
		||||
    history_output: Union[
 | 
			
		||||
        dict[str, np.ndarray],  # multi-output case
 | 
			
		||||
        np.ndarray,  # single output case
 | 
			
		||||
    ]
 | 
			
		||||
    history_output: (
 | 
			
		||||
        dict[str, np.ndarray]  # multi-output case
 | 
			
		||||
        | np.ndarray,  # single output case
 | 
			
		||||
    )
 | 
			
		||||
    history_output = await anext(out_stream)
 | 
			
		||||
 | 
			
		||||
    func_name = func.__name__
 | 
			
		||||
    func_name = edge_func.__name__
 | 
			
		||||
    profiler(f'{func_name} generated history')
 | 
			
		||||
 | 
			
		||||
    # build struct array with an 'index' field to push as history
 | 
			
		||||
| 
						 | 
				
			
			@ -134,10 +266,12 @@ async def fsp_compute(
 | 
			
		|||
    # TODO: push using a[['f0', 'f1', .., 'fn']] = .. syntax no?
 | 
			
		||||
    # if the output array is multi-field then push
 | 
			
		||||
    # each respective field.
 | 
			
		||||
    fields = getattr(dst.array.dtype, 'fields', None).copy()
 | 
			
		||||
    dst_shm: ShmArray = dst.rt_shm
 | 
			
		||||
    fields = getattr(dst_shm.array.dtype, 'fields', None).copy()
 | 
			
		||||
    fields.pop('index')
 | 
			
		||||
    history_by_field: Optional[np.ndarray] = None
 | 
			
		||||
    src_time = src.array['time']
 | 
			
		||||
    history_by_field: np.ndarray | None = None
 | 
			
		||||
    src_shm: ShmArray = src.rt_shm
 | 
			
		||||
    src_time = src_shm.array['time']
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
        fields and
 | 
			
		||||
| 
						 | 
				
			
			@ -156,7 +290,7 @@ async def fsp_compute(
 | 
			
		|||
                if history_by_field is None:
 | 
			
		||||
 | 
			
		||||
                    if output is None:
 | 
			
		||||
                        length = len(src.array)
 | 
			
		||||
                        length = len(src_shm.array)
 | 
			
		||||
                    else:
 | 
			
		||||
                        length = len(output)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -165,7 +299,7 @@ async def fsp_compute(
 | 
			
		|||
                    # will be pushed to shm.
 | 
			
		||||
                    history_by_field = np.zeros(
 | 
			
		||||
                        length,
 | 
			
		||||
                        dtype=dst.array.dtype
 | 
			
		||||
                        dtype=dst_shm.array.dtype
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                if output is None:
 | 
			
		||||
| 
						 | 
				
			
			@ -182,13 +316,13 @@ async def fsp_compute(
 | 
			
		|||
            )
 | 
			
		||||
        history_by_field = np.zeros(
 | 
			
		||||
            len(history_output),
 | 
			
		||||
            dtype=dst.array.dtype
 | 
			
		||||
            dtype=dst_shm.array.dtype
 | 
			
		||||
        )
 | 
			
		||||
        history_by_field[func_name] = history_output
 | 
			
		||||
 | 
			
		||||
    history_by_field['time'] = src_time[-len(history_by_field):]
 | 
			
		||||
 | 
			
		||||
    history_output['time'] = src.array['time']
 | 
			
		||||
    history_output['time'] = src_shm.array['time']
 | 
			
		||||
 | 
			
		||||
    # TODO: XXX:
 | 
			
		||||
    # THERE'S A BIG BUG HERE WITH THE `index` field since we're
 | 
			
		||||
| 
						 | 
				
			
			@ -201,11 +335,11 @@ async def fsp_compute(
 | 
			
		|||
    #   is `index` aware such that historical data can be indexed
 | 
			
		||||
    #   relative to the true first datum? Not sure if this is sane
 | 
			
		||||
    #   for incremental compuations.
 | 
			
		||||
    first = dst._first.value = src._first.value
 | 
			
		||||
    first = dst_shm._first.value = src_shm._first.value
 | 
			
		||||
 | 
			
		||||
    # TODO: can we use this `start` flag instead of the manual
 | 
			
		||||
    # setting above?
 | 
			
		||||
    index = dst.push(
 | 
			
		||||
    index = dst_shm.push(
 | 
			
		||||
        history_by_field,
 | 
			
		||||
        start=first,
 | 
			
		||||
    )
 | 
			
		||||
| 
						 | 
				
			
			@ -216,12 +350,9 @@ async def fsp_compute(
 | 
			
		|||
    # setup a respawn handle
 | 
			
		||||
    with trio.CancelScope() as cs:
 | 
			
		||||
 | 
			
		||||
        # TODO: might be better to just make a "restart" method where
 | 
			
		||||
        # the target task is spawned implicitly and then the event is
 | 
			
		||||
        # set via some higher level api? At that poing we might as well
 | 
			
		||||
        # be writing a one-cancels-one nursery though right?
 | 
			
		||||
        tracker = TaskTracker(trio.Event(), cs)
 | 
			
		||||
        task_status.started((tracker, index))
 | 
			
		||||
        casc.cs = cs
 | 
			
		||||
        casc.complete = trio.Event()
 | 
			
		||||
        task_status.started(index)
 | 
			
		||||
 | 
			
		||||
        profiler(f'{func_name} yield last index')
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -235,12 +366,12 @@ async def fsp_compute(
 | 
			
		|||
                log.debug(f"{func_name}: {processed}")
 | 
			
		||||
                key, output = processed
 | 
			
		||||
                # dst.array[-1][key] = output
 | 
			
		||||
                dst.array[[key, 'time']][-1] = (
 | 
			
		||||
                dst_shm.array[[key, 'time']][-1] = (
 | 
			
		||||
                    output,
 | 
			
		||||
                    # TODO: what about pushing ``time.time_ns()``
 | 
			
		||||
                    # in which case we'll need to round at the graphics
 | 
			
		||||
                    # processing / sampling layer?
 | 
			
		||||
                    src.array[-1]['time']
 | 
			
		||||
                    src_shm.array[-1]['time']
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # NOTE: for now we aren't streaming this to the consumer
 | 
			
		||||
| 
						 | 
				
			
			@ -252,7 +383,7 @@ async def fsp_compute(
 | 
			
		|||
                # N-consumers who subscribe for the real-time output,
 | 
			
		||||
                # which we'll likely want to implement using local-mem
 | 
			
		||||
                # chans for the fan out?
 | 
			
		||||
                # index = src.index
 | 
			
		||||
                # index = src_shm.index
 | 
			
		||||
                # if attach_stream:
 | 
			
		||||
                #     await client_stream.send(index)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -262,7 +393,7 @@ async def fsp_compute(
 | 
			
		|||
                #     log.info(f'FSP quote too fast: {hz}')
 | 
			
		||||
                # last = time.time()
 | 
			
		||||
        finally:
 | 
			
		||||
            tracker.complete.set()
 | 
			
		||||
            casc.complete.set()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@tractor.context
 | 
			
		||||
| 
						 | 
				
			
			@ -271,17 +402,17 @@ async def cascade(
 | 
			
		|||
    ctx: tractor.Context,
 | 
			
		||||
 | 
			
		||||
    # data feed key
 | 
			
		||||
    fqsn: str,
 | 
			
		||||
 | 
			
		||||
    src_shm_token: dict,
 | 
			
		||||
    dst_shm_token: tuple[str, np.dtype],
 | 
			
		||||
    fqme: str,
 | 
			
		||||
 | 
			
		||||
    # flume pair cascaded using an "edge function"
 | 
			
		||||
    src_flume_addr: dict,
 | 
			
		||||
    dst_flume_addr: dict,
 | 
			
		||||
    ns_path: NamespacePath,
 | 
			
		||||
 | 
			
		||||
    shm_registry: dict[str, _Token],
 | 
			
		||||
 | 
			
		||||
    zero_on_step: bool = False,
 | 
			
		||||
    loglevel: Optional[str] = None,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
| 
						 | 
				
			
			@ -297,8 +428,14 @@ async def cascade(
 | 
			
		|||
    if loglevel:
 | 
			
		||||
        get_console_log(loglevel)
 | 
			
		||||
 | 
			
		||||
    src = attach_shm_array(token=src_shm_token)
 | 
			
		||||
    dst = attach_shm_array(readonly=False, token=dst_shm_token)
 | 
			
		||||
    src: Flume = Flume.from_msg(src_flume_addr)
 | 
			
		||||
    dst: Flume = Flume.from_msg(
 | 
			
		||||
        dst_flume_addr,
 | 
			
		||||
        readonly=False,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # src: ShmArray = attach_shm_array(token=src_shm_token)
 | 
			
		||||
    # dst: ShmArray = attach_shm_array(readonly=False, token=dst_shm_token)
 | 
			
		||||
 | 
			
		||||
    reg = _load_builtins()
 | 
			
		||||
    lines = '\n'.join([f'{key.rpartition(":")[2]} => {key}' for key in reg])
 | 
			
		||||
| 
						 | 
				
			
			@ -306,11 +443,11 @@ async def cascade(
 | 
			
		|||
        f'Registered FSP set:\n{lines}'
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # update actorlocal flows table which registers
 | 
			
		||||
    # readonly "instances" of this fsp for symbol/source
 | 
			
		||||
    # so that consumer fsps can look it up by source + fsp.
 | 
			
		||||
    # TODO: ugh i hate this wind/unwind to list over the wire
 | 
			
		||||
    # but not sure how else to do it.
 | 
			
		||||
    # NOTE XXX: update actorlocal flows table which registers
 | 
			
		||||
    # readonly "instances" of this fsp for symbol/source so that
 | 
			
		||||
    # consumer fsps can look it up by source + fsp.
 | 
			
		||||
    # TODO: ugh i hate this wind/unwind to list over the wire but
 | 
			
		||||
    # not sure how else to do it.
 | 
			
		||||
    for (token, fsp_name, dst_token) in shm_registry:
 | 
			
		||||
        Fsp._flow_registry[(
 | 
			
		||||
            _Token.from_msg(token),
 | 
			
		||||
| 
						 | 
				
			
			@ -320,16 +457,19 @@ async def cascade(
 | 
			
		|||
    fsp: Fsp = reg.get(
 | 
			
		||||
        NamespacePath(ns_path)
 | 
			
		||||
    )
 | 
			
		||||
    func = fsp.func
 | 
			
		||||
    func: Callable = fsp.func
 | 
			
		||||
 | 
			
		||||
    if not func:
 | 
			
		||||
        # TODO: assume it's a func target path
 | 
			
		||||
        raise ValueError(f'Unknown fsp target: {ns_path}')
 | 
			
		||||
 | 
			
		||||
    _fqme: str = src.mkt.fqme
 | 
			
		||||
    assert _fqme == fqme
 | 
			
		||||
 | 
			
		||||
    # open a data feed stream with requested broker
 | 
			
		||||
    feed: Feed
 | 
			
		||||
    async with data.feed.maybe_open_feed(
 | 
			
		||||
        [fqsn],
 | 
			
		||||
        [fqme],
 | 
			
		||||
 | 
			
		||||
        # TODO throttle tick outputs from *this* daemon since
 | 
			
		||||
        # it'll emit tons of ticks due to the throttle only
 | 
			
		||||
| 
						 | 
				
			
			@ -339,177 +479,142 @@ async def cascade(
 | 
			
		|||
 | 
			
		||||
    ) as feed:
 | 
			
		||||
 | 
			
		||||
        flume = feed.flumes[fqsn]
 | 
			
		||||
        symbol = flume.symbol
 | 
			
		||||
        assert src.token == flume.rt_shm.token
 | 
			
		||||
        flume: Flume = feed.flumes[fqme]
 | 
			
		||||
        # XXX: can't do this since flume.feed will be set XD
 | 
			
		||||
        # assert flume == src
 | 
			
		||||
        assert flume.mkt == src.mkt
 | 
			
		||||
        mkt: MktPair = flume.mkt
 | 
			
		||||
 | 
			
		||||
        # NOTE: FOR NOW, sanity checks around the feed as being
 | 
			
		||||
        # always the src flume (until we get to fancier/lengthier
 | 
			
		||||
        # chains/graphs.
 | 
			
		||||
        assert src.rt_shm.token == flume.rt_shm.token
 | 
			
		||||
 | 
			
		||||
        # XXX: won't work bc the _hist_shm_token value will be
 | 
			
		||||
        # list[list] after IPC..
 | 
			
		||||
        # assert flume.to_msg() == src_flume_addr
 | 
			
		||||
 | 
			
		||||
        profiler(f'{func}: feed up')
 | 
			
		||||
 | 
			
		||||
        func_name = func.__name__
 | 
			
		||||
        func_name: str = func.__name__
 | 
			
		||||
        async with (
 | 
			
		||||
            trio.open_nursery() as n,
 | 
			
		||||
            trio.open_nursery() as tn,
 | 
			
		||||
        ):
 | 
			
		||||
            # TODO: might be better to just make a "restart" method where
 | 
			
		||||
            # the target task is spawned implicitly and then the event is
 | 
			
		||||
            # set via some higher level api? At that poing we might as well
 | 
			
		||||
            # be writing a one-cancels-one nursery though right?
 | 
			
		||||
            casc = Cascade(
 | 
			
		||||
                src,
 | 
			
		||||
                dst,
 | 
			
		||||
                tn,
 | 
			
		||||
                fsp,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            # TODO: this seems like it should be wrapped somewhere?
 | 
			
		||||
            fsp_target = partial(
 | 
			
		||||
 | 
			
		||||
                fsp_compute,
 | 
			
		||||
                symbol=symbol,
 | 
			
		||||
                flume=flume,
 | 
			
		||||
                connect_streams,
 | 
			
		||||
                casc=casc,
 | 
			
		||||
                mkt=mkt,
 | 
			
		||||
                quote_stream=flume.stream,
 | 
			
		||||
 | 
			
		||||
                # shm
 | 
			
		||||
                # flumes and shm passthrough
 | 
			
		||||
                src=src,
 | 
			
		||||
                dst=dst,
 | 
			
		||||
 | 
			
		||||
                # target
 | 
			
		||||
                func=func
 | 
			
		||||
                # chain function which takes src flume input(s)
 | 
			
		||||
                # and renders dst flume output(s)
 | 
			
		||||
                edge_func=func
 | 
			
		||||
            )
 | 
			
		||||
            async with casc.open_edge(
 | 
			
		||||
                bind_func=fsp_target,
 | 
			
		||||
            ) as index:
 | 
			
		||||
                # casc.bind_func = fsp_target
 | 
			
		||||
                # index = await tn.start(fsp_target)
 | 
			
		||||
                dst_shm: ShmArray = dst.rt_shm
 | 
			
		||||
                src_shm: ShmArray = src.rt_shm
 | 
			
		||||
 | 
			
		||||
            tracker, index = await n.start(fsp_target)
 | 
			
		||||
                if zero_on_step:
 | 
			
		||||
                    last = dst.rt_shm.array[-1:]
 | 
			
		||||
                    zeroed = np.zeros(last.shape, dtype=last.dtype)
 | 
			
		||||
 | 
			
		||||
            if zero_on_step:
 | 
			
		||||
                last = dst.array[-1:]
 | 
			
		||||
                zeroed = np.zeros(last.shape, dtype=last.dtype)
 | 
			
		||||
                profiler(f'{func_name}: fsp up')
 | 
			
		||||
 | 
			
		||||
            profiler(f'{func_name}: fsp up')
 | 
			
		||||
                # sync to client-side actor
 | 
			
		||||
                await ctx.started(index)
 | 
			
		||||
 | 
			
		||||
            # sync client
 | 
			
		||||
            await ctx.started(index)
 | 
			
		||||
                # XXX:  rt stream with client which we MUST
 | 
			
		||||
                # open here (and keep it open) in order to make
 | 
			
		||||
                # incremental "updates" as history prepends take
 | 
			
		||||
                # place.
 | 
			
		||||
                async with ctx.open_stream() as client_stream:
 | 
			
		||||
                    casc.client_stream: tractor.MsgStream = client_stream
 | 
			
		||||
 | 
			
		||||
            # XXX:  rt stream with client which we MUST
 | 
			
		||||
            # open here (and keep it open) in order to make
 | 
			
		||||
            # incremental "updates" as history prepends take
 | 
			
		||||
            # place.
 | 
			
		||||
            async with ctx.open_stream() as client_stream:
 | 
			
		||||
                    s, step, ld = casc.is_synced()
 | 
			
		||||
 | 
			
		||||
                # TODO: these likely should all become
 | 
			
		||||
                # methods of this ``TaskLifetime`` or wtv
 | 
			
		||||
                # abstraction..
 | 
			
		||||
                async def resync(
 | 
			
		||||
                    tracker: TaskTracker,
 | 
			
		||||
                    # detect sample period step for subscription to increment
 | 
			
		||||
                    # signal
 | 
			
		||||
                    times = src.rt_shm.array['time']
 | 
			
		||||
                    if len(times) > 1:
 | 
			
		||||
                        last_ts = times[-1]
 | 
			
		||||
                        delay_s: float = float(last_ts - times[times != last_ts][-1])
 | 
			
		||||
                    else:
 | 
			
		||||
                        # our default "HFT" sample rate.
 | 
			
		||||
                        delay_s: float = _default_delay_s
 | 
			
		||||
 | 
			
		||||
                ) -> tuple[TaskTracker, int]:
 | 
			
		||||
                    # TODO: adopt an incremental update engine/approach
 | 
			
		||||
                    # where possible here eventually!
 | 
			
		||||
                    log.info(f're-syncing fsp {func_name} to source')
 | 
			
		||||
                    tracker.cs.cancel()
 | 
			
		||||
                    await tracker.complete.wait()
 | 
			
		||||
                    tracker, index = await n.start(fsp_target)
 | 
			
		||||
                    # sub and increment the underlying shared memory buffer
 | 
			
		||||
                    # on every step msg received from the global `samplerd`
 | 
			
		||||
                    # service.
 | 
			
		||||
                    async with open_sample_stream(
 | 
			
		||||
                        float(delay_s)
 | 
			
		||||
                    ) as istream:
 | 
			
		||||
 | 
			
		||||
                    # always trigger UI refresh after history update,
 | 
			
		||||
                    # see ``piker.ui._fsp.FspAdmin.open_chain()`` and
 | 
			
		||||
                    # ``piker.ui._display.trigger_update()``.
 | 
			
		||||
                    await client_stream.send({
 | 
			
		||||
                        'fsp_update': {
 | 
			
		||||
                            'key': dst_shm_token,
 | 
			
		||||
                            'first': dst._first.value,
 | 
			
		||||
                            'last': dst._last.value,
 | 
			
		||||
                        }
 | 
			
		||||
                    })
 | 
			
		||||
                    return tracker, index
 | 
			
		||||
                        profiler(f'{func_name}: sample stream up')
 | 
			
		||||
                        profiler.finish()
 | 
			
		||||
 | 
			
		||||
                def is_synced(
 | 
			
		||||
                    src: ShmArray,
 | 
			
		||||
                    dst: ShmArray
 | 
			
		||||
                ) -> tuple[bool, int, int]:
 | 
			
		||||
                    '''
 | 
			
		||||
                    Predicate to dertmine if a destination FSP
 | 
			
		||||
                    output array is aligned to its source array.
 | 
			
		||||
                        async for i in istream:
 | 
			
		||||
                            # print(f'FSP incrementing {i}')
 | 
			
		||||
 | 
			
		||||
                    '''
 | 
			
		||||
                    step_diff = src.index - dst.index
 | 
			
		||||
                    len_diff = abs(len(src.array) - len(dst.array))
 | 
			
		||||
                    return not (
 | 
			
		||||
                        # the source is likely backfilling and we must
 | 
			
		||||
                        # sync history calculations
 | 
			
		||||
                        len_diff > 2
 | 
			
		||||
                            # respawn the compute task if the source
 | 
			
		||||
                            # array has been updated such that we compute
 | 
			
		||||
                            # new history from the (prepended) source.
 | 
			
		||||
                            synced, step_diff, _ = casc.is_synced()
 | 
			
		||||
                            if not synced:
 | 
			
		||||
                                step_diff: int = await casc.poll_and_sync_to_step()
 | 
			
		||||
 | 
			
		||||
                        # we aren't step synced to the source and may be
 | 
			
		||||
                        # leading/lagging by a step
 | 
			
		||||
                        or step_diff > 1
 | 
			
		||||
                        or step_diff < 0
 | 
			
		||||
                    ), step_diff, len_diff
 | 
			
		||||
                                # skip adding a last bar since we should already
 | 
			
		||||
                                # be step alinged
 | 
			
		||||
                                if step_diff == 0:
 | 
			
		||||
                                    continue
 | 
			
		||||
 | 
			
		||||
                async def poll_and_sync_to_step(
 | 
			
		||||
                    tracker: TaskTracker,
 | 
			
		||||
                    src: ShmArray,
 | 
			
		||||
                    dst: ShmArray,
 | 
			
		||||
                            # read out last shm row, copy and write new row
 | 
			
		||||
                            array = dst_shm.array
 | 
			
		||||
 | 
			
		||||
                ) -> tuple[TaskTracker, int]:
 | 
			
		||||
                            # some metrics like vlm should be reset
 | 
			
		||||
                            # to zero every step.
 | 
			
		||||
                            if zero_on_step:
 | 
			
		||||
                                last = zeroed
 | 
			
		||||
                            else:
 | 
			
		||||
                                last = array[-1:].copy()
 | 
			
		||||
 | 
			
		||||
                    synced, step_diff, _ = is_synced(src, dst)
 | 
			
		||||
                    while not synced:
 | 
			
		||||
                        tracker, index = await resync(tracker)
 | 
			
		||||
                        synced, step_diff, _ = is_synced(src, dst)
 | 
			
		||||
                            dst.rt_shm.push(last)
 | 
			
		||||
 | 
			
		||||
                    return tracker, step_diff
 | 
			
		||||
                            # sync with source buffer's time step
 | 
			
		||||
                            src_l2 = src_shm.array[-2:]
 | 
			
		||||
                            src_li, src_lt = src_l2[-1][['index', 'time']]
 | 
			
		||||
                            src_2li, src_2lt = src_l2[-2][['index', 'time']]
 | 
			
		||||
                            dst_shm._array['time'][src_li] = src_lt
 | 
			
		||||
                            dst_shm._array['time'][src_2li] = src_2lt
 | 
			
		||||
 | 
			
		||||
                s, step, ld = is_synced(src, dst)
 | 
			
		||||
 | 
			
		||||
                # detect sample period step for subscription to increment
 | 
			
		||||
                # signal
 | 
			
		||||
                times = src.array['time']
 | 
			
		||||
                if len(times) > 1:
 | 
			
		||||
                    last_ts = times[-1]
 | 
			
		||||
                    delay_s = float(last_ts - times[times != last_ts][-1])
 | 
			
		||||
                else:
 | 
			
		||||
                    # our default "HFT" sample rate.
 | 
			
		||||
                    delay_s = _default_delay_s
 | 
			
		||||
 | 
			
		||||
                # sub and increment the underlying shared memory buffer
 | 
			
		||||
                # on every step msg received from the global `samplerd`
 | 
			
		||||
                # service.
 | 
			
		||||
                async with open_sample_stream(float(delay_s)) as istream:
 | 
			
		||||
 | 
			
		||||
                    profiler(f'{func_name}: sample stream up')
 | 
			
		||||
                    profiler.finish()
 | 
			
		||||
 | 
			
		||||
                    async for i in istream:
 | 
			
		||||
                        # print(f'FSP incrementing {i}')
 | 
			
		||||
 | 
			
		||||
                        # respawn the compute task if the source
 | 
			
		||||
                        # array has been updated such that we compute
 | 
			
		||||
                        # new history from the (prepended) source.
 | 
			
		||||
                        synced, step_diff, _ = is_synced(src, dst)
 | 
			
		||||
                        if not synced:
 | 
			
		||||
                            tracker, step_diff = await poll_and_sync_to_step(
 | 
			
		||||
                                tracker,
 | 
			
		||||
                                src,
 | 
			
		||||
                                dst,
 | 
			
		||||
                            )
 | 
			
		||||
 | 
			
		||||
                            # skip adding a last bar since we should already
 | 
			
		||||
                            # be step alinged
 | 
			
		||||
                            if step_diff == 0:
 | 
			
		||||
                                continue
 | 
			
		||||
 | 
			
		||||
                        # read out last shm row, copy and write new row
 | 
			
		||||
                        array = dst.array
 | 
			
		||||
 | 
			
		||||
                        # some metrics like vlm should be reset
 | 
			
		||||
                        # to zero every step.
 | 
			
		||||
                        if zero_on_step:
 | 
			
		||||
                            last = zeroed
 | 
			
		||||
                        else:
 | 
			
		||||
                            last = array[-1:].copy()
 | 
			
		||||
 | 
			
		||||
                        dst.push(last)
 | 
			
		||||
 | 
			
		||||
                        # sync with source buffer's time step
 | 
			
		||||
                        src_l2 = src.array[-2:]
 | 
			
		||||
                        src_li, src_lt = src_l2[-1][['index', 'time']]
 | 
			
		||||
                        src_2li, src_2lt = src_l2[-2][['index', 'time']]
 | 
			
		||||
                        dst._array['time'][src_li] = src_lt
 | 
			
		||||
                        dst._array['time'][src_2li] = src_2lt
 | 
			
		||||
 | 
			
		||||
                        # last2 = dst.array[-2:]
 | 
			
		||||
                        # if (
 | 
			
		||||
                        #     last2[-1]['index'] != src_li
 | 
			
		||||
                        #     or last2[-2]['index'] != src_2li
 | 
			
		||||
                        # ):
 | 
			
		||||
                        #     dstl2 = list(last2)
 | 
			
		||||
                        #     srcl2 = list(src_l2)
 | 
			
		||||
                        #     print(
 | 
			
		||||
                        #         # f'{dst.token}\n'
 | 
			
		||||
                        #         f'src: {srcl2}\n'
 | 
			
		||||
                        #         f'dst: {dstl2}\n'
 | 
			
		||||
                        #     )
 | 
			
		||||
                            # last2 = dst.array[-2:]
 | 
			
		||||
                            # if (
 | 
			
		||||
                            #     last2[-1]['index'] != src_li
 | 
			
		||||
                            #     or last2[-2]['index'] != src_2li
 | 
			
		||||
                            # ):
 | 
			
		||||
                            #     dstl2 = list(last2)
 | 
			
		||||
                            #     srcl2 = list(src_l2)
 | 
			
		||||
                            #     print(
 | 
			
		||||
                            #         # f'{dst.token}\n'
 | 
			
		||||
                            #         f'src: {srcl2}\n'
 | 
			
		||||
                            #         f'dst: {dstl2}\n'
 | 
			
		||||
                            #     )
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -24,7 +24,7 @@ import numpy as np
 | 
			
		|||
from numba import jit, float64, optional, int64
 | 
			
		||||
 | 
			
		||||
from ._api import fsp
 | 
			
		||||
from ..data._normalize import iterticks
 | 
			
		||||
from ..data import iterticks
 | 
			
		||||
from ..data._sharedmem import ShmArray
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -20,7 +20,7 @@ import numpy as np
 | 
			
		|||
from tractor.trionics._broadcast import AsyncReceiver
 | 
			
		||||
 | 
			
		||||
from ._api import fsp
 | 
			
		||||
from ..data._normalize import iterticks
 | 
			
		||||
from ..data import iterticks
 | 
			
		||||
from ..data._sharedmem import ShmArray
 | 
			
		||||
from ._momo import _wma
 | 
			
		||||
from ..log import get_logger
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -40,7 +40,10 @@ def get_logger(
 | 
			
		|||
    Return the package log or a sub-log for `name` if provided.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    return tractor.log.get_logger(name=name, _root_name=_proj_name)
 | 
			
		||||
    return tractor.log.get_logger(
 | 
			
		||||
        name=name,
 | 
			
		||||
        _root_name=_proj_name,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_console_log(
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
							
								
								
									
										1047
									
								
								piker/pp.py
								
								
								
								
							
							
						
						
									
										1047
									
								
								piker/pp.py
								
								
								
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							| 
						 | 
				
			
			@ -14,47 +14,45 @@
 | 
			
		|||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
Actor-runtime service orchestration machinery.
 | 
			
		||||
'''
 | 
			
		||||
Actor runtime primtives and (distributed) service APIs for,
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
- daemon-service mgmt: `_daemon` (i.e. low-level spawn and supervise machinery
 | 
			
		||||
  for sub-actors like `brokerd`, `emsd`, datad`, etc.)
 | 
			
		||||
 | 
			
		||||
from ._mngr import Services
 | 
			
		||||
from ._registry import (  # noqa
 | 
			
		||||
    _tractor_kwargs,
 | 
			
		||||
    _default_reg_addr,
 | 
			
		||||
    _default_registry_host,
 | 
			
		||||
    _default_registry_port,
 | 
			
		||||
    open_registry,
 | 
			
		||||
    find_service,
 | 
			
		||||
    check_for_service,
 | 
			
		||||
- service-actor supervision (via `trio` tasks) API: `._mngr`
 | 
			
		||||
 | 
			
		||||
- discovery interface (via light wrapping around `tractor`'s built-in
 | 
			
		||||
  prot): `._registry`
 | 
			
		||||
 | 
			
		||||
- `docker` cntr SC supervision for use with `trio`: `_ahab`
 | 
			
		||||
  - wrappers for marketstore and elasticsearch dbs
 | 
			
		||||
  => TODO: maybe to (re)move elsewhere?
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from ._mngr import Services as Services
 | 
			
		||||
from ._registry import (
 | 
			
		||||
    _tractor_kwargs as _tractor_kwargs,
 | 
			
		||||
    _default_reg_addr as _default_reg_addr,
 | 
			
		||||
    _default_registry_host as _default_registry_host,
 | 
			
		||||
    _default_registry_port as _default_registry_port,
 | 
			
		||||
 | 
			
		||||
    open_registry as open_registry,
 | 
			
		||||
    find_service as find_service,
 | 
			
		||||
    check_for_service as check_for_service,
 | 
			
		||||
)
 | 
			
		||||
from ._daemon import (  # noqa
 | 
			
		||||
    maybe_spawn_daemon,
 | 
			
		||||
    spawn_brokerd,
 | 
			
		||||
    maybe_spawn_brokerd,
 | 
			
		||||
    spawn_emsd,
 | 
			
		||||
    maybe_open_emsd,
 | 
			
		||||
from ._daemon import (
 | 
			
		||||
    maybe_spawn_daemon as maybe_spawn_daemon,
 | 
			
		||||
    spawn_emsd as spawn_emsd,
 | 
			
		||||
    maybe_open_emsd as maybe_open_emsd,
 | 
			
		||||
)
 | 
			
		||||
from ._actor_runtime import (
 | 
			
		||||
    open_piker_runtime,
 | 
			
		||||
    maybe_open_pikerd,
 | 
			
		||||
    open_pikerd,
 | 
			
		||||
    get_tractor_runtime_kwargs,
 | 
			
		||||
    open_piker_runtime as open_piker_runtime,
 | 
			
		||||
    maybe_open_pikerd as maybe_open_pikerd,
 | 
			
		||||
    open_pikerd as open_pikerd,
 | 
			
		||||
    get_runtime_vars as get_runtime_vars,
 | 
			
		||||
)
 | 
			
		||||
from ..brokers._daemon import (
 | 
			
		||||
    spawn_brokerd as spawn_brokerd,
 | 
			
		||||
    maybe_spawn_brokerd as maybe_spawn_brokerd,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
__all__ = [
 | 
			
		||||
    'check_for_service',
 | 
			
		||||
    'Services',
 | 
			
		||||
    'maybe_spawn_daemon',
 | 
			
		||||
    'spawn_brokerd',
 | 
			
		||||
    'maybe_spawn_brokerd',
 | 
			
		||||
    'spawn_emsd',
 | 
			
		||||
    'maybe_open_emsd',
 | 
			
		||||
    'open_piker_runtime',
 | 
			
		||||
    'maybe_open_pikerd',
 | 
			
		||||
    'open_pikerd',
 | 
			
		||||
    'get_tractor_runtime_kwargs',
 | 
			
		||||
]
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -19,8 +19,6 @@
 | 
			
		|||
 | 
			
		||||
"""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from functools import partial
 | 
			
		||||
import os
 | 
			
		||||
from typing import (
 | 
			
		||||
    Optional,
 | 
			
		||||
| 
						 | 
				
			
			@ -34,8 +32,7 @@ from contextlib import (
 | 
			
		|||
import tractor
 | 
			
		||||
import trio
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
from ._util import (
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
from ._mngr import (
 | 
			
		||||
| 
						 | 
				
			
			@ -47,10 +44,8 @@ from ._registry import (  # noqa
 | 
			
		|||
    open_registry,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def get_tractor_runtime_kwargs() -> dict[str, Any]:
 | 
			
		||||
def get_runtime_vars() -> dict[str, Any]:
 | 
			
		||||
    '''
 | 
			
		||||
    Deliver ``tractor`` related runtime variables in a `dict`.
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -61,6 +56,8 @@ def get_tractor_runtime_kwargs() -> dict[str, Any]:
 | 
			
		|||
@acm
 | 
			
		||||
async def open_piker_runtime(
 | 
			
		||||
    name: str,
 | 
			
		||||
    registry_addrs: list[tuple[str, int]] = [],
 | 
			
		||||
 | 
			
		||||
    enable_modules: list[str] = [],
 | 
			
		||||
    loglevel: Optional[str] = None,
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -68,8 +65,6 @@ async def open_piker_runtime(
 | 
			
		|||
    # for data daemons when running in production.
 | 
			
		||||
    debug_mode: bool = False,
 | 
			
		||||
 | 
			
		||||
    registry_addr: None | tuple[str, int] = None,
 | 
			
		||||
 | 
			
		||||
    # TODO: once we have `rsyscall` support we will read a config
 | 
			
		||||
    # and spawn the service tree distributed per that.
 | 
			
		||||
    start_method: str = 'trio',
 | 
			
		||||
| 
						 | 
				
			
			@ -79,7 +74,7 @@ async def open_piker_runtime(
 | 
			
		|||
 | 
			
		||||
) -> tuple[
 | 
			
		||||
    tractor.Actor,
 | 
			
		||||
    tuple[str, int],
 | 
			
		||||
    list[tuple[str, int]],
 | 
			
		||||
]:
 | 
			
		||||
    '''
 | 
			
		||||
    Start a piker actor who's runtime will automatically sync with
 | 
			
		||||
| 
						 | 
				
			
			@ -89,21 +84,31 @@ async def open_piker_runtime(
 | 
			
		|||
    a root actor.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # check for existing runtime, boot it
 | 
			
		||||
    # if not already running.
 | 
			
		||||
    try:
 | 
			
		||||
        # check for existing runtime
 | 
			
		||||
        actor = tractor.current_actor().uid
 | 
			
		||||
 | 
			
		||||
        actor = tractor.current_actor()
 | 
			
		||||
    except tractor._exceptions.NoRuntime:
 | 
			
		||||
        tractor._state._runtime_vars[
 | 
			
		||||
            'piker_vars'] = tractor_runtime_overrides
 | 
			
		||||
            'piker_vars'
 | 
			
		||||
        ] = tractor_runtime_overrides
 | 
			
		||||
 | 
			
		||||
        registry_addr = registry_addr or _default_reg_addr
 | 
			
		||||
        # NOTE: if no registrar list passed used the default of just
 | 
			
		||||
        # setting it as the root actor on localhost.
 | 
			
		||||
        registry_addrs = (
 | 
			
		||||
            registry_addrs
 | 
			
		||||
            or [_default_reg_addr]
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        if ems := tractor_kwargs.pop('enable_modules', None):
 | 
			
		||||
            # import pdbp; pdbp.set_trace()
 | 
			
		||||
            enable_modules.extend(ems)
 | 
			
		||||
 | 
			
		||||
        async with (
 | 
			
		||||
            tractor.open_root_actor(
 | 
			
		||||
 | 
			
		||||
                # passed through to ``open_root_actor``
 | 
			
		||||
                arbiter_addr=registry_addr,
 | 
			
		||||
                registry_addrs=registry_addrs,
 | 
			
		||||
                name=name,
 | 
			
		||||
                loglevel=loglevel,
 | 
			
		||||
                debug_mode=debug_mode,
 | 
			
		||||
| 
						 | 
				
			
			@ -115,46 +120,50 @@ async def open_piker_runtime(
 | 
			
		|||
                enable_modules=enable_modules,
 | 
			
		||||
 | 
			
		||||
                **tractor_kwargs,
 | 
			
		||||
            ) as _,
 | 
			
		||||
            ) as actor,
 | 
			
		||||
 | 
			
		||||
            open_registry(registry_addr, ensure_exists=False) as addr,
 | 
			
		||||
            open_registry(
 | 
			
		||||
                registry_addrs,
 | 
			
		||||
                ensure_exists=False,
 | 
			
		||||
            ) as addrs,
 | 
			
		||||
        ):
 | 
			
		||||
            yield (
 | 
			
		||||
                tractor.current_actor(),
 | 
			
		||||
                addr,
 | 
			
		||||
            )
 | 
			
		||||
    else:
 | 
			
		||||
        async with open_registry(registry_addr) as addr:
 | 
			
		||||
            assert actor is tractor.current_actor()
 | 
			
		||||
            yield (
 | 
			
		||||
                actor,
 | 
			
		||||
                addr,
 | 
			
		||||
                addrs,
 | 
			
		||||
            )
 | 
			
		||||
    else:
 | 
			
		||||
        async with open_registry(
 | 
			
		||||
            registry_addrs
 | 
			
		||||
        ) as addrs:
 | 
			
		||||
            yield (
 | 
			
		||||
                actor,
 | 
			
		||||
                addrs,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_root_dname = 'pikerd'
 | 
			
		||||
_root_modules = [
 | 
			
		||||
_root_dname: str = 'pikerd'
 | 
			
		||||
_root_modules: list[str] = [
 | 
			
		||||
    __name__,
 | 
			
		||||
    'piker.service._daemon',
 | 
			
		||||
    'piker.brokers._daemon',
 | 
			
		||||
 | 
			
		||||
    'piker.clearing._ems',
 | 
			
		||||
    'piker.clearing._client',
 | 
			
		||||
 | 
			
		||||
    'piker.data._sampling',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_pikerd(
 | 
			
		||||
    registry_addrs: list[tuple[str, int]],
 | 
			
		||||
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
    # XXX: you should pretty much never want debug mode
 | 
			
		||||
    # for data daemons when running in production.
 | 
			
		||||
    debug_mode: bool = False,
 | 
			
		||||
    registry_addr: None | tuple[str, int] = None,
 | 
			
		||||
 | 
			
		||||
    # db init flags
 | 
			
		||||
    tsdb: bool = False,
 | 
			
		||||
    es: bool = False,
 | 
			
		||||
    drop_root_perms_for_ahab: bool = True,
 | 
			
		||||
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -166,76 +175,45 @@ async def open_pikerd(
 | 
			
		|||
    alive underling services (see below).
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # NOTE: for the root daemon we always enable the root
 | 
			
		||||
    # mod set and we `list.extend()` it into wtv the
 | 
			
		||||
    # caller requested.
 | 
			
		||||
    # TODO: make this mod set more strict?
 | 
			
		||||
    # -[ ] eventually we should be able to avoid
 | 
			
		||||
    #    having the root have more then permissions to spawn other
 | 
			
		||||
    #    specialized daemons I think?
 | 
			
		||||
    ems: list[str] = kwargs.setdefault('enable_modules', [])
 | 
			
		||||
    ems.extend(_root_modules)
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        open_piker_runtime(
 | 
			
		||||
 | 
			
		||||
            name=_root_dname,
 | 
			
		||||
            # TODO: eventually we should be able to avoid
 | 
			
		||||
            # having the root have more then permissions to
 | 
			
		||||
            # spawn other specialized daemons I think?
 | 
			
		||||
            enable_modules=_root_modules,
 | 
			
		||||
            loglevel=loglevel,
 | 
			
		||||
            debug_mode=debug_mode,
 | 
			
		||||
            registry_addr=registry_addr,
 | 
			
		||||
            registry_addrs=registry_addrs,
 | 
			
		||||
 | 
			
		||||
            **kwargs,
 | 
			
		||||
 | 
			
		||||
        ) as (root_actor, reg_addr),
 | 
			
		||||
        ) as (
 | 
			
		||||
            root_actor,
 | 
			
		||||
            reg_addrs,
 | 
			
		||||
        ),
 | 
			
		||||
        tractor.open_nursery() as actor_nursery,
 | 
			
		||||
        trio.open_nursery() as service_nursery,
 | 
			
		||||
    ):
 | 
			
		||||
        if root_actor.accept_addr != reg_addr:
 | 
			
		||||
            raise RuntimeError(f'Daemon failed to bind on {reg_addr}!?')
 | 
			
		||||
        for addr in reg_addrs:
 | 
			
		||||
            if addr not in root_actor.accept_addrs:
 | 
			
		||||
                raise RuntimeError(
 | 
			
		||||
                    f'`pikerd` failed to bind on {addr}!\n'
 | 
			
		||||
                    'Maybe you have another daemon already running?'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        # assign globally for future daemon/task creation
 | 
			
		||||
        Services.actor_n = actor_nursery
 | 
			
		||||
        Services.service_n = service_nursery
 | 
			
		||||
        Services.debug_mode = debug_mode
 | 
			
		||||
 | 
			
		||||
        if tsdb:
 | 
			
		||||
            from ._ahab import start_ahab
 | 
			
		||||
            from .marketstore import start_marketstore
 | 
			
		||||
 | 
			
		||||
            log.info('Spawning `marketstore` supervisor')
 | 
			
		||||
            ctn_ready, config, (cid, pid) = await service_nursery.start(
 | 
			
		||||
                partial(
 | 
			
		||||
                    start_ahab,
 | 
			
		||||
                    'marketstored',
 | 
			
		||||
                    start_marketstore,
 | 
			
		||||
                    loglevel=loglevel,
 | 
			
		||||
                    drop_root_perms=drop_root_perms_for_ahab,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            )
 | 
			
		||||
            log.info(
 | 
			
		||||
                f'`marketstored` up!\n'
 | 
			
		||||
                f'pid: {pid}\n'
 | 
			
		||||
                f'container id: {cid[:12]}\n'
 | 
			
		||||
                f'config: {pformat(config)}'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if es:
 | 
			
		||||
            from ._ahab import start_ahab
 | 
			
		||||
            from .elastic import start_elasticsearch
 | 
			
		||||
 | 
			
		||||
            log.info('Spawning `elasticsearch` supervisor')
 | 
			
		||||
            ctn_ready, config, (cid, pid) = await service_nursery.start(
 | 
			
		||||
                partial(
 | 
			
		||||
                    start_ahab,
 | 
			
		||||
                    'elasticsearch',
 | 
			
		||||
                    start_elasticsearch,
 | 
			
		||||
                    loglevel=loglevel,
 | 
			
		||||
                    drop_root_perms=drop_root_perms_for_ahab,
 | 
			
		||||
                )
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            log.info(
 | 
			
		||||
                f'`elasticsearch` up!\n'
 | 
			
		||||
                f'pid: {pid}\n'
 | 
			
		||||
                f'container id: {cid[:12]}\n'
 | 
			
		||||
                f'config: {pformat(config)}'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            yield Services
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -273,12 +251,9 @@ async def open_pikerd(
 | 
			
		|||
 | 
			
		||||
@acm
 | 
			
		||||
async def maybe_open_pikerd(
 | 
			
		||||
    loglevel: Optional[str] = None,
 | 
			
		||||
    registry_addr: None | tuple = None,
 | 
			
		||||
    tsdb: bool = False,
 | 
			
		||||
    es: bool = False,
 | 
			
		||||
    drop_root_perms_for_ahab: bool = True,
 | 
			
		||||
    registry_addrs: list[tuple[str, int]] | None = None,
 | 
			
		||||
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tractor._portal.Portal | ClassVar[Services]:
 | 
			
		||||
| 
						 | 
				
			
			@ -304,37 +279,51 @@ async def maybe_open_pikerd(
 | 
			
		|||
    #     async with open_portal(chan) as arb_portal:
 | 
			
		||||
    #         yield arb_portal
 | 
			
		||||
 | 
			
		||||
    registry_addrs: list[tuple[str, int]] = (
 | 
			
		||||
        registry_addrs
 | 
			
		||||
        or [_default_reg_addr]
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    pikerd_portal: tractor.Portal | None
 | 
			
		||||
    async with (
 | 
			
		||||
        open_piker_runtime(
 | 
			
		||||
            name=query_name,
 | 
			
		||||
            registry_addr=registry_addr,
 | 
			
		||||
            registry_addrs=registry_addrs,
 | 
			
		||||
            loglevel=loglevel,
 | 
			
		||||
            **kwargs,
 | 
			
		||||
        ) as _,
 | 
			
		||||
 | 
			
		||||
        tractor.find_actor(
 | 
			
		||||
            _root_dname,
 | 
			
		||||
            arbiter_sockaddr=registry_addr,
 | 
			
		||||
        ) as portal
 | 
			
		||||
        ) as (actor, addrs),
 | 
			
		||||
    ):
 | 
			
		||||
        # connect to any existing daemon presuming
 | 
			
		||||
        # its registry socket was selected.
 | 
			
		||||
        if (
 | 
			
		||||
            portal is not None
 | 
			
		||||
        ):
 | 
			
		||||
            yield portal
 | 
			
		||||
        if _root_dname in actor.uid:
 | 
			
		||||
            yield None
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
        # NOTE: IFF running in disti mode, try to attach to any
 | 
			
		||||
        # existing (host-local) `pikerd`.
 | 
			
		||||
        else:
 | 
			
		||||
            async with tractor.find_actor(
 | 
			
		||||
                _root_dname,
 | 
			
		||||
                registry_addrs=registry_addrs,
 | 
			
		||||
                only_first=True,
 | 
			
		||||
                # raise_on_none=True,
 | 
			
		||||
            ) as pikerd_portal:
 | 
			
		||||
 | 
			
		||||
                # connect to any existing remote daemon presuming its
 | 
			
		||||
                # registry socket was selected.
 | 
			
		||||
                if pikerd_portal is not None:
 | 
			
		||||
 | 
			
		||||
                    # sanity check that we are actually connecting to
 | 
			
		||||
                    # a remote process and not ourselves.
 | 
			
		||||
                    assert actor.uid != pikerd_portal.channel.uid
 | 
			
		||||
                    assert registry_addrs
 | 
			
		||||
 | 
			
		||||
                    yield pikerd_portal
 | 
			
		||||
                    return
 | 
			
		||||
 | 
			
		||||
    # presume pikerd role since no daemon could be found at
 | 
			
		||||
    # configured address
 | 
			
		||||
    async with open_pikerd(
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
        registry_addr=registry_addr,
 | 
			
		||||
 | 
			
		||||
        # ahabd (docker super) specific controls
 | 
			
		||||
        tsdb=tsdb,
 | 
			
		||||
        es=es,
 | 
			
		||||
        drop_root_perms_for_ahab=drop_root_perms_for_ahab,
 | 
			
		||||
        registry_addrs=registry_addrs,
 | 
			
		||||
 | 
			
		||||
        # passthrough to ``tractor`` init
 | 
			
		||||
        **kwargs,
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -15,10 +15,11 @@
 | 
			
		|||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
Supervisor for ``docker`` with included async and SC wrapping
 | 
			
		||||
to ensure a cancellable container lifetime system.
 | 
			
		||||
Supervisor for ``docker`` with included async and SC wrapping to
 | 
			
		||||
ensure a cancellable container lifetime system.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from collections import ChainMap
 | 
			
		||||
from functools import partial
 | 
			
		||||
import os
 | 
			
		||||
| 
						 | 
				
			
			@ -48,14 +49,13 @@ from requests.exceptions import (
 | 
			
		|||
    ReadTimeout,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
from ._mngr import Services
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,  # sub-sys logger
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
from .. import config
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class DockerNotStarted(Exception):
 | 
			
		||||
    'Prolly you dint start da daemon bruh'
 | 
			
		||||
| 
						 | 
				
			
			@ -189,7 +189,11 @@ class Container:
 | 
			
		|||
                    and entry not in seen_so_far
 | 
			
		||||
                ):
 | 
			
		||||
                    seen_so_far.add(entry)
 | 
			
		||||
                    getattr(log, level.lower(), log.error)(f'{msg}')
 | 
			
		||||
                    getattr(
 | 
			
		||||
                        log,
 | 
			
		||||
                        level.lower(),
 | 
			
		||||
                        log.error
 | 
			
		||||
                    )(f'{msg}')
 | 
			
		||||
 | 
			
		||||
                    if level == 'fatal':
 | 
			
		||||
                        raise ApplicationLogError(msg)
 | 
			
		||||
| 
						 | 
				
			
			@ -265,8 +269,10 @@ class Container:
 | 
			
		|||
        start = time.time()
 | 
			
		||||
        for _ in range(6):
 | 
			
		||||
 | 
			
		||||
            with trio.move_on_after(0.5) as cs:
 | 
			
		||||
                log.cancel('polling for CNTR logs...')
 | 
			
		||||
            with trio.move_on_after(1) as cs:
 | 
			
		||||
                log.cancel(
 | 
			
		||||
                    'polling for CNTR logs for {stop_predicate}..'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                try:
 | 
			
		||||
                    await self.process_logs_until(
 | 
			
		||||
| 
						 | 
				
			
			@ -330,16 +336,13 @@ class Container:
 | 
			
		|||
async def open_ahabd(
 | 
			
		||||
    ctx: tractor.Context,
 | 
			
		||||
    endpoint: str,  # ns-pointer str-msg-type
 | 
			
		||||
    loglevel: str | None = 'cancel',
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
    **kwargs,
 | 
			
		||||
    **ep_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
 | 
			
		||||
    log = get_console_log(
 | 
			
		||||
        loglevel,
 | 
			
		||||
        name=__name__,
 | 
			
		||||
    )
 | 
			
		||||
    log = get_console_log(loglevel or 'cancel')
 | 
			
		||||
 | 
			
		||||
    async with open_docker() as client:
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -352,7 +355,7 @@ async def open_ahabd(
 | 
			
		|||
            cntr_config,
 | 
			
		||||
            start_pred,
 | 
			
		||||
            stop_pred,
 | 
			
		||||
        ) = ep_func(client)
 | 
			
		||||
        ) = ep_func(client, **ep_kwargs)
 | 
			
		||||
        cntr = Container(dcntr)
 | 
			
		||||
 | 
			
		||||
        conf: ChainMap[str, Any] = ChainMap(
 | 
			
		||||
| 
						 | 
				
			
			@ -448,10 +451,17 @@ async def open_ahabd(
 | 
			
		|||
                )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def start_ahab(
 | 
			
		||||
@acm
 | 
			
		||||
async def start_ahab_service(
 | 
			
		||||
    services: Services,
 | 
			
		||||
    service_name: str,
 | 
			
		||||
 | 
			
		||||
    # endpoint config passed as **kwargs
 | 
			
		||||
    endpoint: Callable[docker.DockerClient, DockerContainer],
 | 
			
		||||
    ep_kwargs: dict,
 | 
			
		||||
    loglevel: str | None = 'cancel',
 | 
			
		||||
 | 
			
		||||
    # supervisor config
 | 
			
		||||
    drop_root_perms: bool = True,
 | 
			
		||||
 | 
			
		||||
    task_status: TaskStatus[
 | 
			
		||||
| 
						 | 
				
			
			@ -472,6 +482,9 @@ async def start_ahab(
 | 
			
		|||
    is started.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    # global log
 | 
			
		||||
    log = get_console_log(loglevel or 'cancel')
 | 
			
		||||
 | 
			
		||||
    cn_ready = trio.Event()
 | 
			
		||||
    try:
 | 
			
		||||
        async with tractor.open_nursery() as an:
 | 
			
		||||
| 
						 | 
				
			
			@ -500,21 +513,28 @@ async def start_ahab(
 | 
			
		|||
                    )[2]  # named user's uid
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            async with portal.open_context(
 | 
			
		||||
                open_ahabd,
 | 
			
		||||
            cs, first = await services.start_service_task(
 | 
			
		||||
                name=service_name,
 | 
			
		||||
                portal=portal,
 | 
			
		||||
 | 
			
		||||
                # rest: endpoint inputs
 | 
			
		||||
                target=open_ahabd,
 | 
			
		||||
                endpoint=str(NamespacePath.from_ref(endpoint)),
 | 
			
		||||
                loglevel='cancel',
 | 
			
		||||
            ) as (ctx, first):
 | 
			
		||||
                **ep_kwargs,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
                cid, pid, cntr_config = first
 | 
			
		||||
            cid, pid, cntr_config = first
 | 
			
		||||
 | 
			
		||||
                task_status.started((
 | 
			
		||||
            try:
 | 
			
		||||
                yield (
 | 
			
		||||
                    cn_ready,
 | 
			
		||||
                    cntr_config,
 | 
			
		||||
                    (cid, pid),
 | 
			
		||||
                ))
 | 
			
		||||
 | 
			
		||||
                await trio.sleep_forever()
 | 
			
		||||
                )
 | 
			
		||||
            finally:
 | 
			
		||||
                log.info(f'Cancelling ahab service `{service_name}`')
 | 
			
		||||
                await services.cancel_service(service_name)
 | 
			
		||||
 | 
			
		||||
    # since we demoted root perms in this parent
 | 
			
		||||
    # we'll get a perms error on proc cleanup in
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -20,7 +20,6 @@ Daemon-actor spawning "endpoint-hooks".
 | 
			
		|||
"""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from typing import (
 | 
			
		||||
    Optional,
 | 
			
		||||
    Callable,
 | 
			
		||||
    Any,
 | 
			
		||||
)
 | 
			
		||||
| 
						 | 
				
			
			@ -30,41 +29,28 @@ from contextlib import (
 | 
			
		|||
 | 
			
		||||
import tractor
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,  # sub-sys logger
 | 
			
		||||
)
 | 
			
		||||
from ..brokers import get_brokermod
 | 
			
		||||
from ._mngr import (
 | 
			
		||||
    Services,
 | 
			
		||||
)
 | 
			
		||||
from ._actor_runtime import maybe_open_pikerd
 | 
			
		||||
from ._registry import find_service
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
# `brokerd` enabled modules
 | 
			
		||||
# NOTE: keeping this list as small as possible is part of our caps-sec
 | 
			
		||||
# model and should be treated with utmost care!
 | 
			
		||||
_data_mods = [
 | 
			
		||||
    'piker.brokers.core',
 | 
			
		||||
    'piker.brokers.data',
 | 
			
		||||
    'piker.data',
 | 
			
		||||
    'piker.data.feed',
 | 
			
		||||
    'piker.data._sampling'
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def maybe_spawn_daemon(
 | 
			
		||||
 | 
			
		||||
    service_name: str,
 | 
			
		||||
    service_task_target: Callable,
 | 
			
		||||
    spawn_args: dict[str, Any],
 | 
			
		||||
    loglevel: Optional[str] = None,
 | 
			
		||||
 | 
			
		||||
    spawn_args: dict[str, Any],
 | 
			
		||||
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
    singleton: bool = False,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
    **pikerd_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tractor.Portal:
 | 
			
		||||
    '''
 | 
			
		||||
| 
						 | 
				
			
			@ -79,32 +65,31 @@ async def maybe_spawn_daemon(
 | 
			
		|||
    clients.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    if loglevel:
 | 
			
		||||
        get_console_log(loglevel)
 | 
			
		||||
 | 
			
		||||
    # serialize access to this section to avoid
 | 
			
		||||
    # 2 or more tasks racing to create a daemon
 | 
			
		||||
    lock = Services.locks[service_name]
 | 
			
		||||
    await lock.acquire()
 | 
			
		||||
 | 
			
		||||
    async with find_service(service_name) as portal:
 | 
			
		||||
    async with find_service(
 | 
			
		||||
        service_name,
 | 
			
		||||
        registry_addrs=[('127.0.0.1', 6116)],
 | 
			
		||||
    ) as portal:
 | 
			
		||||
        if portal is not None:
 | 
			
		||||
            lock.release()
 | 
			
		||||
            yield portal
 | 
			
		||||
            return
 | 
			
		||||
 | 
			
		||||
    log.warning(f"Couldn't find any existing {service_name}")
 | 
			
		||||
 | 
			
		||||
    # TODO: really shouldn't the actor spawning be part of the service
 | 
			
		||||
    # starting method `Services.start_service()` ?
 | 
			
		||||
    log.warning(
 | 
			
		||||
        f"Couldn't find any existing {service_name}\n"
 | 
			
		||||
        'Attempting to spawn new daemon-service..'
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # ask root ``pikerd`` daemon to spawn the daemon we need if
 | 
			
		||||
    # pikerd is not live we now become the root of the
 | 
			
		||||
    # process tree
 | 
			
		||||
    async with maybe_open_pikerd(
 | 
			
		||||
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
        **pikerd_kwargs,
 | 
			
		||||
 | 
			
		||||
    ) as pikerd_portal:
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -117,108 +102,42 @@ async def maybe_spawn_daemon(
 | 
			
		|||
        # service task for that actor.
 | 
			
		||||
        started: bool
 | 
			
		||||
        if pikerd_portal is None:
 | 
			
		||||
            started = await service_task_target(**spawn_args)
 | 
			
		||||
            started = await service_task_target(
 | 
			
		||||
                loglevel=loglevel,
 | 
			
		||||
                **spawn_args,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        else:
 | 
			
		||||
            # tell the remote `pikerd` to start the target,
 | 
			
		||||
            # the target can't return a non-serializable value
 | 
			
		||||
            # since it is expected that service startingn is
 | 
			
		||||
            # non-blocking and the target task will persist running
 | 
			
		||||
            # on `pikerd` after the client requesting it's start
 | 
			
		||||
            # disconnects.
 | 
			
		||||
            # request a remote `pikerd` (service manager) to start the
 | 
			
		||||
            # target daemon-task, the target can't return
 | 
			
		||||
            # a non-serializable value since it is expected that service
 | 
			
		||||
            # starting is non-blocking and the target task will persist
 | 
			
		||||
            # running "under" or "within" the `pikerd` actor tree after
 | 
			
		||||
            # the questing client disconnects. in other words this
 | 
			
		||||
            # spawns a persistent daemon actor that continues to live
 | 
			
		||||
            # for the lifespan of whatever the service manager inside
 | 
			
		||||
            # `pikerd` says it should.
 | 
			
		||||
            started = await pikerd_portal.run(
 | 
			
		||||
                service_task_target,
 | 
			
		||||
                loglevel=loglevel,
 | 
			
		||||
                **spawn_args,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        if started:
 | 
			
		||||
            log.info(f'Service {service_name} started!')
 | 
			
		||||
 | 
			
		||||
        # block until we can discover (by IPC connection) to the newly
 | 
			
		||||
        # spawned daemon-actor and then deliver the portal to the
 | 
			
		||||
        # caller.
 | 
			
		||||
        async with tractor.wait_for_actor(service_name) as portal:
 | 
			
		||||
            lock.release()
 | 
			
		||||
            yield portal
 | 
			
		||||
            await portal.cancel_actor()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def spawn_brokerd(
 | 
			
		||||
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    loglevel: Optional[str] = None,
 | 
			
		||||
    **tractor_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> bool:
 | 
			
		||||
 | 
			
		||||
    log.info(f'Spawning {brokername} broker daemon')
 | 
			
		||||
 | 
			
		||||
    brokermod = get_brokermod(brokername)
 | 
			
		||||
    dname = f'brokerd.{brokername}'
 | 
			
		||||
 | 
			
		||||
    extra_tractor_kwargs = getattr(brokermod, '_spawn_kwargs', {})
 | 
			
		||||
    tractor_kwargs.update(extra_tractor_kwargs)
 | 
			
		||||
 | 
			
		||||
    # ask `pikerd` to spawn a new sub-actor and manage it under its
 | 
			
		||||
    # actor nursery
 | 
			
		||||
    modpath = brokermod.__name__
 | 
			
		||||
    broker_enable = [modpath]
 | 
			
		||||
    for submodname in getattr(
 | 
			
		||||
        brokermod,
 | 
			
		||||
        '__enable_modules__',
 | 
			
		||||
        [],
 | 
			
		||||
    ):
 | 
			
		||||
        subpath = f'{modpath}.{submodname}'
 | 
			
		||||
        broker_enable.append(subpath)
 | 
			
		||||
 | 
			
		||||
    portal = await Services.actor_n.start_actor(
 | 
			
		||||
        dname,
 | 
			
		||||
        enable_modules=_data_mods + broker_enable,
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
        debug_mode=Services.debug_mode,
 | 
			
		||||
        **tractor_kwargs
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # non-blocking setup of brokerd service nursery
 | 
			
		||||
    from ..data import _setup_persistent_brokerd
 | 
			
		||||
 | 
			
		||||
    await Services.start_service_task(
 | 
			
		||||
        dname,
 | 
			
		||||
        portal,
 | 
			
		||||
        _setup_persistent_brokerd,
 | 
			
		||||
        brokername=brokername,
 | 
			
		||||
    )
 | 
			
		||||
    return True
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def maybe_spawn_brokerd(
 | 
			
		||||
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    loglevel: Optional[str] = None,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tractor.Portal:
 | 
			
		||||
    '''
 | 
			
		||||
    Helper to spawn a brokerd service *from* a client
 | 
			
		||||
    who wishes to use the sub-actor-daemon.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async with maybe_spawn_daemon(
 | 
			
		||||
 | 
			
		||||
        f'brokerd.{brokername}',
 | 
			
		||||
        service_task_target=spawn_brokerd,
 | 
			
		||||
        spawn_args={
 | 
			
		||||
            'brokername': brokername,
 | 
			
		||||
            'loglevel': loglevel,
 | 
			
		||||
        },
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
 | 
			
		||||
    ) as portal:
 | 
			
		||||
        yield portal
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def spawn_emsd(
 | 
			
		||||
 | 
			
		||||
    loglevel: Optional[str] = None,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
    **extra_tractor_kwargs
 | 
			
		||||
 | 
			
		||||
) -> bool:
 | 
			
		||||
| 
						 | 
				
			
			@ -245,7 +164,10 @@ async def spawn_emsd(
 | 
			
		|||
    await Services.start_service_task(
 | 
			
		||||
        'emsd',
 | 
			
		||||
        portal,
 | 
			
		||||
 | 
			
		||||
        # signature of target root-task endpoint
 | 
			
		||||
        _setup_persistent_emsd,
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
    )
 | 
			
		||||
    return True
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -254,18 +176,18 @@ async def spawn_emsd(
 | 
			
		|||
async def maybe_open_emsd(
 | 
			
		||||
 | 
			
		||||
    brokername: str,
 | 
			
		||||
    loglevel: Optional[str] = None,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
) -> tractor._portal.Portal:  # noqa
 | 
			
		||||
    **pikerd_kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tractor.Portal:  # noqa
 | 
			
		||||
 | 
			
		||||
    async with maybe_spawn_daemon(
 | 
			
		||||
 | 
			
		||||
        'emsd',
 | 
			
		||||
        service_task_target=spawn_emsd,
 | 
			
		||||
        spawn_args={'loglevel': loglevel},
 | 
			
		||||
        spawn_args={},
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
        **pikerd_kwargs,
 | 
			
		||||
 | 
			
		||||
    ) as portal:
 | 
			
		||||
        yield portal
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -27,16 +27,25 @@ from typing import (
 | 
			
		|||
import trio
 | 
			
		||||
from trio_typing import TaskStatus
 | 
			
		||||
import tractor
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
from tractor import (
 | 
			
		||||
    current_actor,
 | 
			
		||||
    ContextCancelled,
 | 
			
		||||
    Context,
 | 
			
		||||
    Portal,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,  # sub-sys logger
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: factor this into a ``tractor.highlevel`` extension
 | 
			
		||||
# pack for the library.
 | 
			
		||||
# TODO: we need remote wrapping and a general soln:
 | 
			
		||||
# - factor this into a ``tractor.highlevel`` extension # pack for the
 | 
			
		||||
#   library.
 | 
			
		||||
# - wrap a "remote api" wherein you can get a method proxy
 | 
			
		||||
#   to the pikerd actor for starting services remotely!
 | 
			
		||||
# - prolly rename this to ActorServicesNursery since it spawns
 | 
			
		||||
#   new actors and supervises them to completion?
 | 
			
		||||
class Services:
 | 
			
		||||
 | 
			
		||||
    actor_n: tractor._supervise.ActorNursery
 | 
			
		||||
| 
						 | 
				
			
			@ -46,7 +55,7 @@ class Services:
 | 
			
		|||
        str,
 | 
			
		||||
        tuple[
 | 
			
		||||
            trio.CancelScope,
 | 
			
		||||
            tractor.Portal,
 | 
			
		||||
            Portal,
 | 
			
		||||
            trio.Event,
 | 
			
		||||
        ]
 | 
			
		||||
    ] = {}
 | 
			
		||||
| 
						 | 
				
			
			@ -56,11 +65,12 @@ class Services:
 | 
			
		|||
    async def start_service_task(
 | 
			
		||||
        self,
 | 
			
		||||
        name: str,
 | 
			
		||||
        portal: tractor.Portal,
 | 
			
		||||
        portal: Portal,
 | 
			
		||||
        target: Callable,
 | 
			
		||||
        **kwargs,
 | 
			
		||||
        allow_overruns: bool = False,
 | 
			
		||||
        **ctx_kwargs,
 | 
			
		||||
 | 
			
		||||
    ) -> (trio.CancelScope, tractor.Context):
 | 
			
		||||
    ) -> (trio.CancelScope, Context):
 | 
			
		||||
        '''
 | 
			
		||||
        Open a context in a service sub-actor, add to a stack
 | 
			
		||||
        that gets unwound at ``pikerd`` teardown.
 | 
			
		||||
| 
						 | 
				
			
			@ -81,9 +91,11 @@ class Services:
 | 
			
		|||
        ) -> Any:
 | 
			
		||||
 | 
			
		||||
            with trio.CancelScope() as cs:
 | 
			
		||||
 | 
			
		||||
                async with portal.open_context(
 | 
			
		||||
                    target,
 | 
			
		||||
                    **kwargs,
 | 
			
		||||
                    allow_overruns=allow_overruns,
 | 
			
		||||
                    **ctx_kwargs,
 | 
			
		||||
 | 
			
		||||
                ) as (ctx, first):
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -97,13 +109,30 @@ class Services:
 | 
			
		|||
                        # wait on any context's return value
 | 
			
		||||
                        # and any final portal result from the
 | 
			
		||||
                        # sub-actor.
 | 
			
		||||
                        ctx_res = await ctx.result()
 | 
			
		||||
                        ctx_res: Any = await ctx.result()
 | 
			
		||||
 | 
			
		||||
                        # NOTE: blocks indefinitely until cancelled
 | 
			
		||||
                        # either by error from the target context
 | 
			
		||||
                        # function or by being cancelled here by the
 | 
			
		||||
                        # surrounding cancel scope.
 | 
			
		||||
                        return (await portal.result(), ctx_res)
 | 
			
		||||
                    except ContextCancelled as ctxe:
 | 
			
		||||
                        canceller: tuple[str, str] = ctxe.canceller
 | 
			
		||||
                        our_uid: tuple[str, str] = current_actor().uid
 | 
			
		||||
                        if (
 | 
			
		||||
                            canceller != portal.channel.uid
 | 
			
		||||
                            and
 | 
			
		||||
                            canceller != our_uid
 | 
			
		||||
                        ):
 | 
			
		||||
                            log.cancel(
 | 
			
		||||
                                f'Actor-service {name} was remotely cancelled?\n'
 | 
			
		||||
                                f'remote canceller: {canceller}\n'
 | 
			
		||||
                                f'Keeping {our_uid} alive, ignoring sub-actor cancel..\n'
 | 
			
		||||
                            )
 | 
			
		||||
                        else:
 | 
			
		||||
                            raise
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
                    finally:
 | 
			
		||||
                        await portal.cancel_actor()
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -27,14 +27,12 @@ from typing import (
 | 
			
		|||
)
 | 
			
		||||
 | 
			
		||||
import tractor
 | 
			
		||||
from tractor import Portal
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,  # sub-sys logger
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
_default_registry_host: str = '127.0.0.1'
 | 
			
		||||
_default_registry_port: int = 6116
 | 
			
		||||
_default_reg_addr: tuple[str, int] = (
 | 
			
		||||
| 
						 | 
				
			
			@ -49,7 +47,9 @@ _registry: Registry | None = None
 | 
			
		|||
 | 
			
		||||
 | 
			
		||||
class Registry:
 | 
			
		||||
    addr: None | tuple[str, int] = None
 | 
			
		||||
    # TODO: should this be a set or should we complain
 | 
			
		||||
    # on duplicates?
 | 
			
		||||
    addrs: list[tuple[str, int]] = []
 | 
			
		||||
 | 
			
		||||
    # TODO: table of uids to sockaddrs
 | 
			
		||||
    peers: dict[
 | 
			
		||||
| 
						 | 
				
			
			@ -63,69 +63,115 @@ _tractor_kwargs: dict[str, Any] = {}
 | 
			
		|||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_registry(
 | 
			
		||||
    addr: None | tuple[str, int] = None,
 | 
			
		||||
    addrs: list[tuple[str, int]],
 | 
			
		||||
    ensure_exists: bool = True,
 | 
			
		||||
 | 
			
		||||
) -> tuple[str, int]:
 | 
			
		||||
) -> list[tuple[str, int]]:
 | 
			
		||||
    '''
 | 
			
		||||
    Open the service-actor-discovery registry by returning a set of
 | 
			
		||||
    tranport socket-addrs to registrar actors which may be
 | 
			
		||||
    contacted and queried for similar addresses for other
 | 
			
		||||
    non-registrar actors.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    global _tractor_kwargs
 | 
			
		||||
    actor = tractor.current_actor()
 | 
			
		||||
    uid = actor.uid
 | 
			
		||||
    preset_reg_addrs: list[tuple[str, int]] = Registry.addrs
 | 
			
		||||
    if (
 | 
			
		||||
        Registry.addr is not None
 | 
			
		||||
        and addr
 | 
			
		||||
        preset_reg_addrs
 | 
			
		||||
        and addrs
 | 
			
		||||
    ):
 | 
			
		||||
        raise RuntimeError(
 | 
			
		||||
            f'`{uid}` registry addr already bound @ {_registry.sockaddr}'
 | 
			
		||||
        )
 | 
			
		||||
        if preset_reg_addrs != addrs:
 | 
			
		||||
            # if any(addr in preset_reg_addrs for addr in addrs):
 | 
			
		||||
            diff: set[tuple[str, int]] = set(preset_reg_addrs) - set(addrs)
 | 
			
		||||
            if diff:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    f'`{uid}` requested only subset of registrars: {addrs}\n'
 | 
			
		||||
                    f'However there are more @{diff}'
 | 
			
		||||
                )
 | 
			
		||||
            else:
 | 
			
		||||
                raise RuntimeError(
 | 
			
		||||
                    f'`{uid}` has non-matching registrar addresses?\n'
 | 
			
		||||
                    f'request: {addrs}\n'
 | 
			
		||||
                    f'already set: {preset_reg_addrs}'
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    was_set: bool = False
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
        not tractor.is_root_process()
 | 
			
		||||
        and Registry.addr is None
 | 
			
		||||
        and not Registry.addrs
 | 
			
		||||
    ):
 | 
			
		||||
        Registry.addr = actor._arb_addr
 | 
			
		||||
        Registry.addrs.extend(actor.reg_addrs)
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
        ensure_exists
 | 
			
		||||
        and Registry.addr is None
 | 
			
		||||
        and not Registry.addrs
 | 
			
		||||
    ):
 | 
			
		||||
        raise RuntimeError(
 | 
			
		||||
            f"`{uid}` registry should already exist bug doesn't?"
 | 
			
		||||
            f"`{uid}` registry should already exist but doesn't?"
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    if (
 | 
			
		||||
        Registry.addr is None
 | 
			
		||||
        not Registry.addrs
 | 
			
		||||
    ):
 | 
			
		||||
        was_set = True
 | 
			
		||||
        Registry.addr = addr or _default_reg_addr
 | 
			
		||||
        Registry.addrs = addrs or [_default_reg_addr]
 | 
			
		||||
 | 
			
		||||
    _tractor_kwargs['arbiter_addr'] = Registry.addr
 | 
			
		||||
    # NOTE: only spot this seems currently used is inside
 | 
			
		||||
    # `.ui._exec` which is the (eventual qtloops) bootstrapping
 | 
			
		||||
    # with guest mode.
 | 
			
		||||
    _tractor_kwargs['registry_addrs'] = Registry.addrs
 | 
			
		||||
 | 
			
		||||
    try:
 | 
			
		||||
        yield Registry.addr
 | 
			
		||||
        yield Registry.addrs
 | 
			
		||||
    finally:
 | 
			
		||||
        # XXX: always clear the global addr if we set it so that the
 | 
			
		||||
        # next (set of) calls will apply whatever new one is passed
 | 
			
		||||
        # in.
 | 
			
		||||
        if was_set:
 | 
			
		||||
            Registry.addr = None
 | 
			
		||||
            Registry.addrs = None
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def find_service(
 | 
			
		||||
    service_name: str,
 | 
			
		||||
) -> tractor.Portal | None:
 | 
			
		||||
    registry_addrs: list[tuple[str, int]] | None = None,
 | 
			
		||||
 | 
			
		||||
    async with open_registry() as reg_addr:
 | 
			
		||||
    first_only: bool = True,
 | 
			
		||||
 | 
			
		||||
) -> (
 | 
			
		||||
    Portal
 | 
			
		||||
    | list[Portal]
 | 
			
		||||
    | None
 | 
			
		||||
):
 | 
			
		||||
 | 
			
		||||
    reg_addrs: list[tuple[str, int]]
 | 
			
		||||
    async with open_registry(
 | 
			
		||||
        addrs=(
 | 
			
		||||
            registry_addrs
 | 
			
		||||
            # NOTE: if no addr set is passed assume the registry has
 | 
			
		||||
            # already been opened and use the previously applied
 | 
			
		||||
            # startup set.
 | 
			
		||||
            or Registry.addrs
 | 
			
		||||
        ),
 | 
			
		||||
    ) as reg_addrs:
 | 
			
		||||
        log.info(f'Scanning for service `{service_name}`')
 | 
			
		||||
 | 
			
		||||
        maybe_portals: list[Portal] | Portal | None
 | 
			
		||||
 | 
			
		||||
        # attach to existing daemon by name if possible
 | 
			
		||||
        async with tractor.find_actor(
 | 
			
		||||
            service_name,
 | 
			
		||||
            arbiter_sockaddr=reg_addr,
 | 
			
		||||
        ) as maybe_portal:
 | 
			
		||||
            yield maybe_portal
 | 
			
		||||
            registry_addrs=reg_addrs,
 | 
			
		||||
            only_first=first_only,  # if set only returns single ref
 | 
			
		||||
        ) as maybe_portals:
 | 
			
		||||
            if not maybe_portals:
 | 
			
		||||
                yield None
 | 
			
		||||
                return
 | 
			
		||||
 | 
			
		||||
            yield maybe_portals
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def check_for_service(
 | 
			
		||||
| 
						 | 
				
			
			@ -136,9 +182,11 @@ async def check_for_service(
 | 
			
		|||
    Service daemon "liveness" predicate.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async with open_registry(ensure_exists=False) as reg_addr:
 | 
			
		||||
        async with tractor.query_actor(
 | 
			
		||||
    async with (
 | 
			
		||||
        open_registry(ensure_exists=False) as reg_addr,
 | 
			
		||||
        tractor.query_actor(
 | 
			
		||||
            service_name,
 | 
			
		||||
            arbiter_sockaddr=reg_addr,
 | 
			
		||||
        ) as sockaddr:
 | 
			
		||||
            return sockaddr
 | 
			
		||||
        ) as sockaddr,
 | 
			
		||||
    ):
 | 
			
		||||
        return sockaddr
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,33 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
"""
 | 
			
		||||
Sub-sys module commons.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from functools import partial
 | 
			
		||||
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
subsys: str = 'piker.service'
 | 
			
		||||
 | 
			
		||||
log = get_logger(subsys)
 | 
			
		||||
 | 
			
		||||
get_console_log = partial(
 | 
			
		||||
    get_console_log,
 | 
			
		||||
    name=subsys,
 | 
			
		||||
)
 | 
			
		||||
| 
						 | 
				
			
			@ -15,26 +15,23 @@
 | 
			
		|||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from contextlib import asynccontextmanager as acm
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import asks
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    import docker
 | 
			
		||||
    from ._ahab import DockerContainer
 | 
			
		||||
 | 
			
		||||
from piker.log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
    get_console_log
 | 
			
		||||
from ._util import log  # sub-sys logger
 | 
			
		||||
from ._util import (
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import asks
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# container level config
 | 
			
		||||
_config = {
 | 
			
		||||
| 
						 | 
				
			
			@ -92,7 +89,7 @@ def start_elasticsearch(
 | 
			
		|||
                'http://localhost:19200/_cat/health',
 | 
			
		||||
                params={'format': 'json'}
 | 
			
		||||
            )).json()
 | 
			
		||||
            kog.info(
 | 
			
		||||
            log.info(
 | 
			
		||||
                'ElasticSearch cntr health:\n'
 | 
			
		||||
                f'{health}'
 | 
			
		||||
            )
 | 
			
		||||
| 
						 | 
				
			
			@ -126,3 +123,47 @@ def start_elasticsearch(
 | 
			
		|||
        health_query,
 | 
			
		||||
        chk_for_closed_msg,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def start_ahab_daemon(
 | 
			
		||||
    service_mngr: Services,
 | 
			
		||||
    user_config: dict | None = None,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
) -> tuple[str, dict]:
 | 
			
		||||
    '''
 | 
			
		||||
    Task entrypoint to start the estasticsearch docker container using
 | 
			
		||||
    the service manager.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from ._ahab import start_ahab_service
 | 
			
		||||
 | 
			
		||||
    # dict-merge any user settings
 | 
			
		||||
    conf: dict = _config.copy()
 | 
			
		||||
    if user_config:
 | 
			
		||||
        conf = conf | user_config
 | 
			
		||||
 | 
			
		||||
    dname: str = 'esd'
 | 
			
		||||
    log.info(f'Spawning `{dname}` supervisor')
 | 
			
		||||
    async with start_ahab_service(
 | 
			
		||||
        service_mngr,
 | 
			
		||||
        dname,
 | 
			
		||||
 | 
			
		||||
        # NOTE: docker-py client is passed at runtime
 | 
			
		||||
        start_elasticsearch,
 | 
			
		||||
        ep_kwargs={'user_config': conf},
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
 | 
			
		||||
    ) as (
 | 
			
		||||
        ctn_ready,
 | 
			
		||||
        config,
 | 
			
		||||
        (cid, pid),
 | 
			
		||||
    ):
 | 
			
		||||
        log.info(
 | 
			
		||||
            f'`{dname}` up!\n'
 | 
			
		||||
            f'pid: {pid}\n'
 | 
			
		||||
            f'container id: {cid[:12]}\n'
 | 
			
		||||
            f'config: {pformat(config)}'
 | 
			
		||||
        )
 | 
			
		||||
        yield dname, conf
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -1,5 +1,5 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for piker0)
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
| 
						 | 
				
			
			@ -25,11 +25,9 @@
 | 
			
		|||
'''
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
from contextlib import asynccontextmanager as acm
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from pprint import pformat
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    Optional,
 | 
			
		||||
    Union,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
import time
 | 
			
		||||
| 
						 | 
				
			
			@ -37,29 +35,34 @@ from math import isnan
 | 
			
		|||
from pathlib import Path
 | 
			
		||||
 | 
			
		||||
from bidict import bidict
 | 
			
		||||
from msgspec.msgpack import encode, decode
 | 
			
		||||
from msgspec.msgpack import (
 | 
			
		||||
    encode,
 | 
			
		||||
    decode,
 | 
			
		||||
)
 | 
			
		||||
# import pyqtgraph as pg
 | 
			
		||||
import numpy as np
 | 
			
		||||
import tractor
 | 
			
		||||
from trio_websocket import open_websocket_url
 | 
			
		||||
from anyio_marketstore import (
 | 
			
		||||
from anyio_marketstore import (  # noqa
 | 
			
		||||
    open_marketstore_client,
 | 
			
		||||
    MarketstoreClient,
 | 
			
		||||
    Params,
 | 
			
		||||
)
 | 
			
		||||
import pendulum
 | 
			
		||||
import purerpc
 | 
			
		||||
# TODO: import this for specific error set expected by mkts client
 | 
			
		||||
# import purerpc
 | 
			
		||||
 | 
			
		||||
from ..data.feed import maybe_open_feed
 | 
			
		||||
from . import Services
 | 
			
		||||
from ._util import (
 | 
			
		||||
    log,  # sub-sys logger
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    import docker
 | 
			
		||||
    from ._ahab import DockerContainer
 | 
			
		||||
 | 
			
		||||
from ..data.feed import maybe_open_feed
 | 
			
		||||
from ..log import get_logger, get_console_log
 | 
			
		||||
from .._profile import Profiler
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
log = get_logger(__name__)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# ahabd-supervisor and container level config
 | 
			
		||||
| 
						 | 
				
			
			@ -70,7 +73,7 @@ _config = {
 | 
			
		|||
    'startup_timeout': 2,
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
_yaml_config = '''
 | 
			
		||||
_yaml_config_str: str = '''
 | 
			
		||||
# piker's ``marketstore`` config.
 | 
			
		||||
 | 
			
		||||
# mount this config using:
 | 
			
		||||
| 
						 | 
				
			
			@ -89,6 +92,12 @@ stale_threshold: 5
 | 
			
		|||
enable_add: true
 | 
			
		||||
enable_remove: false
 | 
			
		||||
 | 
			
		||||
# SUPER DUPER CRITICAL to address a super weird issue:
 | 
			
		||||
# https://github.com/pikers/piker/issues/443
 | 
			
		||||
# seems like "variable compression" is possibly borked
 | 
			
		||||
# or snappy compression somehow breaks easily?
 | 
			
		||||
disable_variable_compression: true
 | 
			
		||||
 | 
			
		||||
triggers:
 | 
			
		||||
  - module: ondiskagg.so
 | 
			
		||||
    on: "*/1Sec/OHLCV"
 | 
			
		||||
| 
						 | 
				
			
			@ -106,18 +115,18 @@ triggers:
 | 
			
		|||
    # config:
 | 
			
		||||
    #     filter: "nasdaq"
 | 
			
		||||
 | 
			
		||||
'''.format(**_config)
 | 
			
		||||
'''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def start_marketstore(
 | 
			
		||||
    client: docker.DockerClient,
 | 
			
		||||
 | 
			
		||||
    user_config: dict,
 | 
			
		||||
    **kwargs,
 | 
			
		||||
 | 
			
		||||
) -> tuple[DockerContainer, dict[str, Any]]:
 | 
			
		||||
    '''
 | 
			
		||||
    Start and supervise a marketstore instance with its config bind-mounted
 | 
			
		||||
    in from the piker config directory on the system.
 | 
			
		||||
    Start and supervise a marketstore instance with its config
 | 
			
		||||
    bind-mounted in from the piker config directory on the system.
 | 
			
		||||
 | 
			
		||||
    The equivalent cli cmd to this code is:
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -141,14 +150,16 @@ def start_marketstore(
 | 
			
		|||
        os.mkdir(mktsdir)
 | 
			
		||||
 | 
			
		||||
    yml_file = os.path.join(mktsdir, 'mkts.yml')
 | 
			
		||||
    yaml_config = _yaml_config_str.format(**user_config)
 | 
			
		||||
 | 
			
		||||
    if not os.path.isfile(yml_file):
 | 
			
		||||
        log.warning(
 | 
			
		||||
            f'No `marketstore` config exists?: {yml_file}\n'
 | 
			
		||||
            'Generating new file from template:\n'
 | 
			
		||||
            f'{_yaml_config}\n'
 | 
			
		||||
            f'{yaml_config}\n'
 | 
			
		||||
        )
 | 
			
		||||
        with open(yml_file, 'w') as yf:
 | 
			
		||||
            yf.write(_yaml_config)
 | 
			
		||||
            yf.write(yaml_config)
 | 
			
		||||
 | 
			
		||||
    # create a mount from user's local piker config dir into container
 | 
			
		||||
    config_dir_mnt = docker.types.Mount(
 | 
			
		||||
| 
						 | 
				
			
			@ -171,6 +182,9 @@ def start_marketstore(
 | 
			
		|||
        type='bind',
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    grpc_listen_port = int(user_config['grpc_listen_port'])
 | 
			
		||||
    ws_listen_port = int(user_config['ws_listen_port'])
 | 
			
		||||
 | 
			
		||||
    dcntr: DockerContainer = client.containers.run(
 | 
			
		||||
        'alpacamarkets/marketstore:latest',
 | 
			
		||||
        # do we need this for cmds?
 | 
			
		||||
| 
						 | 
				
			
			@ -178,8 +192,8 @@ def start_marketstore(
 | 
			
		|||
 | 
			
		||||
        # '-p 5993:5993',
 | 
			
		||||
        ports={
 | 
			
		||||
            '5993/tcp': 5993,  # jsonrpc / ws?
 | 
			
		||||
            '5995/tcp': 5995,  # grpc
 | 
			
		||||
            f'{ws_listen_port}/tcp': ws_listen_port,
 | 
			
		||||
            f'{grpc_listen_port}/tcp': grpc_listen_port,
 | 
			
		||||
        },
 | 
			
		||||
        mounts=[
 | 
			
		||||
            config_dir_mnt,
 | 
			
		||||
| 
						 | 
				
			
			@ -199,7 +213,13 @@ def start_marketstore(
 | 
			
		|||
        return "launching tcp listener for all services..." in msg
 | 
			
		||||
 | 
			
		||||
    async def stop_matcher(msg: str):
 | 
			
		||||
        return "exiting..." in msg
 | 
			
		||||
        return (
 | 
			
		||||
            # not sure when this happens, some kinda stop condition
 | 
			
		||||
            "exiting..." in msg
 | 
			
		||||
 | 
			
		||||
            # after we send SIGINT..
 | 
			
		||||
            or "initiating graceful shutdown due to 'interrupt' request" in msg
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    return (
 | 
			
		||||
        dcntr,
 | 
			
		||||
| 
						 | 
				
			
			@ -211,6 +231,49 @@ def start_marketstore(
 | 
			
		|||
    )
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def start_ahab_daemon(
 | 
			
		||||
    service_mngr: Services,
 | 
			
		||||
    user_config: dict | None = None,
 | 
			
		||||
    loglevel: str | None = None,
 | 
			
		||||
 | 
			
		||||
) -> tuple[str, dict]:
 | 
			
		||||
    '''
 | 
			
		||||
    Task entrypoint to start the marketstore docker container using the
 | 
			
		||||
    service manager.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from ._ahab import start_ahab_service
 | 
			
		||||
 | 
			
		||||
    # dict-merge any user settings
 | 
			
		||||
    conf: dict = _config.copy()
 | 
			
		||||
    if user_config:
 | 
			
		||||
        conf: dict = conf | user_config
 | 
			
		||||
 | 
			
		||||
    dname: str = 'marketstored'
 | 
			
		||||
    log.info(f'Spawning `{dname}` supervisor')
 | 
			
		||||
    async with start_ahab_service(
 | 
			
		||||
        service_mngr,
 | 
			
		||||
        dname,
 | 
			
		||||
 | 
			
		||||
        # NOTE: docker-py client is passed at runtime
 | 
			
		||||
        start_marketstore,
 | 
			
		||||
        ep_kwargs={'user_config': conf},
 | 
			
		||||
        loglevel=loglevel,
 | 
			
		||||
    ) as (
 | 
			
		||||
        _,
 | 
			
		||||
        config,
 | 
			
		||||
        (cid, pid),
 | 
			
		||||
    ):
 | 
			
		||||
        log.info(
 | 
			
		||||
            f'`{dname}` up!\n'
 | 
			
		||||
            f'pid: {pid}\n'
 | 
			
		||||
            f'container id: {cid[:12]}\n'
 | 
			
		||||
            f'config: {pformat(config)}'
 | 
			
		||||
        )
 | 
			
		||||
        yield dname, conf
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
_tick_tbk_ids: tuple[str, str] = ('1Sec', 'TICK')
 | 
			
		||||
_tick_tbk: str = '{}/' + '/'.join(_tick_tbk_ids)
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -264,16 +327,6 @@ _ohlcv_dt = [
 | 
			
		|||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
ohlc_key_map = bidict({
 | 
			
		||||
    'Epoch': 'time',
 | 
			
		||||
    'Open': 'open',
 | 
			
		||||
    'High': 'high',
 | 
			
		||||
    'Low': 'low',
 | 
			
		||||
    'Close': 'close',
 | 
			
		||||
    'Volume': 'volume',
 | 
			
		||||
})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
def mk_tbk(keys: tuple[str, str, str]) -> str:
 | 
			
		||||
    '''
 | 
			
		||||
    Generate a marketstore table key from a tuple.
 | 
			
		||||
| 
						 | 
				
			
			@ -286,7 +339,7 @@ def mk_tbk(keys: tuple[str, str, str]) -> str:
 | 
			
		|||
 | 
			
		||||
def quote_to_marketstore_structarray(
 | 
			
		||||
    quote: dict[str, Any],
 | 
			
		||||
    last_fill: Optional[float]
 | 
			
		||||
    last_fill: float | None,
 | 
			
		||||
 | 
			
		||||
) -> np.array:
 | 
			
		||||
    '''
 | 
			
		||||
| 
						 | 
				
			
			@ -325,24 +378,6 @@ def quote_to_marketstore_structarray(
 | 
			
		|||
    return np.array([tuple(array_input)], dtype=_quote_dt)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def get_client(
 | 
			
		||||
    host: str = 'localhost',
 | 
			
		||||
    port: int = _config['grpc_listen_port'],
 | 
			
		||||
 | 
			
		||||
) -> MarketstoreClient:
 | 
			
		||||
    '''
 | 
			
		||||
    Load a ``anyio_marketstore`` grpc client connected
 | 
			
		||||
    to an existing ``marketstore`` server.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async with open_marketstore_client(
 | 
			
		||||
        host,
 | 
			
		||||
        port
 | 
			
		||||
    ) as client:
 | 
			
		||||
        yield client
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class MarketStoreError(Exception):
 | 
			
		||||
    "Generic marketstore client error"
 | 
			
		||||
 | 
			
		||||
| 
						 | 
				
			
			@ -370,356 +405,6 @@ tf_in_1s = bidict({
 | 
			
		|||
})
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class Storage:
 | 
			
		||||
    '''
 | 
			
		||||
    High level storage api for both real-time and historical ingest.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    def __init__(
 | 
			
		||||
        self,
 | 
			
		||||
        client: MarketstoreClient,
 | 
			
		||||
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        # TODO: eventually this should be an api/interface type that
 | 
			
		||||
        # ensures we can support multiple tsdb backends.
 | 
			
		||||
        self.client = client
 | 
			
		||||
 | 
			
		||||
        # series' cache from tsdb reads
 | 
			
		||||
        self._arrays: dict[str, np.ndarray] = {}
 | 
			
		||||
 | 
			
		||||
    async def list_keys(self) -> list[str]:
 | 
			
		||||
        return await self.client.list_symbols()
 | 
			
		||||
 | 
			
		||||
    async def search_keys(self, pattern: str) -> list[str]:
 | 
			
		||||
        '''
 | 
			
		||||
        Search for time series key in the storage backend.
 | 
			
		||||
 | 
			
		||||
        '''
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    async def write_ticks(self, ticks: list) -> None:
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    async def load(
 | 
			
		||||
        self,
 | 
			
		||||
        fqsn: str,
 | 
			
		||||
        timeframe: int,
 | 
			
		||||
 | 
			
		||||
    ) -> tuple[
 | 
			
		||||
        np.ndarray,  # timeframe sampled array-series
 | 
			
		||||
        Optional[datetime],  # first dt
 | 
			
		||||
        Optional[datetime],  # last dt
 | 
			
		||||
    ]:
 | 
			
		||||
 | 
			
		||||
        first_tsdb_dt, last_tsdb_dt = None, None
 | 
			
		||||
        hist = await self.read_ohlcv(
 | 
			
		||||
            fqsn,
 | 
			
		||||
            # on first load we don't need to pull the max
 | 
			
		||||
            # history per request size worth.
 | 
			
		||||
            limit=3000,
 | 
			
		||||
            timeframe=timeframe,
 | 
			
		||||
        )
 | 
			
		||||
        log.info(f'Loaded tsdb history {hist}')
 | 
			
		||||
 | 
			
		||||
        if len(hist):
 | 
			
		||||
            times = hist['Epoch']
 | 
			
		||||
            first, last = times[0], times[-1]
 | 
			
		||||
            first_tsdb_dt, last_tsdb_dt = map(
 | 
			
		||||
                pendulum.from_timestamp, [first, last]
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
        return (
 | 
			
		||||
            hist,  # array-data
 | 
			
		||||
            first_tsdb_dt,  # start of query-frame
 | 
			
		||||
            last_tsdb_dt,  # most recent
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
    async def read_ohlcv(
 | 
			
		||||
        self,
 | 
			
		||||
        fqsn: str,
 | 
			
		||||
        timeframe: int | str,
 | 
			
		||||
        end: Optional[int] = None,
 | 
			
		||||
        limit: int = int(800e3),
 | 
			
		||||
 | 
			
		||||
    ) -> np.ndarray:
 | 
			
		||||
 | 
			
		||||
        client = self.client
 | 
			
		||||
        syms = await client.list_symbols()
 | 
			
		||||
 | 
			
		||||
        if fqsn not in syms:
 | 
			
		||||
            return {}
 | 
			
		||||
 | 
			
		||||
        # use the provided timeframe or 1s by default
 | 
			
		||||
        tfstr = tf_in_1s.get(timeframe, tf_in_1s[1])
 | 
			
		||||
 | 
			
		||||
        params = Params(
 | 
			
		||||
            symbols=fqsn,
 | 
			
		||||
            timeframe=tfstr,
 | 
			
		||||
            attrgroup='OHLCV',
 | 
			
		||||
            end=end,
 | 
			
		||||
            # limit_from_start=True,
 | 
			
		||||
 | 
			
		||||
            # TODO: figure the max limit here given the
 | 
			
		||||
            # ``purepc`` msg size limit of purerpc: 33554432
 | 
			
		||||
            limit=limit,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        try:
 | 
			
		||||
            result = await client.query(params)
 | 
			
		||||
        except purerpc.grpclib.exceptions.UnknownError as err:
 | 
			
		||||
            # indicate there is no history for this timeframe
 | 
			
		||||
            log.exception(
 | 
			
		||||
                f'Unknown mkts QUERY error: {params}\n'
 | 
			
		||||
                f'{err.args}'
 | 
			
		||||
            )
 | 
			
		||||
            return {}
 | 
			
		||||
 | 
			
		||||
        # TODO: it turns out column access on recarrays is actually slower:
 | 
			
		||||
        # https://jakevdp.github.io/PythonDataScienceHandbook/02.09-structured-data-numpy.html#RecordArrays:-Structured-Arrays-with-a-Twist
 | 
			
		||||
        # it might make sense to make these structured arrays?
 | 
			
		||||
        data_set = result.by_symbols()[fqsn]
 | 
			
		||||
        array = data_set.array
 | 
			
		||||
 | 
			
		||||
        # XXX: ensure sample rate is as expected
 | 
			
		||||
        time = data_set.array['Epoch']
 | 
			
		||||
        if len(time) > 1:
 | 
			
		||||
            time_step = time[-1] - time[-2]
 | 
			
		||||
            ts = tf_in_1s.inverse[data_set.timeframe]
 | 
			
		||||
 | 
			
		||||
            if time_step != ts:
 | 
			
		||||
                log.warning(
 | 
			
		||||
                    f'MKTS BUG: wrong timeframe loaded: {time_step}'
 | 
			
		||||
                    'YOUR DATABASE LIKELY CONTAINS BAD DATA FROM AN OLD BUG'
 | 
			
		||||
                    f'WIPING HISTORY FOR {ts}s'
 | 
			
		||||
                )
 | 
			
		||||
                await self.delete_ts(fqsn, timeframe)
 | 
			
		||||
 | 
			
		||||
                # try reading again..
 | 
			
		||||
                return await self.read_ohlcv(
 | 
			
		||||
                    fqsn,
 | 
			
		||||
                    timeframe,
 | 
			
		||||
                    end,
 | 
			
		||||
                    limit,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
        return array
 | 
			
		||||
 | 
			
		||||
    async def delete_ts(
 | 
			
		||||
        self,
 | 
			
		||||
        key: str,
 | 
			
		||||
        timeframe: Optional[Union[int, str]] = None,
 | 
			
		||||
        fmt: str = 'OHLCV',
 | 
			
		||||
 | 
			
		||||
    ) -> bool:
 | 
			
		||||
 | 
			
		||||
        client = self.client
 | 
			
		||||
        syms = await client.list_symbols()
 | 
			
		||||
        if key not in syms:
 | 
			
		||||
            raise KeyError(f'`{key}` table key not found in\n{syms}?')
 | 
			
		||||
 | 
			
		||||
        tbk = mk_tbk((
 | 
			
		||||
            key,
 | 
			
		||||
            tf_in_1s.get(timeframe, tf_in_1s[60]),
 | 
			
		||||
            fmt,
 | 
			
		||||
        ))
 | 
			
		||||
        return await client.destroy(tbk=tbk)
 | 
			
		||||
 | 
			
		||||
    async def write_ohlcv(
 | 
			
		||||
        self,
 | 
			
		||||
        fqsn: str,
 | 
			
		||||
        ohlcv: np.ndarray,
 | 
			
		||||
        timeframe: int,
 | 
			
		||||
        append_and_duplicate: bool = True,
 | 
			
		||||
        limit: int = int(800e3),
 | 
			
		||||
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        # build mkts schema compat array for writing
 | 
			
		||||
        mkts_dt = np.dtype(_ohlcv_dt)
 | 
			
		||||
        mkts_array = np.zeros(
 | 
			
		||||
            len(ohlcv),
 | 
			
		||||
            dtype=mkts_dt,
 | 
			
		||||
        )
 | 
			
		||||
        # copy from shm array (yes it's this easy):
 | 
			
		||||
        # https://numpy.org/doc/stable/user/basics.rec.html#assignment-from-other-structured-arrays
 | 
			
		||||
        mkts_array[:] = ohlcv[[
 | 
			
		||||
            'time',
 | 
			
		||||
            'open',
 | 
			
		||||
            'high',
 | 
			
		||||
            'low',
 | 
			
		||||
            'close',
 | 
			
		||||
            'volume',
 | 
			
		||||
        ]]
 | 
			
		||||
 | 
			
		||||
        m, r = divmod(len(mkts_array), limit)
 | 
			
		||||
 | 
			
		||||
        tfkey = tf_in_1s[timeframe]
 | 
			
		||||
        for i in range(m, 1):
 | 
			
		||||
            to_push = mkts_array[i-1:i*limit]
 | 
			
		||||
 | 
			
		||||
            # write to db
 | 
			
		||||
            resp = await self.client.write(
 | 
			
		||||
                to_push,
 | 
			
		||||
                tbk=f'{fqsn}/{tfkey}/OHLCV',
 | 
			
		||||
 | 
			
		||||
                # NOTE: will will append duplicates
 | 
			
		||||
                # for the same timestamp-index.
 | 
			
		||||
                # TODO: pre-deduplicate?
 | 
			
		||||
                isvariablelength=append_and_duplicate,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            log.info(
 | 
			
		||||
                f'Wrote {mkts_array.size} datums to tsdb\n'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            for resp in resp.responses:
 | 
			
		||||
                err = resp.error
 | 
			
		||||
                if err:
 | 
			
		||||
                    raise MarketStoreError(err)
 | 
			
		||||
 | 
			
		||||
        if r:
 | 
			
		||||
            to_push = mkts_array[m*limit:]
 | 
			
		||||
 | 
			
		||||
            # write to db
 | 
			
		||||
            resp = await self.client.write(
 | 
			
		||||
                to_push,
 | 
			
		||||
                tbk=f'{fqsn}/{tfkey}/OHLCV',
 | 
			
		||||
 | 
			
		||||
                # NOTE: will will append duplicates
 | 
			
		||||
                # for the same timestamp-index.
 | 
			
		||||
                # TODO: pre deduplicate?
 | 
			
		||||
                isvariablelength=append_and_duplicate,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            log.info(
 | 
			
		||||
                f'Wrote {mkts_array.size} datums to tsdb\n'
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            for resp in resp.responses:
 | 
			
		||||
                err = resp.error
 | 
			
		||||
                if err:
 | 
			
		||||
                    raise MarketStoreError(err)
 | 
			
		||||
 | 
			
		||||
    # XXX: currently the only way to do this is through the CLI:
 | 
			
		||||
 | 
			
		||||
    # sudo ./marketstore connect --dir ~/.config/piker/data
 | 
			
		||||
    # >> \show mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
 | 
			
		||||
    # and this seems to block and use up mem..
 | 
			
		||||
    # >> \trim mnq.globex.20220617.ib/1Sec/OHLCV 2022-05-15
 | 
			
		||||
 | 
			
		||||
    # relevant source code for this is here:
 | 
			
		||||
    # https://github.com/alpacahq/marketstore/blob/master/cmd/connect/session/trim.go#L14
 | 
			
		||||
    # def delete_range(self, start_dt, end_dt) -> None:
 | 
			
		||||
    #     ...
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_storage_client(
 | 
			
		||||
    fqsn: str,
 | 
			
		||||
    period: Optional[Union[int, str]] = None,  # in seconds
 | 
			
		||||
 | 
			
		||||
) -> tuple[Storage, dict[str, np.ndarray]]:
 | 
			
		||||
    '''
 | 
			
		||||
    Load a series by key and deliver in ``numpy`` struct array format.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async with (
 | 
			
		||||
        # eventually a storage backend endpoint
 | 
			
		||||
        get_client() as client,
 | 
			
		||||
    ):
 | 
			
		||||
        # slap on our wrapper api
 | 
			
		||||
        yield Storage(client)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_tsdb_client(
 | 
			
		||||
    fqsn: str,
 | 
			
		||||
) -> Storage:
 | 
			
		||||
 | 
			
		||||
    # TODO: real-time dedicated task for ensuring
 | 
			
		||||
    # history consistency between the tsdb, shm and real-time feed..
 | 
			
		||||
 | 
			
		||||
    # update sequence design notes:
 | 
			
		||||
 | 
			
		||||
    # - load existing highest frequency data from mkts
 | 
			
		||||
    #   * how do we want to offer this to the UI?
 | 
			
		||||
    #    - lazy loading?
 | 
			
		||||
    #    - try to load it all and expect graphics caching/diffing
 | 
			
		||||
    #      to  hide extra bits that aren't in view?
 | 
			
		||||
 | 
			
		||||
    # - compute the diff between latest data from broker and shm
 | 
			
		||||
    #   * use sql api in mkts to determine where the backend should
 | 
			
		||||
    #     start querying for data?
 | 
			
		||||
    #   * append any diff with new shm length
 | 
			
		||||
    #   * determine missing (gapped) history by scanning
 | 
			
		||||
    #   * how far back do we look?
 | 
			
		||||
 | 
			
		||||
    # - begin rt update ingest and aggregation
 | 
			
		||||
    #   * could start by always writing ticks to mkts instead of
 | 
			
		||||
    #     worrying about a shm queue for now.
 | 
			
		||||
    #   * we have a short list of shm queues worth groking:
 | 
			
		||||
    #     - https://github.com/pikers/piker/issues/107
 | 
			
		||||
    #   * the original data feed arch blurb:
 | 
			
		||||
    #     - https://github.com/pikers/piker/issues/98
 | 
			
		||||
    #
 | 
			
		||||
    profiler = Profiler(
 | 
			
		||||
        disabled=True,  # not pg_profile_enabled(),
 | 
			
		||||
        delayed=False,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        open_storage_client(fqsn) as storage,
 | 
			
		||||
 | 
			
		||||
        maybe_open_feed(
 | 
			
		||||
            [fqsn],
 | 
			
		||||
            start_stream=False,
 | 
			
		||||
 | 
			
		||||
        ) as feed,
 | 
			
		||||
    ):
 | 
			
		||||
        profiler(f'opened feed for {fqsn}')
 | 
			
		||||
 | 
			
		||||
        # to_append = feed.hist_shm.array
 | 
			
		||||
        # to_prepend = None
 | 
			
		||||
 | 
			
		||||
        if fqsn:
 | 
			
		||||
            flume = feed.flumes[fqsn]
 | 
			
		||||
            symbol = flume.symbol
 | 
			
		||||
            if symbol:
 | 
			
		||||
                fqsn = symbol.fqsn
 | 
			
		||||
 | 
			
		||||
            # diff db history with shm and only write the missing portions
 | 
			
		||||
            # ohlcv = flume.hist_shm.array
 | 
			
		||||
 | 
			
		||||
            # TODO: use pg profiler
 | 
			
		||||
            # for secs in (1, 60):
 | 
			
		||||
            #     tsdb_array = await storage.read_ohlcv(
 | 
			
		||||
            #         fqsn,
 | 
			
		||||
            #         timeframe=timeframe,
 | 
			
		||||
            #     )
 | 
			
		||||
            #     # hist diffing:
 | 
			
		||||
            #     # these aren't currently used but can be referenced from
 | 
			
		||||
            #     # within the embedded ipython shell below.
 | 
			
		||||
            #     to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
 | 
			
		||||
            #     to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
 | 
			
		||||
 | 
			
		||||
            # profiler('Finished db arrays diffs')
 | 
			
		||||
 | 
			
		||||
            syms = await storage.client.list_symbols()
 | 
			
		||||
            # log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
 | 
			
		||||
            # profiler(f'listed symbols {syms}')
 | 
			
		||||
            yield storage
 | 
			
		||||
 | 
			
		||||
        # for array in [to_append, to_prepend]:
 | 
			
		||||
        #     if array is None:
 | 
			
		||||
        #         continue
 | 
			
		||||
 | 
			
		||||
        #     log.info(
 | 
			
		||||
        #         f'Writing datums {array.size} -> to tsdb from shm\n'
 | 
			
		||||
        #     )
 | 
			
		||||
        #     await storage.write_ohlcv(fqsn, array)
 | 
			
		||||
 | 
			
		||||
        # profiler('Finished db writes')
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def ingest_quote_stream(
 | 
			
		||||
    symbols: list[str],
 | 
			
		||||
    brokername: str,
 | 
			
		||||
| 
						 | 
				
			
			@ -731,6 +416,7 @@ async def ingest_quote_stream(
 | 
			
		|||
    Ingest a broker quote stream into a ``marketstore`` tsdb.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from piker.storage.marketstore import get_client
 | 
			
		||||
    async with (
 | 
			
		||||
        maybe_open_feed(brokername, symbols, loglevel=loglevel) as feed,
 | 
			
		||||
        get_client() as ms_client,
 | 
			
		||||
| 
						 | 
				
			
			
 | 
			
		|||
| 
						 | 
				
			
			@ -0,0 +1,320 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) Tyler Goodlet (in stewardship for pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
(time-series) database middle ware layer.
 | 
			
		||||
 | 
			
		||||
- APIs for read, write, delete, replicate over multiple
 | 
			
		||||
  db systems.
 | 
			
		||||
- backend agnostic tick msg ingest machinery.
 | 
			
		||||
- broadcast systems for fan out of real-time ingested
 | 
			
		||||
  data to live consumers.
 | 
			
		||||
- test harness utilities for data-processing verification.
 | 
			
		||||
 | 
			
		||||
'''
 | 
			
		||||
from abc import abstractmethod
 | 
			
		||||
from contextlib import asynccontextmanager as acm
 | 
			
		||||
from functools import partial
 | 
			
		||||
from importlib import import_module
 | 
			
		||||
from datetime import datetime
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
from typing import (
 | 
			
		||||
    # Callable,
 | 
			
		||||
    # Awaitable,
 | 
			
		||||
    # Any,
 | 
			
		||||
    # AsyncIterator,
 | 
			
		||||
    Protocol,
 | 
			
		||||
    # Generic,
 | 
			
		||||
    # TypeVar,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import numpy as np
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
from .. import config
 | 
			
		||||
from ..service import (
 | 
			
		||||
    check_for_service,
 | 
			
		||||
)
 | 
			
		||||
from ..log import (
 | 
			
		||||
    get_logger,
 | 
			
		||||
    get_console_log,
 | 
			
		||||
)
 | 
			
		||||
subsys: str = 'piker.storage'
 | 
			
		||||
 | 
			
		||||
log = get_logger(subsys)
 | 
			
		||||
get_console_log = partial(
 | 
			
		||||
    get_console_log,
 | 
			
		||||
    name=subsys,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
__tsdbs__: list[str] = [
 | 
			
		||||
    'nativedb',
 | 
			
		||||
    # 'marketstore',
 | 
			
		||||
]
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class StorageClient(
 | 
			
		||||
    Protocol,
 | 
			
		||||
):
 | 
			
		||||
    '''
 | 
			
		||||
    Api description that all storage backends must implement
 | 
			
		||||
    in order to suffice the historical data mgmt layer.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    name: str
 | 
			
		||||
 | 
			
		||||
    @abstractmethod
 | 
			
		||||
    async def list_keys(self) -> list[str]:
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    @abstractmethod
 | 
			
		||||
    def search_keys(self) -> list[str]:
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    # @abstractmethod
 | 
			
		||||
    # async def write_ticks(
 | 
			
		||||
    #     self,
 | 
			
		||||
    #     ticks: list,
 | 
			
		||||
    # ) -> ReceiveType:
 | 
			
		||||
    #     ...
 | 
			
		||||
 | 
			
		||||
    # ``trio.abc.AsyncResource`` methods
 | 
			
		||||
    @abstractmethod
 | 
			
		||||
    async def load(
 | 
			
		||||
        self,
 | 
			
		||||
        fqme: str,
 | 
			
		||||
        timeframe: int,
 | 
			
		||||
 | 
			
		||||
    ) -> tuple[
 | 
			
		||||
        np.ndarray,  # timeframe sampled array-series
 | 
			
		||||
        datetime | None,  # first dt
 | 
			
		||||
        datetime | None,  # last dt
 | 
			
		||||
    ]:
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    @abstractmethod
 | 
			
		||||
    async def delete_ts(
 | 
			
		||||
        self,
 | 
			
		||||
        key: str,
 | 
			
		||||
        timeframe: int | str | None = None,
 | 
			
		||||
        fmt: str = 'OHLCV',
 | 
			
		||||
 | 
			
		||||
    ) -> bool:
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    @abstractmethod
 | 
			
		||||
    async def read_ohlcv(
 | 
			
		||||
        self,
 | 
			
		||||
        fqme: str,
 | 
			
		||||
        timeframe: int | str,
 | 
			
		||||
        end: int | None = None,
 | 
			
		||||
        limit: int = int(800e3),
 | 
			
		||||
 | 
			
		||||
    ) -> np.ndarray:
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
    async def write_ohlcv(
 | 
			
		||||
        self,
 | 
			
		||||
        fqme: str,
 | 
			
		||||
        ohlcv: np.ndarray,
 | 
			
		||||
        timeframe: int,
 | 
			
		||||
        append_and_duplicate: bool = True,
 | 
			
		||||
        limit: int = int(800e3),
 | 
			
		||||
 | 
			
		||||
    ) -> None:
 | 
			
		||||
        ...
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class TimeseriesNotFound(Exception):
 | 
			
		||||
    '''
 | 
			
		||||
    No timeseries entry can be found for this backend.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
class StorageConnectionError(ConnectionError):
 | 
			
		||||
    '''
 | 
			
		||||
    Can't connect to the desired tsdb subsys/service.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
 | 
			
		||||
def get_storagemod(name: str) -> ModuleType:
 | 
			
		||||
    mod: ModuleType = import_module(
 | 
			
		||||
        '.' + name,
 | 
			
		||||
        'piker.storage',
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # we only allow monkeying because it's for internal keying
 | 
			
		||||
    mod.name = mod.__name__.split('.')[-1]
 | 
			
		||||
    return mod
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@acm
 | 
			
		||||
async def open_storage_client(
 | 
			
		||||
    backend: str | None = None,
 | 
			
		||||
 | 
			
		||||
) -> tuple[ModuleType, StorageClient]:
 | 
			
		||||
    '''
 | 
			
		||||
    Load the ``StorageClient`` for named backend.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    def_backend: str = 'nativedb'
 | 
			
		||||
    tsdb_host: str = 'localhost'
 | 
			
		||||
 | 
			
		||||
    # load root config and any tsdb user defined settings
 | 
			
		||||
    conf, path = config.load(
 | 
			
		||||
        conf_name='conf',
 | 
			
		||||
        touch_if_dne=True,
 | 
			
		||||
    )
 | 
			
		||||
 | 
			
		||||
    # TODO: maybe not under a "network" section.. since
 | 
			
		||||
    # no more chitty `marketstore`..
 | 
			
		||||
    tsdbconf: dict = {}
 | 
			
		||||
    service_section = conf.get('service')
 | 
			
		||||
    if (
 | 
			
		||||
        not backend
 | 
			
		||||
        and service_section
 | 
			
		||||
    ):
 | 
			
		||||
        tsdbconf = service_section.get('tsdb')
 | 
			
		||||
 | 
			
		||||
        # lookup backend tsdb module by name and load any user service
 | 
			
		||||
        # settings for connecting to the tsdb service.
 | 
			
		||||
        backend: str = tsdbconf.pop(
 | 
			
		||||
            'name',
 | 
			
		||||
            def_backend,
 | 
			
		||||
        )
 | 
			
		||||
        tsdb_host: str = tsdbconf.get('maddrs', [])
 | 
			
		||||
 | 
			
		||||
    if backend is None:
 | 
			
		||||
        backend: str = def_backend
 | 
			
		||||
 | 
			
		||||
    # import and load storagemod by name
 | 
			
		||||
    mod: ModuleType = get_storagemod(backend)
 | 
			
		||||
    get_client = mod.get_client
 | 
			
		||||
 | 
			
		||||
    log.info('Scanning for existing `{tsbd_backend}`')
 | 
			
		||||
    if backend != def_backend:
 | 
			
		||||
        tsdb_is_up: bool = await check_for_service(f'{backend}d')
 | 
			
		||||
        if (
 | 
			
		||||
            tsdb_host == 'localhost'
 | 
			
		||||
            or tsdb_is_up
 | 
			
		||||
        ):
 | 
			
		||||
            log.info(f'Connecting to local: {backend}@{tsdbconf}')
 | 
			
		||||
        else:
 | 
			
		||||
            log.info(f'Attempting to connect to remote: {backend}@{tsdbconf}')
 | 
			
		||||
    else:
 | 
			
		||||
        log.info(f'Connecting to default storage: {backend}@{tsdbconf}')
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        get_client(**tsdbconf) as client,
 | 
			
		||||
    ):
 | 
			
		||||
        # slap on our wrapper api
 | 
			
		||||
        yield mod, client
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# NOTE: pretty sure right now this is only being
 | 
			
		||||
# called by a CLI entrypoint?
 | 
			
		||||
@acm
 | 
			
		||||
async def open_tsdb_client(
 | 
			
		||||
    fqme: str,
 | 
			
		||||
) -> StorageClient:
 | 
			
		||||
 | 
			
		||||
    # TODO: real-time dedicated task for ensuring
 | 
			
		||||
    # history consistency between the tsdb, shm and real-time feed..
 | 
			
		||||
 | 
			
		||||
    # update sequence design notes:
 | 
			
		||||
 | 
			
		||||
    # - load existing highest frequency data from mkts
 | 
			
		||||
    #   * how do we want to offer this to the UI?
 | 
			
		||||
    #    - lazy loading?
 | 
			
		||||
    #    - try to load it all and expect graphics caching/diffing
 | 
			
		||||
    #      to  hide extra bits that aren't in view?
 | 
			
		||||
 | 
			
		||||
    # - compute the diff between latest data from broker and shm
 | 
			
		||||
    #   * use sql api in mkts to determine where the backend should
 | 
			
		||||
    #     start querying for data?
 | 
			
		||||
    #   * append any diff with new shm length
 | 
			
		||||
    #   * determine missing (gapped) history by scanning
 | 
			
		||||
    #   * how far back do we look?
 | 
			
		||||
 | 
			
		||||
    # - begin rt update ingest and aggregation
 | 
			
		||||
    #   * could start by always writing ticks to mkts instead of
 | 
			
		||||
    #     worrying about a shm queue for now.
 | 
			
		||||
    #   * we have a short list of shm queues worth groking:
 | 
			
		||||
    #     - https://github.com/pikers/piker/issues/107
 | 
			
		||||
    #   * the original data feed arch blurb:
 | 
			
		||||
    #     - https://github.com/pikers/piker/issues/98
 | 
			
		||||
    #
 | 
			
		||||
    from ..toolz import Profiler
 | 
			
		||||
    profiler = Profiler(
 | 
			
		||||
        disabled=True,  # not pg_profile_enabled(),
 | 
			
		||||
        delayed=False,
 | 
			
		||||
    )
 | 
			
		||||
    from ..data.feed import maybe_open_feed
 | 
			
		||||
 | 
			
		||||
    async with (
 | 
			
		||||
        open_storage_client() as (_, storage),
 | 
			
		||||
 | 
			
		||||
        maybe_open_feed(
 | 
			
		||||
            [fqme],
 | 
			
		||||
            start_stream=False,
 | 
			
		||||
 | 
			
		||||
        ) as feed,
 | 
			
		||||
    ):
 | 
			
		||||
        profiler(f'opened feed for {fqme}')
 | 
			
		||||
 | 
			
		||||
        # to_append = feed.hist_shm.array
 | 
			
		||||
        # to_prepend = None
 | 
			
		||||
 | 
			
		||||
        if fqme:
 | 
			
		||||
            flume = feed.flumes[fqme]
 | 
			
		||||
            symbol = flume.mkt
 | 
			
		||||
            if symbol:
 | 
			
		||||
                fqme = symbol.fqme
 | 
			
		||||
 | 
			
		||||
            # diff db history with shm and only write the missing portions
 | 
			
		||||
            # ohlcv = flume.hist_shm.array
 | 
			
		||||
 | 
			
		||||
            # TODO: use pg profiler
 | 
			
		||||
            # for secs in (1, 60):
 | 
			
		||||
            #     tsdb_array = await storage.read_ohlcv(
 | 
			
		||||
            #         fqme,
 | 
			
		||||
            #         timeframe=timeframe,
 | 
			
		||||
            #     )
 | 
			
		||||
            #     # hist diffing:
 | 
			
		||||
            #     # these aren't currently used but can be referenced from
 | 
			
		||||
            #     # within the embedded ipython shell below.
 | 
			
		||||
            #     to_append = ohlcv[ohlcv['time'] > ts['Epoch'][-1]]
 | 
			
		||||
            #     to_prepend = ohlcv[ohlcv['time'] < ts['Epoch'][0]]
 | 
			
		||||
 | 
			
		||||
            # profiler('Finished db arrays diffs')
 | 
			
		||||
 | 
			
		||||
            _ = await storage.client.list_symbols()
 | 
			
		||||
            # log.info(f'Existing tsdb symbol set:\n{pformat(syms)}')
 | 
			
		||||
            # profiler(f'listed symbols {syms}')
 | 
			
		||||
            yield storage
 | 
			
		||||
 | 
			
		||||
        # for array in [to_append, to_prepend]:
 | 
			
		||||
        #     if array is None:
 | 
			
		||||
        #         continue
 | 
			
		||||
 | 
			
		||||
        #     log.info(
 | 
			
		||||
        #         f'Writing datums {array.size} -> to tsdb from shm\n'
 | 
			
		||||
        #     )
 | 
			
		||||
        #     await storage.write_ohlcv(fqme, array)
 | 
			
		||||
 | 
			
		||||
        # profiler('Finished db writes')
 | 
			
		||||
| 
						 | 
				
			
			@ -0,0 +1,553 @@
 | 
			
		|||
# piker: trading gear for hackers
 | 
			
		||||
# Copyright (C) 2018-present  Tyler Goodlet (in stewardship of pikers)
 | 
			
		||||
 | 
			
		||||
# This program is free software: you can redistribute it and/or modify
 | 
			
		||||
# it under the terms of the GNU Affero General Public License as published by
 | 
			
		||||
# the Free Software Foundation, either version 3 of the License, or
 | 
			
		||||
# (at your option) any later version.
 | 
			
		||||
 | 
			
		||||
# This program is distributed in the hope that it will be useful,
 | 
			
		||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 | 
			
		||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 | 
			
		||||
# GNU Affero General Public License for more details.
 | 
			
		||||
 | 
			
		||||
# You should have received a copy of the GNU Affero General Public License
 | 
			
		||||
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
Storage middle-ware CLIs.
 | 
			
		||||
 | 
			
		||||
"""
 | 
			
		||||
from __future__ import annotations
 | 
			
		||||
# from datetime import datetime
 | 
			
		||||
# from contextlib import (
 | 
			
		||||
#     AsyncExitStack,
 | 
			
		||||
# )
 | 
			
		||||
from pathlib import Path
 | 
			
		||||
from math import copysign
 | 
			
		||||
import time
 | 
			
		||||
from types import ModuleType
 | 
			
		||||
from typing import (
 | 
			
		||||
    Any,
 | 
			
		||||
    TYPE_CHECKING,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
import polars as pl
 | 
			
		||||
import numpy as np
 | 
			
		||||
import tractor
 | 
			
		||||
# import pendulum
 | 
			
		||||
from rich.console import Console
 | 
			
		||||
import trio
 | 
			
		||||
# from rich.markdown import Markdown
 | 
			
		||||
import typer
 | 
			
		||||
 | 
			
		||||
from piker.service import open_piker_runtime
 | 
			
		||||
from piker.cli import cli
 | 
			
		||||
from piker.data import (
 | 
			
		||||
    ShmArray,
 | 
			
		||||
)
 | 
			
		||||
from piker import tsp
 | 
			
		||||
from piker.data._formatters import BGM
 | 
			
		||||
from . import log
 | 
			
		||||
from . import (
 | 
			
		||||
    __tsdbs__,
 | 
			
		||||
    open_storage_client,
 | 
			
		||||
    StorageClient,
 | 
			
		||||
)
 | 
			
		||||
 | 
			
		||||
if TYPE_CHECKING:
 | 
			
		||||
    from piker.ui._remote_ctl import AnnotCtl
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
store = typer.Typer()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@store.command()
 | 
			
		||||
def ls(
 | 
			
		||||
    backends: list[str] = typer.Argument(
 | 
			
		||||
        default=None,
 | 
			
		||||
        help='Storage backends to query, default is all.'
 | 
			
		||||
    ),
 | 
			
		||||
):
 | 
			
		||||
    from rich.table import Table
 | 
			
		||||
 | 
			
		||||
    if not backends:
 | 
			
		||||
        backends: list[str] = __tsdbs__
 | 
			
		||||
 | 
			
		||||
    console = Console()
 | 
			
		||||
 | 
			
		||||
    async def query_all():
 | 
			
		||||
        nonlocal backends
 | 
			
		||||
 | 
			
		||||
        async with (
 | 
			
		||||
            open_piker_runtime(
 | 
			
		||||
                'tsdb_storage',
 | 
			
		||||
            ),
 | 
			
		||||
        ):
 | 
			
		||||
            for i, backend in enumerate(backends):
 | 
			
		||||
                table = Table()
 | 
			
		||||
                try:
 | 
			
		||||
                    async with open_storage_client(backend=backend) as (
 | 
			
		||||
                        mod,
 | 
			
		||||
                        client,
 | 
			
		||||
                    ):
 | 
			
		||||
                        table.add_column(f'{mod.name}@{client.address}')
 | 
			
		||||
                        keys: list[str] = await client.list_keys()
 | 
			
		||||
                        for key in keys:
 | 
			
		||||
                            table.add_row(key)
 | 
			
		||||
 | 
			
		||||
                    console.print(table)
 | 
			
		||||
                except Exception:
 | 
			
		||||
                    log.error(f'Unable to connect to storage engine: `{backend}`')
 | 
			
		||||
 | 
			
		||||
    trio.run(query_all)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
# TODO: like ls but takes in a pattern and matches
 | 
			
		||||
# @store.command()
 | 
			
		||||
# def search(
 | 
			
		||||
#     patt: str,
 | 
			
		||||
#     backends: list[str] = typer.Argument(
 | 
			
		||||
#         default=None,
 | 
			
		||||
#         help='Storage backends to query, default is all.'
 | 
			
		||||
#     ),
 | 
			
		||||
# ):
 | 
			
		||||
#     ...
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@store.command()
 | 
			
		||||
def delete(
 | 
			
		||||
    symbols: list[str],
 | 
			
		||||
 | 
			
		||||
    backend: str = typer.Option(
 | 
			
		||||
        default=None,
 | 
			
		||||
        help='Storage backend to update'
 | 
			
		||||
    ),
 | 
			
		||||
    # TODO: expose this as flagged multi-option?
 | 
			
		||||
    timeframes: list[int] = [1, 60],
 | 
			
		||||
):
 | 
			
		||||
    '''
 | 
			
		||||
    Delete a storage backend's time series for (table) keys provided as
 | 
			
		||||
    ``symbols``.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    from . import open_storage_client
 | 
			
		||||
 | 
			
		||||
    async def main(symbols: list[str]):
 | 
			
		||||
        async with (
 | 
			
		||||
            open_piker_runtime(
 | 
			
		||||
                'tsdb_storage',
 | 
			
		||||
            ),
 | 
			
		||||
            open_storage_client(backend) as (_, client),
 | 
			
		||||
            trio.open_nursery() as n,
 | 
			
		||||
        ):
 | 
			
		||||
            # spawn queries as tasks for max conc!
 | 
			
		||||
            for fqme in symbols:
 | 
			
		||||
                for tf in timeframes:
 | 
			
		||||
                    n.start_soon(
 | 
			
		||||
                        client.delete_ts,
 | 
			
		||||
                        fqme,
 | 
			
		||||
                        tf,
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
    trio.run(main, symbols)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@store.command()
 | 
			
		||||
def anal(
 | 
			
		||||
    fqme: str,
 | 
			
		||||
    period: int = 60,
 | 
			
		||||
    pdb: bool = False,
 | 
			
		||||
 | 
			
		||||
) -> np.ndarray:
 | 
			
		||||
    '''
 | 
			
		||||
    Anal-ysis is when you take the data do stuff to it.
 | 
			
		||||
 | 
			
		||||
    NOTE: This ONLY loads the offline timeseries data (by default
 | 
			
		||||
    from a parquet file) NOT the in-shm version you might be seeing
 | 
			
		||||
    in a chart.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async def main():
 | 
			
		||||
        async with (
 | 
			
		||||
            open_piker_runtime(
 | 
			
		||||
                # are you a bear or boi?
 | 
			
		||||
                'tsdb_polars_anal',
 | 
			
		||||
                debug_mode=pdb,
 | 
			
		||||
            ),
 | 
			
		||||
            open_storage_client() as (
 | 
			
		||||
                mod,
 | 
			
		||||
                client,
 | 
			
		||||
            ),
 | 
			
		||||
        ):
 | 
			
		||||
            syms: list[str] = await client.list_keys()
 | 
			
		||||
            log.info(f'{len(syms)} FOUND for {mod.name}')
 | 
			
		||||
 | 
			
		||||
            history: ShmArray  # np buffer format
 | 
			
		||||
            (
 | 
			
		||||
                history,
 | 
			
		||||
                first_dt,
 | 
			
		||||
                last_dt,
 | 
			
		||||
            ) = await client.load(
 | 
			
		||||
                fqme,
 | 
			
		||||
                period,
 | 
			
		||||
            )
 | 
			
		||||
            assert first_dt < last_dt
 | 
			
		||||
 | 
			
		||||
            null_segs: tuple = tsp.get_null_segs(
 | 
			
		||||
                frame=history,
 | 
			
		||||
                period=period,
 | 
			
		||||
            )
 | 
			
		||||
            # TODO: do tsp queries to backcend to fill i missing
 | 
			
		||||
            # history and then prolly write it to tsdb!
 | 
			
		||||
 | 
			
		||||
            shm_df: pl.DataFrame = await client.as_df(
 | 
			
		||||
                fqme,
 | 
			
		||||
                period,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            df: pl.DataFrame  # with dts
 | 
			
		||||
            deduped: pl.DataFrame  # deduplicated dts
 | 
			
		||||
            (
 | 
			
		||||
                df,
 | 
			
		||||
                deduped,
 | 
			
		||||
                diff,
 | 
			
		||||
            ) = tsp.dedupe(
 | 
			
		||||
                shm_df,
 | 
			
		||||
                period=period,
 | 
			
		||||
            )
 | 
			
		||||
 | 
			
		||||
            write_edits: bool = True
 | 
			
		||||
            if (
 | 
			
		||||
                write_edits
 | 
			
		||||
                and (
 | 
			
		||||
                    diff
 | 
			
		||||
                    or null_segs
 | 
			
		||||
                )
 | 
			
		||||
            ):
 | 
			
		||||
                await tractor.pause()
 | 
			
		||||
                await client.write_ohlcv(
 | 
			
		||||
                    fqme,
 | 
			
		||||
                    ohlcv=deduped,
 | 
			
		||||
                    timeframe=period,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
            else:
 | 
			
		||||
                # TODO: something better with tab completion..
 | 
			
		||||
                # is there something more minimal but nearly as
 | 
			
		||||
                # functional as ipython?
 | 
			
		||||
                await tractor.pause()
 | 
			
		||||
                assert not null_segs
 | 
			
		||||
 | 
			
		||||
    trio.run(main)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
async def markup_gaps(
 | 
			
		||||
    fqme: str,
 | 
			
		||||
    timeframe: float,
 | 
			
		||||
    actl: AnnotCtl,
 | 
			
		||||
    wdts: pl.DataFrame,
 | 
			
		||||
    gaps: pl.DataFrame,
 | 
			
		||||
 | 
			
		||||
) -> dict[int, dict]:
 | 
			
		||||
    '''
 | 
			
		||||
    Remote annotate time-gaps in a dt-fielded ts (normally OHLC)
 | 
			
		||||
    with rectangles.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    aids: dict[int] = {}
 | 
			
		||||
    for i in range(gaps.height):
 | 
			
		||||
 | 
			
		||||
        row: pl.DataFrame = gaps[i]
 | 
			
		||||
 | 
			
		||||
        # the gap's RIGHT-most bar's OPEN value
 | 
			
		||||
        # at that time (sample) step.
 | 
			
		||||
        iend: int = row['index'][0]
 | 
			
		||||
        # dt: datetime = row['dt'][0]
 | 
			
		||||
        # dt_prev: datetime = row['dt_prev'][0]
 | 
			
		||||
        # dt_end_t: float = dt.timestamp()
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
        # TODO: can we eventually remove this
 | 
			
		||||
        # once we figure out why the epoch cols
 | 
			
		||||
        # don't match?
 | 
			
		||||
        # TODO: FIX HOW/WHY these aren't matching
 | 
			
		||||
        # and are instead off by 4hours (EST
 | 
			
		||||
        # vs. UTC?!?!)
 | 
			
		||||
        # end_t: float = row['time']
 | 
			
		||||
        # assert (
 | 
			
		||||
        #     dt.timestamp()
 | 
			
		||||
        #     ==
 | 
			
		||||
        #     end_t
 | 
			
		||||
        # )
 | 
			
		||||
 | 
			
		||||
        # the gap's LEFT-most bar's CLOSE value
 | 
			
		||||
        # at that time (sample) step.
 | 
			
		||||
        prev_r: pl.DataFrame = wdts.filter(
 | 
			
		||||
            pl.col('index') == iend - 1
 | 
			
		||||
        )
 | 
			
		||||
        # XXX: probably a gap in the (newly sorted or de-duplicated)
 | 
			
		||||
        # dt-df, so we might need to re-index first..
 | 
			
		||||
        if prev_r.is_empty():
 | 
			
		||||
            await tractor.pause()
 | 
			
		||||
 | 
			
		||||
        istart: int = prev_r['index'][0]
 | 
			
		||||
        # dt_start_t: float = dt_prev.timestamp()
 | 
			
		||||
 | 
			
		||||
        # start_t: float = prev_r['time']
 | 
			
		||||
        # assert (
 | 
			
		||||
        #     dt_start_t
 | 
			
		||||
        #     ==
 | 
			
		||||
        #     start_t
 | 
			
		||||
        # )
 | 
			
		||||
 | 
			
		||||
        # TODO: implement px-col width measure
 | 
			
		||||
        # and ensure at least as many px-cols
 | 
			
		||||
        # shown per rect as configured by user.
 | 
			
		||||
        # gap_w: float = abs((iend - istart))
 | 
			
		||||
        # if gap_w < 6:
 | 
			
		||||
        #     margin: float = 6
 | 
			
		||||
        #     iend += margin
 | 
			
		||||
        #     istart -= margin
 | 
			
		||||
 | 
			
		||||
        rect_gap: float = BGM*3/8
 | 
			
		||||
        opn: float = row['open'][0]
 | 
			
		||||
        ro: tuple[float, float] = (
 | 
			
		||||
            # dt_end_t,
 | 
			
		||||
            iend + rect_gap + 1,
 | 
			
		||||
            opn,
 | 
			
		||||
        )
 | 
			
		||||
        cls: float = prev_r['close'][0]
 | 
			
		||||
        lc: tuple[float, float] = (
 | 
			
		||||
            # dt_start_t,
 | 
			
		||||
            istart - rect_gap, # + 1 ,
 | 
			
		||||
            cls,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        color: str = 'dad_blue'
 | 
			
		||||
        diff: float = cls - opn
 | 
			
		||||
        sgn: float = copysign(1, diff)
 | 
			
		||||
        color: str = {
 | 
			
		||||
            -1: 'buy_green',
 | 
			
		||||
            1: 'sell_red',
 | 
			
		||||
        }[sgn]
 | 
			
		||||
 | 
			
		||||
        rect_kwargs: dict[str, Any] = dict(
 | 
			
		||||
            fqme=fqme,
 | 
			
		||||
            timeframe=timeframe,
 | 
			
		||||
            start_pos=lc,
 | 
			
		||||
            end_pos=ro,
 | 
			
		||||
            color=color,
 | 
			
		||||
        )
 | 
			
		||||
 | 
			
		||||
        aid: int = await actl.add_rect(**rect_kwargs)
 | 
			
		||||
        assert aid
 | 
			
		||||
        aids[aid] = rect_kwargs
 | 
			
		||||
 | 
			
		||||
    # tell chart to redraw all its
 | 
			
		||||
    # graphics view layers Bo
 | 
			
		||||
    await actl.redraw(
 | 
			
		||||
        fqme=fqme,
 | 
			
		||||
        timeframe=timeframe,
 | 
			
		||||
    )
 | 
			
		||||
    return aids
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@store.command()
 | 
			
		||||
def ldshm(
 | 
			
		||||
    fqme: str,
 | 
			
		||||
    write_parquet: bool = True,
 | 
			
		||||
    reload_parquet_to_shm: bool = True,
 | 
			
		||||
 | 
			
		||||
) -> None:
 | 
			
		||||
    '''
 | 
			
		||||
    Linux ONLY: load any fqme file name matching shm buffer from
 | 
			
		||||
    /dev/shm/ into an OHLCV numpy array and polars DataFrame,
 | 
			
		||||
    optionally write to offline storage via `.parquet` file.
 | 
			
		||||
 | 
			
		||||
    '''
 | 
			
		||||
    async def main():
 | 
			
		||||
        from piker.ui._remote_ctl import (
 | 
			
		||||
            open_annot_ctl,
 | 
			
		||||
        )
 | 
			
		||||
        actl: AnnotCtl
 | 
			
		||||
        mod: ModuleType
 | 
			
		||||
        client: StorageClient
 | 
			
		||||
        async with (
 | 
			
		||||
            open_piker_runtime(
 | 
			
		||||
                'polars_boi',
 | 
			
		||||
                enable_modules=['piker.data._sharedmem'],
 | 
			
		||||
                debug_mode=True,
 | 
			
		||||
            ),
 | 
			
		||||
            open_storage_client() as (
 | 
			
		||||
                mod,
 | 
			
		||||
                client,
 | 
			
		||||
            ),
 | 
			
		||||
            open_annot_ctl() as actl,
 | 
			
		||||
        ):
 | 
			
		||||
            shm_df: pl.DataFrame | None = None
 | 
			
		||||
            tf2aids: dict[float, dict] = {}
 | 
			
		||||
 | 
			
		||||
            for (
 | 
			
		||||
                shmfile,
 | 
			
		||||
                shm,
 | 
			
		||||
                # parquet_path,
 | 
			
		||||
                shm_df,
 | 
			
		||||
            ) in tsp.iter_dfs_from_shms(fqme):
 | 
			
		||||
 | 
			
		||||
                times: np.ndarray = shm.array['time']
 | 
			
		||||
                d1: float = float(times[-1] - times[-2])
 | 
			
		||||
                d2: float = float(times[-2] - times[-3])
 | 
			
		||||
                med: float = np.median(np.diff(times))
 | 
			
		||||
                if (
 | 
			
		||||
                    d1 < 1.
 | 
			
		||||
                    and d2 < 1.
 | 
			
		||||
                    and med < 1.
 | 
			
		||||
                ):
 | 
			
		||||
                    raise ValueError(
 | 
			
		||||
                        f'Something is wrong with time period for {shm}:\n{times}'
 | 
			
		||||
                    )
 | 
			
		||||
 | 
			
		||||
                period_s: float = float(max(d1, d2, med))
 | 
			
		||||
 | 
			
		||||
                null_segs: tuple = tsp.get_null_segs(
 | 
			
		||||
                    frame=shm.array,
 | 
			
		||||
                    period=period_s,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # TODO: call null-seg fixer somehow?
 | 
			
		||||
                if null_segs:
 | 
			
		||||
                    await tractor.pause()
 | 
			
		||||
                #     async with (
 | 
			
		||||
                #         trio.open_nursery() as tn,
 | 
			
		||||
                #         mod.open_history_client(
 | 
			
		||||
                #             mkt,
 | 
			
		||||
                #         ) as (get_hist, config),
 | 
			
		||||
                #     ):
 | 
			
		||||
                #         nulls_detected: trio.Event = await tn.start(partial(
 | 
			
		||||
                #             tsp.maybe_fill_null_segments,
 | 
			
		||||
 | 
			
		||||
                #             shm=shm,
 | 
			
		||||
                #             timeframe=timeframe,
 | 
			
		||||
                #             get_hist=get_hist,
 | 
			
		||||
                #             sampler_stream=sampler_stream,
 | 
			
		||||
                #             mkt=mkt,
 | 
			
		||||
                #         ))
 | 
			
		||||
 | 
			
		||||
                # over-write back to shm?
 | 
			
		||||
                wdts: pl.DataFrame  # with dts
 | 
			
		||||
                deduped: pl.DataFrame  # deduplicated dts
 | 
			
		||||
                (
 | 
			
		||||
                    wdts,
 | 
			
		||||
                    deduped,
 | 
			
		||||
                    diff,
 | 
			
		||||
                ) = tsp.dedupe(
 | 
			
		||||
                    shm_df,
 | 
			
		||||
                    period=period_s,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # detect gaps from in expected (uniform OHLC) sample period
 | 
			
		||||
                step_gaps: pl.DataFrame = tsp.detect_time_gaps(
 | 
			
		||||
                    deduped,
 | 
			
		||||
                    expect_period=period_s,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # TODO: by default we always want to mark these up
 | 
			
		||||
                # with rects showing up/down gaps Bo
 | 
			
		||||
                venue_gaps: pl.DataFrame = tsp.detect_time_gaps(
 | 
			
		||||
                    deduped,
 | 
			
		||||
                    expect_period=period_s,
 | 
			
		||||
 | 
			
		||||
                    # TODO: actually pull the exact duration
 | 
			
		||||
                    # expected for each venue operational period?
 | 
			
		||||
                    gap_dt_unit='days',
 | 
			
		||||
                    gap_thresh=1,
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
                # TODO: find the disjoint set of step gaps from
 | 
			
		||||
                # venue (closure) set!
 | 
			
		||||
                # -[ ] do a set diff by checking for the unique
 | 
			
		||||
                #    gap set only in the step_gaps?
 | 
			
		||||
                if (
 | 
			
		||||
                    not venue_gaps.is_empty()
 | 
			
		||||
                    or (
 | 
			
		||||
                        period_s < 60
 | 
			
		||||
                        and not step_gaps.is_empty()
 | 
			
		||||
                    )
 | 
			
		||||
                ):
 | 
			
		||||
                    # write repaired ts to parquet-file?
 | 
			
		||||
                    if write_parquet:
 | 
			
		||||
                        start: float = time.time()
 | 
			
		||||
                        path: Path = await client.write_ohlcv(
 | 
			
		||||
                            fqme,
 | 
			
		||||
                            ohlcv=deduped,
 | 
			
		||||
                            timeframe=period_s,
 | 
			
		||||
                        )
 | 
			
		||||
                        write_delay: float = round(
 | 
			
		||||
                            time.time() - start,
 | 
			
		||||
                            ndigits=6,
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                        # read back from fs
 | 
			
		||||
                        start: float = time.time()
 | 
			
		||||
                        read_df: pl.DataFrame = pl.read_parquet(path)
 | 
			
		||||
                        read_delay: float = round(
 | 
			
		||||
                            time.time() - start,
 | 
			
		||||
                            ndigits=6,
 | 
			
		||||
                        )
 | 
			
		||||
                        log.info(
 | 
			
		||||
                            f'parquet write took {write_delay} secs\n'
 | 
			
		||||
                            f'file path: {path}'
 | 
			
		||||
                            f'parquet read took {read_delay} secs\n'
 | 
			
		||||
                            f'polars df: {read_df}'
 | 
			
		||||
                        )
 | 
			
		||||
 | 
			
		||||
                        if reload_parquet_to_shm:
 | 
			
		||||
                            new = tsp.pl2np(
 | 
			
		||||
                                deduped,
 | 
			
		||||
                                dtype=shm.array.dtype,
 | 
			
		||||
                            )
 | 
			
		||||
                            # since normally readonly
 | 
			
		||||
                            shm._array.setflags(
 | 
			
		||||
                                write=int(1),
 | 
			
		||||
                            )
 | 
			
		||||
                            shm.push(
 | 
			
		||||
                                new,
 | 
			
		||||
                                prepend=True,
 | 
			
		||||
                                start=new['index'][-1],
 | 
			
		||||
                                update_first=False,  # don't update ._first
 | 
			
		||||
                            )
 | 
			
		||||
 | 
			
		||||
                    do_markup_gaps: bool = True
 | 
			
		||||
                    if do_markup_gaps:
 | 
			
		||||
                        new_df: pl.DataFrame = tsp.np2pl(new)
 | 
			
		||||
                        aids: dict = await markup_gaps(
 | 
			
		||||
                            fqme,
 | 
			
		||||
                            period_s,
 | 
			
		||||
                            actl,
 | 
			
		||||
                            new_df,
 | 
			
		||||
                            step_gaps,
 | 
			
		||||
                        )
 | 
			
		||||
                        # last chance manual overwrites in REPL
 | 
			
		||||
                        # await tractor.pause()
 | 
			
		||||
                        assert aids
 | 
			
		||||
                        tf2aids[period_s] = aids
 | 
			
		||||
 | 
			
		||||
                else:
 | 
			
		||||
                    # allow interaction even when no ts problems.
 | 
			
		||||
                    assert not diff
 | 
			
		||||
 | 
			
		||||
            await tractor.pause()
 | 
			
		||||
            log.info('Exiting TSP shm anal-izer!')
 | 
			
		||||
 | 
			
		||||
            if shm_df is None:
 | 
			
		||||
                log.error(
 | 
			
		||||
                    f'No matching shm buffers for {fqme} ?'
 | 
			
		||||
 | 
			
		||||
                )
 | 
			
		||||
 | 
			
		||||
    trio.run(main)
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
typer_click_object = typer.main.get_command(store)
 | 
			
		||||
cli.add_command(typer_click_object, 'store')
 | 
			
		||||
Some files were not shown because too many files have changed in this diff Show More
		Loading…
	
		Reference in New Issue