Use `tractor.to_asyncio.open_channel_from()` in ib backend
							parent
							
								
									980a6dde05
								
							
						
					
					
						commit
						af0503956a
					
				|  | @ -21,7 +21,7 @@ Note the client runs under an ``asyncio`` loop (since ``ib_insync`` is | ||||||
| built on it) and thus actor aware API calls must be spawned with | built on it) and thus actor aware API calls must be spawned with | ||||||
| ``infected_aio==True``. | ``infected_aio==True``. | ||||||
| """ | """ | ||||||
| from contextlib import asynccontextmanager | from contextlib import asynccontextmanager as acm | ||||||
| from dataclasses import asdict | from dataclasses import asdict | ||||||
| from datetime import datetime | from datetime import datetime | ||||||
| from functools import partial | from functools import partial | ||||||
|  | @ -570,66 +570,6 @@ class Client: | ||||||
|             ) |             ) | ||||||
|         ) |         ) | ||||||
| 
 | 
 | ||||||
|     async def recv_trade_updates( |  | ||||||
|         self, |  | ||||||
|         to_trio: trio.abc.SendChannel, |  | ||||||
| 
 |  | ||||||
|     ) -> None: |  | ||||||
|         """Stream a ticker using the std L1 api. |  | ||||||
|         """ |  | ||||||
|         self.inline_errors(to_trio) |  | ||||||
| 
 |  | ||||||
|         def push_tradesies(eventkit_obj, obj, fill=None): |  | ||||||
|             """Push events to trio task. |  | ||||||
| 
 |  | ||||||
|             """ |  | ||||||
|             if fill is not None: |  | ||||||
|                 # execution details event |  | ||||||
|                 item = ('fill', (obj, fill)) |  | ||||||
| 
 |  | ||||||
|             elif eventkit_obj.name() == 'positionEvent': |  | ||||||
|                 item = ('position', obj) |  | ||||||
| 
 |  | ||||||
|             else: |  | ||||||
|                 item = ('status', obj) |  | ||||||
| 
 |  | ||||||
|             log.info(f'eventkit event ->\n{pformat(item)}') |  | ||||||
| 
 |  | ||||||
|             try: |  | ||||||
|                 to_trio.send_nowait(item) |  | ||||||
|             except trio.BrokenResourceError: |  | ||||||
|                 log.exception(f'Disconnected from {eventkit_obj} updates') |  | ||||||
|                 eventkit_obj.disconnect(push_tradesies) |  | ||||||
| 
 |  | ||||||
|         # hook up to the weird eventkit object - event stream api |  | ||||||
|         for ev_name in [ |  | ||||||
|             'orderStatusEvent',  # all order updates |  | ||||||
|             'execDetailsEvent',  # all "fill" updates |  | ||||||
|             'positionEvent',  # avg price updates per symbol per account |  | ||||||
| 
 |  | ||||||
|             # 'commissionReportEvent', |  | ||||||
|             # XXX: ugh, it is a separate event from IB and it's |  | ||||||
|             # emitted as follows: |  | ||||||
|             # self.ib.commissionReportEvent.emit(trade, fill, report) |  | ||||||
| 
 |  | ||||||
|             # XXX: not sure yet if we need these |  | ||||||
|             # 'updatePortfolioEvent', |  | ||||||
| 
 |  | ||||||
|             # XXX: these all seem to be weird ib_insync intrernal |  | ||||||
|             # events that we probably don't care that much about |  | ||||||
|             # given the internal design is wonky af.. |  | ||||||
|             # 'newOrderEvent', |  | ||||||
|             # 'orderModifyEvent', |  | ||||||
|             # 'cancelOrderEvent', |  | ||||||
|             # 'openOrderEvent', |  | ||||||
|         ]: |  | ||||||
|             eventkit_obj = getattr(self.ib, ev_name) |  | ||||||
|             handler = partial(push_tradesies, eventkit_obj) |  | ||||||
|             eventkit_obj.connect(handler) |  | ||||||
| 
 |  | ||||||
|         # let the engine run and stream |  | ||||||
|         await self.ib.disconnectedEvent |  | ||||||
| 
 |  | ||||||
|     def inline_errors( |     def inline_errors( | ||||||
|         self, |         self, | ||||||
|         to_trio: trio.abc.SendChannel, |         to_trio: trio.abc.SendChannel, | ||||||
|  | @ -675,6 +615,71 @@ class Client: | ||||||
|         return self.ib.positions(account=account) |         return self.ib.positions(account=account) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | async def recv_trade_updates( | ||||||
|  | 
 | ||||||
|  |     client: Client, | ||||||
|  |     to_trio: trio.abc.SendChannel, | ||||||
|  | 
 | ||||||
|  | ) -> None: | ||||||
|  |     """Stream a ticker using the std L1 api. | ||||||
|  |     """ | ||||||
|  |     client.inline_errors(to_trio) | ||||||
|  | 
 | ||||||
|  |     # sync with trio task | ||||||
|  |     to_trio.send_nowait(None) | ||||||
|  | 
 | ||||||
|  |     def push_tradesies(eventkit_obj, obj, fill=None): | ||||||
|  |         """Push events to trio task. | ||||||
|  | 
 | ||||||
|  |         """ | ||||||
|  |         if fill is not None: | ||||||
|  |             # execution details event | ||||||
|  |             item = ('fill', (obj, fill)) | ||||||
|  | 
 | ||||||
|  |         elif eventkit_obj.name() == 'positionEvent': | ||||||
|  |             item = ('position', obj) | ||||||
|  | 
 | ||||||
|  |         else: | ||||||
|  |             item = ('status', obj) | ||||||
|  | 
 | ||||||
|  |         log.info(f'eventkit event ->\n{pformat(item)}') | ||||||
|  | 
 | ||||||
|  |         try: | ||||||
|  |             to_trio.send_nowait(item) | ||||||
|  |         except trio.BrokenResourceError: | ||||||
|  |             log.exception(f'Disconnected from {eventkit_obj} updates') | ||||||
|  |             eventkit_obj.disconnect(push_tradesies) | ||||||
|  | 
 | ||||||
|  |     # hook up to the weird eventkit object - event stream api | ||||||
|  |     for ev_name in [ | ||||||
|  |         'orderStatusEvent',  # all order updates | ||||||
|  |         'execDetailsEvent',  # all "fill" updates | ||||||
|  |         'positionEvent',  # avg price updates per symbol per account | ||||||
|  | 
 | ||||||
|  |         # 'commissionReportEvent', | ||||||
|  |         # XXX: ugh, it is a separate event from IB and it's | ||||||
|  |         # emitted as follows: | ||||||
|  |         # self.ib.commissionReportEvent.emit(trade, fill, report) | ||||||
|  | 
 | ||||||
|  |         # XXX: not sure yet if we need these | ||||||
|  |         # 'updatePortfolioEvent', | ||||||
|  | 
 | ||||||
|  |         # XXX: these all seem to be weird ib_insync intrernal | ||||||
|  |         # events that we probably don't care that much about | ||||||
|  |         # given the internal design is wonky af.. | ||||||
|  |         # 'newOrderEvent', | ||||||
|  |         # 'orderModifyEvent', | ||||||
|  |         # 'cancelOrderEvent', | ||||||
|  |         # 'openOrderEvent', | ||||||
|  |     ]: | ||||||
|  |         eventkit_obj = getattr(client.ib, ev_name) | ||||||
|  |         handler = partial(push_tradesies, eventkit_obj) | ||||||
|  |         eventkit_obj.connect(handler) | ||||||
|  | 
 | ||||||
|  |     # let the engine run and stream | ||||||
|  |     await client.ib.disconnectedEvent | ||||||
|  | 
 | ||||||
|  | 
 | ||||||
| # default config ports | # default config ports | ||||||
| _tws_port: int = 7497 | _tws_port: int = 7497 | ||||||
| _gw_port: int = 4002 | _gw_port: int = 4002 | ||||||
|  | @ -705,7 +710,7 @@ def get_config() -> dict[str, Any]: | ||||||
| _accounts2clients: dict[str, Client] = {} | _accounts2clients: dict[str, Client] = {} | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @asynccontextmanager | @acm | ||||||
| async def load_aio_clients( | async def load_aio_clients( | ||||||
| 
 | 
 | ||||||
|     host: str = '127.0.0.1', |     host: str = '127.0.0.1', | ||||||
|  | @ -756,119 +761,136 @@ async def load_aio_clients( | ||||||
|     accounts_def = config.load_accounts(['ib']) |     accounts_def = config.load_accounts(['ib']) | ||||||
|     try_ports = list(ports.values()) |     try_ports = list(ports.values()) | ||||||
|     ports = try_ports if port is None else [port] |     ports = try_ports if port is None else [port] | ||||||
|     we_connected = [] |     # we_connected = [] | ||||||
|     connect_timeout = 0.5 if platform.system() != 'Windows' else 1 |     connect_timeout = 0.5 if platform.system() != 'Windows' else 1 | ||||||
|     combos = list(itertools.product(hosts, ports)) |     combos = list(itertools.product(hosts, ports)) | ||||||
| 
 | 
 | ||||||
|     # allocate new and/or reload disconnected but cached clients |     # allocate new and/or reload disconnected but cached clients | ||||||
|     try: |     # try: | ||||||
|         # TODO: support multiple clients allowing for execution on |     # TODO: support multiple clients allowing for execution on | ||||||
|         # multiple accounts (including a paper instance running on the |     # multiple accounts (including a paper instance running on the | ||||||
|         # same machine) and switching between accounts in the EMs |     # same machine) and switching between accounts in the EMs | ||||||
| 
 | 
 | ||||||
|         _err = None |     _err = None | ||||||
| 
 | 
 | ||||||
|         # (re)load any and all clients that can be found |     # (re)load any and all clients that can be found | ||||||
|         # from connection details in ``brokers.toml``. |     # from connection details in ``brokers.toml``. | ||||||
|         for host, port in combos: |     for host, port in combos: | ||||||
| 
 | 
 | ||||||
|             sockaddr = (host, port) |         sockaddr = (host, port) | ||||||
|             client = _client_cache.get(sockaddr) |         client = _client_cache.get(sockaddr) | ||||||
|             accounts_found: dict[str, Client] = {} |         accounts_found: dict[str, Client] = {} | ||||||
| 
 | 
 | ||||||
|             if ( |         if ( | ||||||
|                 client and client.ib.isConnected() or |             client and client.ib.isConnected() or | ||||||
|                 sockaddr in _scan_ignore |             sockaddr in _scan_ignore | ||||||
|             ): |         ): | ||||||
|                 continue |             continue | ||||||
| 
 | 
 | ||||||
|             try: |         try: | ||||||
|                 ib = NonShittyIB() |             ib = NonShittyIB() | ||||||
| 
 | 
 | ||||||
|                 # if this is a persistent brokerd, try to allocate |             # XXX: not sure if we ever really need to increment the | ||||||
|                 # a new id for each client |             # client id if teardown is sucessful. | ||||||
|                 client_id = next(_client_ids) |             client_id = 616 | ||||||
| 
 | 
 | ||||||
|                 await ib.connectAsync( |             await ib.connectAsync( | ||||||
|                     host, |                 host, | ||||||
|                     port, |                 port, | ||||||
|                     clientId=client_id, |                 clientId=client_id, | ||||||
| 
 | 
 | ||||||
|                     # this timeout is sensative on windows and will |                 # this timeout is sensative on windows and will | ||||||
|                     # fail without a good "timeout error" so be |                 # fail without a good "timeout error" so be | ||||||
|                     # careful. |                 # careful. | ||||||
|                     timeout=connect_timeout, |                 timeout=connect_timeout, | ||||||
|                 ) |             ) | ||||||
| 
 | 
 | ||||||
|                 # create and cache client |             # create and cache client | ||||||
|                 client = Client(ib) |             client = Client(ib) | ||||||
| 
 | 
 | ||||||
|                 # Pre-collect all accounts available for this |             # Pre-collect all accounts available for this | ||||||
|                 # connection and map account names to this client |             # connection and map account names to this client | ||||||
|                 # instance. |             # instance. | ||||||
|                 pps = ib.positions() |             pps = ib.positions() | ||||||
|                 if pps: |             if pps: | ||||||
|                     for pp in pps: |                 for pp in pps: | ||||||
|                         accounts_found[ |                     accounts_found[ | ||||||
|                             accounts_def.inverse[pp.account] |                         accounts_def.inverse[pp.account] | ||||||
|                         ] = client |                     ] = client | ||||||
| 
 | 
 | ||||||
|                 # if there are no positions or accounts |             # if there are no positions or accounts | ||||||
|                 # without positions we should still register |             # without positions we should still register | ||||||
|                 # them for this client |             # them for this client | ||||||
|                 for value in ib.accountValues(): |             for value in ib.accountValues(): | ||||||
|                     acct = value.account |                 acct = value.account | ||||||
|                     if acct not in accounts_found: |                 if acct not in accounts_found: | ||||||
|                         accounts_found[ |                     accounts_found[ | ||||||
|                             accounts_def.inverse[acct] |                         accounts_def.inverse[acct] | ||||||
|                         ] = client |                     ] = client | ||||||
| 
 | 
 | ||||||
|                 log.info( |             log.info( | ||||||
|                     f'Loaded accounts for client @ {host}:{port}\n' |                 f'Loaded accounts for client @ {host}:{port}\n' | ||||||
|                     f'{pformat(accounts_found)}' |                 f'{pformat(accounts_found)}' | ||||||
|                 ) |             ) | ||||||
| 
 | 
 | ||||||
|                 # update all actor-global caches |             # update all actor-global caches | ||||||
|                 log.info(f"Caching client for {(host, port)}") |             log.info(f"Caching client for {(host, port)}") | ||||||
|                 _client_cache[(host, port)] = client |             _client_cache[(host, port)] = client | ||||||
|                 we_connected.append(client) |  | ||||||
|                 _accounts2clients.update(accounts_found) |  | ||||||
| 
 | 
 | ||||||
|             except ( |             # we_connected.append((host, port, client)) | ||||||
|                 ConnectionRefusedError, |  | ||||||
| 
 | 
 | ||||||
|                 # TODO: if trying to scan for remote api clients |             # TODO: don't do it this way, get a gud to_asyncio | ||||||
|                 # pretty sure we need to catch this, though it |             # context / .start() system goin.. | ||||||
|                 # definitely needs a shorter timeout since it hangs |             def pop_and_discon(): | ||||||
|                 # for like 5s.. |                 log.info(f'Disconnecting client {client}') | ||||||
|                 asyncio.exceptions.TimeoutError, |                 client.ib.disconnect() | ||||||
|                 OSError, |                 _client_cache.pop((host, port), None) | ||||||
|             ) as ce: |  | ||||||
|                 _err = ce |  | ||||||
|                 log.warning(f'Failed to connect on {port}') |  | ||||||
| 
 | 
 | ||||||
|                 # cache logic to avoid rescanning if we already have all |             # NOTE: the above callback **CAN'T FAIL** or shm won't get | ||||||
|                 # clients loaded. |             # torn down correctly ... | ||||||
|                 _scan_ignore.add(sockaddr) |             tractor._actor._lifetime_stack.callback(pop_and_discon) | ||||||
|         else: |  | ||||||
|             if not _client_cache: |  | ||||||
|                 raise ConnectionError( |  | ||||||
|                     'No ib APIs could be found scanning @:\n' |  | ||||||
|                     f'{pformat(combos)}\n' |  | ||||||
|                     'Check your `brokers.toml` and/or network' |  | ||||||
|                 ) from _err |  | ||||||
| 
 | 
 | ||||||
|         # retreive first loaded client |             _accounts2clients.update(accounts_found) | ||||||
|         clients = list(_client_cache.values()) |  | ||||||
|         if clients: |  | ||||||
|             client = clients[0] |  | ||||||
| 
 | 
 | ||||||
|         yield client, _client_cache, _accounts2clients |         except ( | ||||||
|  |             ConnectionRefusedError, | ||||||
| 
 | 
 | ||||||
|     except BaseException: |             # TODO: if trying to scan for remote api clients | ||||||
|         for client in we_connected: |             # pretty sure we need to catch this, though it | ||||||
|             client.ib.disconnect() |             # definitely needs a shorter timeout since it hangs | ||||||
|         raise |             # for like 5s.. | ||||||
|  |             asyncio.exceptions.TimeoutError, | ||||||
|  |             OSError, | ||||||
|  |         ) as ce: | ||||||
|  |             _err = ce | ||||||
|  |             log.warning(f'Failed to connect on {port}') | ||||||
|  | 
 | ||||||
|  |             # cache logic to avoid rescanning if we already have all | ||||||
|  |             # clients loaded. | ||||||
|  |             _scan_ignore.add(sockaddr) | ||||||
|  |     else: | ||||||
|  |         if not _client_cache: | ||||||
|  |             raise ConnectionError( | ||||||
|  |                 'No ib APIs could be found scanning @:\n' | ||||||
|  |                 f'{pformat(combos)}\n' | ||||||
|  |                 'Check your `brokers.toml` and/or network' | ||||||
|  |             ) from _err | ||||||
|  | 
 | ||||||
|  |     # retreive first loaded client | ||||||
|  |     clients = list(_client_cache.values()) | ||||||
|  |     if clients: | ||||||
|  |         client = clients[0] | ||||||
|  | 
 | ||||||
|  |     yield client, _client_cache, _accounts2clients | ||||||
|  | 
 | ||||||
|  |     # TODO: this in a way that works xD | ||||||
|  |     # finally: | ||||||
|  |     #     pass | ||||||
|  |     #     # async with trio.CancelScope(shield=True): | ||||||
|  |     #     for host, port, client in we_connected: | ||||||
|  |     #         client.ib.disconnect() | ||||||
|  |     #         _client_cache.pop((host, port)) | ||||||
|  |     #     raise | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def _aio_run_client_method( | async def _aio_run_client_method( | ||||||
|  | @ -909,16 +931,16 @@ async def _trio_run_client_method( | ||||||
|     assert ca.is_infected_aio() |     assert ca.is_infected_aio() | ||||||
| 
 | 
 | ||||||
|     # if the method is an *async gen* stream for it |     # if the method is an *async gen* stream for it | ||||||
|     meth = getattr(Client, method) |     # meth = getattr(Client, method) | ||||||
| 
 | 
 | ||||||
|     args = tuple(inspect.getfullargspec(meth).args) |     # args = tuple(inspect.getfullargspec(meth).args) | ||||||
| 
 | 
 | ||||||
|     if inspect.isasyncgenfunction(meth) or ( |     # if inspect.isasyncgenfunction(meth) or ( | ||||||
|         # if the method is an *async func* but manually |     #     # if the method is an *async func* but manually | ||||||
|         # streams back results, make sure to also stream it |     #     # streams back results, make sure to also stream it | ||||||
|         'to_trio' in args |     #     'to_trio' in args | ||||||
|     ): |     # ): | ||||||
|         kwargs['_treat_as_stream'] = True |     #     kwargs['_treat_as_stream'] = True | ||||||
| 
 | 
 | ||||||
|     return await tractor.to_asyncio.run_task( |     return await tractor.to_asyncio.run_task( | ||||||
|         _aio_run_client_method, |         _aio_run_client_method, | ||||||
|  | @ -968,7 +990,7 @@ def get_client_proxy( | ||||||
|     return proxy |     return proxy | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| @asynccontextmanager | @acm | ||||||
| async def get_client( | async def get_client( | ||||||
|     **kwargs, |     **kwargs, | ||||||
| ) -> Client: | ) -> Client: | ||||||
|  | @ -1037,8 +1059,10 @@ def normalize( | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def get_bars( | async def get_bars( | ||||||
|  | 
 | ||||||
|     sym: str, |     sym: str, | ||||||
|     end_dt: str = "", |     end_dt: str = "", | ||||||
|  | 
 | ||||||
| ) -> (dict, np.ndarray): | ) -> (dict, np.ndarray): | ||||||
| 
 | 
 | ||||||
|     _err: Optional[Exception] = None |     _err: Optional[Exception] = None | ||||||
|  | @ -1066,10 +1090,20 @@ async def get_bars( | ||||||
|             # TODO: retreive underlying ``ib_insync`` error? |             # TODO: retreive underlying ``ib_insync`` error? | ||||||
|             if err.code == 162: |             if err.code == 162: | ||||||
| 
 | 
 | ||||||
|  |                 # TODO: so this error is normally raised (it seems) if | ||||||
|  |                 # we try to retrieve history for a time range for which | ||||||
|  |                 # there is none. in that case we should not only report | ||||||
|  |                 # the "empty range" but also do a iteration on the time | ||||||
|  |                 # step for ``next_dt`` to see if we can pull older | ||||||
|  |                 # history. | ||||||
|                 if 'HMDS query returned no data' in err.message: |                 if 'HMDS query returned no data' in err.message: | ||||||
|                     # means we hit some kind of historical "dead zone" |                     # means we hit some kind of historical "empty space" | ||||||
|                     # and further requests seem to always cause |                     # and further requests will need to decrement the | ||||||
|                     # throttling despite the rps being low |                     # start time dt in order to not receive a further | ||||||
|  |                     # error? | ||||||
|  |                     # OLDER: seem to always cause throttling despite low rps | ||||||
|  | 
 | ||||||
|  |                     # raise err | ||||||
|                     break |                     break | ||||||
| 
 | 
 | ||||||
|                 elif 'No market data permissions for' in err.message: |                 elif 'No market data permissions for' in err.message: | ||||||
|  | @ -1092,8 +1126,7 @@ async def get_bars( | ||||||
|                     fails += 1 |                     fails += 1 | ||||||
|                     continue |                     continue | ||||||
| 
 | 
 | ||||||
|     return (None, None) |     return None, None | ||||||
| 
 |  | ||||||
|     # else:  # throttle wasn't fixed so error out immediately |     # else:  # throttle wasn't fixed so error out immediately | ||||||
|     #     raise _err |     #     raise _err | ||||||
| 
 | 
 | ||||||
|  | @ -1108,7 +1141,7 @@ async def backfill_bars( | ||||||
|     # on that until we have the `marketstore` daemon in place in which |     # on that until we have the `marketstore` daemon in place in which | ||||||
|     # case the shm size will be driven by user config and available sys |     # case the shm size will be driven by user config and available sys | ||||||
|     # memory. |     # memory. | ||||||
|     count: int = 24, |     count: int = 16, | ||||||
| 
 | 
 | ||||||
|     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, |     task_status: TaskStatus[trio.CancelScope] = trio.TASK_STATUS_IGNORED, | ||||||
| 
 | 
 | ||||||
|  | @ -1119,7 +1152,13 @@ async def backfill_bars( | ||||||
|     https://github.com/pikers/piker/issues/128 |     https://github.com/pikers/piker/issues/128 | ||||||
| 
 | 
 | ||||||
|     """ |     """ | ||||||
|     (first_bars, bars_array, next_dt), fails = await get_bars(sym) |     out, fails = await get_bars(sym) | ||||||
|  |     if out is None: | ||||||
|  |         raise RuntimeError("Could not pull currrent history?!") | ||||||
|  | 
 | ||||||
|  |     (first_bars, bars_array, next_dt) = out | ||||||
|  |     vlm = bars_array['volume'] | ||||||
|  |     vlm[vlm < 0] = 0 | ||||||
| 
 | 
 | ||||||
|     # write historical data to buffer |     # write historical data to buffer | ||||||
|     shm.push(bars_array) |     shm.push(bars_array) | ||||||
|  | @ -1182,14 +1221,21 @@ _quote_streams: dict[str, trio.abc.ReceiveStream] = {} | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def _setup_quote_stream( | async def _setup_quote_stream( | ||||||
|  | 
 | ||||||
|  |     from_trio: asyncio.Queue, | ||||||
|  |     to_trio: trio.abc.SendChannel, | ||||||
|  | 
 | ||||||
|     symbol: str, |     symbol: str, | ||||||
|     opts: tuple[int] = ('375', '233', '236'), |     opts: tuple[int] = ('375', '233', '236'), | ||||||
|     contract: Optional[Contract] = None, |     contract: Optional[Contract] = None, | ||||||
| ) -> None: | 
 | ||||||
|  | ) -> trio.abc.ReceiveChannel: | ||||||
|     """Stream a ticker using the std L1 api. |     """Stream a ticker using the std L1 api. | ||||||
|     """ |     """ | ||||||
|     global _quote_streams |     global _quote_streams | ||||||
| 
 | 
 | ||||||
|  |     to_trio.send_nowait(None) | ||||||
|  | 
 | ||||||
|     async with load_aio_clients() as ( |     async with load_aio_clients() as ( | ||||||
|         client, |         client, | ||||||
|         clients, |         clients, | ||||||
|  | @ -1198,29 +1244,40 @@ async def _setup_quote_stream( | ||||||
|         contract = contract or (await client.find_contract(symbol)) |         contract = contract or (await client.find_contract(symbol)) | ||||||
|         ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts)) |         ticker: Ticker = client.ib.reqMktData(contract, ','.join(opts)) | ||||||
| 
 | 
 | ||||||
|         # define a simple queue push routine that streams quote packets |         # # define a simple queue push routine that streams quote packets | ||||||
|         # to trio over the ``to_trio`` memory channel. |         # # to trio over the ``to_trio`` memory channel. | ||||||
|         to_trio, from_aio = trio.open_memory_channel(2**8)  # type: ignore |         # to_trio, from_aio = trio.open_memory_channel(2**8)  # type: ignore | ||||||
|  |         def teardown(): | ||||||
|  |             ticker.updateEvent.disconnect(push) | ||||||
|  |             log.error(f"Disconnected stream for `{symbol}`") | ||||||
|  |             client.ib.cancelMktData(contract) | ||||||
| 
 | 
 | ||||||
|         def push(t): |             # decouple broadcast mem chan | ||||||
|             """Push quotes to trio task. |             _quote_streams.pop(symbol, None) | ||||||
|  | 
 | ||||||
|  |         def push(t: Ticker) -> None: | ||||||
|  |             """ | ||||||
|  |             Push quotes to trio task. | ||||||
| 
 | 
 | ||||||
|             """ |             """ | ||||||
|             # log.debug(t) |             # log.debug(t) | ||||||
|             try: |             try: | ||||||
|                 to_trio.send_nowait(t) |                 to_trio.send_nowait(t) | ||||||
| 
 | 
 | ||||||
|             except trio.BrokenResourceError: |             except ( | ||||||
|  |                 trio.BrokenResourceError, | ||||||
|  | 
 | ||||||
|  |                 # XXX: HACK, not sure why this gets left stale (probably | ||||||
|  |                 # due to our terrible ``tractor.to_asyncio`` | ||||||
|  |                 # implementation for streams.. but if the mem chan | ||||||
|  |                 # gets left here and starts blocking just kill the feed? | ||||||
|  |                 # trio.WouldBlock, | ||||||
|  |             ): | ||||||
|                 # XXX: eventkit's ``Event.emit()`` for whatever redic |                 # XXX: eventkit's ``Event.emit()`` for whatever redic | ||||||
|                 # reason will catch and ignore regular exceptions |                 # reason will catch and ignore regular exceptions | ||||||
|                 # resulting in tracebacks spammed to console.. |                 # resulting in tracebacks spammed to console.. | ||||||
|                 # Manually do the dereg ourselves. |                 # Manually do the dereg ourselves. | ||||||
|                 ticker.updateEvent.disconnect(push) |                 teardown() | ||||||
|                 log.error(f"Disconnected stream for `{symbol}`") |  | ||||||
|                 client.ib.cancelMktData(contract) |  | ||||||
| 
 |  | ||||||
|                 # decouple broadcast mem chan |  | ||||||
|                 _quote_streams.pop(symbol, None) |  | ||||||
| 
 | 
 | ||||||
|             # except trio.WouldBlock: |             # except trio.WouldBlock: | ||||||
|             #     # for slow debugging purposes to avoid clobbering prompt |             #     # for slow debugging purposes to avoid clobbering prompt | ||||||
|  | @ -1228,35 +1285,43 @@ async def _setup_quote_stream( | ||||||
|             #     pass |             #     pass | ||||||
| 
 | 
 | ||||||
|         ticker.updateEvent.connect(push) |         ticker.updateEvent.connect(push) | ||||||
|  |         try: | ||||||
|  |             await asyncio.sleep(float('inf')) | ||||||
|  |         finally: | ||||||
|  |             teardown() | ||||||
| 
 | 
 | ||||||
|         return from_aio |         # return from_aio | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def start_aio_quote_stream( | @acm | ||||||
|  | async def open_aio_quote_stream( | ||||||
|  | 
 | ||||||
|     symbol: str, |     symbol: str, | ||||||
|     contract: Optional[Contract] = None, |     contract: Optional[Contract] = None, | ||||||
|  | 
 | ||||||
| ) -> trio.abc.ReceiveStream: | ) -> trio.abc.ReceiveStream: | ||||||
| 
 | 
 | ||||||
|  |     from tractor.trionics import broadcast_receiver | ||||||
|     global _quote_streams |     global _quote_streams | ||||||
| 
 | 
 | ||||||
|     from_aio = _quote_streams.get(symbol) |     from_aio = _quote_streams.get(symbol) | ||||||
|     if from_aio: |     if from_aio: | ||||||
| 
 | 
 | ||||||
|         # if we already have a cached feed deliver a rx side clone to consumer |         # if we already have a cached feed deliver a rx side clone to consumer | ||||||
|         return from_aio.clone() |         async with broadcast_receiver(from_aio) as from_aio: | ||||||
|  |             yield from_aio | ||||||
|  |             return | ||||||
| 
 | 
 | ||||||
|     else: |     async with tractor.to_asyncio.open_channel_from( | ||||||
| 
 |         _setup_quote_stream, | ||||||
|         from_aio = await tractor.to_asyncio.run_task( |         symbol=symbol, | ||||||
|             _setup_quote_stream, |         contract=contract, | ||||||
|             symbol=symbol, |     ) as (first, from_aio): | ||||||
|             contract=contract, |  | ||||||
|         ) |  | ||||||
| 
 | 
 | ||||||
|         # cache feed for later consumers |         # cache feed for later consumers | ||||||
|         _quote_streams[symbol] = from_aio |         _quote_streams[symbol] = from_aio | ||||||
| 
 | 
 | ||||||
|         return from_aio |         yield from_aio | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def stream_quotes( | async def stream_quotes( | ||||||
|  | @ -1285,116 +1350,120 @@ async def stream_quotes( | ||||||
|         symbol=sym, |         symbol=sym, | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
|     stream = await start_aio_quote_stream(symbol=sym, contract=contract) |     # stream = await start_aio_quote_stream(symbol=sym, contract=contract) | ||||||
|  |     async with open_aio_quote_stream( | ||||||
|  |         symbol=sym, contract=contract | ||||||
|  |     ) as stream: | ||||||
| 
 | 
 | ||||||
|     # pass back some symbol info like min_tick, trading_hours, etc. |         # pass back some symbol info like min_tick, trading_hours, etc. | ||||||
|     syminfo = asdict(details) |         syminfo = asdict(details) | ||||||
|     syminfo.update(syminfo['contract']) |         syminfo.update(syminfo['contract']) | ||||||
| 
 | 
 | ||||||
|     # nested dataclass we probably don't need and that won't IPC serialize |         # nested dataclass we probably don't need and that won't IPC serialize | ||||||
|     syminfo.pop('secIdList') |         syminfo.pop('secIdList') | ||||||
| 
 | 
 | ||||||
|     # TODO: more consistent field translation |         # TODO: more consistent field translation | ||||||
|     atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']] |         atype = syminfo['asset_type'] = asset_type_map[syminfo['secType']] | ||||||
| 
 | 
 | ||||||
|     # for stocks it seems TWS reports too small a tick size |         # for stocks it seems TWS reports too small a tick size | ||||||
|     # such that you can't submit orders with that granularity? |         # such that you can't submit orders with that granularity? | ||||||
|     min_tick = 0.01 if atype == 'stock' else 0 |         min_tick = 0.01 if atype == 'stock' else 0 | ||||||
| 
 | 
 | ||||||
|     syminfo['price_tick_size'] = max(syminfo['minTick'], min_tick) |         syminfo['price_tick_size'] = max(syminfo['minTick'], min_tick) | ||||||
| 
 | 
 | ||||||
|     # for "traditional" assets, volume is normally discreet, not a float |         # for "traditional" assets, volume is normally discreet, not a float | ||||||
|     syminfo['lot_tick_size'] = 0.0 |         syminfo['lot_tick_size'] = 0.0 | ||||||
| 
 | 
 | ||||||
|     # TODO: for loop through all symbols passed in |         # TODO: for loop through all symbols passed in | ||||||
|     init_msgs = { |         init_msgs = { | ||||||
|         # pass back token, and bool, signalling if we're the writer |             # pass back token, and bool, signalling if we're the writer | ||||||
|         # and that history has been written |             # and that history has been written | ||||||
|         sym: { |             sym: { | ||||||
|             'symbol_info': syminfo, |                 'symbol_info': syminfo, | ||||||
|  |             } | ||||||
|         } |         } | ||||||
|     } |  | ||||||
| 
 | 
 | ||||||
|     con = first_ticker.contract |         con = first_ticker.contract | ||||||
| 
 | 
 | ||||||
|     # should be real volume for this contract by default |         # should be real volume for this contract by default | ||||||
|     calc_price = False |         calc_price = False | ||||||
| 
 | 
 | ||||||
|     # check for special contract types |         # check for special contract types | ||||||
|     if type(first_ticker.contract) not in (ibis.Commodity, ibis.Forex): |         if type(first_ticker.contract) not in (ibis.Commodity, ibis.Forex): | ||||||
| 
 | 
 | ||||||
|         suffix = con.primaryExchange |             suffix = con.primaryExchange | ||||||
|         if not suffix: |             if not suffix: | ||||||
|             suffix = con.exchange |                 suffix = con.exchange | ||||||
| 
 | 
 | ||||||
|     else: |         else: | ||||||
|         # commodities and forex don't have an exchange name and |             # commodities and forex don't have an exchange name and | ||||||
|         # no real volume so we have to calculate the price |             # no real volume so we have to calculate the price | ||||||
|         suffix = con.secType |             suffix = con.secType | ||||||
|         # no real volume on this tract |             # no real volume on this tract | ||||||
|         calc_price = True |             calc_price = True | ||||||
| 
 | 
 | ||||||
|     quote = normalize(first_ticker, calc_price=calc_price) |         quote = normalize(first_ticker, calc_price=calc_price) | ||||||
|     con = quote['contract'] |         con = quote['contract'] | ||||||
|     topic = '.'.join((con['symbol'], suffix)).lower() |         topic = '.'.join((con['symbol'], suffix)).lower() | ||||||
|     quote['symbol'] = topic |         quote['symbol'] = topic | ||||||
| 
 | 
 | ||||||
|     # pass first quote asap |         # pass first quote asap | ||||||
|     first_quote = {topic: quote} |         first_quote = {topic: quote} | ||||||
| 
 | 
 | ||||||
|     # ugh, clear ticks since we've consumed them |         # ugh, clear ticks since we've consumed them | ||||||
|     # (ahem, ib_insync is stateful trash) |         # (ahem, ib_insync is stateful trash) | ||||||
|     first_ticker.ticks = [] |         first_ticker.ticks = [] | ||||||
| 
 | 
 | ||||||
|     log.debug(f"First ticker received {quote}") |         log.debug(f"First ticker received {quote}") | ||||||
| 
 | 
 | ||||||
|     task_status.started((init_msgs,  first_quote)) |         task_status.started((init_msgs,  first_quote)) | ||||||
| 
 | 
 | ||||||
|     if type(first_ticker.contract) not in (ibis.Commodity, ibis.Forex): |         async with aclosing(stream): | ||||||
|         # suffix = 'exchange' |             if type(first_ticker.contract) not in (ibis.Commodity, ibis.Forex): | ||||||
|         # calc_price = False  # should be real volume for contract |                 # suffix = 'exchange' | ||||||
|  |                 # calc_price = False  # should be real volume for contract | ||||||
| 
 | 
 | ||||||
|         # wait for real volume on feed (trading might be closed) |                 # wait for real volume on feed (trading might be closed) | ||||||
|         while True: |                 while True: | ||||||
| 
 | 
 | ||||||
|             ticker = await stream.receive() |                     ticker = await stream.receive() | ||||||
| 
 | 
 | ||||||
|             # for a real volume contract we rait for the first |                     # for a real volume contract we rait for the first | ||||||
|             # "real" trade to take place |                     # "real" trade to take place | ||||||
|             if not calc_price and not ticker.rtTime: |                     if not calc_price and not ticker.rtTime: | ||||||
|                 # spin consuming tickers until we get a real market datum |                         # spin consuming tickers until we get a real | ||||||
|                 log.debug(f"New unsent ticker: {ticker}") |                         # market datum | ||||||
|                 continue |                         log.debug(f"New unsent ticker: {ticker}") | ||||||
|             else: |                         continue | ||||||
|                 log.debug("Received first real volume tick") |                     else: | ||||||
|                 # ugh, clear ticks since we've consumed them |                         log.debug("Received first real volume tick") | ||||||
|                 # (ahem, ib_insync is truly stateful trash) |                         # ugh, clear ticks since we've consumed them | ||||||
|                 ticker.ticks = [] |                         # (ahem, ib_insync is truly stateful trash) | ||||||
|  |                         ticker.ticks = [] | ||||||
| 
 | 
 | ||||||
|                 # XXX: this works because we don't use |                         # XXX: this works because we don't use | ||||||
|                 # ``aclosing()`` above? |                         # ``aclosing()`` above? | ||||||
|                 break |                         break | ||||||
| 
 | 
 | ||||||
|     # tell caller quotes are now coming in live |             # tell caller quotes are now coming in live | ||||||
|     feed_is_live.set() |             feed_is_live.set() | ||||||
| 
 | 
 | ||||||
|     # last = time.time() |  | ||||||
|     async with aclosing(stream): |  | ||||||
|         async for ticker in stream: |  | ||||||
|             # print(f'ticker rate: {1/(time.time() - last)}') |  | ||||||
| 
 |  | ||||||
|             # print(ticker.vwap) |  | ||||||
|             quote = normalize( |  | ||||||
|                 ticker, |  | ||||||
|                 calc_price=calc_price |  | ||||||
|             ) |  | ||||||
| 
 |  | ||||||
|             quote['symbol'] = topic |  | ||||||
|             await send_chan.send({topic: quote}) |  | ||||||
| 
 |  | ||||||
|             # ugh, clear ticks since we've consumed them |  | ||||||
|             ticker.ticks = [] |  | ||||||
|             # last = time.time() |             # last = time.time() | ||||||
|  |             async for ticker in stream: | ||||||
|  |                 # print(f'ticker rate: {1/(time.time() - last)}') | ||||||
|  | 
 | ||||||
|  |                 # print(ticker.vwap) | ||||||
|  |                 quote = normalize( | ||||||
|  |                     ticker, | ||||||
|  |                     calc_price=calc_price | ||||||
|  |                 ) | ||||||
|  | 
 | ||||||
|  |                 quote['symbol'] = topic | ||||||
|  |                 await send_chan.send({topic: quote}) | ||||||
|  | 
 | ||||||
|  |                 # ugh, clear ticks since we've consumed them | ||||||
|  |                 ticker.ticks = [] | ||||||
|  |                 # last = time.time() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def pack_position( | def pack_position( | ||||||
|  | @ -1536,14 +1605,25 @@ async def trades_dialogue( | ||||||
|     accounts = set() |     accounts = set() | ||||||
| 
 | 
 | ||||||
|     clients: list[tuple[Client, trio.MemoryReceiveChannel]] = [] |     clients: list[tuple[Client, trio.MemoryReceiveChannel]] = [] | ||||||
|     for account, client in _accounts2clients.items(): |     async with trio.open_nursery() as nurse: | ||||||
|  |         for account, client in _accounts2clients.items(): | ||||||
| 
 | 
 | ||||||
|         # each client to an api endpoint will have it's own event stream |             async def open_stream( | ||||||
|         trade_event_stream = await _trio_run_client_method( |                 task_status: TaskStatus[ | ||||||
|             method='recv_trade_updates', |                     trio.abc.ReceiveChannel | ||||||
|             client=client, |                 ] = trio.TASK_STATUS_IGNORED, | ||||||
|         ) |             ): | ||||||
|         clients.append((client, trade_event_stream)) |                 # each api client has a unique event stream | ||||||
|  |                 async with tractor.to_asyncio.open_channel_from( | ||||||
|  |                     recv_trade_updates, | ||||||
|  |                     client=client, | ||||||
|  |                 ) as (first, trade_event_stream): | ||||||
|  |                     task_status.started(trade_event_stream) | ||||||
|  |                     await trio.sleep_forever() | ||||||
|  | 
 | ||||||
|  |             trade_event_stream = await nurse.start(open_stream) | ||||||
|  | 
 | ||||||
|  |             clients.append((client, trade_event_stream)) | ||||||
| 
 | 
 | ||||||
|         for client in _client_cache.values(): |         for client in _client_cache.values(): | ||||||
|             for pos in client.positions(): |             for pos in client.positions(): | ||||||
|  | @ -1552,26 +1632,29 @@ async def trades_dialogue( | ||||||
|                 accounts.add(msg.account) |                 accounts.add(msg.account) | ||||||
|                 all_positions.append(msg.dict()) |                 all_positions.append(msg.dict()) | ||||||
| 
 | 
 | ||||||
|     await ctx.started((all_positions, accounts)) |         await ctx.started(( | ||||||
|  |             all_positions, | ||||||
|  |             tuple(name for name in accounts_def if name in accounts), | ||||||
|  |         )) | ||||||
| 
 | 
 | ||||||
|     async with ( |         async with ( | ||||||
|         ctx.open_stream() as ems_stream, |             ctx.open_stream() as ems_stream, | ||||||
|         trio.open_nursery() as n, |             trio.open_nursery() as n, | ||||||
|     ): |         ): | ||||||
|         # start order request handler **before** local trades event loop |             # start order request handler **before** local trades event loop | ||||||
|         n.start_soon(handle_order_requests, ems_stream, accounts_def) |             n.start_soon(handle_order_requests, ems_stream, accounts_def) | ||||||
| 
 | 
 | ||||||
|         # allocate event relay tasks for each client connection |             # allocate event relay tasks for each client connection | ||||||
|         for client, stream in clients: |             for client, stream in clients: | ||||||
|             n.start_soon( |                 n.start_soon( | ||||||
|                 deliver_trade_events, |                     deliver_trade_events, | ||||||
|                 stream, |                     stream, | ||||||
|                 ems_stream, |                     ems_stream, | ||||||
|                 accounts_def |                     accounts_def | ||||||
|             ) |                 ) | ||||||
| 
 | 
 | ||||||
|         # block until cancelled |             # block until cancelled | ||||||
|         await trio.sleep_forever() |             await trio.sleep_forever() | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| async def deliver_trade_events( | async def deliver_trade_events( | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue