forked from goodboy/tractor
1
0
Fork 0

Update all examples to new streaming API

stream_contexts
Tyler Goodlet 2021-04-28 11:54:56 -04:00
parent f59346d854
commit 5a5e6baad1
5 changed files with 33 additions and 22 deletions

View File

@ -24,8 +24,9 @@ async def main():
# this async for loop streams values from the above
# async generator running in a separate process
async for letter in await portal.run(stream_forever):
print(letter)
async with portal.open_stream_from(stream_forever) as stream:
async for letter in stream:
print(letter)
# we support trio's cancellation system
assert cancel_scope.cancelled_caught

View File

@ -26,8 +26,8 @@ async def main():
p1 = await n.start_actor('name_error', enable_modules=[__name__])
# retreive results
stream = await p0.run(breakpoint_forever)
await p1.run(name_error)
async with p0.open_stream_from(breakpoint_forever) as stream:
await p1.run(name_error)
if __name__ == '__main__':

View File

@ -21,4 +21,4 @@ async def main():
if __name__ == '__main__':
tractor.run(main, debug_mode=True)
tractor.run(main, debug_mode=True, loglevel='debug')

View File

@ -21,7 +21,7 @@ async def aggregate(seed):
# fork point
portal = await nursery.start_actor(
name=f'streamer_{i}',
rpc_module_paths=[__name__],
enable_modules=[__name__],
)
portals.append(portal)
@ -29,8 +29,11 @@ async def aggregate(seed):
send_chan, recv_chan = trio.open_memory_channel(500)
async def push_to_chan(portal, send_chan):
async with send_chan:
async for value in await portal.run(stream_data, seed=seed):
async with (
send_chan,
portal.open_stream_from(stream_data, seed=seed) as stream,
):
async for value in stream:
# leverage trio's built-in backpressure
await send_chan.send(value)
@ -71,18 +74,24 @@ async def main():
import time
pre_start = time.time()
portal = await nursery.run_in_actor(
aggregate,
portal = await nursery.start_actor(
name='aggregator',
seed=seed,
enable_modules=[__name__],
)
start = time.time()
# the portal call returns exactly what you'd expect
# as if the remote "aggregate" function was called locally
result_stream = []
async for value in await portal.result():
result_stream.append(value)
async with portal.open_stream_from(
aggregate,
seed=seed,
) as stream:
start = time.time()
# the portal call returns exactly what you'd expect
# as if the remote "aggregate" function was called locally
result_stream = []
async for value in stream:
result_stream.append(value)
await portal.cancel_actor()
print(f"STREAM TIME = {time.time() - start}")
print(f"STREAM + SPAWN TIME = {time.time() - pre_start}")

View File

@ -15,11 +15,12 @@ async def stream_data(seed=10):
async def stream_from_portal(p, consumed):
async for item in await p.run(stream_data):
if item in consumed:
consumed.remove(item)
else:
consumed.append(item)
async with p.open_stream_from(stream_data) as stream:
async for item in stream:
if item in consumed:
consumed.remove(item)
else:
consumed.append(item)
async def main():