Update all examples to new streaming API
parent
f59346d854
commit
5a5e6baad1
|
@ -24,7 +24,8 @@ async def main():
|
|||
|
||||
# this async for loop streams values from the above
|
||||
# async generator running in a separate process
|
||||
async for letter in await portal.run(stream_forever):
|
||||
async with portal.open_stream_from(stream_forever) as stream:
|
||||
async for letter in stream:
|
||||
print(letter)
|
||||
|
||||
# we support trio's cancellation system
|
||||
|
|
|
@ -26,7 +26,7 @@ async def main():
|
|||
p1 = await n.start_actor('name_error', enable_modules=[__name__])
|
||||
|
||||
# retreive results
|
||||
stream = await p0.run(breakpoint_forever)
|
||||
async with p0.open_stream_from(breakpoint_forever) as stream:
|
||||
await p1.run(name_error)
|
||||
|
||||
|
||||
|
|
|
@ -21,4 +21,4 @@ async def main():
|
|||
|
||||
|
||||
if __name__ == '__main__':
|
||||
tractor.run(main, debug_mode=True)
|
||||
tractor.run(main, debug_mode=True, loglevel='debug')
|
||||
|
|
|
@ -21,7 +21,7 @@ async def aggregate(seed):
|
|||
# fork point
|
||||
portal = await nursery.start_actor(
|
||||
name=f'streamer_{i}',
|
||||
rpc_module_paths=[__name__],
|
||||
enable_modules=[__name__],
|
||||
)
|
||||
|
||||
portals.append(portal)
|
||||
|
@ -29,8 +29,11 @@ async def aggregate(seed):
|
|||
send_chan, recv_chan = trio.open_memory_channel(500)
|
||||
|
||||
async def push_to_chan(portal, send_chan):
|
||||
async with send_chan:
|
||||
async for value in await portal.run(stream_data, seed=seed):
|
||||
async with (
|
||||
send_chan,
|
||||
portal.open_stream_from(stream_data, seed=seed) as stream,
|
||||
):
|
||||
async for value in stream:
|
||||
# leverage trio's built-in backpressure
|
||||
await send_chan.send(value)
|
||||
|
||||
|
@ -71,19 +74,25 @@ async def main():
|
|||
import time
|
||||
pre_start = time.time()
|
||||
|
||||
portal = await nursery.run_in_actor(
|
||||
aggregate,
|
||||
portal = await nursery.start_actor(
|
||||
name='aggregator',
|
||||
seed=seed,
|
||||
enable_modules=[__name__],
|
||||
)
|
||||
|
||||
async with portal.open_stream_from(
|
||||
aggregate,
|
||||
seed=seed,
|
||||
) as stream:
|
||||
|
||||
start = time.time()
|
||||
# the portal call returns exactly what you'd expect
|
||||
# as if the remote "aggregate" function was called locally
|
||||
result_stream = []
|
||||
async for value in await portal.result():
|
||||
async for value in stream:
|
||||
result_stream.append(value)
|
||||
|
||||
await portal.cancel_actor()
|
||||
|
||||
print(f"STREAM TIME = {time.time() - start}")
|
||||
print(f"STREAM + SPAWN TIME = {time.time() - pre_start}")
|
||||
assert result_stream == list(range(seed))
|
||||
|
|
|
@ -15,7 +15,8 @@ async def stream_data(seed=10):
|
|||
|
||||
async def stream_from_portal(p, consumed):
|
||||
|
||||
async for item in await p.run(stream_data):
|
||||
async with p.open_stream_from(stream_data) as stream:
|
||||
async for item in stream:
|
||||
if item in consumed:
|
||||
consumed.remove(item)
|
||||
else:
|
||||
|
|
Loading…
Reference in New Issue