2018-09-03 01:58:42 +00:00
|
|
|
"""
|
|
|
|
Streaming via async gen api
|
|
|
|
"""
|
|
|
|
import time
|
2019-03-29 23:10:56 +00:00
|
|
|
from functools import partial
|
2019-03-31 00:59:10 +00:00
|
|
|
import platform
|
2018-09-03 01:58:42 +00:00
|
|
|
|
|
|
|
import trio
|
|
|
|
import tractor
|
|
|
|
import pytest
|
|
|
|
|
|
|
|
|
2019-03-29 23:10:56 +00:00
|
|
|
def test_must_define_ctx():
|
|
|
|
|
|
|
|
with pytest.raises(TypeError) as err:
|
|
|
|
@tractor.stream
|
|
|
|
async def no_ctx():
|
|
|
|
pass
|
|
|
|
|
|
|
|
assert "no_ctx must be `ctx: tractor.Context" in str(err.value)
|
|
|
|
|
|
|
|
@tractor.stream
|
2019-03-31 00:59:10 +00:00
|
|
|
async def has_ctx(ctx):
|
2019-03-29 23:10:56 +00:00
|
|
|
pass
|
|
|
|
|
|
|
|
|
|
|
|
async def async_gen_stream(sequence):
|
2018-09-03 01:58:42 +00:00
|
|
|
for i in sequence:
|
|
|
|
yield i
|
|
|
|
await trio.sleep(0.1)
|
|
|
|
|
2018-12-11 04:13:58 +00:00
|
|
|
# block indefinitely waiting to be cancelled by ``aclose()`` call
|
2019-02-16 19:25:06 +00:00
|
|
|
with trio.CancelScope() as cs:
|
2018-12-11 04:13:58 +00:00
|
|
|
await trio.sleep(float('inf'))
|
|
|
|
assert 0
|
|
|
|
assert cs.cancelled_caught
|
|
|
|
|
2018-09-03 01:58:42 +00:00
|
|
|
|
2019-03-29 23:10:56 +00:00
|
|
|
@tractor.stream
|
|
|
|
async def context_stream(ctx, sequence):
|
|
|
|
for i in sequence:
|
|
|
|
await ctx.send_yield(i)
|
|
|
|
await trio.sleep(0.1)
|
|
|
|
|
|
|
|
# block indefinitely waiting to be cancelled by ``aclose()`` call
|
|
|
|
with trio.CancelScope() as cs:
|
|
|
|
await trio.sleep(float('inf'))
|
|
|
|
assert 0
|
|
|
|
assert cs.cancelled_caught
|
|
|
|
|
|
|
|
|
|
|
|
async def stream_from_single_subactor(stream_func_name):
|
2018-09-03 01:58:42 +00:00
|
|
|
"""Verify we can spawn a daemon actor and retrieve streamed data.
|
|
|
|
"""
|
2019-03-29 23:10:56 +00:00
|
|
|
async with tractor.find_actor('streamerd') as portals:
|
2018-09-03 01:58:42 +00:00
|
|
|
if not portals:
|
|
|
|
# only one per host address, spawns an actor if None
|
|
|
|
async with tractor.open_nursery() as nursery:
|
|
|
|
# no brokerd actor found
|
|
|
|
portal = await nursery.start_actor(
|
|
|
|
'streamerd',
|
|
|
|
rpc_module_paths=[__name__],
|
|
|
|
statespace={'global_dict': {}},
|
|
|
|
)
|
|
|
|
|
|
|
|
seq = range(10)
|
|
|
|
|
2019-03-29 23:10:56 +00:00
|
|
|
stream = await portal.run(
|
2018-09-03 01:58:42 +00:00
|
|
|
__name__,
|
2019-03-29 23:10:56 +00:00
|
|
|
stream_func_name, # one of the funcs above
|
2018-09-03 01:58:42 +00:00
|
|
|
sequence=list(seq), # has to be msgpack serializable
|
|
|
|
)
|
|
|
|
# it'd sure be nice to have an asyncitertools here...
|
|
|
|
iseq = iter(seq)
|
2018-12-11 04:13:58 +00:00
|
|
|
ival = next(iseq)
|
2019-03-29 23:10:56 +00:00
|
|
|
async for val in stream:
|
2018-12-11 04:13:58 +00:00
|
|
|
assert val == ival
|
|
|
|
try:
|
|
|
|
ival = next(iseq)
|
|
|
|
except StopIteration:
|
|
|
|
# should cancel far end task which will be
|
|
|
|
# caught and no error is raised
|
2019-03-29 23:10:56 +00:00
|
|
|
await stream.aclose()
|
2018-09-03 01:58:42 +00:00
|
|
|
|
2018-12-11 04:13:58 +00:00
|
|
|
await trio.sleep(0.3)
|
|
|
|
try:
|
2019-03-29 23:10:56 +00:00
|
|
|
await stream.__anext__()
|
2018-12-11 04:13:58 +00:00
|
|
|
except StopAsyncIteration:
|
|
|
|
# stop all spawned subactors
|
|
|
|
await portal.cancel_actor()
|
2018-09-03 01:58:42 +00:00
|
|
|
# await nursery.cancel()
|
|
|
|
|
|
|
|
|
2019-03-29 23:10:56 +00:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'stream_func', ['async_gen_stream', 'context_stream']
|
|
|
|
)
|
|
|
|
def test_stream_from_single_subactor(arb_addr, start_method, stream_func):
|
2018-09-03 01:58:42 +00:00
|
|
|
"""Verify streaming from a spawned async generator.
|
|
|
|
"""
|
2019-03-06 05:37:02 +00:00
|
|
|
tractor.run(
|
2019-03-29 23:10:56 +00:00
|
|
|
partial(
|
|
|
|
stream_from_single_subactor,
|
|
|
|
stream_func_name=stream_func,
|
|
|
|
),
|
2019-03-06 05:37:02 +00:00
|
|
|
arbiter_addr=arb_addr,
|
2019-03-09 01:06:16 +00:00
|
|
|
start_method=start_method,
|
2019-03-06 05:37:02 +00:00
|
|
|
)
|
2018-09-03 01:58:42 +00:00
|
|
|
|
|
|
|
|
|
|
|
# this is the first 2 actors, streamer_1 and streamer_2
|
|
|
|
async def stream_data(seed):
|
|
|
|
for i in range(seed):
|
|
|
|
yield i
|
2019-03-10 19:54:05 +00:00
|
|
|
# trigger scheduler to simulate practical usage
|
|
|
|
await trio.sleep(0)
|
2018-09-03 01:58:42 +00:00
|
|
|
|
|
|
|
|
|
|
|
# this is the third actor; the aggregator
|
|
|
|
async def aggregate(seed):
|
|
|
|
"""Ensure that the two streams we receive match but only stream
|
|
|
|
a single set of values to the parent.
|
|
|
|
"""
|
|
|
|
async with tractor.open_nursery() as nursery:
|
|
|
|
portals = []
|
|
|
|
for i in range(1, 3):
|
|
|
|
# fork point
|
|
|
|
portal = await nursery.start_actor(
|
|
|
|
name=f'streamer_{i}',
|
|
|
|
rpc_module_paths=[__name__],
|
|
|
|
)
|
|
|
|
|
|
|
|
portals.append(portal)
|
|
|
|
|
2019-02-15 22:10:57 +00:00
|
|
|
send_chan, recv_chan = trio.open_memory_channel(500)
|
2018-09-03 01:58:42 +00:00
|
|
|
|
2019-03-10 19:54:05 +00:00
|
|
|
async def push_to_chan(portal, send_chan):
|
|
|
|
async with send_chan:
|
|
|
|
async for value in await portal.run(
|
|
|
|
__name__, 'stream_data', seed=seed
|
|
|
|
):
|
|
|
|
# leverage trio's built-in backpressure
|
|
|
|
await send_chan.send(value)
|
2018-09-03 01:58:42 +00:00
|
|
|
|
|
|
|
print(f"FINISHED ITERATING {portal.channel.uid}")
|
|
|
|
|
|
|
|
# spawn 2 trio tasks to collect streams and push to a local queue
|
|
|
|
async with trio.open_nursery() as n:
|
2019-03-10 19:54:05 +00:00
|
|
|
|
2018-09-03 01:58:42 +00:00
|
|
|
for portal in portals:
|
2019-03-10 19:54:05 +00:00
|
|
|
n.start_soon(push_to_chan, portal, send_chan.clone())
|
2018-09-03 01:58:42 +00:00
|
|
|
|
2019-03-10 19:54:05 +00:00
|
|
|
# close this local task's reference to send side
|
|
|
|
await send_chan.aclose()
|
2018-09-03 01:58:42 +00:00
|
|
|
|
2019-03-10 19:54:05 +00:00
|
|
|
unique_vals = set()
|
|
|
|
async with recv_chan:
|
|
|
|
async for value in recv_chan:
|
|
|
|
if value not in unique_vals:
|
|
|
|
unique_vals.add(value)
|
|
|
|
# yield upwards to the spawning parent actor
|
|
|
|
yield value
|
2018-09-03 01:58:42 +00:00
|
|
|
|
|
|
|
assert value in unique_vals
|
|
|
|
|
|
|
|
print("FINISHED ITERATING in aggregator")
|
|
|
|
|
|
|
|
await nursery.cancel()
|
|
|
|
print("WAITING on `ActorNursery` to finish")
|
|
|
|
print("AGGREGATOR COMPLETE!")
|
|
|
|
|
|
|
|
|
|
|
|
# this is the main actor and *arbiter*
|
|
|
|
async def a_quadruple_example():
|
|
|
|
# a nursery which spawns "actors"
|
|
|
|
async with tractor.open_nursery() as nursery:
|
|
|
|
|
|
|
|
seed = int(1e3)
|
|
|
|
pre_start = time.time()
|
|
|
|
|
|
|
|
portal = await nursery.run_in_actor(
|
|
|
|
'aggregator',
|
|
|
|
aggregate,
|
|
|
|
seed=seed,
|
|
|
|
)
|
|
|
|
|
|
|
|
start = time.time()
|
|
|
|
# the portal call returns exactly what you'd expect
|
|
|
|
# as if the remote "aggregate" function was called locally
|
|
|
|
result_stream = []
|
|
|
|
async for value in await portal.result():
|
|
|
|
result_stream.append(value)
|
|
|
|
|
|
|
|
print(f"STREAM TIME = {time.time() - start}")
|
|
|
|
print(f"STREAM + SPAWN TIME = {time.time() - pre_start}")
|
2019-03-10 19:54:05 +00:00
|
|
|
assert result_stream == list(range(seed))
|
2018-09-03 01:58:42 +00:00
|
|
|
return result_stream
|
|
|
|
|
|
|
|
|
|
|
|
async def cancel_after(wait):
|
|
|
|
with trio.move_on_after(wait):
|
|
|
|
return await a_quadruple_example()
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.fixture(scope='module')
|
2020-07-29 13:57:32 +00:00
|
|
|
def time_quad_ex(arb_addr, travis, spawn_backend):
|
|
|
|
if travis and spawn_backend == 'mp' and (platform.system() != 'Windows'):
|
|
|
|
# no idea, but the travis, mp, linux runs are flaking out here often
|
|
|
|
pytest.skip("Test is too flaky on mp in CI")
|
|
|
|
|
2020-07-26 01:20:34 +00:00
|
|
|
timeout = 7 if platform.system() == 'Windows' else 4
|
2018-09-03 01:58:42 +00:00
|
|
|
start = time.time()
|
2019-03-31 00:59:10 +00:00
|
|
|
results = tractor.run(cancel_after, timeout, arbiter_addr=arb_addr)
|
2018-09-03 01:58:42 +00:00
|
|
|
diff = time.time() - start
|
|
|
|
assert results
|
|
|
|
return results, diff
|
|
|
|
|
|
|
|
|
2020-07-26 01:20:34 +00:00
|
|
|
def test_a_quadruple_example(time_quad_ex, travis, spawn_backend):
|
2018-09-03 01:58:42 +00:00
|
|
|
"""This also serves as a kind of "we'd like to be this fast test"."""
|
2020-07-26 01:20:34 +00:00
|
|
|
|
2018-09-03 01:58:42 +00:00
|
|
|
results, diff = time_quad_ex
|
|
|
|
assert results
|
2019-10-16 15:02:18 +00:00
|
|
|
this_fast = 6 if platform.system() == 'Windows' else 2.5
|
2019-03-31 00:59:10 +00:00
|
|
|
assert diff < this_fast
|
2018-09-03 01:58:42 +00:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
'cancel_delay',
|
2018-11-22 19:25:31 +00:00
|
|
|
list(map(lambda i: i/10, range(3, 9)))
|
2018-09-03 01:58:42 +00:00
|
|
|
)
|
2020-07-26 01:20:34 +00:00
|
|
|
def test_not_fast_enough_quad(
|
|
|
|
arb_addr, time_quad_ex, cancel_delay, travis, spawn_backend
|
|
|
|
):
|
2018-09-03 01:58:42 +00:00
|
|
|
"""Verify we can cancel midway through the quad example and all actors
|
|
|
|
cancel gracefully.
|
|
|
|
"""
|
|
|
|
results, diff = time_quad_ex
|
|
|
|
delay = max(diff - cancel_delay, 0)
|
|
|
|
results = tractor.run(cancel_after, delay, arbiter_addr=arb_addr)
|
2019-03-31 00:59:10 +00:00
|
|
|
if platform.system() == 'Windows' and results is not None:
|
|
|
|
# In Windows CI it seems later runs are quicker then the first
|
|
|
|
# so just ignore these
|
|
|
|
print("Woa there windows caught your breath eh?")
|
|
|
|
else:
|
|
|
|
# should be cancelled mid-streaming
|
|
|
|
assert results is None
|