tractor/examples/parallelism/concurrent_actors_primes.py

120 lines
3.1 KiB
Python
Raw Permalink Normal View History

"""
Demonstration of the prime number detector example from the
``concurrent.futures`` docs:
https://docs.python.org/3/library/concurrent.futures.html#processpoolexecutor-example
2021-02-24 18:39:14 +00:00
This uses no extra threads, fancy semaphores or futures; all we need
2021-01-17 20:41:35 +00:00
is ``tractor``'s channels.
"""
from contextlib import asynccontextmanager
from typing import Callable
import itertools
import math
import time
import tractor
import trio
2021-01-24 18:14:17 +00:00
from async_generator import aclosing
PRIMES = [
112272535095293,
112582705942171,
112272535095293,
115280095190773,
115797848077099,
1099726899285419,
]
2021-04-27 13:14:08 +00:00
async def is_prime(n):
if n < 2:
return False
if n == 2:
return True
if n % 2 == 0:
return False
sqrt_n = int(math.floor(math.sqrt(n)))
for i in range(3, sqrt_n + 1, 2):
if n % i == 0:
return False
return True
@asynccontextmanager
async def worker_pool(workers=4):
"""Though it's a trivial special case for ``tractor``, the well
2021-01-17 20:41:35 +00:00
known "worker pool" seems to be the defacto "but, I want this
process pattern!" for most parallelism pilgrims.
2021-01-17 20:41:35 +00:00
Yes, the workers stay alive (and ready for work) until you close
the context.
"""
async with tractor.open_nursery() as tn:
portals = []
snd_chan, recv_chan = trio.open_memory_channel(len(PRIMES))
for i in range(workers):
# this starts a new sub-actor (process + trio runtime) and
# stores it's "portal" for later use to "submit jobs" (ugh).
portals.append(
await tn.start_actor(
f'worker_{i}',
2021-01-17 18:34:52 +00:00
enable_modules=[__name__],
)
)
2021-01-17 20:41:35 +00:00
async def _map(
worker_func: Callable[[int], bool],
sequence: list[int]
) -> list[bool]:
# define an async (local) task to collect results from workers
async def send_result(func, value, portal):
await snd_chan.send((value, await portal.run(func, n=value)))
async with trio.open_nursery() as n:
for value, portal in zip(sequence, itertools.cycle(portals)):
n.start_soon(
send_result,
worker_func,
value,
portal
)
# deliver results as they arrive
for _ in range(len(sequence)):
yield await recv_chan.receive()
2021-01-17 20:41:35 +00:00
# deliver the parallel "worker mapper" to user code
yield _map
# tear down all "workers" on pool close
await tn.cancel()
async def main():
async with worker_pool() as actor_map:
start = time.time()
2021-01-24 18:14:17 +00:00
async with aclosing(actor_map(is_prime, PRIMES)) as results:
async for number, prime in results:
print(f'{number} is prime: {prime}')
print(f'processing took {time.time() - start} seconds')
2021-01-17 18:34:52 +00:00
if __name__ == '__main__':
2021-01-17 18:34:52 +00:00
start = time.time()
trio.run(main)
print(f'script took {time.time() - start} seconds')